From fca91d1024a1c00e6cc4b6b1f30d8db4fa744469 Mon Sep 17 00:00:00 2001 From: GuanMu Date: Wed, 11 Jun 2025 12:43:06 +0000 Subject: [PATCH] fix: update output handling in task state metadata and remove debug print statements --- api/core/app/apps/advanced_chat/generate_task_pipeline.py | 6 +++--- api/core/app/entities/task_entities.py | 2 +- .../workflow/nodes/answer/answer_stream_generate_router.py | 1 - 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 367d12c43b..a298f451a1 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -165,7 +165,6 @@ class AdvancedChatAppGenerateTaskPipeline: ) generator = self._wrapper_process_stream_response(trace_manager=self._application_generate_entity.trace_manager) - print(f"generator: {generator}=======") if self._base_task_pipeline._stream: return self._to_stream_response(generator) else: @@ -183,12 +182,13 @@ class AdvancedChatAppGenerateTaskPipeline: extras = {} if stream_response.metadata: extras["metadata"] = stream_response.metadata + final_outputs = self._task_state.metadata.outputs if self._task_state.metadata and hasattr(self._task_state.metadata, 'outputs') else {} return ChatbotAppBlockingResponse( task_id=stream_response.task_id, data=ChatbotAppBlockingResponse.Data( id=self._message_id, mode=self._conversation_mode, - outputs=stream_response.data.outputs, + outputs=final_outputs, conversation_id=self._conversation_id, message_id=self._message_id, answer=self._task_state.answer, @@ -505,7 +505,7 @@ class AdvancedChatAppGenerateTaskPipeline: task_id=self._application_generate_entity.task_id, workflow_execution=workflow_execution, ) - self._task_state.metadata.data = workflow_finish_resp.data.outputs.get('outputs', {}).get('outputs') + self._task_state.metadata.outputs = workflow_finish_resp.data.outputs.get('outputs', {}).get('outputs') yield workflow_finish_resp self._base_task_pipeline._queue_manager.publish( QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 7e628fb62d..cca8ce7737 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -24,7 +24,7 @@ class AnnotationReply(BaseModel): class TaskStateMetadata(BaseModel): annotation_reply: AnnotationReply | None = None retriever_resources: Sequence[RetrievalSourceMetadata] = Field(default_factory=list) - data: Optional[Mapping[str, Any]] = None + outputs: Optional[Mapping[str, Any]] = None usage: LLMUsage | None = None diff --git a/api/core/workflow/nodes/answer/answer_stream_generate_router.py b/api/core/workflow/nodes/answer/answer_stream_generate_router.py index 628476677c..ce6095a34f 100644 --- a/api/core/workflow/nodes/answer/answer_stream_generate_router.py +++ b/api/core/workflow/nodes/answer/answer_stream_generate_router.py @@ -100,7 +100,6 @@ class AnswerStreamGeneratorRouter: :param config: node config :return: """ - print(f"config: {config}=======") node_data = AnswerNodeData(**config.get("data", {})) return cls.extract_generate_route_from_node_data(node_data)