|
|
|
|
@ -5,6 +5,7 @@ from datetime import UTC, datetime
|
|
|
|
|
from typing import Any, Optional, Union, cast
|
|
|
|
|
from uuid import uuid4
|
|
|
|
|
|
|
|
|
|
from sqlalchemy import func, select
|
|
|
|
|
from sqlalchemy.orm import Session
|
|
|
|
|
|
|
|
|
|
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
|
|
|
|
|
@ -47,7 +48,6 @@ from core.workflow.enums import SystemVariableKey
|
|
|
|
|
from core.workflow.nodes import NodeType
|
|
|
|
|
from core.workflow.nodes.tool.entities import ToolNodeData
|
|
|
|
|
from core.workflow.workflow_entry import WorkflowEntry
|
|
|
|
|
from extensions.ext_database import db
|
|
|
|
|
from models.account import Account
|
|
|
|
|
from models.enums import CreatedByRole, WorkflowRunTriggeredFrom
|
|
|
|
|
from models.model import EndUser
|
|
|
|
|
@ -65,28 +65,33 @@ from .exc import WorkflowNodeExecutionNotFoundError, WorkflowRunNotFoundError
|
|
|
|
|
|
|
|
|
|
class WorkflowCycleManage:
|
|
|
|
|
_application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity]
|
|
|
|
|
_workflow: Workflow
|
|
|
|
|
_user: Union[Account, EndUser]
|
|
|
|
|
_task_state: WorkflowTaskState
|
|
|
|
|
_workflow_system_variables: dict[SystemVariableKey, Any]
|
|
|
|
|
_wip_workflow_node_executions: dict[str, WorkflowNodeExecution]
|
|
|
|
|
_wip_workflow_agent_logs: dict[str, list[AgentLogStreamResponse.Data]]
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_run_start(self) -> WorkflowRun:
|
|
|
|
|
max_sequence = (
|
|
|
|
|
db.session.query(db.func.max(WorkflowRun.sequence_number))
|
|
|
|
|
.filter(WorkflowRun.tenant_id == self._workflow.tenant_id)
|
|
|
|
|
.filter(WorkflowRun.app_id == self._workflow.app_id)
|
|
|
|
|
.scalar()
|
|
|
|
|
or 0
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_run_start(
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
workflow_id: str,
|
|
|
|
|
user_id: str,
|
|
|
|
|
created_by_role: CreatedByRole,
|
|
|
|
|
) -> WorkflowRun:
|
|
|
|
|
workflow_stmt = select(Workflow).where(Workflow.id == workflow_id)
|
|
|
|
|
workflow = session.scalar(workflow_stmt)
|
|
|
|
|
if not workflow:
|
|
|
|
|
raise ValueError(f"Workflow not found: {workflow_id}")
|
|
|
|
|
|
|
|
|
|
max_sequence_stmt = select(func.max(WorkflowRun.sequence_number)).where(
|
|
|
|
|
WorkflowRun.tenant_id == workflow.tenant_id,
|
|
|
|
|
WorkflowRun.app_id == workflow.app_id,
|
|
|
|
|
)
|
|
|
|
|
max_sequence = session.scalar(max_sequence_stmt) or 0
|
|
|
|
|
new_sequence_number = max_sequence + 1
|
|
|
|
|
|
|
|
|
|
inputs = {**self._application_generate_entity.inputs}
|
|
|
|
|
for key, value in (self._workflow_system_variables or {}).items():
|
|
|
|
|
if key.value == "conversation":
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
inputs[f"sys.{key.value}"] = value
|
|
|
|
|
|
|
|
|
|
triggered_from = (
|
|
|
|
|
@ -99,34 +104,33 @@ class WorkflowCycleManage:
|
|
|
|
|
inputs = dict(WorkflowEntry.handle_special_values(inputs) or {})
|
|
|
|
|
|
|
|
|
|
# init workflow run
|
|
|
|
|
with Session(db.engine, expire_on_commit=False) as session:
|
|
|
|
|
workflow_run = WorkflowRun()
|
|
|
|
|
system_id = self._workflow_system_variables[SystemVariableKey.WORKFLOW_RUN_ID]
|
|
|
|
|
workflow_run.id = system_id or str(uuid4())
|
|
|
|
|
workflow_run.tenant_id = self._workflow.tenant_id
|
|
|
|
|
workflow_run.app_id = self._workflow.app_id
|
|
|
|
|
workflow_run.sequence_number = new_sequence_number
|
|
|
|
|
workflow_run.workflow_id = self._workflow.id
|
|
|
|
|
workflow_run.type = self._workflow.type
|
|
|
|
|
workflow_run.triggered_from = triggered_from.value
|
|
|
|
|
workflow_run.version = self._workflow.version
|
|
|
|
|
workflow_run.graph = self._workflow.graph
|
|
|
|
|
workflow_run.inputs = json.dumps(inputs)
|
|
|
|
|
workflow_run.status = WorkflowRunStatus.RUNNING
|
|
|
|
|
workflow_run.created_by_role = (
|
|
|
|
|
CreatedByRole.ACCOUNT if isinstance(self._user, Account) else CreatedByRole.END_USER
|
|
|
|
|
)
|
|
|
|
|
workflow_run.created_by = self._user.id
|
|
|
|
|
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
session.add(workflow_run)
|
|
|
|
|
session.commit()
|
|
|
|
|
workflow_run_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID, uuid4()))
|
|
|
|
|
|
|
|
|
|
workflow_run = WorkflowRun()
|
|
|
|
|
workflow_run.id = workflow_run_id
|
|
|
|
|
workflow_run.tenant_id = workflow.tenant_id
|
|
|
|
|
workflow_run.app_id = workflow.app_id
|
|
|
|
|
workflow_run.sequence_number = new_sequence_number
|
|
|
|
|
workflow_run.workflow_id = workflow.id
|
|
|
|
|
workflow_run.type = workflow.type
|
|
|
|
|
workflow_run.triggered_from = triggered_from.value
|
|
|
|
|
workflow_run.version = workflow.version
|
|
|
|
|
workflow_run.graph = workflow.graph
|
|
|
|
|
workflow_run.inputs = json.dumps(inputs)
|
|
|
|
|
workflow_run.status = WorkflowRunStatus.RUNNING
|
|
|
|
|
workflow_run.created_by_role = created_by_role
|
|
|
|
|
workflow_run.created_by = user_id
|
|
|
|
|
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
session.add(workflow_run)
|
|
|
|
|
|
|
|
|
|
return workflow_run
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_run_success(
|
|
|
|
|
self,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
workflow_run_id: str,
|
|
|
|
|
start_at: float,
|
|
|
|
|
total_tokens: int,
|
|
|
|
|
total_steps: int,
|
|
|
|
|
@ -144,7 +148,7 @@ class WorkflowCycleManage:
|
|
|
|
|
:param conversation_id: conversation id
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_run = self._refetch_workflow_run(workflow_run.id)
|
|
|
|
|
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
|
|
|
|
|
|
|
|
|
outputs = WorkflowEntry.handle_special_values(outputs)
|
|
|
|
|
|
|
|
|
|
@ -155,9 +159,6 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_run.total_steps = total_steps
|
|
|
|
|
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
db.session.refresh(workflow_run)
|
|
|
|
|
|
|
|
|
|
if trace_manager:
|
|
|
|
|
trace_manager.add_trace_task(
|
|
|
|
|
TraceTask(
|
|
|
|
|
@ -168,13 +169,13 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.close()
|
|
|
|
|
|
|
|
|
|
return workflow_run
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_run_partial_success(
|
|
|
|
|
self,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
workflow_run_id: str,
|
|
|
|
|
start_at: float,
|
|
|
|
|
total_tokens: int,
|
|
|
|
|
total_steps: int,
|
|
|
|
|
@ -183,18 +184,7 @@ class WorkflowCycleManage:
|
|
|
|
|
conversation_id: Optional[str] = None,
|
|
|
|
|
trace_manager: Optional[TraceQueueManager] = None,
|
|
|
|
|
) -> WorkflowRun:
|
|
|
|
|
"""
|
|
|
|
|
Workflow run success
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param start_at: start time
|
|
|
|
|
:param total_tokens: total tokens
|
|
|
|
|
:param total_steps: total steps
|
|
|
|
|
:param outputs: outputs
|
|
|
|
|
:param conversation_id: conversation id
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_run = self._refetch_workflow_run(workflow_run.id)
|
|
|
|
|
|
|
|
|
|
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
|
|
|
|
outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None)
|
|
|
|
|
|
|
|
|
|
workflow_run.status = WorkflowRunStatus.PARTIAL_SUCCESSED.value
|
|
|
|
|
@ -204,8 +194,6 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_run.total_steps = total_steps
|
|
|
|
|
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
workflow_run.exceptions_count = exceptions_count
|
|
|
|
|
db.session.commit()
|
|
|
|
|
db.session.refresh(workflow_run)
|
|
|
|
|
|
|
|
|
|
if trace_manager:
|
|
|
|
|
trace_manager.add_trace_task(
|
|
|
|
|
@ -217,13 +205,13 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.close()
|
|
|
|
|
|
|
|
|
|
return workflow_run
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_run_failed(
|
|
|
|
|
self,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
workflow_run_id: str,
|
|
|
|
|
start_at: float,
|
|
|
|
|
total_tokens: int,
|
|
|
|
|
total_steps: int,
|
|
|
|
|
@ -243,7 +231,7 @@ class WorkflowCycleManage:
|
|
|
|
|
:param error: error message
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_run = self._refetch_workflow_run(workflow_run.id)
|
|
|
|
|
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
|
|
|
|
|
|
|
|
|
workflow_run.status = status.value
|
|
|
|
|
workflow_run.error = error
|
|
|
|
|
@ -252,21 +240,18 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_run.total_steps = total_steps
|
|
|
|
|
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
workflow_run.exceptions_count = exceptions_count
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
running_workflow_node_executions = (
|
|
|
|
|
db.session.query(WorkflowNodeExecution)
|
|
|
|
|
.filter(
|
|
|
|
|
WorkflowNodeExecution.tenant_id == workflow_run.tenant_id,
|
|
|
|
|
WorkflowNodeExecution.app_id == workflow_run.app_id,
|
|
|
|
|
WorkflowNodeExecution.workflow_id == workflow_run.workflow_id,
|
|
|
|
|
WorkflowNodeExecution.triggered_from == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
|
|
|
|
WorkflowNodeExecution.workflow_run_id == workflow_run.id,
|
|
|
|
|
WorkflowNodeExecution.status == WorkflowNodeExecutionStatus.RUNNING.value,
|
|
|
|
|
)
|
|
|
|
|
.all()
|
|
|
|
|
|
|
|
|
|
stmt = select(WorkflowNodeExecution).where(
|
|
|
|
|
WorkflowNodeExecution.tenant_id == workflow_run.tenant_id,
|
|
|
|
|
WorkflowNodeExecution.app_id == workflow_run.app_id,
|
|
|
|
|
WorkflowNodeExecution.workflow_id == workflow_run.workflow_id,
|
|
|
|
|
WorkflowNodeExecution.triggered_from == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
|
|
|
|
|
WorkflowNodeExecution.workflow_run_id == workflow_run.id,
|
|
|
|
|
WorkflowNodeExecution.status == WorkflowNodeExecutionStatus.RUNNING.value,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
running_workflow_node_executions = session.scalars(stmt).all()
|
|
|
|
|
|
|
|
|
|
for workflow_node_execution in running_workflow_node_executions:
|
|
|
|
|
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
|
|
|
|
|
workflow_node_execution.error = error
|
|
|
|
|
@ -274,13 +259,6 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_node_execution.elapsed_time = (
|
|
|
|
|
workflow_node_execution.finished_at - workflow_node_execution.created_at
|
|
|
|
|
).total_seconds()
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
db.session.close()
|
|
|
|
|
|
|
|
|
|
# with Session(db.engine, expire_on_commit=False) as session:
|
|
|
|
|
# session.add(workflow_run)
|
|
|
|
|
# session.refresh(workflow_run)
|
|
|
|
|
|
|
|
|
|
if trace_manager:
|
|
|
|
|
trace_manager.add_trace_task(
|
|
|
|
|
@ -295,79 +273,49 @@ class WorkflowCycleManage:
|
|
|
|
|
return workflow_run
|
|
|
|
|
|
|
|
|
|
def _handle_node_execution_start(
|
|
|
|
|
self, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
|
|
|
|
|
self, *, session: Session, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
|
|
|
|
|
) -> WorkflowNodeExecution:
|
|
|
|
|
# init workflow node execution
|
|
|
|
|
|
|
|
|
|
with Session(db.engine, expire_on_commit=False) as session:
|
|
|
|
|
workflow_node_execution = WorkflowNodeExecution()
|
|
|
|
|
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
|
|
|
|
workflow_node_execution.app_id = workflow_run.app_id
|
|
|
|
|
workflow_node_execution.workflow_id = workflow_run.workflow_id
|
|
|
|
|
workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
|
|
|
|
|
workflow_node_execution.workflow_run_id = workflow_run.id
|
|
|
|
|
workflow_node_execution.predecessor_node_id = event.predecessor_node_id
|
|
|
|
|
workflow_node_execution.index = event.node_run_index
|
|
|
|
|
workflow_node_execution.node_execution_id = event.node_execution_id
|
|
|
|
|
workflow_node_execution.node_id = event.node_id
|
|
|
|
|
workflow_node_execution.node_type = event.node_type.value
|
|
|
|
|
workflow_node_execution.title = event.node_data.title
|
|
|
|
|
workflow_node_execution.status = WorkflowNodeExecutionStatus.RUNNING.value
|
|
|
|
|
workflow_node_execution.created_by_role = workflow_run.created_by_role
|
|
|
|
|
workflow_node_execution.created_by = workflow_run.created_by
|
|
|
|
|
workflow_node_execution.execution_metadata = json.dumps(
|
|
|
|
|
{
|
|
|
|
|
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id,
|
|
|
|
|
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
session.add(workflow_node_execution)
|
|
|
|
|
session.commit()
|
|
|
|
|
session.refresh(workflow_node_execution)
|
|
|
|
|
workflow_node_execution = WorkflowNodeExecution()
|
|
|
|
|
workflow_node_execution.id = event.node_execution_id
|
|
|
|
|
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
|
|
|
|
workflow_node_execution.app_id = workflow_run.app_id
|
|
|
|
|
workflow_node_execution.workflow_id = workflow_run.workflow_id
|
|
|
|
|
workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
|
|
|
|
|
workflow_node_execution.workflow_run_id = workflow_run.id
|
|
|
|
|
workflow_node_execution.predecessor_node_id = event.predecessor_node_id
|
|
|
|
|
workflow_node_execution.index = event.node_run_index
|
|
|
|
|
workflow_node_execution.node_execution_id = event.node_execution_id
|
|
|
|
|
workflow_node_execution.node_id = event.node_id
|
|
|
|
|
workflow_node_execution.node_type = event.node_type.value
|
|
|
|
|
workflow_node_execution.title = event.node_data.title
|
|
|
|
|
workflow_node_execution.status = WorkflowNodeExecutionStatus.RUNNING.value
|
|
|
|
|
workflow_node_execution.created_by_role = workflow_run.created_by_role
|
|
|
|
|
workflow_node_execution.created_by = workflow_run.created_by
|
|
|
|
|
workflow_node_execution.execution_metadata = json.dumps(
|
|
|
|
|
{
|
|
|
|
|
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id,
|
|
|
|
|
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
|
|
|
|
|
self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution
|
|
|
|
|
session.add(workflow_node_execution)
|
|
|
|
|
return workflow_node_execution
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node execution success
|
|
|
|
|
:param event: queue node succeeded event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_node_execution = self._refetch_workflow_node_execution(event.node_execution_id)
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_node_execution_success(
|
|
|
|
|
self, *, session: Session, event: QueueNodeSucceededEvent
|
|
|
|
|
) -> WorkflowNodeExecution:
|
|
|
|
|
workflow_node_execution = self._get_workflow_node_execution(
|
|
|
|
|
session=session, node_execution_id=event.node_execution_id
|
|
|
|
|
)
|
|
|
|
|
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
|
|
|
|
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
|
|
|
|
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
|
|
|
|
execution_metadata_dict = dict(event.execution_metadata or {})
|
|
|
|
|
if self._wip_workflow_agent_logs.get(workflow_node_execution.id):
|
|
|
|
|
if not execution_metadata_dict:
|
|
|
|
|
execution_metadata_dict = {}
|
|
|
|
|
|
|
|
|
|
execution_metadata_dict[NodeRunMetadataKey.AGENT_LOG] = self._wip_workflow_agent_logs.get(
|
|
|
|
|
workflow_node_execution.id, []
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
execution_metadata = json.dumps(jsonable_encoder(execution_metadata_dict)) if execution_metadata_dict else None
|
|
|
|
|
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
elapsed_time = (finished_at - event.start_at).total_seconds()
|
|
|
|
|
|
|
|
|
|
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
|
|
|
|
|
{
|
|
|
|
|
WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.SUCCEEDED.value,
|
|
|
|
|
WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None,
|
|
|
|
|
WorkflowNodeExecution.process_data: json.dumps(process_data) if process_data else None,
|
|
|
|
|
WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None,
|
|
|
|
|
WorkflowNodeExecution.execution_metadata: execution_metadata,
|
|
|
|
|
WorkflowNodeExecution.finished_at: finished_at,
|
|
|
|
|
WorkflowNodeExecution.elapsed_time: elapsed_time,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
db.session.close()
|
|
|
|
|
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
|
|
|
|
|
|
|
|
|
workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
|
|
|
|
|
@ -378,54 +326,31 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_node_execution.finished_at = finished_at
|
|
|
|
|
workflow_node_execution.elapsed_time = elapsed_time
|
|
|
|
|
|
|
|
|
|
self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id)
|
|
|
|
|
|
|
|
|
|
return workflow_node_execution
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_node_execution_failed(
|
|
|
|
|
self, event: QueueNodeFailedEvent | QueueNodeInIterationFailedEvent | QueueNodeExceptionEvent
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
event: QueueNodeFailedEvent | QueueNodeInIterationFailedEvent | QueueNodeExceptionEvent,
|
|
|
|
|
) -> WorkflowNodeExecution:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node execution failed
|
|
|
|
|
:param event: queue node failed event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_node_execution = self._refetch_workflow_node_execution(event.node_execution_id)
|
|
|
|
|
workflow_node_execution = self._get_workflow_node_execution(
|
|
|
|
|
session=session, node_execution_id=event.node_execution_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
|
|
|
|
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
|
|
|
|
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
|
|
|
|
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
|
|
|
elapsed_time = (finished_at - event.start_at).total_seconds()
|
|
|
|
|
execution_metadata_dict = dict(event.execution_metadata or {})
|
|
|
|
|
if self._wip_workflow_agent_logs.get(workflow_node_execution.id):
|
|
|
|
|
if not execution_metadata_dict:
|
|
|
|
|
execution_metadata_dict = {}
|
|
|
|
|
|
|
|
|
|
execution_metadata_dict[NodeRunMetadataKey.AGENT_LOG] = self._wip_workflow_agent_logs.get(
|
|
|
|
|
workflow_node_execution.id, []
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
execution_metadata = json.dumps(jsonable_encoder(execution_metadata_dict)) if execution_metadata_dict else None
|
|
|
|
|
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
|
|
|
|
|
{
|
|
|
|
|
WorkflowNodeExecution.status: (
|
|
|
|
|
WorkflowNodeExecutionStatus.FAILED.value
|
|
|
|
|
if not isinstance(event, QueueNodeExceptionEvent)
|
|
|
|
|
else WorkflowNodeExecutionStatus.EXCEPTION.value
|
|
|
|
|
),
|
|
|
|
|
WorkflowNodeExecution.error: event.error,
|
|
|
|
|
WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None,
|
|
|
|
|
WorkflowNodeExecution.process_data: json.dumps(process_data) if process_data else None,
|
|
|
|
|
WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None,
|
|
|
|
|
WorkflowNodeExecution.finished_at: finished_at,
|
|
|
|
|
WorkflowNodeExecution.elapsed_time: elapsed_time,
|
|
|
|
|
WorkflowNodeExecution.execution_metadata: execution_metadata,
|
|
|
|
|
}
|
|
|
|
|
execution_metadata = (
|
|
|
|
|
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
db.session.close()
|
|
|
|
|
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
|
|
|
|
workflow_node_execution.status = (
|
|
|
|
|
WorkflowNodeExecutionStatus.FAILED.value
|
|
|
|
|
@ -440,12 +365,10 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_node_execution.elapsed_time = elapsed_time
|
|
|
|
|
workflow_node_execution.execution_metadata = execution_metadata
|
|
|
|
|
|
|
|
|
|
self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id)
|
|
|
|
|
|
|
|
|
|
return workflow_node_execution
|
|
|
|
|
|
|
|
|
|
def _handle_workflow_node_execution_retried(
|
|
|
|
|
self, workflow_run: WorkflowRun, event: QueueNodeRetryEvent
|
|
|
|
|
self, *, session: Session, workflow_run: WorkflowRun, event: QueueNodeRetryEvent
|
|
|
|
|
) -> WorkflowNodeExecution:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node execution failed
|
|
|
|
|
@ -469,6 +392,7 @@ class WorkflowCycleManage:
|
|
|
|
|
execution_metadata = json.dumps(merged_metadata)
|
|
|
|
|
|
|
|
|
|
workflow_node_execution = WorkflowNodeExecution()
|
|
|
|
|
workflow_node_execution.id = event.node_execution_id
|
|
|
|
|
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
|
|
|
|
workflow_node_execution.app_id = workflow_run.app_id
|
|
|
|
|
workflow_node_execution.workflow_id = workflow_run.workflow_id
|
|
|
|
|
@ -491,10 +415,7 @@ class WorkflowCycleManage:
|
|
|
|
|
workflow_node_execution.execution_metadata = execution_metadata
|
|
|
|
|
workflow_node_execution.index = event.node_run_index
|
|
|
|
|
|
|
|
|
|
db.session.add(workflow_node_execution)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
db.session.refresh(workflow_node_execution)
|
|
|
|
|
|
|
|
|
|
session.add(workflow_node_execution)
|
|
|
|
|
return workflow_node_execution
|
|
|
|
|
|
|
|
|
|
#################################################
|
|
|
|
|
@ -502,14 +423,14 @@ class WorkflowCycleManage:
|
|
|
|
|
#################################################
|
|
|
|
|
|
|
|
|
|
def _workflow_start_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
) -> WorkflowStartStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow start to stream response.
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return WorkflowStartStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -523,36 +444,32 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_finish_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
) -> WorkflowFinishStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow finish to stream response.
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# Attach WorkflowRun to an active session so "created_by_role" can be accessed.
|
|
|
|
|
workflow_run = db.session.merge(workflow_run)
|
|
|
|
|
|
|
|
|
|
# Refresh to ensure any expired attributes are fully loaded
|
|
|
|
|
db.session.refresh(workflow_run)
|
|
|
|
|
|
|
|
|
|
created_by = None
|
|
|
|
|
if workflow_run.created_by_role == CreatedByRole.ACCOUNT.value:
|
|
|
|
|
created_by_account = workflow_run.created_by_account
|
|
|
|
|
if created_by_account:
|
|
|
|
|
if workflow_run.created_by_role == CreatedByRole.ACCOUNT:
|
|
|
|
|
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
|
|
|
|
account = session.scalar(stmt)
|
|
|
|
|
if account:
|
|
|
|
|
created_by = {
|
|
|
|
|
"id": created_by_account.id,
|
|
|
|
|
"name": created_by_account.name,
|
|
|
|
|
"email": created_by_account.email,
|
|
|
|
|
"id": account.id,
|
|
|
|
|
"name": account.name,
|
|
|
|
|
"email": account.email,
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
created_by_end_user = workflow_run.created_by_end_user
|
|
|
|
|
if created_by_end_user:
|
|
|
|
|
elif workflow_run.created_by_role == CreatedByRole.END_USER:
|
|
|
|
|
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
|
|
|
|
end_user = session.scalar(stmt)
|
|
|
|
|
if end_user:
|
|
|
|
|
created_by = {
|
|
|
|
|
"id": created_by_end_user.id,
|
|
|
|
|
"user": created_by_end_user.session_id,
|
|
|
|
|
"id": end_user.id,
|
|
|
|
|
"user": end_user.session_id,
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}")
|
|
|
|
|
|
|
|
|
|
return WorkflowFinishStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
@ -576,17 +493,20 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_node_start_to_stream_response(
|
|
|
|
|
self, event: QueueNodeStartedEvent, task_id: str, workflow_node_execution: WorkflowNodeExecution
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
event: QueueNodeStartedEvent,
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_node_execution: WorkflowNodeExecution,
|
|
|
|
|
) -> Optional[NodeStartStreamResponse]:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node start to stream response.
|
|
|
|
|
:param event: queue node started event
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_node_execution: workflow node execution
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
|
|
|
|
|
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
|
|
|
|
return None
|
|
|
|
|
if not workflow_node_execution.workflow_run_id:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
response = NodeStartStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
@ -622,6 +542,8 @@ class WorkflowCycleManage:
|
|
|
|
|
|
|
|
|
|
def _workflow_node_finish_to_stream_response(
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
event: QueueNodeSucceededEvent
|
|
|
|
|
| QueueNodeFailedEvent
|
|
|
|
|
| QueueNodeInIterationFailedEvent
|
|
|
|
|
@ -629,15 +551,14 @@ class WorkflowCycleManage:
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_node_execution: WorkflowNodeExecution,
|
|
|
|
|
) -> Optional[NodeFinishStreamResponse]:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node finish to stream response.
|
|
|
|
|
:param event: queue node succeeded or failed event
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_node_execution: workflow node execution
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
|
|
|
|
return None
|
|
|
|
|
if not workflow_node_execution.workflow_run_id:
|
|
|
|
|
return None
|
|
|
|
|
if not workflow_node_execution.finished_at:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return NodeFinishStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
@ -669,19 +590,20 @@ class WorkflowCycleManage:
|
|
|
|
|
|
|
|
|
|
def _workflow_node_retry_to_stream_response(
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
event: QueueNodeRetryEvent,
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_node_execution: WorkflowNodeExecution,
|
|
|
|
|
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
|
|
|
|
"""
|
|
|
|
|
Workflow node finish to stream response.
|
|
|
|
|
:param event: queue node succeeded or failed event
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_node_execution: workflow node execution
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
|
|
|
|
return None
|
|
|
|
|
if not workflow_node_execution.workflow_run_id:
|
|
|
|
|
return None
|
|
|
|
|
if not workflow_node_execution.finished_at:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return NodeRetryStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
@ -713,15 +635,10 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_parallel_branch_start_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun, event: QueueParallelBranchRunStartedEvent
|
|
|
|
|
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueParallelBranchRunStartedEvent
|
|
|
|
|
) -> ParallelBranchStartStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow parallel branch start to stream response
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param event: parallel branch run started event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return ParallelBranchStartStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -737,17 +654,14 @@ class WorkflowCycleManage:
|
|
|
|
|
|
|
|
|
|
def _workflow_parallel_branch_finished_to_stream_response(
|
|
|
|
|
self,
|
|
|
|
|
*,
|
|
|
|
|
session: Session,
|
|
|
|
|
task_id: str,
|
|
|
|
|
workflow_run: WorkflowRun,
|
|
|
|
|
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
|
|
|
|
|
) -> ParallelBranchFinishedStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow parallel branch finished to stream response
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param event: parallel branch run succeeded or failed event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return ParallelBranchFinishedStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -764,15 +678,10 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_iteration_start_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun, event: QueueIterationStartEvent
|
|
|
|
|
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationStartEvent
|
|
|
|
|
) -> IterationNodeStartStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow iteration start to stream response
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param event: iteration start event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return IterationNodeStartStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -791,15 +700,10 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_iteration_next_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun, event: QueueIterationNextEvent
|
|
|
|
|
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationNextEvent
|
|
|
|
|
) -> IterationNodeNextStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow iteration next to stream response
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param event: iteration next event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return IterationNodeNextStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -820,15 +724,10 @@ class WorkflowCycleManage:
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def _workflow_iteration_completed_to_stream_response(
|
|
|
|
|
self, task_id: str, workflow_run: WorkflowRun, event: QueueIterationCompletedEvent
|
|
|
|
|
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationCompletedEvent
|
|
|
|
|
) -> IterationNodeCompletedStreamResponse:
|
|
|
|
|
"""
|
|
|
|
|
Workflow iteration completed to stream response
|
|
|
|
|
:param task_id: task id
|
|
|
|
|
:param workflow_run: workflow run
|
|
|
|
|
:param event: iteration completed event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
# receive session to make sure the workflow_run won't be expired, need a more elegant way to handle this
|
|
|
|
|
_ = session
|
|
|
|
|
return IterationNodeCompletedStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
workflow_run_id=workflow_run.id,
|
|
|
|
|
@ -912,27 +811,22 @@ class WorkflowCycleManage:
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def _refetch_workflow_run(self, workflow_run_id: str) -> WorkflowRun:
|
|
|
|
|
def _get_workflow_run(self, *, session: Session, workflow_run_id: str) -> WorkflowRun:
|
|
|
|
|
"""
|
|
|
|
|
Refetch workflow run
|
|
|
|
|
:param workflow_run_id: workflow run id
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_run = db.session.query(WorkflowRun).filter(WorkflowRun.id == workflow_run_id).first()
|
|
|
|
|
|
|
|
|
|
stmt = select(WorkflowRun).where(WorkflowRun.id == workflow_run_id)
|
|
|
|
|
workflow_run = session.scalar(stmt)
|
|
|
|
|
if not workflow_run:
|
|
|
|
|
raise WorkflowRunNotFoundError(workflow_run_id)
|
|
|
|
|
|
|
|
|
|
return workflow_run
|
|
|
|
|
|
|
|
|
|
def _refetch_workflow_node_execution(self, node_execution_id: str) -> WorkflowNodeExecution:
|
|
|
|
|
"""
|
|
|
|
|
Refetch workflow node execution
|
|
|
|
|
:param node_execution_id: workflow node execution id
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
workflow_node_execution = self._wip_workflow_node_executions.get(node_execution_id)
|
|
|
|
|
|
|
|
|
|
def _get_workflow_node_execution(self, session: Session, node_execution_id: str) -> WorkflowNodeExecution:
|
|
|
|
|
stmt = select(WorkflowNodeExecution).where(WorkflowNodeExecution.id == node_execution_id)
|
|
|
|
|
workflow_node_execution = session.scalar(stmt)
|
|
|
|
|
if not workflow_node_execution:
|
|
|
|
|
raise WorkflowNodeExecutionNotFoundError(node_execution_id)
|
|
|
|
|
|
|
|
|
|
@ -945,41 +839,10 @@ class WorkflowCycleManage:
|
|
|
|
|
:param event: agent log event
|
|
|
|
|
:return:
|
|
|
|
|
"""
|
|
|
|
|
node_execution = self._wip_workflow_node_executions.get(event.node_execution_id)
|
|
|
|
|
if not node_execution:
|
|
|
|
|
raise Exception(f"Workflow node execution not found: {event.node_execution_id}")
|
|
|
|
|
|
|
|
|
|
node_execution_id = node_execution.id
|
|
|
|
|
original_agent_logs = self._wip_workflow_agent_logs.get(node_execution_id, [])
|
|
|
|
|
|
|
|
|
|
# try to find the log with the same id
|
|
|
|
|
for log in original_agent_logs:
|
|
|
|
|
if log.id == event.id:
|
|
|
|
|
# update the log
|
|
|
|
|
log.status = event.status
|
|
|
|
|
log.error = event.error
|
|
|
|
|
log.data = event.data
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
# append the log
|
|
|
|
|
original_agent_logs.append(
|
|
|
|
|
AgentLogStreamResponse.Data(
|
|
|
|
|
id=event.id,
|
|
|
|
|
parent_id=event.parent_id,
|
|
|
|
|
node_execution_id=node_execution_id,
|
|
|
|
|
error=event.error,
|
|
|
|
|
status=event.status,
|
|
|
|
|
data=event.data,
|
|
|
|
|
label=event.label,
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self._wip_workflow_agent_logs[node_execution_id] = original_agent_logs
|
|
|
|
|
|
|
|
|
|
return AgentLogStreamResponse(
|
|
|
|
|
task_id=task_id,
|
|
|
|
|
data=AgentLogStreamResponse.Data(
|
|
|
|
|
node_execution_id=node_execution_id,
|
|
|
|
|
node_execution_id=event.node_execution_id,
|
|
|
|
|
id=event.id,
|
|
|
|
|
parent_id=event.parent_id,
|
|
|
|
|
label=event.label,
|
|
|
|
|
|