merge main
commit
01566035e3
@ -1,5 +1,11 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
|
- name: "\U0001F4A1 Model Providers & Plugins"
|
||||||
|
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
|
||||||
|
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
|
||||||
|
- name: "\U0001F4AC Documentation Issues"
|
||||||
|
url: "https://github.com/langgenius/dify-docs/issues/new"
|
||||||
|
about: Report issues with the documentation, such as typos, outdated information, or missing content. Please provide the specific section and details of the issue.
|
||||||
- name: "\U0001F4E7 Discussions"
|
- name: "\U0001F4E7 Discussions"
|
||||||
url: https://github.com/langgenius/dify/discussions/categories/general
|
url: https://github.com/langgenius/dify/discussions/categories/general
|
||||||
about: General discussions and request help from the community
|
about: General discussions and seek help from the community
|
||||||
|
|||||||
@ -1,52 +0,0 @@
|
|||||||
import base64
|
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from configs import dify_config
|
|
||||||
|
|
||||||
|
|
||||||
class SignedUrlParams(BaseModel):
|
|
||||||
sign_key: str = Field(..., description="The sign key")
|
|
||||||
timestamp: str = Field(..., description="Timestamp")
|
|
||||||
nonce: str = Field(..., description="Nonce")
|
|
||||||
sign: str = Field(..., description="Signature")
|
|
||||||
|
|
||||||
|
|
||||||
class UrlSigner:
|
|
||||||
@classmethod
|
|
||||||
def get_signed_url(cls, url: str, sign_key: str, prefix: str) -> str:
|
|
||||||
signed_url_params = cls.get_signed_url_params(sign_key, prefix)
|
|
||||||
return (
|
|
||||||
f"{url}?timestamp={signed_url_params.timestamp}"
|
|
||||||
f"&nonce={signed_url_params.nonce}&sign={signed_url_params.sign}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_signed_url_params(cls, sign_key: str, prefix: str) -> SignedUrlParams:
|
|
||||||
timestamp = str(int(time.time()))
|
|
||||||
nonce = os.urandom(16).hex()
|
|
||||||
sign = cls._sign(sign_key, timestamp, nonce, prefix)
|
|
||||||
|
|
||||||
return SignedUrlParams(sign_key=sign_key, timestamp=timestamp, nonce=nonce, sign=sign)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def verify(cls, sign_key: str, timestamp: str, nonce: str, sign: str, prefix: str) -> bool:
|
|
||||||
recalculated_sign = cls._sign(sign_key, timestamp, nonce, prefix)
|
|
||||||
|
|
||||||
return sign == recalculated_sign
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _sign(cls, sign_key: str, timestamp: str, nonce: str, prefix: str) -> str:
|
|
||||||
if not dify_config.SECRET_KEY:
|
|
||||||
raise Exception("SECRET_KEY is not set")
|
|
||||||
|
|
||||||
data_to_sign = f"{prefix}|{sign_key}|{timestamp}|{nonce}"
|
|
||||||
secret_key = dify_config.SECRET_KEY.encode()
|
|
||||||
sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest()
|
|
||||||
encoded_sign = base64.urlsafe_b64encode(sign).decode()
|
|
||||||
|
|
||||||
return encoded_sign
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
"""Abstract interface for document clean implementations."""
|
|
||||||
|
|
||||||
from core.rag.cleaner.cleaner_base import BaseCleaner
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredNonAsciiCharsCleaner(BaseCleaner):
|
|
||||||
def clean(self, content) -> str:
|
|
||||||
"""clean document content."""
|
|
||||||
from unstructured.cleaners.core import clean_extra_whitespace
|
|
||||||
|
|
||||||
# Returns "ITEM 1A: RISK FACTORS"
|
|
||||||
return clean_extra_whitespace(content)
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
"""Abstract interface for document clean implementations."""
|
|
||||||
|
|
||||||
from core.rag.cleaner.cleaner_base import BaseCleaner
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredGroupBrokenParagraphsCleaner(BaseCleaner):
|
|
||||||
def clean(self, content) -> str:
|
|
||||||
"""clean document content."""
|
|
||||||
import re
|
|
||||||
|
|
||||||
from unstructured.cleaners.core import group_broken_paragraphs
|
|
||||||
|
|
||||||
para_split_re = re.compile(r"(\s*\n\s*){3}")
|
|
||||||
|
|
||||||
return group_broken_paragraphs(content, paragraph_split=para_split_re)
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
"""Abstract interface for document clean implementations."""
|
|
||||||
|
|
||||||
from core.rag.cleaner.cleaner_base import BaseCleaner
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredNonAsciiCharsCleaner(BaseCleaner):
|
|
||||||
def clean(self, content) -> str:
|
|
||||||
"""clean document content."""
|
|
||||||
from unstructured.cleaners.core import clean_non_ascii_chars
|
|
||||||
|
|
||||||
# Returns "This text contains non-ascii characters!"
|
|
||||||
return clean_non_ascii_chars(content)
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
"""Abstract interface for document clean implementations."""
|
|
||||||
|
|
||||||
from core.rag.cleaner.cleaner_base import BaseCleaner
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredNonAsciiCharsCleaner(BaseCleaner):
|
|
||||||
def clean(self, content) -> str:
|
|
||||||
"""Replaces unicode quote characters, such as the \x91 character in a string."""
|
|
||||||
|
|
||||||
from unstructured.cleaners.core import replace_unicode_quotes
|
|
||||||
|
|
||||||
return replace_unicode_quotes(content)
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
"""Abstract interface for document clean implementations."""
|
|
||||||
|
|
||||||
from core.rag.cleaner.cleaner_base import BaseCleaner
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredTranslateTextCleaner(BaseCleaner):
|
|
||||||
def clean(self, content) -> str:
|
|
||||||
"""clean document content."""
|
|
||||||
from unstructured.cleaners.translate import translate_text
|
|
||||||
|
|
||||||
return translate_text(content)
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class ClusterEntity(BaseModel):
|
|
||||||
"""
|
|
||||||
Model Config Entity.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name: str
|
|
||||||
cluster_id: str
|
|
||||||
displayName: str
|
|
||||||
region: str
|
|
||||||
spendingLimit: Optional[int] = 1000
|
|
||||||
version: str
|
|
||||||
createdBy: str
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from core.rag.extractor.extractor_base import BaseExtractor
|
|
||||||
from core.rag.models.document import Document
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredPDFExtractor(BaseExtractor):
|
|
||||||
"""Load pdf files.
|
|
||||||
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to load.
|
|
||||||
|
|
||||||
api_url: Unstructured API URL
|
|
||||||
|
|
||||||
api_key: Unstructured API Key
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, file_path: str, api_url: str, api_key: str):
|
|
||||||
"""Initialize with file path."""
|
|
||||||
self._file_path = file_path
|
|
||||||
self._api_url = api_url
|
|
||||||
self._api_key = api_key
|
|
||||||
|
|
||||||
def extract(self) -> list[Document]:
|
|
||||||
if self._api_url:
|
|
||||||
from unstructured.partition.api import partition_via_api
|
|
||||||
|
|
||||||
elements = partition_via_api(
|
|
||||||
filename=self._file_path, api_url=self._api_url, api_key=self._api_key, strategy="auto"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
from unstructured.partition.pdf import partition_pdf
|
|
||||||
|
|
||||||
elements = partition_pdf(filename=self._file_path, strategy="auto")
|
|
||||||
|
|
||||||
from unstructured.chunking.title import chunk_by_title
|
|
||||||
|
|
||||||
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
|
|
||||||
documents = []
|
|
||||||
for chunk in chunks:
|
|
||||||
text = chunk.text.strip()
|
|
||||||
documents.append(Document(page_content=text))
|
|
||||||
|
|
||||||
return documents
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from core.rag.extractor.extractor_base import BaseExtractor
|
|
||||||
from core.rag.models.document import Document
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class UnstructuredTextExtractor(BaseExtractor):
|
|
||||||
"""Load msg files.
|
|
||||||
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the file to load.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, file_path: str, api_url: str):
|
|
||||||
"""Initialize with file path."""
|
|
||||||
self._file_path = file_path
|
|
||||||
self._api_url = api_url
|
|
||||||
|
|
||||||
def extract(self) -> list[Document]:
|
|
||||||
from unstructured.partition.text import partition_text
|
|
||||||
|
|
||||||
elements = partition_text(filename=self._file_path)
|
|
||||||
from unstructured.chunking.title import chunk_by_title
|
|
||||||
|
|
||||||
chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000)
|
|
||||||
documents = []
|
|
||||||
for chunk in chunks:
|
|
||||||
text = chunk.text.strip()
|
|
||||||
documents.append(Document(page_content=text))
|
|
||||||
|
|
||||||
return documents
|
|
||||||
@ -0,0 +1,224 @@
|
|||||||
|
"""
|
||||||
|
Repository factory for dynamically creating repository instances based on configuration.
|
||||||
|
|
||||||
|
This module provides a Django-like settings system for repository implementations,
|
||||||
|
allowing users to configure different repository backends through string paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
import logging
|
||||||
|
from typing import Protocol, Union
|
||||||
|
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from configs import dify_config
|
||||||
|
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
|
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
|
from models import Account, EndUser
|
||||||
|
from models.enums import WorkflowRunTriggeredFrom
|
||||||
|
from models.workflow import WorkflowNodeExecutionTriggeredFrom
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryImportError(Exception):
|
||||||
|
"""Raised when a repository implementation cannot be imported or instantiated."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DifyCoreRepositoryFactory:
|
||||||
|
"""
|
||||||
|
Factory for creating repository instances based on configuration.
|
||||||
|
|
||||||
|
This factory supports Django-like settings where repository implementations
|
||||||
|
are specified as module paths (e.g., 'module.submodule.ClassName').
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _import_class(class_path: str) -> type:
|
||||||
|
"""
|
||||||
|
Import a class from a module path string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
class_path: Full module path to the class (e.g., 'module.submodule.ClassName')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The imported class
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RepositoryImportError: If the class cannot be imported
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
module_path, class_name = class_path.rsplit(".", 1)
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
repo_class = getattr(module, class_name)
|
||||||
|
assert isinstance(repo_class, type)
|
||||||
|
return repo_class
|
||||||
|
except (ValueError, ImportError, AttributeError) as e:
|
||||||
|
raise RepositoryImportError(f"Cannot import repository class '{class_path}': {e}") from e
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_repository_interface(repository_class: type, expected_interface: type[Protocol]) -> None: # type: ignore
|
||||||
|
"""
|
||||||
|
Validate that a class implements the expected repository interface.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
repository_class: The class to validate
|
||||||
|
expected_interface: The expected interface/protocol
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RepositoryImportError: If the class doesn't implement the interface
|
||||||
|
"""
|
||||||
|
# Check if the class has all required methods from the protocol
|
||||||
|
required_methods = [
|
||||||
|
method
|
||||||
|
for method in dir(expected_interface)
|
||||||
|
if not method.startswith("_") and callable(getattr(expected_interface, method, None))
|
||||||
|
]
|
||||||
|
|
||||||
|
missing_methods = []
|
||||||
|
for method_name in required_methods:
|
||||||
|
if not hasattr(repository_class, method_name):
|
||||||
|
missing_methods.append(method_name)
|
||||||
|
|
||||||
|
if missing_methods:
|
||||||
|
raise RepositoryImportError(
|
||||||
|
f"Repository class '{repository_class.__name__}' does not implement required methods "
|
||||||
|
f"{missing_methods} from interface '{expected_interface.__name__}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_constructor_signature(repository_class: type, required_params: list[str]) -> None:
|
||||||
|
"""
|
||||||
|
Validate that a repository class constructor accepts required parameters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
repository_class: The class to validate
|
||||||
|
required_params: List of required parameter names
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RepositoryImportError: If the constructor doesn't accept required parameters
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
# MyPy may flag the line below with the following error:
|
||||||
|
#
|
||||||
|
# > Accessing "__init__" on an instance is unsound, since
|
||||||
|
# > instance.__init__ could be from an incompatible subclass.
|
||||||
|
#
|
||||||
|
# Despite this, we need to ensure that the constructor of `repository_class`
|
||||||
|
# has a compatible signature.
|
||||||
|
signature = inspect.signature(repository_class.__init__) # type: ignore[misc]
|
||||||
|
param_names = list(signature.parameters.keys())
|
||||||
|
|
||||||
|
# Remove 'self' parameter
|
||||||
|
if "self" in param_names:
|
||||||
|
param_names.remove("self")
|
||||||
|
|
||||||
|
missing_params = [param for param in required_params if param not in param_names]
|
||||||
|
if missing_params:
|
||||||
|
raise RepositoryImportError(
|
||||||
|
f"Repository class '{repository_class.__name__}' constructor does not accept required parameters: "
|
||||||
|
f"{missing_params}. Expected parameters: {required_params}"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise RepositoryImportError(
|
||||||
|
f"Failed to validate constructor signature for '{repository_class.__name__}': {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_workflow_execution_repository(
|
||||||
|
cls,
|
||||||
|
session_factory: Union[sessionmaker, Engine],
|
||||||
|
user: Union[Account, EndUser],
|
||||||
|
app_id: str,
|
||||||
|
triggered_from: WorkflowRunTriggeredFrom,
|
||||||
|
) -> WorkflowExecutionRepository:
|
||||||
|
"""
|
||||||
|
Create a WorkflowExecutionRepository instance based on configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_factory: SQLAlchemy sessionmaker or engine
|
||||||
|
user: Account or EndUser object
|
||||||
|
app_id: Application ID
|
||||||
|
triggered_from: Source of the execution trigger
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured WorkflowExecutionRepository instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RepositoryImportError: If the configured repository cannot be created
|
||||||
|
"""
|
||||||
|
class_path = dify_config.CORE_WORKFLOW_EXECUTION_REPOSITORY
|
||||||
|
logger.debug(f"Creating WorkflowExecutionRepository from: {class_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository_class = cls._import_class(class_path)
|
||||||
|
cls._validate_repository_interface(repository_class, WorkflowExecutionRepository)
|
||||||
|
cls._validate_constructor_signature(
|
||||||
|
repository_class, ["session_factory", "user", "app_id", "triggered_from"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return repository_class( # type: ignore[no-any-return]
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=app_id,
|
||||||
|
triggered_from=triggered_from,
|
||||||
|
)
|
||||||
|
except RepositoryImportError:
|
||||||
|
# Re-raise our custom errors as-is
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Failed to create WorkflowExecutionRepository")
|
||||||
|
raise RepositoryImportError(f"Failed to create WorkflowExecutionRepository from '{class_path}': {e}") from e
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_workflow_node_execution_repository(
|
||||||
|
cls,
|
||||||
|
session_factory: Union[sessionmaker, Engine],
|
||||||
|
user: Union[Account, EndUser],
|
||||||
|
app_id: str,
|
||||||
|
triggered_from: WorkflowNodeExecutionTriggeredFrom,
|
||||||
|
) -> WorkflowNodeExecutionRepository:
|
||||||
|
"""
|
||||||
|
Create a WorkflowNodeExecutionRepository instance based on configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_factory: SQLAlchemy sessionmaker or engine
|
||||||
|
user: Account or EndUser object
|
||||||
|
app_id: Application ID
|
||||||
|
triggered_from: Source of the execution trigger
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured WorkflowNodeExecutionRepository instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RepositoryImportError: If the configured repository cannot be created
|
||||||
|
"""
|
||||||
|
class_path = dify_config.CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY
|
||||||
|
logger.debug(f"Creating WorkflowNodeExecutionRepository from: {class_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository_class = cls._import_class(class_path)
|
||||||
|
cls._validate_repository_interface(repository_class, WorkflowNodeExecutionRepository)
|
||||||
|
cls._validate_constructor_signature(
|
||||||
|
repository_class, ["session_factory", "user", "app_id", "triggered_from"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return repository_class( # type: ignore[no-any-return]
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=app_id,
|
||||||
|
triggered_from=triggered_from,
|
||||||
|
)
|
||||||
|
except RepositoryImportError:
|
||||||
|
# Re-raise our custom errors as-is
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Failed to create WorkflowNodeExecutionRepository")
|
||||||
|
raise RepositoryImportError(
|
||||||
|
f"Failed to create WorkflowNodeExecutionRepository from '{class_path}': {e}"
|
||||||
|
) from e
|
||||||
@ -1,79 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
|
||||||
from core.workflow.nodes.base import BaseIterationState, BaseLoopState, BaseNode
|
|
||||||
from models.enums import UserFrom
|
|
||||||
from models.workflow import Workflow, WorkflowType
|
|
||||||
|
|
||||||
from .node_entities import NodeRunResult
|
|
||||||
from .variable_pool import VariablePool
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowNodeAndResult:
|
|
||||||
node: BaseNode
|
|
||||||
result: Optional[NodeRunResult] = None
|
|
||||||
|
|
||||||
def __init__(self, node: BaseNode, result: Optional[NodeRunResult] = None):
|
|
||||||
self.node = node
|
|
||||||
self.result = result
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRunState:
|
|
||||||
tenant_id: str
|
|
||||||
app_id: str
|
|
||||||
workflow_id: str
|
|
||||||
workflow_type: WorkflowType
|
|
||||||
user_id: str
|
|
||||||
user_from: UserFrom
|
|
||||||
invoke_from: InvokeFrom
|
|
||||||
|
|
||||||
workflow_call_depth: int
|
|
||||||
|
|
||||||
start_at: float
|
|
||||||
variable_pool: VariablePool
|
|
||||||
|
|
||||||
total_tokens: int = 0
|
|
||||||
|
|
||||||
workflow_nodes_and_results: list[WorkflowNodeAndResult]
|
|
||||||
|
|
||||||
class NodeRun(BaseModel):
|
|
||||||
node_id: str
|
|
||||||
iteration_node_id: str
|
|
||||||
loop_node_id: str
|
|
||||||
|
|
||||||
workflow_node_runs: list[NodeRun]
|
|
||||||
workflow_node_steps: int
|
|
||||||
|
|
||||||
current_iteration_state: Optional[BaseIterationState]
|
|
||||||
current_loop_state: Optional[BaseLoopState]
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
workflow: Workflow,
|
|
||||||
start_at: float,
|
|
||||||
variable_pool: VariablePool,
|
|
||||||
user_id: str,
|
|
||||||
user_from: UserFrom,
|
|
||||||
invoke_from: InvokeFrom,
|
|
||||||
workflow_call_depth: int,
|
|
||||||
):
|
|
||||||
self.workflow_id = workflow.id
|
|
||||||
self.tenant_id = workflow.tenant_id
|
|
||||||
self.app_id = workflow.app_id
|
|
||||||
self.workflow_type = WorkflowType.value_of(workflow.type)
|
|
||||||
self.user_id = user_id
|
|
||||||
self.user_from = user_from
|
|
||||||
self.invoke_from = invoke_from
|
|
||||||
self.workflow_call_depth = workflow_call_depth
|
|
||||||
|
|
||||||
self.start_at = start_at
|
|
||||||
self.variable_pool = variable_pool
|
|
||||||
|
|
||||||
self.total_tokens = 0
|
|
||||||
|
|
||||||
self.workflow_node_steps = 1
|
|
||||||
self.workflow_node_runs = []
|
|
||||||
self.current_iteration_state = None
|
|
||||||
self.current_loop_state = None
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue