Merge branch 'main' into feat/plugin-auto-upgrade-fe
commit
784a236280
@ -0,0 +1,486 @@
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from opentelemetry.trace import Status, StatusCode
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.aliyun_trace.data_exporter.traceclient import (
|
||||
TraceClient,
|
||||
convert_datetime_to_nanoseconds,
|
||||
convert_to_span_id,
|
||||
convert_to_trace_id,
|
||||
generate_span_id,
|
||||
)
|
||||
from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData
|
||||
from core.ops.aliyun_trace.entities.semconv import (
|
||||
GEN_AI_COMPLETION,
|
||||
GEN_AI_FRAMEWORK,
|
||||
GEN_AI_MODEL_NAME,
|
||||
GEN_AI_PROMPT,
|
||||
GEN_AI_PROMPT_TEMPLATE_TEMPLATE,
|
||||
GEN_AI_PROMPT_TEMPLATE_VARIABLE,
|
||||
GEN_AI_RESPONSE_FINISH_REASON,
|
||||
GEN_AI_SESSION_ID,
|
||||
GEN_AI_SPAN_KIND,
|
||||
GEN_AI_SYSTEM,
|
||||
GEN_AI_USAGE_INPUT_TOKENS,
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS,
|
||||
GEN_AI_USAGE_TOTAL_TOKENS,
|
||||
GEN_AI_USER_ID,
|
||||
INPUT_VALUE,
|
||||
OUTPUT_VALUE,
|
||||
RETRIEVAL_DOCUMENT,
|
||||
RETRIEVAL_QUERY,
|
||||
TOOL_DESCRIPTION,
|
||||
TOOL_NAME,
|
||||
TOOL_PARAMETERS,
|
||||
GenAISpanKind,
|
||||
)
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import AliyunConfig
|
||||
from core.ops.entities.trace_entity import (
|
||||
BaseTraceInfo,
|
||||
DatasetRetrievalTraceInfo,
|
||||
GenerateNameTraceInfo,
|
||||
MessageTraceInfo,
|
||||
ModerationTraceInfo,
|
||||
SuggestedQuestionTraceInfo,
|
||||
ToolTraceInfo,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.rag.models.document import Document
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.workflow_node_execution import (
|
||||
WorkflowNodeExecution,
|
||||
WorkflowNodeExecutionMetadataKey,
|
||||
WorkflowNodeExecutionStatus,
|
||||
)
|
||||
from core.workflow.nodes import NodeType
|
||||
from models import Account, App, EndUser, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom, db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AliyunDataTrace(BaseTraceInstance):
|
||||
def __init__(
|
||||
self,
|
||||
aliyun_config: AliyunConfig,
|
||||
):
|
||||
super().__init__(aliyun_config)
|
||||
base_url = aliyun_config.endpoint.rstrip("/")
|
||||
endpoint = urljoin(base_url, f"adapt_{aliyun_config.license_key}/api/otlp/traces")
|
||||
self.trace_client = TraceClient(service_name=aliyun_config.app_name, endpoint=endpoint)
|
||||
|
||||
def trace(self, trace_info: BaseTraceInfo):
|
||||
if isinstance(trace_info, WorkflowTraceInfo):
|
||||
self.workflow_trace(trace_info)
|
||||
if isinstance(trace_info, MessageTraceInfo):
|
||||
self.message_trace(trace_info)
|
||||
if isinstance(trace_info, ModerationTraceInfo):
|
||||
pass
|
||||
if isinstance(trace_info, SuggestedQuestionTraceInfo):
|
||||
self.suggested_question_trace(trace_info)
|
||||
if isinstance(trace_info, DatasetRetrievalTraceInfo):
|
||||
self.dataset_retrieval_trace(trace_info)
|
||||
if isinstance(trace_info, ToolTraceInfo):
|
||||
self.tool_trace(trace_info)
|
||||
if isinstance(trace_info, GenerateNameTraceInfo):
|
||||
pass
|
||||
|
||||
def api_check(self):
|
||||
return self.trace_client.api_check()
|
||||
|
||||
def get_project_url(self):
|
||||
try:
|
||||
return self.trace_client.get_project_url()
|
||||
except Exception as e:
|
||||
logger.info(f"Aliyun get run url failed: {str(e)}", exc_info=True)
|
||||
raise ValueError(f"Aliyun get run url failed: {str(e)}")
|
||||
|
||||
def workflow_trace(self, trace_info: WorkflowTraceInfo):
|
||||
trace_id = convert_to_trace_id(trace_info.workflow_run_id)
|
||||
workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow")
|
||||
self.add_workflow_span(trace_id, workflow_span_id, trace_info)
|
||||
|
||||
workflow_node_executions = self.get_workflow_node_executions(trace_info)
|
||||
for node_execution in workflow_node_executions:
|
||||
node_span = self.build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id)
|
||||
self.trace_client.add_span(node_span)
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo):
|
||||
message_data = trace_info.message_data
|
||||
if message_data is None:
|
||||
return
|
||||
message_id = trace_info.message_id
|
||||
|
||||
user_id = message_data.from_account_id
|
||||
if message_data.from_end_user_id:
|
||||
end_user_data: Optional[EndUser] = (
|
||||
db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first()
|
||||
)
|
||||
if end_user_data is not None:
|
||||
user_id = end_user_data.session_id
|
||||
|
||||
status: Status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
trace_id = convert_to_trace_id(message_id)
|
||||
message_span_id = convert_to_span_id(message_id, "message")
|
||||
message_span = SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=None,
|
||||
span_id=message_span_id,
|
||||
name="message",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(trace_info.outputs),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
app_model_config = getattr(trace_info.message_data, "app_model_config", {})
|
||||
pre_prompt = getattr(app_model_config, "pre_prompt", "")
|
||||
inputs_data = getattr(trace_info.message_data, "inputs", {})
|
||||
llm_span = SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=message_span_id,
|
||||
span_id=convert_to_span_id(message_id, "llm"),
|
||||
name="llm",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""),
|
||||
GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""),
|
||||
GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens),
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens),
|
||||
GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens),
|
||||
GEN_AI_PROMPT_TEMPLATE_VARIABLE: json.dumps(inputs_data, ensure_ascii=False),
|
||||
GEN_AI_PROMPT_TEMPLATE_TEMPLATE: pre_prompt,
|
||||
GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
GEN_AI_COMPLETION: str(trace_info.outputs),
|
||||
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(trace_info.outputs),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(llm_span)
|
||||
|
||||
def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
message_id = trace_info.message_id
|
||||
|
||||
documents_data = extract_retrieval_documents(trace_info.documents)
|
||||
dataset_retrieval_span = SpanData(
|
||||
trace_id=convert_to_trace_id(message_id),
|
||||
parent_span_id=convert_to_span_id(message_id, "message"),
|
||||
span_id=generate_span_id(),
|
||||
name="dataset_retrieval",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
RETRIEVAL_QUERY: str(trace_info.inputs),
|
||||
RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False),
|
||||
INPUT_VALUE: str(trace_info.inputs),
|
||||
OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False),
|
||||
},
|
||||
)
|
||||
self.trace_client.add_span(dataset_retrieval_span)
|
||||
|
||||
def tool_trace(self, trace_info: ToolTraceInfo):
|
||||
if trace_info.message_data is None:
|
||||
return
|
||||
message_id = trace_info.message_id
|
||||
|
||||
status: Status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
|
||||
tool_span = SpanData(
|
||||
trace_id=convert_to_trace_id(message_id),
|
||||
parent_span_id=convert_to_span_id(message_id, "message"),
|
||||
span_id=generate_span_id(),
|
||||
name=trace_info.tool_name,
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
TOOL_NAME: trace_info.tool_name,
|
||||
TOOL_DESCRIPTION: json.dumps(trace_info.tool_config, ensure_ascii=False),
|
||||
TOOL_PARAMETERS: json.dumps(trace_info.tool_inputs, ensure_ascii=False),
|
||||
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(trace_info.tool_outputs),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(tool_span)
|
||||
|
||||
def get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> Sequence[WorkflowNodeExecution]:
|
||||
# through workflow_run_id get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
# Find the app's creator account
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get the app to find its creator
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app = session.query(App).filter(App.id == app_id).first()
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
|
||||
|
||||
service_account = session.query(Account).filter(Account.id == app.created_by).first()
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
|
||||
current_tenant = (
|
||||
session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first()
|
||||
)
|
||||
if not current_tenant:
|
||||
raise ValueError(f"Current tenant not found for account {service_account.id}")
|
||||
service_account.set_tenant_id(current_tenant.tenant_id)
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
# Get all executions for this workflow run
|
||||
workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run(
|
||||
workflow_run_id=trace_info.workflow_run_id
|
||||
)
|
||||
return workflow_node_executions
|
||||
|
||||
def build_workflow_node_span(
|
||||
self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int
|
||||
):
|
||||
try:
|
||||
if node_execution.node_type == NodeType.LLM:
|
||||
node_span = self.build_workflow_llm_span(trace_id, workflow_span_id, trace_info, node_execution)
|
||||
elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL:
|
||||
node_span = self.build_workflow_retrieval_span(trace_id, workflow_span_id, trace_info, node_execution)
|
||||
elif node_execution.node_type == NodeType.TOOL:
|
||||
node_span = self.build_workflow_tool_span(trace_id, workflow_span_id, trace_info, node_execution)
|
||||
else:
|
||||
node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution)
|
||||
return node_span
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status:
|
||||
span_status: Status = Status(StatusCode.UNSET)
|
||||
if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED:
|
||||
span_status = Status(StatusCode.OK)
|
||||
elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]:
|
||||
span_status = Status(StatusCode.ERROR, str(node_execution.error))
|
||||
return span_status
|
||||
|
||||
def build_workflow_task_span(
|
||||
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
|
||||
},
|
||||
status=self.get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
def build_workflow_tool_span(
|
||||
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
tool_des = {}
|
||||
if node_execution.metadata:
|
||||
tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {})
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
TOOL_NAME: node_execution.title,
|
||||
TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False),
|
||||
TOOL_PARAMETERS: json.dumps(node_execution.inputs if node_execution.inputs else {}, ensure_ascii=False),
|
||||
INPUT_VALUE: json.dumps(node_execution.inputs if node_execution.inputs else {}, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False),
|
||||
},
|
||||
status=self.get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
def build_workflow_retrieval_span(
|
||||
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
input_value = ""
|
||||
if node_execution.inputs:
|
||||
input_value = str(node_execution.inputs.get("query", ""))
|
||||
output_value = ""
|
||||
if node_execution.outputs:
|
||||
output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False)
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
RETRIEVAL_QUERY: input_value,
|
||||
RETRIEVAL_DOCUMENT: output_value,
|
||||
INPUT_VALUE: input_value,
|
||||
OUTPUT_VALUE: output_value,
|
||||
},
|
||||
status=self.get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
def build_workflow_llm_span(
|
||||
self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution
|
||||
) -> SpanData:
|
||||
process_data = node_execution.process_data or {}
|
||||
outputs = node_execution.outputs or {}
|
||||
return SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=workflow_span_id,
|
||||
span_id=convert_to_span_id(node_execution.id, "node"),
|
||||
name=node_execution.title,
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: process_data.get("model_name", ""),
|
||||
GEN_AI_SYSTEM: process_data.get("model_provider", ""),
|
||||
GEN_AI_USAGE_INPUT_TOKENS: str(outputs.get("usage", {}).get("prompt_tokens", 0)),
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS: str(outputs.get("usage", {}).get("completion_tokens", 0)),
|
||||
GEN_AI_USAGE_TOTAL_TOKENS: str(outputs.get("usage", {}).get("total_tokens", 0)),
|
||||
GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
|
||||
GEN_AI_COMPLETION: str(outputs.get("text", "")),
|
||||
GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""),
|
||||
INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False),
|
||||
OUTPUT_VALUE: str(outputs.get("text", "")),
|
||||
},
|
||||
status=self.get_workflow_node_status(node_execution),
|
||||
)
|
||||
|
||||
def add_workflow_span(self, trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo):
|
||||
message_span_id = None
|
||||
if trace_info.message_id:
|
||||
message_span_id = convert_to_span_id(trace_info.message_id, "message")
|
||||
user_id = trace_info.metadata.get("user_id")
|
||||
status: Status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
if message_span_id: # chatflow
|
||||
message_span = SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=None,
|
||||
span_id=message_span_id,
|
||||
name="message",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""),
|
||||
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
workflow_span = SpanData(
|
||||
trace_id=trace_id,
|
||||
parent_span_id=message_span_id,
|
||||
span_id=workflow_span_id,
|
||||
name="workflow",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(workflow_span)
|
||||
|
||||
def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo):
|
||||
message_id = trace_info.message_id
|
||||
status: Status = Status(StatusCode.OK)
|
||||
if trace_info.error:
|
||||
status = Status(StatusCode.ERROR, trace_info.error)
|
||||
suggested_question_span = SpanData(
|
||||
trace_id=convert_to_trace_id(message_id),
|
||||
parent_span_id=convert_to_span_id(message_id, "message"),
|
||||
span_id=convert_to_span_id(message_id, "suggested_question"),
|
||||
name="suggested_question",
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""),
|
||||
GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""),
|
||||
GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
GEN_AI_COMPLETION: json.dumps(trace_info.suggested_question, ensure_ascii=False),
|
||||
INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False),
|
||||
OUTPUT_VALUE: json.dumps(trace_info.suggested_question, ensure_ascii=False),
|
||||
},
|
||||
status=status,
|
||||
)
|
||||
self.trace_client.add_span(suggested_question_span)
|
||||
|
||||
|
||||
def extract_retrieval_documents(documents: list[Document]):
|
||||
documents_data = []
|
||||
for document in documents:
|
||||
document_data = {
|
||||
"content": document.page_content,
|
||||
"metadata": {
|
||||
"dataset_id": document.metadata.get("dataset_id"),
|
||||
"doc_id": document.metadata.get("doc_id"),
|
||||
"document_id": document.metadata.get("document_id"),
|
||||
},
|
||||
"score": document.metadata.get("score"),
|
||||
}
|
||||
documents_data.append(document_data)
|
||||
return documents_data
|
||||
@ -0,0 +1,200 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
import socket
|
||||
import threading
|
||||
import uuid
|
||||
from collections import deque
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import ReadableSpan
|
||||
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
|
||||
from opentelemetry.semconv.resource import ResourceAttributes
|
||||
|
||||
from configs import dify_config
|
||||
from core.ops.aliyun_trace.entities.aliyun_trace_entity import SpanData
|
||||
|
||||
INVALID_SPAN_ID = 0x0000000000000000
|
||||
INVALID_TRACE_ID = 0x00000000000000000000000000000000
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TraceClient:
|
||||
def __init__(
|
||||
self,
|
||||
service_name: str,
|
||||
endpoint: str,
|
||||
max_queue_size: int = 1000,
|
||||
schedule_delay_sec: int = 5,
|
||||
max_export_batch_size: int = 50,
|
||||
):
|
||||
self.endpoint = endpoint
|
||||
self.resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: service_name,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
}
|
||||
)
|
||||
self.span_builder = SpanBuilder(self.resource)
|
||||
self.exporter = OTLPSpanExporter(endpoint=endpoint)
|
||||
|
||||
self.max_queue_size = max_queue_size
|
||||
self.schedule_delay_sec = schedule_delay_sec
|
||||
self.max_export_batch_size = max_export_batch_size
|
||||
|
||||
self.queue: deque = deque(maxlen=max_queue_size)
|
||||
self.condition = threading.Condition(threading.Lock())
|
||||
self.done = False
|
||||
|
||||
self.worker_thread = threading.Thread(target=self._worker, daemon=True)
|
||||
self.worker_thread.start()
|
||||
|
||||
self._spans_dropped = False
|
||||
|
||||
def export(self, spans: Sequence[ReadableSpan]):
|
||||
self.exporter.export(spans)
|
||||
|
||||
def api_check(self):
|
||||
try:
|
||||
response = requests.head(self.endpoint, timeout=5)
|
||||
if response.status_code == 405:
|
||||
return True
|
||||
else:
|
||||
logger.debug(f"AliyunTrace API check failed: Unexpected status code: {response.status_code}")
|
||||
return False
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.debug(f"AliyunTrace API check failed: {str(e)}")
|
||||
raise ValueError(f"AliyunTrace API check failed: {str(e)}")
|
||||
|
||||
def get_project_url(self):
|
||||
return "https://arms.console.aliyun.com/#/llm"
|
||||
|
||||
def add_span(self, span_data: SpanData):
|
||||
if span_data is None:
|
||||
return
|
||||
span: ReadableSpan = self.span_builder.build_span(span_data)
|
||||
with self.condition:
|
||||
if len(self.queue) == self.max_queue_size:
|
||||
if not self._spans_dropped:
|
||||
logger.warning("Queue is full, likely spans will be dropped.")
|
||||
self._spans_dropped = True
|
||||
|
||||
self.queue.appendleft(span)
|
||||
if len(self.queue) >= self.max_export_batch_size:
|
||||
self.condition.notify()
|
||||
|
||||
def _worker(self):
|
||||
while not self.done:
|
||||
with self.condition:
|
||||
if len(self.queue) < self.max_export_batch_size and not self.done:
|
||||
self.condition.wait(timeout=self.schedule_delay_sec)
|
||||
self._export_batch()
|
||||
|
||||
def _export_batch(self):
|
||||
spans_to_export: list[ReadableSpan] = []
|
||||
with self.condition:
|
||||
while len(spans_to_export) < self.max_export_batch_size and self.queue:
|
||||
spans_to_export.append(self.queue.pop())
|
||||
|
||||
if spans_to_export:
|
||||
try:
|
||||
self.exporter.export(spans_to_export)
|
||||
except Exception as e:
|
||||
logger.debug(f"Error exporting spans: {e}")
|
||||
|
||||
def shutdown(self):
|
||||
with self.condition:
|
||||
self.done = True
|
||||
self.condition.notify_all()
|
||||
self.worker_thread.join()
|
||||
self._export_batch()
|
||||
self.exporter.shutdown()
|
||||
|
||||
|
||||
class SpanBuilder:
|
||||
def __init__(self, resource):
|
||||
self.resource = resource
|
||||
self.instrumentation_scope = InstrumentationScope(
|
||||
__name__,
|
||||
"",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
def build_span(self, span_data: SpanData) -> ReadableSpan:
|
||||
span_context = trace_api.SpanContext(
|
||||
trace_id=span_data.trace_id,
|
||||
span_id=span_data.span_id,
|
||||
is_remote=False,
|
||||
trace_flags=trace_api.TraceFlags(trace_api.TraceFlags.SAMPLED),
|
||||
trace_state=None,
|
||||
)
|
||||
|
||||
parent_span_context = None
|
||||
if span_data.parent_span_id is not None:
|
||||
parent_span_context = trace_api.SpanContext(
|
||||
trace_id=span_data.trace_id,
|
||||
span_id=span_data.parent_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=trace_api.TraceFlags(trace_api.TraceFlags.SAMPLED),
|
||||
trace_state=None,
|
||||
)
|
||||
|
||||
span = ReadableSpan(
|
||||
name=span_data.name,
|
||||
context=span_context,
|
||||
parent=parent_span_context,
|
||||
resource=self.resource,
|
||||
attributes=span_data.attributes,
|
||||
events=span_data.events,
|
||||
links=span_data.links,
|
||||
kind=trace_api.SpanKind.INTERNAL,
|
||||
status=span_data.status,
|
||||
start_time=span_data.start_time,
|
||||
end_time=span_data.end_time,
|
||||
instrumentation_scope=self.instrumentation_scope,
|
||||
)
|
||||
return span
|
||||
|
||||
|
||||
def generate_span_id() -> int:
|
||||
span_id = random.getrandbits(64)
|
||||
while span_id == INVALID_SPAN_ID:
|
||||
span_id = random.getrandbits(64)
|
||||
return span_id
|
||||
|
||||
|
||||
def convert_to_trace_id(uuid_v4: Optional[str]) -> int:
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4)
|
||||
return uuid_obj.int
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid UUID input: {e}")
|
||||
|
||||
|
||||
def convert_to_span_id(uuid_v4: Optional[str], span_type: str) -> int:
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid UUID input: {e}")
|
||||
combined_key = f"{uuid_obj.hex}-{span_type}"
|
||||
hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest()
|
||||
span_id = int.from_bytes(hash_bytes[:8], byteorder="big", signed=False)
|
||||
return span_id
|
||||
|
||||
|
||||
def convert_datetime_to_nanoseconds(start_time_a: Optional[datetime]) -> Optional[int]:
|
||||
if start_time_a is None:
|
||||
return None
|
||||
timestamp_in_seconds = start_time_a.timestamp()
|
||||
timestamp_in_nanoseconds = int(timestamp_in_seconds * 1e9)
|
||||
return timestamp_in_nanoseconds
|
||||
@ -0,0 +1,21 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.sdk.trace import Event, Status, StatusCode
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SpanData(BaseModel):
|
||||
model_config = {"arbitrary_types_allowed": True}
|
||||
|
||||
trace_id: int = Field(..., description="The unique identifier for the trace.")
|
||||
parent_span_id: Optional[int] = Field(None, description="The ID of the parent span, if any.")
|
||||
span_id: int = Field(..., description="The unique identifier for this span.")
|
||||
name: str = Field(..., description="The name of the span.")
|
||||
attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.")
|
||||
events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.")
|
||||
links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.")
|
||||
status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.")
|
||||
start_time: Optional[int] = Field(..., description="The start time of the span in nanoseconds.")
|
||||
end_time: Optional[int] = Field(..., description="The end time of the span in nanoseconds.")
|
||||
@ -0,0 +1,64 @@
|
||||
from enum import Enum
|
||||
|
||||
# public
|
||||
GEN_AI_SESSION_ID = "gen_ai.session.id"
|
||||
|
||||
GEN_AI_USER_ID = "gen_ai.user.id"
|
||||
|
||||
GEN_AI_USER_NAME = "gen_ai.user.name"
|
||||
|
||||
GEN_AI_SPAN_KIND = "gen_ai.span.kind"
|
||||
|
||||
GEN_AI_FRAMEWORK = "gen_ai.framework"
|
||||
|
||||
|
||||
# Chain
|
||||
INPUT_VALUE = "input.value"
|
||||
|
||||
OUTPUT_VALUE = "output.value"
|
||||
|
||||
|
||||
# Retriever
|
||||
RETRIEVAL_QUERY = "retrieval.query"
|
||||
|
||||
RETRIEVAL_DOCUMENT = "retrieval.document"
|
||||
|
||||
|
||||
# LLM
|
||||
GEN_AI_MODEL_NAME = "gen_ai.model_name"
|
||||
|
||||
GEN_AI_SYSTEM = "gen_ai.system"
|
||||
|
||||
GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"
|
||||
|
||||
GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens"
|
||||
|
||||
GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens"
|
||||
|
||||
GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template"
|
||||
|
||||
GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable"
|
||||
|
||||
GEN_AI_PROMPT = "gen_ai.prompt"
|
||||
|
||||
GEN_AI_COMPLETION = "gen_ai.completion"
|
||||
|
||||
GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason"
|
||||
|
||||
# Tool
|
||||
TOOL_NAME = "tool.name"
|
||||
|
||||
TOOL_DESCRIPTION = "tool.description"
|
||||
|
||||
TOOL_PARAMETERS = "tool.parameters"
|
||||
|
||||
|
||||
class GenAISpanKind(Enum):
|
||||
CHAIN = "CHAIN"
|
||||
RETRIEVER = "RETRIEVER"
|
||||
RERANKER = "RERANKER"
|
||||
LLM = "LLM"
|
||||
EMBEDDING = "EMBEDDING"
|
||||
TOOL = "TOOL"
|
||||
AGENT = "AGENT"
|
||||
TASK = "TASK"
|
||||
@ -0,0 +1 @@
|
||||
# Unit tests for core ops module
|
||||
@ -0,0 +1,385 @@
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from core.ops.entities.config_entity import (
|
||||
AliyunConfig,
|
||||
ArizeConfig,
|
||||
LangfuseConfig,
|
||||
LangSmithConfig,
|
||||
OpikConfig,
|
||||
PhoenixConfig,
|
||||
TracingProviderEnum,
|
||||
WeaveConfig,
|
||||
)
|
||||
|
||||
|
||||
class TestTracingProviderEnum:
|
||||
"""Test cases for TracingProviderEnum"""
|
||||
|
||||
def test_enum_values(self):
|
||||
"""Test that all expected enum values are present"""
|
||||
assert TracingProviderEnum.ARIZE == "arize"
|
||||
assert TracingProviderEnum.PHOENIX == "phoenix"
|
||||
assert TracingProviderEnum.LANGFUSE == "langfuse"
|
||||
assert TracingProviderEnum.LANGSMITH == "langsmith"
|
||||
assert TracingProviderEnum.OPIK == "opik"
|
||||
assert TracingProviderEnum.WEAVE == "weave"
|
||||
assert TracingProviderEnum.ALIYUN == "aliyun"
|
||||
|
||||
|
||||
class TestArizeConfig:
|
||||
"""Test cases for ArizeConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Arize configuration"""
|
||||
config = ArizeConfig(
|
||||
api_key="test_key", space_id="test_space", project="test_project", endpoint="https://custom.arize.com"
|
||||
)
|
||||
assert config.api_key == "test_key"
|
||||
assert config.space_id == "test_space"
|
||||
assert config.project == "test_project"
|
||||
assert config.endpoint == "https://custom.arize.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = ArizeConfig()
|
||||
assert config.api_key is None
|
||||
assert config.space_id is None
|
||||
assert config.project is None
|
||||
assert config.endpoint == "https://otlp.arize.com"
|
||||
|
||||
def test_project_validation_empty(self):
|
||||
"""Test project validation with empty value"""
|
||||
config = ArizeConfig(project="")
|
||||
assert config.project == "default"
|
||||
|
||||
def test_project_validation_none(self):
|
||||
"""Test project validation with None value"""
|
||||
config = ArizeConfig(project=None)
|
||||
assert config.project == "default"
|
||||
|
||||
def test_endpoint_validation_empty(self):
|
||||
"""Test endpoint validation with empty value"""
|
||||
config = ArizeConfig(endpoint="")
|
||||
assert config.endpoint == "https://otlp.arize.com"
|
||||
|
||||
def test_endpoint_validation_with_path(self):
|
||||
"""Test endpoint validation normalizes URL by removing path"""
|
||||
config = ArizeConfig(endpoint="https://custom.arize.com/api/v1")
|
||||
assert config.endpoint == "https://custom.arize.com"
|
||||
|
||||
def test_endpoint_validation_invalid_scheme(self):
|
||||
"""Test endpoint validation rejects invalid schemes"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
ArizeConfig(endpoint="ftp://invalid.com")
|
||||
|
||||
def test_endpoint_validation_no_scheme(self):
|
||||
"""Test endpoint validation rejects URLs without scheme"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
ArizeConfig(endpoint="invalid.com")
|
||||
|
||||
|
||||
class TestPhoenixConfig:
|
||||
"""Test cases for PhoenixConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Phoenix configuration"""
|
||||
config = PhoenixConfig(api_key="test_key", project="test_project", endpoint="https://custom.phoenix.com")
|
||||
assert config.api_key == "test_key"
|
||||
assert config.project == "test_project"
|
||||
assert config.endpoint == "https://custom.phoenix.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = PhoenixConfig()
|
||||
assert config.api_key is None
|
||||
assert config.project is None
|
||||
assert config.endpoint == "https://app.phoenix.arize.com"
|
||||
|
||||
def test_project_validation_empty(self):
|
||||
"""Test project validation with empty value"""
|
||||
config = PhoenixConfig(project="")
|
||||
assert config.project == "default"
|
||||
|
||||
def test_endpoint_validation_with_path(self):
|
||||
"""Test endpoint validation normalizes URL by removing path"""
|
||||
config = PhoenixConfig(endpoint="https://custom.phoenix.com/api/v1")
|
||||
assert config.endpoint == "https://custom.phoenix.com"
|
||||
|
||||
|
||||
class TestLangfuseConfig:
|
||||
"""Test cases for LangfuseConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Langfuse configuration"""
|
||||
config = LangfuseConfig(public_key="public_key", secret_key="secret_key", host="https://custom.langfuse.com")
|
||||
assert config.public_key == "public_key"
|
||||
assert config.secret_key == "secret_key"
|
||||
assert config.host == "https://custom.langfuse.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = LangfuseConfig(public_key="public", secret_key="secret")
|
||||
assert config.host == "https://api.langfuse.com"
|
||||
|
||||
def test_missing_required_fields(self):
|
||||
"""Test that required fields are enforced"""
|
||||
with pytest.raises(ValidationError):
|
||||
LangfuseConfig()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
LangfuseConfig(public_key="public")
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
LangfuseConfig(secret_key="secret")
|
||||
|
||||
def test_host_validation_empty(self):
|
||||
"""Test host validation with empty value"""
|
||||
config = LangfuseConfig(public_key="public", secret_key="secret", host="")
|
||||
assert config.host == "https://api.langfuse.com"
|
||||
|
||||
|
||||
class TestLangSmithConfig:
|
||||
"""Test cases for LangSmithConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid LangSmith configuration"""
|
||||
config = LangSmithConfig(api_key="test_key", project="test_project", endpoint="https://custom.smith.com")
|
||||
assert config.api_key == "test_key"
|
||||
assert config.project == "test_project"
|
||||
assert config.endpoint == "https://custom.smith.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = LangSmithConfig(api_key="key", project="project")
|
||||
assert config.endpoint == "https://api.smith.langchain.com"
|
||||
|
||||
def test_missing_required_fields(self):
|
||||
"""Test that required fields are enforced"""
|
||||
with pytest.raises(ValidationError):
|
||||
LangSmithConfig()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
LangSmithConfig(api_key="key")
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
LangSmithConfig(project="project")
|
||||
|
||||
def test_endpoint_validation_https_only(self):
|
||||
"""Test endpoint validation only allows HTTPS"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
LangSmithConfig(api_key="key", project="project", endpoint="http://insecure.com")
|
||||
|
||||
|
||||
class TestOpikConfig:
|
||||
"""Test cases for OpikConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Opik configuration"""
|
||||
config = OpikConfig(
|
||||
api_key="test_key",
|
||||
project="test_project",
|
||||
workspace="test_workspace",
|
||||
url="https://custom.comet.com/opik/api/",
|
||||
)
|
||||
assert config.api_key == "test_key"
|
||||
assert config.project == "test_project"
|
||||
assert config.workspace == "test_workspace"
|
||||
assert config.url == "https://custom.comet.com/opik/api/"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = OpikConfig()
|
||||
assert config.api_key is None
|
||||
assert config.project is None
|
||||
assert config.workspace is None
|
||||
assert config.url == "https://www.comet.com/opik/api/"
|
||||
|
||||
def test_project_validation_empty(self):
|
||||
"""Test project validation with empty value"""
|
||||
config = OpikConfig(project="")
|
||||
assert config.project == "Default Project"
|
||||
|
||||
def test_url_validation_empty(self):
|
||||
"""Test URL validation with empty value"""
|
||||
config = OpikConfig(url="")
|
||||
assert config.url == "https://www.comet.com/opik/api/"
|
||||
|
||||
def test_url_validation_missing_suffix(self):
|
||||
"""Test URL validation requires /api/ suffix"""
|
||||
with pytest.raises(ValidationError, match="URL should end with /api/"):
|
||||
OpikConfig(url="https://custom.comet.com/opik/")
|
||||
|
||||
def test_url_validation_invalid_scheme(self):
|
||||
"""Test URL validation rejects invalid schemes"""
|
||||
with pytest.raises(ValidationError, match="URL must start with https:// or http://"):
|
||||
OpikConfig(url="ftp://custom.comet.com/opik/api/")
|
||||
|
||||
|
||||
class TestWeaveConfig:
|
||||
"""Test cases for WeaveConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Weave configuration"""
|
||||
config = WeaveConfig(
|
||||
api_key="test_key",
|
||||
entity="test_entity",
|
||||
project="test_project",
|
||||
endpoint="https://custom.wandb.ai",
|
||||
host="https://custom.host.com",
|
||||
)
|
||||
assert config.api_key == "test_key"
|
||||
assert config.entity == "test_entity"
|
||||
assert config.project == "test_project"
|
||||
assert config.endpoint == "https://custom.wandb.ai"
|
||||
assert config.host == "https://custom.host.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = WeaveConfig(api_key="key", project="project")
|
||||
assert config.entity is None
|
||||
assert config.endpoint == "https://trace.wandb.ai"
|
||||
assert config.host is None
|
||||
|
||||
def test_missing_required_fields(self):
|
||||
"""Test that required fields are enforced"""
|
||||
with pytest.raises(ValidationError):
|
||||
WeaveConfig()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
WeaveConfig(api_key="key")
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
WeaveConfig(project="project")
|
||||
|
||||
def test_endpoint_validation_https_only(self):
|
||||
"""Test endpoint validation only allows HTTPS"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
WeaveConfig(api_key="key", project="project", endpoint="http://insecure.wandb.ai")
|
||||
|
||||
def test_host_validation_optional(self):
|
||||
"""Test host validation is optional but validates when provided"""
|
||||
config = WeaveConfig(api_key="key", project="project", host=None)
|
||||
assert config.host is None
|
||||
|
||||
config = WeaveConfig(api_key="key", project="project", host="")
|
||||
assert config.host == ""
|
||||
|
||||
config = WeaveConfig(api_key="key", project="project", host="https://valid.host.com")
|
||||
assert config.host == "https://valid.host.com"
|
||||
|
||||
def test_host_validation_invalid_scheme(self):
|
||||
"""Test host validation rejects invalid schemes when provided"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
WeaveConfig(api_key="key", project="project", host="ftp://invalid.host.com")
|
||||
|
||||
|
||||
class TestAliyunConfig:
|
||||
"""Test cases for AliyunConfig"""
|
||||
|
||||
def test_valid_config(self):
|
||||
"""Test valid Aliyun configuration"""
|
||||
config = AliyunConfig(
|
||||
app_name="test_app",
|
||||
license_key="test_license_key",
|
||||
endpoint="https://custom.tracing-analysis-dc-hz.aliyuncs.com",
|
||||
)
|
||||
assert config.app_name == "test_app"
|
||||
assert config.license_key == "test_license_key"
|
||||
assert config.endpoint == "https://custom.tracing-analysis-dc-hz.aliyuncs.com"
|
||||
|
||||
def test_default_values(self):
|
||||
"""Test default values are set correctly"""
|
||||
config = AliyunConfig(license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
|
||||
assert config.app_name == "dify_app"
|
||||
|
||||
def test_missing_required_fields(self):
|
||||
"""Test that required fields are enforced"""
|
||||
with pytest.raises(ValidationError):
|
||||
AliyunConfig()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
AliyunConfig(license_key="test_license")
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
AliyunConfig(endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
|
||||
|
||||
def test_app_name_validation_empty(self):
|
||||
"""Test app_name validation with empty value"""
|
||||
config = AliyunConfig(
|
||||
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com", app_name=""
|
||||
)
|
||||
assert config.app_name == "dify_app"
|
||||
|
||||
def test_endpoint_validation_empty(self):
|
||||
"""Test endpoint validation with empty value"""
|
||||
config = AliyunConfig(license_key="test_license", endpoint="")
|
||||
assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
|
||||
|
||||
def test_endpoint_validation_with_path(self):
|
||||
"""Test endpoint validation normalizes URL by removing path"""
|
||||
config = AliyunConfig(
|
||||
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces"
|
||||
)
|
||||
assert config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
|
||||
|
||||
def test_endpoint_validation_invalid_scheme(self):
|
||||
"""Test endpoint validation rejects invalid schemes"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
AliyunConfig(license_key="test_license", endpoint="ftp://invalid.tracing-analysis-dc-hz.aliyuncs.com")
|
||||
|
||||
def test_endpoint_validation_no_scheme(self):
|
||||
"""Test endpoint validation rejects URLs without scheme"""
|
||||
with pytest.raises(ValidationError, match="URL scheme must be one of"):
|
||||
AliyunConfig(license_key="test_license", endpoint="invalid.tracing-analysis-dc-hz.aliyuncs.com")
|
||||
|
||||
def test_license_key_required(self):
|
||||
"""Test that license_key is required and cannot be empty"""
|
||||
with pytest.raises(ValidationError):
|
||||
AliyunConfig(license_key="", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com")
|
||||
|
||||
|
||||
class TestConfigIntegration:
|
||||
"""Integration tests for configuration classes"""
|
||||
|
||||
def test_all_configs_can_be_instantiated(self):
|
||||
"""Test that all config classes can be instantiated with valid data"""
|
||||
configs = [
|
||||
ArizeConfig(api_key="key"),
|
||||
PhoenixConfig(api_key="key"),
|
||||
LangfuseConfig(public_key="public", secret_key="secret"),
|
||||
LangSmithConfig(api_key="key", project="project"),
|
||||
OpikConfig(api_key="key"),
|
||||
WeaveConfig(api_key="key", project="project"),
|
||||
AliyunConfig(license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com"),
|
||||
]
|
||||
|
||||
for config in configs:
|
||||
assert config is not None
|
||||
|
||||
def test_url_normalization_consistency(self):
|
||||
"""Test that URL normalization works consistently across configs"""
|
||||
# Test that paths are removed from endpoints
|
||||
arize_config = ArizeConfig(endpoint="https://arize.com/api/v1/test")
|
||||
phoenix_config = PhoenixConfig(endpoint="https://phoenix.com/api/v2/")
|
||||
aliyun_config = AliyunConfig(
|
||||
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com/api/v1/traces"
|
||||
)
|
||||
|
||||
assert arize_config.endpoint == "https://arize.com"
|
||||
assert phoenix_config.endpoint == "https://phoenix.com"
|
||||
assert aliyun_config.endpoint == "https://tracing-analysis-dc-hz.aliyuncs.com"
|
||||
|
||||
def test_project_default_values(self):
|
||||
"""Test that project default values are set correctly"""
|
||||
arize_config = ArizeConfig(project="")
|
||||
phoenix_config = PhoenixConfig(project="")
|
||||
opik_config = OpikConfig(project="")
|
||||
aliyun_config = AliyunConfig(
|
||||
license_key="test_license", endpoint="https://tracing-analysis-dc-hz.aliyuncs.com", app_name=""
|
||||
)
|
||||
|
||||
assert arize_config.project == "default"
|
||||
assert phoenix_config.project == "default"
|
||||
assert opik_config.project == "Default Project"
|
||||
assert aliyun_config.app_name == "dify_app"
|
||||
@ -0,0 +1,138 @@
|
||||
import pytest
|
||||
|
||||
from core.ops.utils import validate_project_name, validate_url, validate_url_with_path
|
||||
|
||||
|
||||
class TestValidateUrl:
|
||||
"""Test cases for validate_url function"""
|
||||
|
||||
def test_valid_https_url(self):
|
||||
"""Test valid HTTPS URL"""
|
||||
result = validate_url("https://example.com", "https://default.com")
|
||||
assert result == "https://example.com"
|
||||
|
||||
def test_valid_http_url(self):
|
||||
"""Test valid HTTP URL"""
|
||||
result = validate_url("http://example.com", "https://default.com")
|
||||
assert result == "http://example.com"
|
||||
|
||||
def test_url_with_path_removed(self):
|
||||
"""Test that URL path is removed during normalization"""
|
||||
result = validate_url("https://example.com/api/v1/test", "https://default.com")
|
||||
assert result == "https://example.com"
|
||||
|
||||
def test_url_with_query_removed(self):
|
||||
"""Test that URL query parameters are removed"""
|
||||
result = validate_url("https://example.com?param=value", "https://default.com")
|
||||
assert result == "https://example.com"
|
||||
|
||||
def test_url_with_fragment_removed(self):
|
||||
"""Test that URL fragments are removed"""
|
||||
result = validate_url("https://example.com#section", "https://default.com")
|
||||
assert result == "https://example.com"
|
||||
|
||||
def test_empty_url_returns_default(self):
|
||||
"""Test empty URL returns default"""
|
||||
result = validate_url("", "https://default.com")
|
||||
assert result == "https://default.com"
|
||||
|
||||
def test_none_url_returns_default(self):
|
||||
"""Test None URL returns default"""
|
||||
result = validate_url(None, "https://default.com")
|
||||
assert result == "https://default.com"
|
||||
|
||||
def test_whitespace_url_returns_default(self):
|
||||
"""Test whitespace URL returns default"""
|
||||
result = validate_url(" ", "https://default.com")
|
||||
assert result == "https://default.com"
|
||||
|
||||
def test_invalid_scheme_raises_error(self):
|
||||
"""Test invalid scheme raises ValueError"""
|
||||
with pytest.raises(ValueError, match="URL scheme must be one of"):
|
||||
validate_url("ftp://example.com", "https://default.com")
|
||||
|
||||
def test_no_scheme_raises_error(self):
|
||||
"""Test URL without scheme raises ValueError"""
|
||||
with pytest.raises(ValueError, match="URL scheme must be one of"):
|
||||
validate_url("example.com", "https://default.com")
|
||||
|
||||
def test_custom_allowed_schemes(self):
|
||||
"""Test custom allowed schemes"""
|
||||
result = validate_url("https://example.com", "https://default.com", allowed_schemes=("https",))
|
||||
assert result == "https://example.com"
|
||||
|
||||
with pytest.raises(ValueError, match="URL scheme must be one of"):
|
||||
validate_url("http://example.com", "https://default.com", allowed_schemes=("https",))
|
||||
|
||||
|
||||
class TestValidateUrlWithPath:
|
||||
"""Test cases for validate_url_with_path function"""
|
||||
|
||||
def test_valid_url_with_path(self):
|
||||
"""Test valid URL with path"""
|
||||
result = validate_url_with_path("https://example.com/api/v1", "https://default.com")
|
||||
assert result == "https://example.com/api/v1"
|
||||
|
||||
def test_valid_url_with_required_suffix(self):
|
||||
"""Test valid URL with required suffix"""
|
||||
result = validate_url_with_path("https://example.com/api/", "https://default.com", required_suffix="/api/")
|
||||
assert result == "https://example.com/api/"
|
||||
|
||||
def test_url_without_required_suffix_raises_error(self):
|
||||
"""Test URL without required suffix raises error"""
|
||||
with pytest.raises(ValueError, match="URL should end with /api/"):
|
||||
validate_url_with_path("https://example.com/api", "https://default.com", required_suffix="/api/")
|
||||
|
||||
def test_empty_url_returns_default(self):
|
||||
"""Test empty URL returns default"""
|
||||
result = validate_url_with_path("", "https://default.com")
|
||||
assert result == "https://default.com"
|
||||
|
||||
def test_none_url_returns_default(self):
|
||||
"""Test None URL returns default"""
|
||||
result = validate_url_with_path(None, "https://default.com")
|
||||
assert result == "https://default.com"
|
||||
|
||||
def test_invalid_scheme_raises_error(self):
|
||||
"""Test invalid scheme raises ValueError"""
|
||||
with pytest.raises(ValueError, match="URL must start with https:// or http://"):
|
||||
validate_url_with_path("ftp://example.com", "https://default.com")
|
||||
|
||||
def test_no_scheme_raises_error(self):
|
||||
"""Test URL without scheme raises ValueError"""
|
||||
with pytest.raises(ValueError, match="URL must start with https:// or http://"):
|
||||
validate_url_with_path("example.com", "https://default.com")
|
||||
|
||||
|
||||
class TestValidateProjectName:
|
||||
"""Test cases for validate_project_name function"""
|
||||
|
||||
def test_valid_project_name(self):
|
||||
"""Test valid project name"""
|
||||
result = validate_project_name("my-project", "default")
|
||||
assert result == "my-project"
|
||||
|
||||
def test_empty_project_name_returns_default(self):
|
||||
"""Test empty project name returns default"""
|
||||
result = validate_project_name("", "default")
|
||||
assert result == "default"
|
||||
|
||||
def test_none_project_name_returns_default(self):
|
||||
"""Test None project name returns default"""
|
||||
result = validate_project_name(None, "default")
|
||||
assert result == "default"
|
||||
|
||||
def test_whitespace_project_name_returns_default(self):
|
||||
"""Test whitespace project name returns default"""
|
||||
result = validate_project_name(" ", "default")
|
||||
assert result == "default"
|
||||
|
||||
def test_project_name_with_whitespace_trimmed(self):
|
||||
"""Test project name with whitespace is trimmed"""
|
||||
result = validate_project_name(" my-project ", "default")
|
||||
assert result == "my-project"
|
||||
|
||||
def test_custom_default_name(self):
|
||||
"""Test custom default name"""
|
||||
result = validate_project_name("", "Custom Default")
|
||||
assert result == "Custom Default"
|
||||
@ -0,0 +1,74 @@
|
||||
import base64
|
||||
import binascii
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from libs.password import compare_password, hash_password, valid_password
|
||||
|
||||
|
||||
class TestValidPassword:
|
||||
"""Test password format validation"""
|
||||
|
||||
def test_should_accept_valid_passwords(self):
|
||||
"""Test accepting valid password formats"""
|
||||
assert valid_password("password123") == "password123"
|
||||
assert valid_password("test1234") == "test1234"
|
||||
assert valid_password("Test123456") == "Test123456"
|
||||
|
||||
def test_should_reject_invalid_passwords(self):
|
||||
"""Test rejecting invalid password formats"""
|
||||
# Too short
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
valid_password("abc123")
|
||||
assert "Password must contain letters and numbers" in str(exc_info.value)
|
||||
|
||||
# No numbers
|
||||
with pytest.raises(ValueError):
|
||||
valid_password("abcdefgh")
|
||||
|
||||
# No letters
|
||||
with pytest.raises(ValueError):
|
||||
valid_password("12345678")
|
||||
|
||||
# Empty
|
||||
with pytest.raises(ValueError):
|
||||
valid_password("")
|
||||
|
||||
|
||||
class TestPasswordHashing:
|
||||
"""Test password hashing and comparison"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test data"""
|
||||
self.password = "test123password"
|
||||
self.salt = os.urandom(16)
|
||||
self.salt_base64 = base64.b64encode(self.salt).decode()
|
||||
|
||||
password_hash = hash_password(self.password, self.salt)
|
||||
self.password_hash_base64 = base64.b64encode(password_hash).decode()
|
||||
|
||||
def test_should_verify_correct_password(self):
|
||||
"""Test correct password verification"""
|
||||
result = compare_password(self.password, self.password_hash_base64, self.salt_base64)
|
||||
assert result is True
|
||||
|
||||
def test_should_reject_wrong_password(self):
|
||||
"""Test rejection of incorrect passwords"""
|
||||
result = compare_password("wrongpassword", self.password_hash_base64, self.salt_base64)
|
||||
assert result is False
|
||||
|
||||
def test_should_handle_invalid_base64(self):
|
||||
"""Test handling of invalid base64 data"""
|
||||
# Invalid base64 hash
|
||||
with pytest.raises(binascii.Error):
|
||||
compare_password(self.password, "invalid_base64!", self.salt_base64)
|
||||
|
||||
# Invalid base64 salt
|
||||
with pytest.raises(binascii.Error):
|
||||
compare_password(self.password, self.password_hash_base64, "invalid_base64!")
|
||||
|
||||
def test_should_be_case_sensitive(self):
|
||||
"""Test password case sensitivity"""
|
||||
result = compare_password(self.password.upper(), self.password_hash_base64, self.salt_base64)
|
||||
assert result is False
|
||||
@ -0,0 +1,145 @@
|
||||
import type { ReactElement } from 'react'
|
||||
import { cloneElement, useCallback } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '../base/portal-to-follow-elem'
|
||||
import { RiMoreLine } from '@remixicon/react'
|
||||
|
||||
export type Operation = {
|
||||
id: string; title: string; icon: ReactElement; onClick: () => void
|
||||
}
|
||||
|
||||
const AppOperations = ({ operations, gap }: {
|
||||
operations: Operation[]
|
||||
gap: number
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const [visibleOpreations, setVisibleOperations] = useState<Operation[]>([])
|
||||
const [moreOperations, setMoreOperations] = useState<Operation[]>([])
|
||||
const [showMore, setShowMore] = useState(false)
|
||||
const navRef = useRef<HTMLDivElement>(null)
|
||||
const handleTriggerMore = useCallback(() => {
|
||||
setShowMore(true)
|
||||
}, [setShowMore])
|
||||
|
||||
useEffect(() => {
|
||||
const moreElement = document.getElementById('more')
|
||||
const navElement = document.getElementById('nav')
|
||||
let width = 0
|
||||
const containerWidth = navElement?.clientWidth ?? 0
|
||||
const moreWidth = moreElement?.clientWidth ?? 0
|
||||
|
||||
if (containerWidth === 0 || moreWidth === 0) return
|
||||
|
||||
const updatedEntries: Record<string, boolean> = operations.reduce((pre, cur) => {
|
||||
pre[cur.id] = false
|
||||
return pre
|
||||
}, {} as Record<string, boolean>)
|
||||
const childrens = Array.from(navRef.current!.children).slice(0, -1)
|
||||
for (let i = 0; i < childrens.length; i++) {
|
||||
const child: any = childrens[i]
|
||||
const id = child.dataset.targetid
|
||||
if (!id) break
|
||||
const childWidth = child.clientWidth
|
||||
|
||||
if (width + gap + childWidth + moreWidth <= containerWidth) {
|
||||
updatedEntries[id] = true
|
||||
width += gap + childWidth
|
||||
}
|
||||
else {
|
||||
if (i === childrens.length - 1 && width + childWidth <= containerWidth)
|
||||
updatedEntries[id] = true
|
||||
else
|
||||
updatedEntries[id] = false
|
||||
break
|
||||
}
|
||||
}
|
||||
setVisibleOperations(operations.filter(item => updatedEntries[item.id]))
|
||||
setMoreOperations(operations.filter(item => !updatedEntries[item.id]))
|
||||
}, [operations, gap])
|
||||
|
||||
return (
|
||||
<>
|
||||
{!visibleOpreations.length && <div
|
||||
id="nav"
|
||||
ref={navRef}
|
||||
className="flex h-0 items-center self-stretch overflow-hidden"
|
||||
style={{ gap }}
|
||||
>
|
||||
{operations.map((operation, index) =>
|
||||
<Button
|
||||
key={index}
|
||||
data-targetid={operation.id}
|
||||
size={'small'}
|
||||
variant={'secondary'}
|
||||
className="gap-[1px]">
|
||||
{cloneElement(operation.icon, { className: 'h-3.5 w-3.5 text-components-button-secondary-text' })}
|
||||
<span className="system-xs-medium text-components-button-secondary-text">
|
||||
{operation.title}
|
||||
</span>
|
||||
</Button>,
|
||||
)}
|
||||
<Button
|
||||
id="more"
|
||||
size={'small'}
|
||||
variant={'secondary'}
|
||||
className="gap-[1px]"
|
||||
>
|
||||
<RiMoreLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
|
||||
<span className="system-xs-medium text-components-button-secondary-text">
|
||||
{t('common.operation.more')}
|
||||
</span>
|
||||
</Button>
|
||||
</div>}
|
||||
<div className="flex items-center self-stretch overflow-hidden" style={{ gap }}>
|
||||
{visibleOpreations.map(operation =>
|
||||
<Button
|
||||
key={operation.id}
|
||||
data-targetid={operation.id}
|
||||
size={'small'}
|
||||
variant={'secondary'}
|
||||
className="gap-[1px]"
|
||||
onClick={operation.onClick}>
|
||||
{cloneElement(operation.icon, { className: 'h-3.5 w-3.5 text-components-button-secondary-text' })}
|
||||
<span className="system-xs-medium text-components-button-secondary-text">
|
||||
{operation.title}
|
||||
</span>
|
||||
</Button>,
|
||||
)}
|
||||
{visibleOpreations.length < operations.length && <PortalToFollowElem
|
||||
open={showMore}
|
||||
onOpenChange={setShowMore}
|
||||
placement='bottom-end'
|
||||
offset={{
|
||||
mainAxis: 4,
|
||||
}}>
|
||||
<PortalToFollowElemTrigger onClick={handleTriggerMore}>
|
||||
<Button
|
||||
size={'small'}
|
||||
variant={'secondary'}
|
||||
className='gap-[1px]'
|
||||
>
|
||||
<RiMoreLine className='h-3.5 w-3.5 text-components-button-secondary-text' />
|
||||
<span className='system-xs-medium text-components-button-secondary-text'>{t('common.operation.more')}</span>
|
||||
</Button>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className='z-[21]'>
|
||||
<div className='flex min-w-[264px] flex-col rounded-[12px] border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg backdrop-blur-[5px]'>
|
||||
{moreOperations.map(item => <div
|
||||
key={item.id}
|
||||
className='flex h-8 cursor-pointer items-center gap-x-1 rounded-lg p-1.5 hover:bg-state-base-hover'
|
||||
onClick={item.onClick}
|
||||
>
|
||||
{cloneElement(item.icon, { className: 'h-4 w-4 text-text-tertiary' })}
|
||||
<span className='system-md-regular text-text-secondary'>{item.title}</span>
|
||||
</div>)}
|
||||
</div>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default AppOperations
|
||||
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 14 KiB |
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 10 KiB |
@ -0,0 +1,118 @@
|
||||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||||
"fill": "none",
|
||||
"version": "1.1",
|
||||
"width": "65",
|
||||
"height": "16",
|
||||
"viewBox": "0 0 65 16"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "defs",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "clipPath",
|
||||
"attributes": {
|
||||
"id": "master_svg0_42_34281"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "rect",
|
||||
"attributes": {
|
||||
"x": "0",
|
||||
"y": "0",
|
||||
"width": "19",
|
||||
"height": "16",
|
||||
"rx": "0"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"attributes": {
|
||||
"clip-path": "url(#master_svg0_42_34281)"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M4.06862,14.6667C3.79213,14.6667,3.45463,14.5688,3.05614,14.373C2.97908,14.3351,2.92692,14.3105,2.89968,14.2992C2.33193,14.0628,1.82911,13.7294,1.39123,13.2989C0.463742,12.3871,0,11.2874,0,10C0,8.71258,0.463742,7.61293,1.39123,6.70107C2.16172,5.94358,3.06404,5.50073,4.09819,5.37252C4.23172,3.98276,4.81755,2.77756,5.85569,1.75693C7.04708,0.585642,8.4857,0,10.1716,0C11.5256,0,12.743,0.396982,13.8239,1.19095C14.8847,1.97019,15.61,2.97855,16,4.21604L14.7045,4.61063C14.4016,3.64918,13.8374,2.86532,13.0121,2.25905C12.1719,1.64191,11.2251,1.33333,10.1716,1.33333C8.8602,1.33333,7.74124,1.7888,6.81467,2.69974C5.88811,3.61067,5.42483,4.71076,5.42483,6L5.42483,6.66667L4.74673,6.66667C3.81172,6.66667,3.01288,6.99242,2.35021,7.64393C1.68754,8.2954,1.35621,9.08076,1.35621,10C1.35621,10.9192,1.68754,11.7046,2.35021,12.3561C2.66354,12.6641,3.02298,12.9026,3.42852,13.0714C3.48193,13.0937,3.55988,13.13,3.66237,13.1803C3.87004,13.2823,4.00545,13.3333,4.06862,13.3333L4.06862,14.6667Z",
|
||||
"fill-rule": "evenodd",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M13.458613505859375,7.779393492279053C12.975613505859375,7.717463492279053,12.484813505859375,7.686503492279053,11.993983505859376,7.686503492279053C11.152583505859376,7.686503492279053,10.303403505859375,7.779393492279053,9.493183505859374,7.941943492279053C8.682953505859375,8.104503492279052,7.903893505859375,8.359943492279053,7.155983505859375,8.654083492279053C6.657383505859375,8.870823492279053,6.158783505859375,9.128843492279053,5.660181505859375,9.428153492279053C5.332974751859375,9.621673492279053,5.239486705859375,10.070633492279054,5.434253505859375,10.395743492279053L7.413073505859375,13.298533492279052C7.639003505859375,13.623603492279052,8.090863505859375,13.716463492279052,8.418073505859375,13.523003492279052C8.547913505859375,13.435263492279052,8.763453505859374,13.326893492279053,9.064693505859374,13.197863492279053C9.516553505859374,13.004333492279052,9.976203505859374,12.872733492279053,10.459223505859375,12.779863492279052C10.942243505859375,12.679263492279052,11.433053505859375,12.617333492279052,11.955023505859375,12.617333492279052L13.380683505859375,7.810353492279052L13.458613505859375,7.779393492279053ZM15.273813505859374,8.135463492279053L15.016753505859375,5.333333492279053L13.458613505859375,7.787133492279053C13.817013505859375,7.818093492279052,14.144213505859375,7.880023492279053,14.494743505859375,7.949683492279053C14.494743505859375,7.944523492279053,14.754433505859375,8.006453492279054,15.273813505859374,8.135463492279053ZM12.064083505859376,12.648273492279053L11.378523505859375,14.970463492279054L12.515943505859376,16.00003349227905L14.074083505859376,15.643933492279054L14.525943505859376,13.027603492279052C14.198743505859374,12.934663492279054,13.879283505859375,12.834063492279054,13.552083505859375,12.772133492279053C13.069083505859375,12.717933492279052,12.578283505859375,12.648273492279053,12.064083505859376,12.648273492279053ZM18.327743505859374,9.428153492279053C17.829143505859374,9.128843492279053,17.330543505859374,8.870823492279053,16.831943505859375,8.654083492279053C16.348943505859374,8.460573492279053,15.826943505859376,8.267053492279054,15.305013505859375,8.135463492279053L15.305013505859375,8.267053492279054L14.463613505859374,13.043063492279053C14.596083505859376,13.105003492279053,14.759683505859375,13.135933492279053,14.884283505859376,13.205603492279053C15.185523505859376,13.334623492279052,15.401043505859375,13.443003492279052,15.530943505859375,13.530733492279053C15.858143505859376,13.724263492279054,16.341143505859375,13.623603492279052,16.535943505859375,13.306263492279053L18.514743505859375,10.403483492279053C18.779643505859376,10.039673492279054,18.686143505859377,9.621673492279053,18.327743505859374,9.428153492279053Z",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M25.044,2.668L34.676,2.668L34.676,4.04L25.044,4.04L25.044,2.668ZM29.958,7.82Q29.258,9.066,28.355,10.41Q27.451999999999998,11.754,26.92,12.3L32.506,11.782Q31.442,10.158,30.84,9.346L32.058,8.562000000000001Q32.786,9.5,33.843,11.012Q34.9,12.524,35.516,13.546L34.214,14.526Q33.891999999999996,13.966,33.346000000000004,13.098Q32.016,13.182,29.734,13.378Q27.451999999999998,13.574,25.87,13.742L25.31,13.812L24.834,13.882L24.414,12.468Q24.708,12.37,24.862000000000002,12.265Q25.016,12.16,25.121,12.069Q25.226,11.978,25.268,11.936Q25.912,11.32,26.724,10.165Q27.536,9.01,28.208,7.82L23.854,7.82L23.854,6.434L35.866,6.434L35.866,7.82L29.958,7.82ZM42.656,7.414L42.656,8.576L41.354,8.576L41.354,1.814L42.656,1.87L42.656,7.036Q43.314,5.846,43.888000000000005,4.369Q44.462,2.892,44.714,1.6600000000000001L46.086,1.981999Q45.96,2.612,45.722,3.41L49.6,3.41L49.6,4.74L45.274,4.74Q44.616,6.56,43.706,8.128L42.656,7.414ZM38.596000000000004,2.346L39.884,2.402L39.884,8.212L38.596000000000004,8.212L38.596000000000004,2.346ZM46.184,4.964Q46.688,5.356,47.5,6.175Q48.312,6.994,48.788,7.582L47.751999999999995,8.59Q47.346000000000004,8.072,46.576,7.274Q45.806,6.476,45.204,5.902L46.184,4.964ZM48.41,9.01L48.41,12.706L49.894,12.706L49.894,13.966L37.391999999999996,13.966L37.391999999999996,12.706L38.848,12.706L38.848,9.01L48.41,9.01ZM41.676,10.256L40.164,10.256L40.164,12.706L41.676,12.706L41.676,10.256ZM42.908,12.706L44.364000000000004,12.706L44.364000000000004,10.256L42.908,10.256L42.908,12.706ZM45.582,12.706L47.108000000000004,12.706L47.108000000000004,10.256L45.582,10.256L45.582,12.706ZM54.906,7.456L55.116,8.394L54.178,8.814L54.178,12.818Q54.178,13.434,54.031,13.735Q53.884,14.036,53.534,14.162Q53.184,14.288,52.456,14.358L51.867999999999995,14.414L51.476,13.084L52.162,13.028Q52.512,13,52.652,12.958Q52.792,12.916,52.841,12.797Q52.89,12.678,52.89,12.384L52.89,9.36Q51.980000000000004,9.724,51.322,9.948L51.013999999999996,8.576Q51.798,8.324,52.89,7.876L52.89,5.524L51.42,5.524L51.42,4.166L52.89,4.166L52.89,1.7579989999999999L54.178,1.814L54.178,4.166L55.214,4.166L55.214,5.524L54.178,5.524L54.178,7.316L54.808,7.022L54.906,7.456ZM56.894,4.5440000000000005L56.894,6.098L55.564,6.098L55.564,3.256L58.686,3.256Q58.42,2.346,58.266,1.9260000000000002L59.624,1.7579989999999999Q59.848,2.276,60.142,3.256L63.25,3.256L63.25,6.098L61.962,6.098L61.962,4.5440000000000005L56.894,4.5440000000000005ZM59.008,6.322Q58.392,6.938,57.685,7.512Q56.978,8.086,55.956,8.841999999999999L55.242,7.764Q56.824,6.728,58.126,5.37L59.008,6.322ZM60.422,5.37Q61.024,5.776,62.095,6.581Q63.166,7.386,63.656,7.806L62.942,8.982Q62.368,8.45,61.332,7.652Q60.296,6.854,59.666,6.434L60.422,5.37ZM62.592,10.256L60.044,10.256L60.044,12.566L63.572,12.566L63.572,13.826L55.144,13.826L55.144,12.566L58.63,12.566L58.63,10.256L56.054,10.256L56.054,8.982L62.592,8.982L62.592,10.256Z",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "AliyunIcon"
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AliyunIcon.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
|
||||
props,
|
||||
ref,
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />)
|
||||
|
||||
Icon.displayName = 'AliyunIcon'
|
||||
|
||||
export default Icon
|
||||
@ -0,0 +1,71 @@
|
||||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||||
"fill": "none",
|
||||
"version": "1.1",
|
||||
"width": "96",
|
||||
"height": "24",
|
||||
"viewBox": "0 0 96 24"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M6.10294,22C5.68819,22,5.18195,21.8532,4.58421,21.5595C4.46861,21.5027,4.39038,21.4658,4.34951,21.4488C3.49789,21.0943,2.74367,20.5941,2.08684,19.9484C0.695613,18.5806,0,16.9311,0,15C0,13.0689,0.695612,11.4194,2.08684,10.0516C3.24259,8.91537,4.59607,8.2511,6.14728,8.05878C6.34758,5.97414,7.22633,4.16634,8.78354,2.63539C10.5706,0.878463,12.7286,0,15.2573,0C17.2884,0,19.1146,0.595472,20.7358,1.78642C22.327,2.95528,23.4151,4.46783,24,6.32406L22.0568,6.91594C21.6024,5.47377,20.7561,4.29798,19.5181,3.38858C18.2579,2.46286,16.8377,2,15.2573,2C13.2903,2,11.6119,2.6832,10.222,4.04961C8.83217,5.41601,8.13725,7.06614,8.13725,9L8.13725,10L7.12009,10C5.71758,10,4.51932,10.4886,3.52532,11.4659C2.53132,12.4431,2.03431,13.6211,2.03431,15C2.03431,16.3789,2.53132,17.5569,3.52532,18.5341C3.99531,18.9962,4.53447,19.3538,5.14278,19.6071C5.2229,19.6405,5.33983,19.695,5.49356,19.7705C5.80505,19.9235,6.00818,20,6.10294,20L6.10294,22Z",
|
||||
"fill-rule": "evenodd",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M20.18796103515625,11.66909C19.46346103515625,11.5762,18.72726103515625,11.52975,17.991011035156248,11.52975C16.728921035156247,11.52975,15.45515103515625,11.66909,14.23981103515625,11.91292C13.02447103515625,12.156749999999999,11.85588103515625,12.539909999999999,10.73402103515625,12.98113C9.98612103515625,13.306239999999999,9.23822103515625,13.69327,8.49031803515625,14.14223C7.99950790415625,14.43251,7.85927603515625,15.10595,8.15142503515625,15.59361L11.11966103515625,19.9478C11.45855103515625,20.4354,12.13634103515625,20.5747,12.627151035156249,20.2845C12.821921035156251,20.152900000000002,13.14523103515625,19.990299999999998,13.59708103515625,19.796799999999998C14.27487103515625,19.506500000000003,14.964341035156249,19.3091,15.68887103515625,19.169800000000002C16.413401035156248,19.018900000000002,17.14962103515625,18.926000000000002,17.93258103515625,18.926000000000002L20.071061035156248,11.715530000000001L20.18796103515625,11.66909ZM22.91076103515625,12.20319L22.525161035156252,8L20.18796103515625,11.6807C20.72556103515625,11.72714,21.21636103515625,11.82003,21.74216103515625,11.92453C21.74216103515625,11.91679,22.13166103515625,12.00968,22.91076103515625,12.20319ZM18.09616103515625,18.9724L17.06782103515625,22.4557L18.773961035156248,24L21.11116103515625,23.465899999999998L21.788961035156248,19.5414C21.298161035156248,19.402,20.81896103515625,19.2511,20.32816103515625,19.1582C19.60366103515625,19.076900000000002,18.86746103515625,18.9724,18.09616103515625,18.9724ZM27.49166103515625,14.14223C26.74376103515625,13.69327,25.99586103515625,13.306239999999999,25.24796103515625,12.98113C24.52346103515625,12.69086,23.74046103515625,12.40058,22.95756103515625,12.20319L22.95756103515625,12.40058L21.69546103515625,19.5646C21.89416103515625,19.6575,22.139561035156248,19.7039,22.32646103515625,19.8084C22.77836103515625,20.0019,23.101661035156248,20.1645,23.29646103515625,20.2961C23.78726103515625,20.586399999999998,24.51176103515625,20.4354,24.80396103515625,19.959400000000002L27.77216103515625,15.605229999999999C28.16946103515625,15.05951,28.02926103515625,14.43251,27.49166103515625,14.14223Z",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "g",
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M35.785,3.8624638671875L50.233000000000004,3.8624638671875L50.233000000000004,5.9204638671875L35.785,5.9204638671875L35.785,3.8624638671875ZM43.156,11.5904638671875Q42.106,13.4594638671875,40.7515,15.4754638671875Q39.397,17.4914638671875,38.599000000000004,18.3104638671875L46.978,17.5334638671875Q45.382,15.0974638671875,44.479,13.8794638671875L46.306,12.7034638671875Q47.397999999999996,14.1104638671875,48.9835,16.3784638671875Q50.569,18.6464638671875,51.492999999999995,20.1794638671875L49.54,21.6494638671875Q49.057,20.8094638671875,48.238,19.5074638671875Q46.243,19.6334638671875,42.82,19.9274638671875Q39.397,20.2214638671875,37.024,20.4734638671875L36.184,20.5784638671875L35.47,20.6834638671875L34.84,18.5624638671875Q35.281,18.4154638671875,35.512,18.2579638671875Q35.743,18.1004638671875,35.9005,17.963963867187502Q36.058,17.8274638671875,36.121,17.7644638671875Q37.087,16.840463867187502,38.305,15.1079638671875Q39.522999999999996,13.3754638671875,40.531,11.5904638671875L34,11.5904638671875L34,9.5114638671875L52.018,9.5114638671875L52.018,11.5904638671875L43.156,11.5904638671875ZM62.203,10.9814638671875L62.203,12.7244638671875L60.25,12.7244638671875L60.25,2.5814638671875L62.203,2.6654638671875L62.203,10.4144638671875Q63.19,8.6294638671875,64.051,6.4139638671875Q64.912,4.1984638671875,65.28999999999999,2.3504638671875L67.348,2.8334628671875Q67.15899999999999,3.7784638671875,66.80199999999999,4.9754638671875L72.619,4.9754638671875L72.619,6.9704638671875L66.13,6.9704638671875Q65.143,9.7004638671875,63.778,12.0524638671875L62.203,10.9814638671875ZM56.113,3.3794638671875L58.045,3.4634638671875L58.045,12.1784638671875L56.113,12.1784638671875L56.113,3.3794638671875ZM67.495,7.3064638671875Q68.251,7.8944638671875,69.469,9.1229638671875Q70.687,10.3514638671875,71.40100000000001,11.2334638671875L69.84700000000001,12.7454638671875Q69.238,11.9684638671875,68.083,10.7714638671875Q66.928,9.5744638671875,66.025,8.7134638671875L67.495,7.3064638671875ZM70.834,13.3754638671875L70.834,18.9194638671875L73.06,18.9194638671875L73.06,20.8094638671875L54.307,20.8094638671875L54.307,18.9194638671875L56.491,18.9194638671875L56.491,13.3754638671875L70.834,13.3754638671875ZM60.733000000000004,15.2444638671875L58.465,15.2444638671875L58.465,18.9194638671875L60.733000000000004,18.9194638671875L60.733000000000004,15.2444638671875ZM62.581,18.9194638671875L64.765,18.9194638671875L64.765,15.2444638671875L62.581,15.2444638671875L62.581,18.9194638671875ZM66.592,18.9194638671875L68.881,18.9194638671875L68.881,15.2444638671875L66.592,15.2444638671875L66.592,18.9194638671875ZM80.578,11.0444638671875L80.893,12.4514638671875L79.48599999999999,13.0814638671875L79.48599999999999,19.0874638671875Q79.48599999999999,20.0114638671875,79.2655,20.4629638671875Q79.045,20.9144638671875,78.52000000000001,21.1034638671875Q77.995,21.2924638671875,76.90299999999999,21.3974638671875L76.021,21.4814638671875L75.43299999999999,19.4864638671875L76.462,19.4024638671875Q76.987,19.3604638671875,77.197,19.2974638671875Q77.407,19.2344638671875,77.4805,19.0559638671875Q77.554,18.8774638671875,77.554,18.4364638671875L77.554,13.9004638671875Q76.189,14.4464638671875,75.202,14.7824638671875L74.74000000000001,12.7244638671875Q75.916,12.3464638671875,77.554,11.6744638671875L77.554,8.1464638671875L75.34899999999999,8.1464638671875L75.34899999999999,6.1094638671875L77.554,6.1094638671875L77.554,2.4974628671875L79.48599999999999,2.5814638671875L79.48599999999999,6.1094638671875L81.03999999999999,6.1094638671875L81.03999999999999,8.1464638671875L79.48599999999999,8.1464638671875L79.48599999999999,10.8344638671875L80.431,10.3934638671875L80.578,11.0444638671875ZM83.56,6.6764638671875L83.56,9.0074638671875L81.565,9.0074638671875L81.565,4.7444638671875L86.24799999999999,4.7444638671875Q85.84899999999999,3.3794638671875,85.618,2.7494638671875L87.655,2.4974628671875Q87.991,3.2744638671875,88.432,4.7444638671875L93.094,4.7444638671875L93.094,9.0074638671875L91.162,9.0074638671875L91.162,6.6764638671875L83.56,6.6764638671875ZM86.731,9.3434638671875Q85.807,10.2674638671875,84.7465,11.1284638671875Q83.686,11.9894638671875,82.15299999999999,13.1234638671875L81.082,11.5064638671875Q83.455,9.9524638671875,85.408,7.9154638671875L86.731,9.3434638671875ZM88.852,7.9154638671875Q89.755,8.5244638671875,91.3615,9.731963867187499Q92.968,10.9394638671875,93.703,11.5694638671875L92.632,13.3334638671875Q91.771,12.5354638671875,90.217,11.3384638671875Q88.663,10.1414638671875,87.718,9.5114638671875L88.852,7.9154638671875ZM92.107,15.2444638671875L88.285,15.2444638671875L88.285,18.7094638671875L93.577,18.7094638671875L93.577,20.5994638671875L80.935,20.5994638671875L80.935,18.7094638671875L86.164,18.7094638671875L86.164,15.2444638671875L82.3,15.2444638671875L82.3,13.3334638671875L92.107,13.3334638671875L92.107,15.2444638671875Z",
|
||||
"fill": "#FF6A00",
|
||||
"fill-opacity": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "AliyunBigIcon"
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import * as React from 'react'
|
||||
import data from './AliyunIconBig.json'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
|
||||
|
||||
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
|
||||
props,
|
||||
ref,
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />)
|
||||
|
||||
Icon.displayName = 'AliyunIconBig'
|
||||
|
||||
export default Icon
|
||||
@ -0,0 +1,83 @@
|
||||
import { type ReadonlyURLSearchParams, usePathname, useRouter, useSearchParams } from 'next/navigation'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
|
||||
export type DocumentListQuery = {
|
||||
page: number
|
||||
limit: number
|
||||
keyword: string
|
||||
}
|
||||
|
||||
const DEFAULT_QUERY: DocumentListQuery = {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
keyword: '',
|
||||
}
|
||||
|
||||
// Parse the query parameters from the URL search string.
|
||||
function parseParams(params: ReadonlyURLSearchParams): DocumentListQuery {
|
||||
const page = Number.parseInt(params.get('page') || '1', 10)
|
||||
const limit = Number.parseInt(params.get('limit') || '10', 10)
|
||||
const keyword = params.get('keyword') || ''
|
||||
|
||||
return {
|
||||
page: page > 0 ? page : 1,
|
||||
limit: (limit > 0 && limit <= 100) ? limit : 10,
|
||||
keyword: keyword ? decodeURIComponent(keyword) : '',
|
||||
}
|
||||
}
|
||||
|
||||
// Update the URL search string with the given query parameters.
|
||||
function updateSearchParams(query: DocumentListQuery, searchParams: URLSearchParams) {
|
||||
const { page, limit, keyword } = query || {}
|
||||
|
||||
const hasNonDefaultParams = (page && page > 1) || (limit && limit !== 10) || (keyword && keyword.trim())
|
||||
|
||||
if (hasNonDefaultParams) {
|
||||
searchParams.set('page', (page || 1).toString())
|
||||
searchParams.set('limit', (limit || 10).toString())
|
||||
}
|
||||
else {
|
||||
searchParams.delete('page')
|
||||
searchParams.delete('limit')
|
||||
}
|
||||
|
||||
if (keyword && keyword.trim())
|
||||
searchParams.set('keyword', encodeURIComponent(keyword))
|
||||
else
|
||||
searchParams.delete('keyword')
|
||||
}
|
||||
|
||||
function useDocumentListQueryState() {
|
||||
const searchParams = useSearchParams()
|
||||
const query = useMemo(() => parseParams(searchParams), [searchParams])
|
||||
|
||||
const router = useRouter()
|
||||
const pathname = usePathname()
|
||||
|
||||
// Helper function to update specific query parameters
|
||||
const updateQuery = useCallback((updates: Partial<DocumentListQuery>) => {
|
||||
const newQuery = { ...query, ...updates }
|
||||
const params = new URLSearchParams()
|
||||
updateSearchParams(newQuery, params)
|
||||
const search = params.toString()
|
||||
const queryString = search ? `?${search}` : ''
|
||||
router.push(`${pathname}${queryString}`, { scroll: false })
|
||||
}, [query, router, pathname])
|
||||
|
||||
// Helper function to reset query to defaults
|
||||
const resetQuery = useCallback(() => {
|
||||
const params = new URLSearchParams()
|
||||
updateSearchParams(DEFAULT_QUERY, params)
|
||||
const search = params.toString()
|
||||
const queryString = search ? `?${search}` : ''
|
||||
router.push(`${pathname}${queryString}`, { scroll: false })
|
||||
}, [router, pathname])
|
||||
|
||||
return useMemo(() => ({
|
||||
query,
|
||||
updateQuery,
|
||||
resetQuery,
|
||||
}), [query, updateQuery, resetQuery])
|
||||
}
|
||||
|
||||
export default useDocumentListQueryState
|
||||
@ -0,0 +1,75 @@
|
||||
import {
|
||||
useIsChatMode,
|
||||
useWorkflow,
|
||||
useWorkflowVariables,
|
||||
} from '@/app/components/workflow/hooks'
|
||||
import { BlockEnum, type Node, type NodeOutPutVar, type ValueSelector, type Var } from '@/app/components/workflow/types'
|
||||
type Params = {
|
||||
onlyLeafNodeVar?: boolean
|
||||
hideEnv?: boolean
|
||||
hideChatVar?: boolean
|
||||
filterVar: (payload: Var, selector: ValueSelector) => boolean
|
||||
passedInAvailableNodes?: Node[]
|
||||
}
|
||||
|
||||
const getNodeInfo = (nodeId: string, nodes: Node[]) => {
|
||||
const allNodes = nodes
|
||||
const node = allNodes.find(n => n.id === nodeId)
|
||||
const isInIteration = !!node?.data.isInIteration
|
||||
const isInLoop = !!node?.data.isInLoop
|
||||
const parentNodeId = node?.parentId
|
||||
const parentNode = allNodes.find(n => n.id === parentNodeId)
|
||||
return {
|
||||
node,
|
||||
isInIteration,
|
||||
isInLoop,
|
||||
parentNode,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: loop type?
|
||||
const useNodesAvailableVarList = (nodes: Node[], {
|
||||
onlyLeafNodeVar,
|
||||
filterVar,
|
||||
hideEnv = false,
|
||||
hideChatVar = false,
|
||||
passedInAvailableNodes,
|
||||
}: Params = {
|
||||
onlyLeafNodeVar: false,
|
||||
filterVar: () => true,
|
||||
}) => {
|
||||
const { getTreeLeafNodes, getBeforeNodesInSameBranchIncludeParent } = useWorkflow()
|
||||
const { getNodeAvailableVars } = useWorkflowVariables()
|
||||
const isChatMode = useIsChatMode()
|
||||
|
||||
const nodeAvailabilityMap: { [key: string ]: { availableVars: NodeOutPutVar[], availableNodes: Node[] } } = {}
|
||||
|
||||
nodes.forEach((node) => {
|
||||
const nodeId = node.id
|
||||
const availableNodes = passedInAvailableNodes || (onlyLeafNodeVar ? getTreeLeafNodes(nodeId) : getBeforeNodesInSameBranchIncludeParent(nodeId))
|
||||
if (node.data.type === BlockEnum.Loop)
|
||||
availableNodes.push(node)
|
||||
|
||||
const {
|
||||
parentNode: iterationNode,
|
||||
} = getNodeInfo(nodeId, nodes)
|
||||
|
||||
const availableVars = getNodeAvailableVars({
|
||||
parentNode: iterationNode,
|
||||
beforeNodes: availableNodes,
|
||||
isChatMode,
|
||||
filterVar,
|
||||
hideEnv,
|
||||
hideChatVar,
|
||||
})
|
||||
const result = {
|
||||
node,
|
||||
availableVars,
|
||||
availableNodes,
|
||||
}
|
||||
nodeAvailabilityMap[nodeId] = result
|
||||
})
|
||||
return nodeAvailabilityMap
|
||||
}
|
||||
|
||||
export default useNodesAvailableVarList
|
||||
Loading…
Reference in New Issue