Merge branch 'main' into feat/rag-pipeline

pull/21398/head
twwu 12 months ago
commit cfb6d59513

@ -6,6 +6,7 @@ from typing import Optional
import click import click
from flask import current_app from flask import current_app
from sqlalchemy import select
from werkzeug.exceptions import NotFound from werkzeug.exceptions import NotFound
from configs import dify_config from configs import dify_config
@ -297,11 +298,11 @@ def migrate_knowledge_vector_database():
page = 1 page = 1
while True: while True:
try: try:
datasets = ( stmt = (
Dataset.query.filter(Dataset.indexing_technique == "high_quality") select(Dataset).filter(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
.order_by(Dataset.created_at.desc())
.paginate(page=page, per_page=50)
) )
datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
except NotFound: except NotFound:
break break
@ -551,11 +552,12 @@ def old_metadata_migration():
page = 1 page = 1
while True: while True:
try: try:
documents = ( stmt = (
DatasetDocument.query.filter(DatasetDocument.doc_metadata is not None) select(DatasetDocument)
.filter(DatasetDocument.doc_metadata.is_not(None))
.order_by(DatasetDocument.created_at.desc()) .order_by(DatasetDocument.created_at.desc())
.paginate(page=page, per_page=50)
) )
documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
except NotFound: except NotFound:
break break
if not documents: if not documents:
@ -592,11 +594,15 @@ def old_metadata_migration():
) )
db.session.add(dataset_metadata_binding) db.session.add(dataset_metadata_binding)
else: else:
dataset_metadata_binding = DatasetMetadataBinding.query.filter( dataset_metadata_binding = (
DatasetMetadataBinding.dataset_id == document.dataset_id, db.session.query(DatasetMetadataBinding) # type: ignore
DatasetMetadataBinding.document_id == document.id, .filter(
DatasetMetadataBinding.metadata_id == dataset_metadata.id, DatasetMetadataBinding.dataset_id == document.dataset_id,
).first() DatasetMetadataBinding.document_id == document.id,
DatasetMetadataBinding.metadata_id == dataset_metadata.id,
)
.first()
)
if not dataset_metadata_binding: if not dataset_metadata_binding:
dataset_metadata_binding = DatasetMetadataBinding( dataset_metadata_binding = DatasetMetadataBinding(
tenant_id=document.tenant_id, tenant_id=document.tenant_id,

@ -1,6 +1,6 @@
import os import os
from typing import Any, Literal, Optional from typing import Any, Literal, Optional
from urllib.parse import quote_plus from urllib.parse import parse_qsl, quote_plus
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
@ -176,14 +176,28 @@ class DatabaseConfig(BaseSettings):
default=os.cpu_count() or 1, default=os.cpu_count() or 1,
) )
@computed_field @computed_field # type: ignore[misc]
@property
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]: def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
# Parse DB_EXTRAS for 'options'
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
options = db_extras_dict.get("options", "")
# Always include timezone
timezone_opt = "-c timezone=UTC"
if options:
# Merge user options and timezone
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
return { return {
"pool_size": self.SQLALCHEMY_POOL_SIZE, "pool_size": self.SQLALCHEMY_POOL_SIZE,
"max_overflow": self.SQLALCHEMY_MAX_OVERFLOW, "max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE, "pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING, "pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
"connect_args": {"options": "-c timezone=UTC"}, "connect_args": connect_args,
} }

@ -83,3 +83,13 @@ class RedisConfig(BaseSettings):
description="Password for Redis Clusters authentication (if required)", description="Password for Redis Clusters authentication (if required)",
default=None, default=None,
) )
REDIS_SERIALIZATION_PROTOCOL: int = Field(
description="Redis serialization protocol (RESP) version",
default=3,
)
REDIS_ENABLE_CLIENT_SIDE_CACHE: bool = Field(
description="Enable client side cache in redis",
default=False,
)

@ -526,14 +526,20 @@ class DatasetIndexingStatusApi(Resource):
) )
documents_status = [] documents_status = []
for document in documents: for document in documents:
completed_segments = DocumentSegment.query.filter( completed_segments = (
DocumentSegment.completed_at.isnot(None), db.session.query(DocumentSegment)
DocumentSegment.document_id == str(document.id), .filter(
DocumentSegment.status != "re_segment", DocumentSegment.completed_at.isnot(None),
).count() DocumentSegment.document_id == str(document.id),
total_segments = DocumentSegment.query.filter( DocumentSegment.status != "re_segment",
DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment" )
).count() .count()
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments document.completed_segments = completed_segments
document.total_segments = total_segments document.total_segments = total_segments
documents_status.append(marshal(document, document_status_fields)) documents_status.append(marshal(document, document_status_fields))

@ -6,7 +6,7 @@ from typing import cast
from flask import request from flask import request
from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, fields, marshal, marshal_with, reqparse from flask_restful import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import asc, desc from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound from werkzeug.exceptions import Forbidden, NotFound
import services import services
@ -112,7 +112,7 @@ class GetProcessRuleApi(Resource):
limits = DocumentService.DEFAULT_RULES["limits"] limits = DocumentService.DEFAULT_RULES["limits"]
if document_id: if document_id:
# get the latest process rule # get the latest process rule
document = Document.query.get_or_404(document_id) document = db.get_or_404(Document, document_id)
dataset = DatasetService.get_dataset(document.dataset_id) dataset = DatasetService.get_dataset(document.dataset_id)
@ -175,7 +175,7 @@ class DatasetDocumentListApi(Resource):
except services.errors.account.NoPermissionError as e: except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e)) raise Forbidden(str(e))
query = Document.query.filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id) query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id)
if search: if search:
search = f"%{search}%" search = f"%{search}%"
@ -209,18 +209,24 @@ class DatasetDocumentListApi(Resource):
desc(Document.position), desc(Document.position),
) )
paginated_documents = query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False) paginated_documents = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
documents = paginated_documents.items documents = paginated_documents.items
if fetch: if fetch:
for document in documents: for document in documents:
completed_segments = DocumentSegment.query.filter( completed_segments = (
DocumentSegment.completed_at.isnot(None), db.session.query(DocumentSegment)
DocumentSegment.document_id == str(document.id), .filter(
DocumentSegment.status != "re_segment", DocumentSegment.completed_at.isnot(None),
).count() DocumentSegment.document_id == str(document.id),
total_segments = DocumentSegment.query.filter( DocumentSegment.status != "re_segment",
DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment" )
).count() .count()
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments document.completed_segments = completed_segments
document.total_segments = total_segments document.total_segments = total_segments
data = marshal(documents, document_with_segments_fields) data = marshal(documents, document_with_segments_fields)
@ -563,14 +569,20 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
documents = self.get_batch_documents(dataset_id, batch) documents = self.get_batch_documents(dataset_id, batch)
documents_status = [] documents_status = []
for document in documents: for document in documents:
completed_segments = DocumentSegment.query.filter( completed_segments = (
DocumentSegment.completed_at.isnot(None), db.session.query(DocumentSegment)
DocumentSegment.document_id == str(document.id), .filter(
DocumentSegment.status != "re_segment", DocumentSegment.completed_at.isnot(None),
).count() DocumentSegment.document_id == str(document.id),
total_segments = DocumentSegment.query.filter( DocumentSegment.status != "re_segment",
DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment" )
).count() .count()
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments document.completed_segments = completed_segments
document.total_segments = total_segments document.total_segments = total_segments
if document.is_paused: if document.is_paused:
@ -589,14 +601,20 @@ class DocumentIndexingStatusApi(DocumentResource):
document_id = str(document_id) document_id = str(document_id)
document = self.get_document(dataset_id, document_id) document = self.get_document(dataset_id, document_id)
completed_segments = DocumentSegment.query.filter( completed_segments = (
DocumentSegment.completed_at.isnot(None), db.session.query(DocumentSegment)
DocumentSegment.document_id == str(document_id), .filter(
DocumentSegment.status != "re_segment", DocumentSegment.completed_at.isnot(None),
).count() DocumentSegment.document_id == str(document_id),
total_segments = DocumentSegment.query.filter( DocumentSegment.status != "re_segment",
DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment" )
).count() .count()
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments document.completed_segments = completed_segments
document.total_segments = total_segments document.total_segments = total_segments

@ -4,6 +4,7 @@ import pandas as pd
from flask import request from flask import request
from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, marshal, reqparse from flask_restful import Resource, marshal, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound from werkzeug.exceptions import Forbidden, NotFound
import services import services
@ -26,6 +27,7 @@ from controllers.console.wraps import (
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.model_manager import ModelManager from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.model_entities import ModelType
from extensions.ext_database import db
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
from fields.segment_fields import child_chunk_fields, segment_fields from fields.segment_fields import child_chunk_fields, segment_fields
from libs.login import login_required from libs.login import login_required
@ -74,9 +76,14 @@ class DatasetDocumentSegmentListApi(Resource):
hit_count_gte = args["hit_count_gte"] hit_count_gte = args["hit_count_gte"]
keyword = args["keyword"] keyword = args["keyword"]
query = DocumentSegment.query.filter( query = (
DocumentSegment.document_id == str(document_id), DocumentSegment.tenant_id == current_user.current_tenant_id select(DocumentSegment)
).order_by(DocumentSegment.position.asc()) .filter(
DocumentSegment.document_id == str(document_id),
DocumentSegment.tenant_id == current_user.current_tenant_id,
)
.order_by(DocumentSegment.position.asc())
)
if status_list: if status_list:
query = query.filter(DocumentSegment.status.in_(status_list)) query = query.filter(DocumentSegment.status.in_(status_list))
@ -93,7 +100,7 @@ class DatasetDocumentSegmentListApi(Resource):
elif args["enabled"].lower() == "false": elif args["enabled"].lower() == "false":
query = query.filter(DocumentSegment.enabled == False) query = query.filter(DocumentSegment.enabled == False)
segments = query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False) segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
response = { response = {
"data": marshal(segments.items, segment_fields), "data": marshal(segments.items, segment_fields),
@ -276,9 +283,11 @@ class DatasetDocumentSegmentUpdateApi(Resource):
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
@ -320,9 +329,11 @@ class DatasetDocumentSegmentUpdateApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
@ -423,9 +434,11 @@ class ChildChunkAddApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
if not current_user.is_dataset_editor: if not current_user.is_dataset_editor:
@ -478,9 +491,11 @@ class ChildChunkAddApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
@ -523,9 +538,11 @@ class ChildChunkAddApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
@ -567,16 +584,20 @@ class ChildChunkUpdateApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
# check child chunk # check child chunk
child_chunk_id = str(child_chunk_id) child_chunk_id = str(child_chunk_id)
child_chunk = ChildChunk.query.filter( child_chunk = (
ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id db.session.query(ChildChunk)
).first() .filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.first()
)
if not child_chunk: if not child_chunk:
raise NotFound("Child chunk not found.") raise NotFound("Child chunk not found.")
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
@ -612,16 +633,20 @@ class ChildChunkUpdateApi(Resource):
raise NotFound("Document not found.") raise NotFound("Document not found.")
# check segment # check segment
segment_id = str(segment_id) segment_id = str(segment_id)
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
# check child chunk # check child chunk
child_chunk_id = str(child_chunk_id) child_chunk_id = str(child_chunk_id)
child_chunk = ChildChunk.query.filter( child_chunk = (
ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id db.session.query(ChildChunk)
).first() .filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.first()
)
if not child_chunk: if not child_chunk:
raise NotFound("Child chunk not found.") raise NotFound("Child chunk not found.")
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor

@ -66,7 +66,7 @@ class InstalledAppsListApi(Resource):
parser.add_argument("app_id", type=str, required=True, help="Invalid app_id") parser.add_argument("app_id", type=str, required=True, help="Invalid app_id")
args = parser.parse_args() args = parser.parse_args()
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args["app_id"]).first() recommended_app = db.session.query(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]).first()
if recommended_app is None: if recommended_app is None:
raise NotFound("App not found") raise NotFound("App not found")
@ -79,9 +79,11 @@ class InstalledAppsListApi(Resource):
if not app.is_public: if not app.is_public:
raise Forbidden("You can't install a non-public app") raise Forbidden("You can't install a non-public app")
installed_app = InstalledApp.query.filter( installed_app = (
and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id) db.session.query(InstalledApp)
).first() .filter(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
.first()
)
if installed_app is None: if installed_app is None:
# todo: position # todo: position

@ -71,7 +71,6 @@ class MemberInviteEmailApi(Resource):
invitation_results.append( invitation_results.append(
{"status": "success", "email": invitee_email, "url": f"{console_web_url}/signin"} {"status": "success", "email": invitee_email, "url": f"{console_web_url}/signin"}
) )
break
except Exception as e: except Exception as e:
invitation_results.append({"status": "failed", "email": invitee_email, "message": str(e)}) invitation_results.append({"status": "failed", "email": invitee_email, "message": str(e)})

@ -3,6 +3,7 @@ import logging
from flask import request from flask import request
from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Unauthorized from werkzeug.exceptions import Unauthorized
import services import services
@ -88,9 +89,8 @@ class WorkspaceListApi(Resource):
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args() args = parser.parse_args()
tenants = Tenant.query.order_by(Tenant.created_at.desc()).paginate( stmt = select(Tenant).order_by(Tenant.created_at.desc())
page=args["page"], per_page=args["limit"], error_out=False tenants = db.paginate(select=stmt, page=args["page"], per_page=args["limit"], error_out=False)
)
has_more = False has_more = False
if tenants.has_next: if tenants.has_next:
@ -162,7 +162,7 @@ class CustomConfigWorkspaceApi(Resource):
parser.add_argument("replace_webapp_logo", type=str, location="json") parser.add_argument("replace_webapp_logo", type=str, location="json")
args = parser.parse_args() args = parser.parse_args()
tenant = Tenant.query.filter(Tenant.id == current_user.current_tenant_id).one_or_404() tenant = db.get_or_404(Tenant, current_user.current_tenant_id)
custom_config_dict = { custom_config_dict = {
"remove_webapp_brand": args["remove_webapp_brand"], "remove_webapp_brand": args["remove_webapp_brand"],
@ -226,7 +226,7 @@ class WorkspaceInfoApi(Resource):
parser.add_argument("name", type=str, required=True, location="json") parser.add_argument("name", type=str, required=True, location="json")
args = parser.parse_args() args = parser.parse_args()
tenant = Tenant.query.filter(Tenant.id == current_user.current_tenant_id).one_or_404() tenant = db.get_or_404(Tenant, current_user.current_tenant_id)
tenant.name = args["name"] tenant.name = args["name"]
db.session.commit() db.session.commit()

@ -93,6 +93,18 @@ class MessageFeedbackApi(Resource):
return {"result": "success"} return {"result": "success"}
class AppGetFeedbacksApi(Resource):
@validate_app_token
def get(self, app_model: App):
"""Get All Feedbacks of an app"""
parser = reqparse.RequestParser()
parser.add_argument("page", type=int, default=1, location="args")
parser.add_argument("limit", type=int_range(1, 101), required=False, default=20, location="args")
args = parser.parse_args()
feedbacks = MessageService.get_all_messages_feedbacks(app_model, page=args["page"], limit=args["limit"])
return {"data": feedbacks}
class MessageSuggestedApi(Resource): class MessageSuggestedApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY, required=True)) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY, required=True))
def get(self, app_model: App, end_user: EndUser, message_id): def get(self, app_model: App, end_user: EndUser, message_id):
@ -119,3 +131,4 @@ class MessageSuggestedApi(Resource):
api.add_resource(MessageListApi, "/messages") api.add_resource(MessageListApi, "/messages")
api.add_resource(MessageFeedbackApi, "/messages/<uuid:message_id>/feedbacks") api.add_resource(MessageFeedbackApi, "/messages/<uuid:message_id>/feedbacks")
api.add_resource(MessageSuggestedApi, "/messages/<uuid:message_id>/suggested") api.add_resource(MessageSuggestedApi, "/messages/<uuid:message_id>/suggested")
api.add_resource(AppGetFeedbacksApi, "/app/feedbacks")

@ -2,10 +2,10 @@ import json
from flask import request from flask import request
from flask_restful import marshal, reqparse from flask_restful import marshal, reqparse
from sqlalchemy import desc from sqlalchemy import desc, select
from werkzeug.exceptions import NotFound from werkzeug.exceptions import NotFound
import services.dataset_service import services
from controllers.common.errors import FilenameNotExistsError from controllers.common.errors import FilenameNotExistsError
from controllers.service_api import api from controllers.service_api import api
from controllers.service_api.app.error import ( from controllers.service_api.app.error import (
@ -337,7 +337,7 @@ class DocumentListApi(DatasetApiResource):
if not dataset: if not dataset:
raise NotFound("Dataset not found.") raise NotFound("Dataset not found.")
query = Document.query.filter_by(dataset_id=str(dataset_id), tenant_id=tenant_id) query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=tenant_id)
if search: if search:
search = f"%{search}%" search = f"%{search}%"
@ -345,7 +345,7 @@ class DocumentListApi(DatasetApiResource):
query = query.order_by(desc(Document.created_at), desc(Document.position)) query = query.order_by(desc(Document.created_at), desc(Document.position))
paginated_documents = query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False) paginated_documents = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
documents = paginated_documents.items documents = paginated_documents.items
response = { response = {
@ -374,14 +374,20 @@ class DocumentIndexingStatusApi(DatasetApiResource):
raise NotFound("Documents not found.") raise NotFound("Documents not found.")
documents_status = [] documents_status = []
for document in documents: for document in documents:
completed_segments = DocumentSegment.query.filter( completed_segments = (
DocumentSegment.completed_at.isnot(None), db.session.query(DocumentSegment)
DocumentSegment.document_id == str(document.id), .filter(
DocumentSegment.status != "re_segment", DocumentSegment.completed_at.isnot(None),
).count() DocumentSegment.document_id == str(document.id),
total_segments = DocumentSegment.query.filter( DocumentSegment.status != "re_segment",
DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment" )
).count() .count()
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments document.completed_segments = completed_segments
document.total_segments = total_segments document.total_segments = total_segments
if document.is_paused: if document.is_paused:

@ -1,3 +1,5 @@
import logging
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.entities.app_invoke_entities import InvokeFrom from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.entities.queue_entities import QueueRetrieverResourcesEvent from core.app.entities.queue_entities import QueueRetrieverResourcesEvent
@ -7,6 +9,8 @@ from extensions.ext_database import db
from models.dataset import ChildChunk, DatasetQuery, DocumentSegment from models.dataset import ChildChunk, DatasetQuery, DocumentSegment
from models.dataset import Document as DatasetDocument from models.dataset import Document as DatasetDocument
_logger = logging.getLogger(__name__)
class DatasetIndexToolCallbackHandler: class DatasetIndexToolCallbackHandler:
"""Callback handler for dataset tool.""" """Callback handler for dataset tool."""
@ -42,18 +46,31 @@ class DatasetIndexToolCallbackHandler:
"""Handle tool end.""" """Handle tool end."""
for document in documents: for document in documents:
if document.metadata is not None: if document.metadata is not None:
dataset_document = DatasetDocument.query.filter( document_id = document.metadata["document_id"]
DatasetDocument.id == document.metadata["document_id"] dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
).first() if not dataset_document:
_logger.warning(
"Expected DatasetDocument record to exist, but none was found, document_id=%s",
document_id,
)
continue
if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
child_chunk = ChildChunk.query.filter( child_chunk = (
ChildChunk.index_node_id == document.metadata["doc_id"], db.session.query(ChildChunk)
ChildChunk.dataset_id == dataset_document.dataset_id, .filter(
ChildChunk.document_id == dataset_document.id, ChildChunk.index_node_id == document.metadata["doc_id"],
).first() ChildChunk.dataset_id == dataset_document.dataset_id,
ChildChunk.document_id == dataset_document.id,
)
.first()
)
if child_chunk: if child_chunk:
segment = DocumentSegment.query.filter(DocumentSegment.id == child_chunk.segment_id).update( segment = (
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False db.session.query(DocumentSegment)
.filter(DocumentSegment.id == child_chunk.segment_id)
.update(
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False
)
) )
else: else:
query = db.session.query(DocumentSegment).filter( query = db.session.query(DocumentSegment).filter(

@ -51,7 +51,7 @@ class IndexingRunner:
for dataset_document in dataset_documents: for dataset_document in dataset_documents:
try: try:
# get dataset # get dataset
dataset = Dataset.query.filter_by(id=dataset_document.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first()
if not dataset: if not dataset:
raise ValueError("no dataset found") raise ValueError("no dataset found")
@ -103,15 +103,17 @@ class IndexingRunner:
"""Run the indexing process when the index_status is splitting.""" """Run the indexing process when the index_status is splitting."""
try: try:
# get dataset # get dataset
dataset = Dataset.query.filter_by(id=dataset_document.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first()
if not dataset: if not dataset:
raise ValueError("no dataset found") raise ValueError("no dataset found")
# get exist document_segment list and delete # get exist document_segment list and delete
document_segments = DocumentSegment.query.filter_by( document_segments = (
dataset_id=dataset.id, document_id=dataset_document.id db.session.query(DocumentSegment)
).all() .filter_by(dataset_id=dataset.id, document_id=dataset_document.id)
.all()
)
for document_segment in document_segments: for document_segment in document_segments:
db.session.delete(document_segment) db.session.delete(document_segment)
@ -162,15 +164,17 @@ class IndexingRunner:
"""Run the indexing process when the index_status is indexing.""" """Run the indexing process when the index_status is indexing."""
try: try:
# get dataset # get dataset
dataset = Dataset.query.filter_by(id=dataset_document.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_document.dataset_id).first()
if not dataset: if not dataset:
raise ValueError("no dataset found") raise ValueError("no dataset found")
# get exist document_segment list and delete # get exist document_segment list and delete
document_segments = DocumentSegment.query.filter_by( document_segments = (
dataset_id=dataset.id, document_id=dataset_document.id db.session.query(DocumentSegment)
).all() .filter_by(dataset_id=dataset.id, document_id=dataset_document.id)
.all()
)
documents = [] documents = []
if document_segments: if document_segments:
@ -254,7 +258,7 @@ class IndexingRunner:
embedding_model_instance = None embedding_model_instance = None
if dataset_id: if dataset_id:
dataset = Dataset.query.filter_by(id=dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset not found.") raise ValueError("Dataset not found.")
if dataset.indexing_technique == "high_quality" or indexing_technique == "high_quality": if dataset.indexing_technique == "high_quality" or indexing_technique == "high_quality":
@ -587,7 +591,7 @@ class IndexingRunner:
@staticmethod @staticmethod
def _process_keyword_index(flask_app, dataset_id, document_id, documents): def _process_keyword_index(flask_app, dataset_id, document_id, documents):
with flask_app.app_context(): with flask_app.app_context():
dataset = Dataset.query.filter_by(id=dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_id).first()
if not dataset: if not dataset:
raise ValueError("no dataset found") raise ValueError("no dataset found")
keyword = Keyword(dataset) keyword = Keyword(dataset)
@ -656,10 +660,10 @@ class IndexingRunner:
""" """
Update the document indexing status. Update the document indexing status.
""" """
count = DatasetDocument.query.filter_by(id=document_id, is_paused=True).count() count = db.session.query(DatasetDocument).filter_by(id=document_id, is_paused=True).count()
if count > 0: if count > 0:
raise DocumentIsPausedError() raise DocumentIsPausedError()
document = DatasetDocument.query.filter_by(id=document_id).first() document = db.session.query(DatasetDocument).filter_by(id=document_id).first()
if not document: if not document:
raise DocumentIsDeletedPausedError() raise DocumentIsDeletedPausedError()
@ -668,7 +672,7 @@ class IndexingRunner:
if extra_update_params: if extra_update_params:
update_params.update(extra_update_params) update_params.update(extra_update_params)
DatasetDocument.query.filter_by(id=document_id).update(update_params) db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params)
db.session.commit() db.session.commit()
@staticmethod @staticmethod
@ -676,7 +680,7 @@ class IndexingRunner:
""" """
Update the document segment by document id. Update the document segment by document id.
""" """
DocumentSegment.query.filter_by(document_id=dataset_document_id).update(update_params) db.session.query(DocumentSegment).filter_by(document_id=dataset_document_id).update(update_params)
db.session.commit() db.session.commit()
def _transform( def _transform(

@ -1,9 +1,9 @@
from enum import Enum from enum import StrEnum
from pydantic import BaseModel, ValidationInfo, field_validator from pydantic import BaseModel, ValidationInfo, field_validator
class TracingProviderEnum(Enum): class TracingProviderEnum(StrEnum):
LANGFUSE = "langfuse" LANGFUSE = "langfuse"
LANGSMITH = "langsmith" LANGSMITH = "langsmith"
OPIK = "opik" OPIK = "opik"

@ -16,11 +16,7 @@ from sqlalchemy.orm import Session
from core.helper.encrypter import decrypt_token, encrypt_token, obfuscated_token from core.helper.encrypter import decrypt_token, encrypt_token, obfuscated_token
from core.ops.entities.config_entity import ( from core.ops.entities.config_entity import (
OPS_FILE_PATH, OPS_FILE_PATH,
LangfuseConfig,
LangSmithConfig,
OpikConfig,
TracingProviderEnum, TracingProviderEnum,
WeaveConfig,
) )
from core.ops.entities.trace_entity import ( from core.ops.entities.trace_entity import (
DatasetRetrievalTraceInfo, DatasetRetrievalTraceInfo,
@ -33,11 +29,7 @@ from core.ops.entities.trace_entity import (
TraceTaskName, TraceTaskName,
WorkflowTraceInfo, WorkflowTraceInfo,
) )
from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace
from core.ops.opik_trace.opik_trace import OpikDataTrace
from core.ops.utils import get_message_data from core.ops.utils import get_message_data
from core.ops.weave_trace.weave_trace import WeaveDataTrace
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_storage import storage from extensions.ext_storage import storage
from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig
@ -45,36 +37,58 @@ from models.workflow import WorkflowAppLog, WorkflowRun
from tasks.ops_trace_task import process_trace_tasks from tasks.ops_trace_task import process_trace_tasks
def build_opik_trace_instance(config: OpikConfig): class OpsTraceProviderConfigMap(dict[str, dict[str, Any]]):
return OpikDataTrace(config) def __getitem__(self, provider: str) -> dict[str, Any]:
match provider:
case TracingProviderEnum.LANGFUSE:
provider_config_map: dict[str, dict[str, Any]] = { from core.ops.entities.config_entity import LangfuseConfig
TracingProviderEnum.LANGFUSE.value: { from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
"config_class": LangfuseConfig,
"secret_keys": ["public_key", "secret_key"], return {
"other_keys": ["host", "project_key"], "config_class": LangfuseConfig,
"trace_instance": LangFuseDataTrace, "secret_keys": ["public_key", "secret_key"],
}, "other_keys": ["host", "project_key"],
TracingProviderEnum.LANGSMITH.value: { "trace_instance": LangFuseDataTrace,
"config_class": LangSmithConfig, }
"secret_keys": ["api_key"],
"other_keys": ["project", "endpoint"], case TracingProviderEnum.LANGSMITH:
"trace_instance": LangSmithDataTrace, from core.ops.entities.config_entity import LangSmithConfig
}, from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace
TracingProviderEnum.OPIK.value: {
"config_class": OpikConfig, return {
"secret_keys": ["api_key"], "config_class": LangSmithConfig,
"other_keys": ["project", "url", "workspace"], "secret_keys": ["api_key"],
"trace_instance": lambda config: build_opik_trace_instance(config), "other_keys": ["project", "endpoint"],
}, "trace_instance": LangSmithDataTrace,
TracingProviderEnum.WEAVE.value: { }
"config_class": WeaveConfig,
"secret_keys": ["api_key"], case TracingProviderEnum.OPIK:
"other_keys": ["project", "entity", "endpoint"], from core.ops.entities.config_entity import OpikConfig
"trace_instance": WeaveDataTrace, from core.ops.opik_trace.opik_trace import OpikDataTrace
},
} return {
"config_class": OpikConfig,
"secret_keys": ["api_key"],
"other_keys": ["project", "url", "workspace"],
"trace_instance": OpikDataTrace,
}
case TracingProviderEnum.WEAVE:
from core.ops.entities.config_entity import WeaveConfig
from core.ops.weave_trace.weave_trace import WeaveDataTrace
return {
"config_class": WeaveConfig,
"secret_keys": ["api_key"],
"other_keys": ["project", "entity", "endpoint"],
"trace_instance": WeaveDataTrace,
}
case _:
raise KeyError(f"Unsupported tracing provider: {provider}")
provider_config_map: dict[str, dict[str, Any]] = OpsTraceProviderConfigMap()
class OpsTraceManager: class OpsTraceManager:

@ -317,7 +317,7 @@ class NotionExtractor(BaseExtractor):
data_source_info["last_edited_time"] = last_edited_time data_source_info["last_edited_time"] = last_edited_time
update_params = {DocumentModel.data_source_info: json.dumps(data_source_info)} update_params = {DocumentModel.data_source_info: json.dumps(data_source_info)}
DocumentModel.query.filter_by(id=document_model.id).update(update_params) db.session.query(DocumentModel).filter_by(id=document_model.id).update(update_params)
db.session.commit() db.session.commit()
def get_notion_last_edited_time(self) -> str: def get_notion_last_edited_time(self) -> str:
@ -347,14 +347,18 @@ class NotionExtractor(BaseExtractor):
@classmethod @classmethod
def _get_access_token(cls, tenant_id: str, notion_workspace_id: str) -> str: def _get_access_token(cls, tenant_id: str, notion_workspace_id: str) -> str:
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.disabled == False, DataSourceOauthBinding.tenant_id == tenant_id,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{notion_workspace_id}"', DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{notion_workspace_id}"',
)
) )
).first() .first()
)
if not data_source_binding: if not data_source_binding:
raise Exception( raise Exception(

@ -237,12 +237,16 @@ class DatasetRetrieval:
if show_retrieve_source: if show_retrieve_source:
for record in records: for record in records:
segment = record.segment segment = record.segment
dataset = Dataset.query.filter_by(id=segment.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first()
document = DatasetDocument.query.filter( document = (
DatasetDocument.id == segment.document_id, db.session.query(DatasetDocument)
DatasetDocument.enabled == True, .filter(
DatasetDocument.archived == False, DatasetDocument.id == segment.document_id,
).first() DatasetDocument.enabled == True,
DatasetDocument.archived == False,
)
.first()
)
if dataset and document: if dataset and document:
source = { source = {
"dataset_id": dataset.id, "dataset_id": dataset.id,
@ -506,19 +510,30 @@ class DatasetRetrieval:
dify_documents = [document for document in documents if document.provider == "dify"] dify_documents = [document for document in documents if document.provider == "dify"]
for document in dify_documents: for document in dify_documents:
if document.metadata is not None: if document.metadata is not None:
dataset_document = DatasetDocument.query.filter( dataset_document = (
DatasetDocument.id == document.metadata["document_id"] db.session.query(DatasetDocument)
).first() .filter(DatasetDocument.id == document.metadata["document_id"])
.first()
)
if dataset_document: if dataset_document:
if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
child_chunk = ChildChunk.query.filter( child_chunk = (
ChildChunk.index_node_id == document.metadata["doc_id"], db.session.query(ChildChunk)
ChildChunk.dataset_id == dataset_document.dataset_id, .filter(
ChildChunk.document_id == dataset_document.id, ChildChunk.index_node_id == document.metadata["doc_id"],
).first() ChildChunk.dataset_id == dataset_document.dataset_id,
ChildChunk.document_id == dataset_document.id,
)
.first()
)
if child_chunk: if child_chunk:
segment = DocumentSegment.query.filter(DocumentSegment.id == child_chunk.segment_id).update( segment = (
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False db.session.query(DocumentSegment)
.filter(DocumentSegment.id == child_chunk.segment_id)
.update(
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1},
synchronize_session=False,
)
) )
db.session.commit() db.session.commit()
else: else:

@ -84,13 +84,17 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool):
document_context_list = [] document_context_list = []
index_node_ids = [document.metadata["doc_id"] for document in all_documents if document.metadata] index_node_ids = [document.metadata["doc_id"] for document in all_documents if document.metadata]
segments = DocumentSegment.query.filter( segments = (
DocumentSegment.dataset_id.in_(self.dataset_ids), db.session.query(DocumentSegment)
DocumentSegment.completed_at.isnot(None), .filter(
DocumentSegment.status == "completed", DocumentSegment.dataset_id.in_(self.dataset_ids),
DocumentSegment.enabled == True, DocumentSegment.completed_at.isnot(None),
DocumentSegment.index_node_id.in_(index_node_ids), DocumentSegment.status == "completed",
).all() DocumentSegment.enabled == True,
DocumentSegment.index_node_id.in_(index_node_ids),
)
.all()
)
if segments: if segments:
index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)}
@ -106,12 +110,16 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool):
context_list = [] context_list = []
resource_number = 1 resource_number = 1
for segment in sorted_segments: for segment in sorted_segments:
dataset = Dataset.query.filter_by(id=segment.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first()
document = Document.query.filter( document = (
Document.id == segment.document_id, db.session.query(Document)
Document.enabled == True, .filter(
Document.archived == False, Document.id == segment.document_id,
).first() Document.enabled == True,
Document.archived == False,
)
.first()
)
if dataset and document: if dataset and document:
source = { source = {
"position": resource_number, "position": resource_number,

@ -185,12 +185,16 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
if self.return_resource: if self.return_resource:
for record in records: for record in records:
segment = record.segment segment = record.segment
dataset = Dataset.query.filter_by(id=segment.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first()
document = DatasetDocument.query.filter( document = (
DatasetDocument.id == segment.document_id, db.session.query(DatasetDocument) # type: ignore
DatasetDocument.enabled == True, .filter(
DatasetDocument.archived == False, DatasetDocument.id == segment.document_id,
).first() DatasetDocument.enabled == True,
DatasetDocument.archived == False,
)
.first()
)
if dataset and document: if dataset and document:
source = { source = {
"dataset_id": dataset.id, "dataset_id": dataset.id,

@ -95,7 +95,12 @@ class StreamProcessor(ABC):
if node_id not in self.rest_node_ids: if node_id not in self.rest_node_ids:
return return
if node_id in reachable_node_ids:
return
self.rest_node_ids.remove(node_id) self.rest_node_ids.remove(node_id)
self.rest_node_ids.extend(set(reachable_node_ids) - set(self.rest_node_ids))
for edge in self.graph.edge_mapping.get(node_id, []): for edge in self.graph.edge_mapping.get(node_id, []):
if edge.target_node_id in reachable_node_ids: if edge.target_node_id in reachable_node_ids:
continue continue

@ -127,7 +127,7 @@ class CodeNode(BaseNode[CodeNodeData]):
depth: int = 1, depth: int = 1,
): ):
if depth > dify_config.CODE_MAX_DEPTH: if depth > dify_config.CODE_MAX_DEPTH:
raise DepthLimitError(f"Depth limit ${dify_config.CODE_MAX_DEPTH} reached, object too deep.") raise DepthLimitError(f"Depth limit {dify_config.CODE_MAX_DEPTH} reached, object too deep.")
transformed_result: dict[str, Any] = {} transformed_result: dict[str, Any] = {}
if output_schema is None: if output_schema is None:

@ -353,27 +353,26 @@ class IterationNode(BaseNode[IterationNodeData]):
) -> NodeRunStartedEvent | BaseNodeEvent | InNodeEvent: ) -> NodeRunStartedEvent | BaseNodeEvent | InNodeEvent:
""" """
add iteration metadata to event. add iteration metadata to event.
ensures iteration context (ID, index/parallel_run_id) is added to metadata,
""" """
if not isinstance(event, BaseNodeEvent): if not isinstance(event, BaseNodeEvent):
return event return event
if self.node_data.is_parallel and isinstance(event, NodeRunStartedEvent): if self.node_data.is_parallel and isinstance(event, NodeRunStartedEvent):
event.parallel_mode_run_id = parallel_mode_run_id event.parallel_mode_run_id = parallel_mode_run_id
return event
iter_metadata = {
NodeRunMetadataKey.ITERATION_ID: self.node_id,
NodeRunMetadataKey.ITERATION_INDEX: iter_run_index,
}
if parallel_mode_run_id:
# for parallel, the specific branch ID is more important than the sequential index
iter_metadata[NodeRunMetadataKey.PARALLEL_MODE_RUN_ID] = parallel_mode_run_id
if event.route_node_state.node_run_result: if event.route_node_state.node_run_result:
metadata = event.route_node_state.node_run_result.metadata current_metadata = event.route_node_state.node_run_result.metadata or {}
if not metadata: if NodeRunMetadataKey.ITERATION_ID not in current_metadata:
metadata = {} event.route_node_state.node_run_result.metadata = {**current_metadata, **iter_metadata}
if NodeRunMetadataKey.ITERATION_ID not in metadata:
metadata = {
**metadata,
NodeRunMetadataKey.ITERATION_ID: self.node_id,
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID
if self.node_data.is_parallel
else NodeRunMetadataKey.ITERATION_INDEX: parallel_mode_run_id
if self.node_data.is_parallel
else iter_run_index,
}
event.route_node_state.node_run_result.metadata = metadata
return event return event
def _run_single_iter( def _run_single_iter(

@ -275,12 +275,16 @@ class KnowledgeRetrievalNode(LLMNode):
if records: if records:
for record in records: for record in records:
segment = record.segment segment = record.segment
dataset = Dataset.query.filter_by(id=segment.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=segment.dataset_id).first() # type: ignore
document = Document.query.filter( document = (
Document.id == segment.document_id, db.session.query(Document)
Document.enabled == True, .filter(
Document.archived == False, Document.id == segment.document_id,
).first() Document.enabled == True,
Document.archived == False,
)
.first()
)
if dataset and document: if dataset and document:
source = { source = {
"metadata": { "metadata": {

@ -337,7 +337,7 @@ class LoopNode(BaseNode[LoopNodeData]):
return {"check_break_result": True} return {"check_break_result": True}
elif isinstance(event, NodeRunFailedEvent): elif isinstance(event, NodeRunFailedEvent):
# Loop run failed # Loop run failed
yield event yield self._handle_event_metadata(event=event, iter_run_index=current_index)
yield LoopRunFailedEvent( yield LoopRunFailedEvent(
loop_id=self.id, loop_id=self.id,
loop_node_id=self.node_id, loop_node_id=self.node_id,

@ -26,7 +26,7 @@ class Mail:
match mail_type: match mail_type:
case "resend": case "resend":
import resend # type: ignore import resend
api_key = dify_config.RESEND_API_KEY api_key = dify_config.RESEND_API_KEY
if not api_key: if not api_key:

@ -1,6 +1,7 @@
from typing import Any, Union from typing import Any, Union
import redis import redis
from redis.cache import CacheConfig
from redis.cluster import ClusterNode, RedisCluster from redis.cluster import ClusterNode, RedisCluster
from redis.connection import Connection, SSLConnection from redis.connection import Connection, SSLConnection
from redis.sentinel import Sentinel from redis.sentinel import Sentinel
@ -51,6 +52,14 @@ def init_app(app: DifyApp):
connection_class: type[Union[Connection, SSLConnection]] = Connection connection_class: type[Union[Connection, SSLConnection]] = Connection
if dify_config.REDIS_USE_SSL: if dify_config.REDIS_USE_SSL:
connection_class = SSLConnection connection_class = SSLConnection
resp_protocol = dify_config.REDIS_SERIALIZATION_PROTOCOL
if dify_config.REDIS_ENABLE_CLIENT_SIDE_CACHE:
if resp_protocol >= 3:
clientside_cache_config = CacheConfig()
else:
raise ValueError("Client side cache is only supported in RESP3")
else:
clientside_cache_config = None
redis_params: dict[str, Any] = { redis_params: dict[str, Any] = {
"username": dify_config.REDIS_USERNAME, "username": dify_config.REDIS_USERNAME,
@ -59,6 +68,8 @@ def init_app(app: DifyApp):
"encoding": "utf-8", "encoding": "utf-8",
"encoding_errors": "strict", "encoding_errors": "strict",
"decode_responses": False, "decode_responses": False,
"protocol": resp_protocol,
"cache_config": clientside_cache_config,
} }
if dify_config.REDIS_USE_SENTINEL: if dify_config.REDIS_USE_SENTINEL:
@ -82,14 +93,22 @@ def init_app(app: DifyApp):
ClusterNode(host=node.split(":")[0], port=int(node.split(":")[1])) ClusterNode(host=node.split(":")[0], port=int(node.split(":")[1]))
for node in dify_config.REDIS_CLUSTERS.split(",") for node in dify_config.REDIS_CLUSTERS.split(",")
] ]
# FIXME: mypy error here, try to figure out how to fix it redis_client.initialize(
redis_client.initialize(RedisCluster(startup_nodes=nodes, password=dify_config.REDIS_CLUSTERS_PASSWORD)) # type: ignore RedisCluster(
startup_nodes=nodes,
password=dify_config.REDIS_CLUSTERS_PASSWORD,
protocol=resp_protocol,
cache_config=clientside_cache_config,
)
)
else: else:
redis_params.update( redis_params.update(
{ {
"host": dify_config.REDIS_HOST, "host": dify_config.REDIS_HOST,
"port": dify_config.REDIS_PORT, "port": dify_config.REDIS_PORT,
"connection_class": connection_class, "connection_class": connection_class,
"protocol": resp_protocol,
"cache_config": clientside_cache_config,
} }
) )
pool = redis.ConnectionPool(**redis_params) pool = redis.ConnectionPool(**redis_params)

@ -61,13 +61,17 @@ class NotionOAuth(OAuthDataSource):
"total": len(pages), "total": len(pages),
} }
# save data source binding # save data source binding
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.access_token == access_token, DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.access_token == access_token,
)
) )
).first() .first()
)
if data_source_binding: if data_source_binding:
data_source_binding.source_info = source_info data_source_binding.source_info = source_info
data_source_binding.disabled = False data_source_binding.disabled = False
@ -97,13 +101,17 @@ class NotionOAuth(OAuthDataSource):
"total": len(pages), "total": len(pages),
} }
# save data source binding # save data source binding
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.access_token == access_token, DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.access_token == access_token,
)
) )
).first() .first()
)
if data_source_binding: if data_source_binding:
data_source_binding.source_info = source_info data_source_binding.source_info = source_info
data_source_binding.disabled = False data_source_binding.disabled = False
@ -121,14 +129,18 @@ class NotionOAuth(OAuthDataSource):
def sync_data_source(self, binding_id: str): def sync_data_source(self, binding_id: str):
# save data source binding # save data source binding
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.id == binding_id, DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.disabled == False, DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.id == binding_id,
DataSourceOauthBinding.disabled == False,
)
) )
).first() .first()
)
if data_source_binding: if data_source_binding:
# get all authorized pages # get all authorized pages
pages = self.get_authorized_pages(data_source_binding.access_token) pages = self.get_authorized_pages(data_source_binding.access_token)

@ -52,7 +52,7 @@ class Account(UserMixin, Base):
@current_tenant.setter @current_tenant.setter
def current_tenant(self, value: "Tenant"): def current_tenant(self, value: "Tenant"):
tenant = value tenant = value
ta = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=self.id).first() ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first()
if ta: if ta:
tenant.current_role = ta.role tenant.current_role = ta.role
else: else:

@ -93,7 +93,8 @@ class Dataset(Base):
@property @property
def latest_process_rule(self): def latest_process_rule(self):
return ( return (
DatasetProcessRule.query.filter(DatasetProcessRule.dataset_id == self.id) db.session.query(DatasetProcessRule)
.filter(DatasetProcessRule.dataset_id == self.id)
.order_by(DatasetProcessRule.created_at.desc()) .order_by(DatasetProcessRule.created_at.desc())
.first() .first()
) )
@ -138,7 +139,8 @@ class Dataset(Base):
@property @property
def word_count(self): def word_count(self):
return ( return (
Document.query.with_entities(func.coalesce(func.sum(Document.word_count))) db.session.query(Document)
.with_entities(func.coalesce(func.sum(Document.word_count)))
.filter(Document.dataset_id == self.id) .filter(Document.dataset_id == self.id)
.scalar() .scalar()
) )
@ -440,12 +442,13 @@ class Document(Base):
@property @property
def segment_count(self): def segment_count(self):
return DocumentSegment.query.filter(DocumentSegment.document_id == self.id).count() return db.session.query(DocumentSegment).filter(DocumentSegment.document_id == self.id).count()
@property @property
def hit_count(self): def hit_count(self):
return ( return (
DocumentSegment.query.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count))) db.session.query(DocumentSegment)
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count)))
.filter(DocumentSegment.document_id == self.id) .filter(DocumentSegment.document_id == self.id)
.scalar() .scalar()
) )
@ -892,7 +895,7 @@ class DatasetKeywordTable(Base):
return dct return dct
# get dataset # get dataset
dataset = Dataset.query.filter_by(id=self.dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=self.dataset_id).first()
if not dataset: if not dataset:
return None return None
if self.data_source_type == "database": if self.data_source_type == "database":

@ -1237,6 +1237,21 @@ class MessageFeedback(Base):
account = db.session.query(Account).filter(Account.id == self.from_account_id).first() account = db.session.query(Account).filter(Account.id == self.from_account_id).first()
return account return account
def to_dict(self):
return {
"id": str(self.id),
"app_id": str(self.app_id),
"conversation_id": str(self.conversation_id),
"message_id": str(self.message_id),
"rating": self.rating,
"content": self.content,
"from_source": self.from_source,
"from_end_user_id": str(self.from_end_user_id) if self.from_end_user_id else None,
"from_account_id": str(self.from_account_id) if self.from_account_id else None,
"created_at": self.created_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
}
class MessageFile(Base): class MessageFile(Base):
__tablename__ = "message_files" __tablename__ = "message_files"

@ -14,7 +14,7 @@ dependencies = [
"chardet~=5.1.0", "chardet~=5.1.0",
"flask~=3.1.0", "flask~=3.1.0",
"flask-compress~=1.17", "flask-compress~=1.17",
"flask-cors~=4.0.0", "flask-cors~=5.0.0",
"flask-login~=0.6.3", "flask-login~=0.6.3",
"flask-migrate~=4.0.7", "flask-migrate~=4.0.7",
"flask-restful~=0.3.10", "flask-restful~=0.3.10",
@ -71,17 +71,16 @@ dependencies = [
"python-docx~=1.1.0", "python-docx~=1.1.0",
"python-dotenv==1.0.1", "python-dotenv==1.0.1",
"pyyaml~=6.0.1", "pyyaml~=6.0.1",
"readabilipy==0.2.0", "readabilipy~=0.3.0",
"redis[hiredis]~=5.0.3", "redis[hiredis]~=6.0.0",
"resend~=0.7.0", "resend~=2.9.0",
"sentry-sdk[flask]~=1.44.1", "sentry-sdk[flask]~=2.28.0",
"sqlalchemy~=2.0.29", "sqlalchemy~=2.0.29",
"starlette==0.41.0", "starlette==0.41.0",
"tiktoken~=0.9.0", "tiktoken~=0.9.0",
"tokenizers~=0.15.0", "transformers~=4.51.0",
"transformers~=4.35.0",
"unstructured[docx,epub,md,ppt,pptx]~=0.16.1", "unstructured[docx,epub,md,ppt,pptx]~=0.16.1",
"weave~=0.51.34", "weave~=0.51.0",
"yarl~=1.18.3", "yarl~=1.18.3",
"webvtt-py~=0.5.1", "webvtt-py~=0.5.1",
] ]
@ -195,7 +194,7 @@ vdb = [
"tcvectordb~=1.6.4", "tcvectordb~=1.6.4",
"tidb-vector==0.0.9", "tidb-vector==0.0.9",
"upstash-vector==0.6.0", "upstash-vector==0.6.0",
"volcengine-compat~=1.0.156", "volcengine-compat~=1.0.0",
"weaviate-client~=3.24.0", "weaviate-client~=3.24.0",
"xinference-client~=1.2.2", "xinference-client~=1.2.2",
] ]

@ -1,4 +1,5 @@
import datetime import datetime
import logging
import time import time
import click import click
@ -20,6 +21,8 @@ from models.model import (
from models.web import SavedMessage from models.web import SavedMessage
from services.feature_service import FeatureService from services.feature_service import FeatureService
_logger = logging.getLogger(__name__)
@app.celery.task(queue="dataset") @app.celery.task(queue="dataset")
def clean_messages(): def clean_messages():
@ -46,7 +49,14 @@ def clean_messages():
break break
for message in messages: for message in messages:
plan_sandbox_clean_message_day = message.created_at plan_sandbox_clean_message_day = message.created_at
app = App.query.filter_by(id=message.app_id).first() app = db.session.query(App).filter_by(id=message.app_id).first()
if not app:
_logger.warning(
"Expected App record to exist, but none was found, app_id=%s, message_id=%s",
message.app_id,
message.id,
)
continue
features_cache_key = f"features:{app.tenant_id}" features_cache_key = f"features:{app.tenant_id}"
plan_cache = redis_client.get(features_cache_key) plan_cache = redis_client.get(features_cache_key)
if plan_cache is None: if plan_cache is None:

@ -2,7 +2,7 @@ import datetime
import time import time
import click import click
from sqlalchemy import func from sqlalchemy import func, select
from werkzeug.exceptions import NotFound from werkzeug.exceptions import NotFound
import app import app
@ -51,8 +51,9 @@ def clean_unused_datasets_task():
) )
# Main query with join and filter # Main query with join and filter
datasets = ( stmt = (
Dataset.query.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id) select(Dataset)
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id) .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
.filter( .filter(
Dataset.created_at < plan_sandbox_clean_day, Dataset.created_at < plan_sandbox_clean_day,
@ -60,9 +61,10 @@ def clean_unused_datasets_task():
func.coalesce(document_subquery_old.c.document_count, 0) > 0, func.coalesce(document_subquery_old.c.document_count, 0) > 0,
) )
.order_by(Dataset.created_at.desc()) .order_by(Dataset.created_at.desc())
.paginate(page=1, per_page=50)
) )
datasets = db.paginate(stmt, page=1, per_page=50)
except NotFound: except NotFound:
break break
if datasets.items is None or len(datasets.items) == 0: if datasets.items is None or len(datasets.items) == 0:
@ -99,7 +101,7 @@ def clean_unused_datasets_task():
# update document # update document
update_params = {Document.enabled: False} update_params = {Document.enabled: False}
Document.query.filter_by(dataset_id=dataset.id).update(update_params) db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params)
db.session.commit() db.session.commit()
click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")) click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green"))
except Exception as e: except Exception as e:
@ -135,8 +137,9 @@ def clean_unused_datasets_task():
) )
# Main query with join and filter # Main query with join and filter
datasets = ( stmt = (
Dataset.query.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id) select(Dataset)
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id) .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
.filter( .filter(
Dataset.created_at < plan_pro_clean_day, Dataset.created_at < plan_pro_clean_day,
@ -144,8 +147,8 @@ def clean_unused_datasets_task():
func.coalesce(document_subquery_old.c.document_count, 0) > 0, func.coalesce(document_subquery_old.c.document_count, 0) > 0,
) )
.order_by(Dataset.created_at.desc()) .order_by(Dataset.created_at.desc())
.paginate(page=1, per_page=50)
) )
datasets = db.paginate(stmt, page=1, per_page=50)
except NotFound: except NotFound:
break break
@ -175,7 +178,7 @@ def clean_unused_datasets_task():
# update document # update document
update_params = {Document.enabled: False} update_params = {Document.enabled: False}
Document.query.filter_by(dataset_id=dataset.id).update(update_params) db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params)
db.session.commit() db.session.commit()
click.echo( click.echo(
click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green") click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")

@ -19,7 +19,9 @@ def create_tidb_serverless_task():
while True: while True:
try: try:
# check the number of idle tidb serverless # check the number of idle tidb serverless
idle_tidb_serverless_number = TidbAuthBinding.query.filter(TidbAuthBinding.active == False).count() idle_tidb_serverless_number = (
db.session.query(TidbAuthBinding).filter(TidbAuthBinding.active == False).count()
)
if idle_tidb_serverless_number >= tidb_serverless_number: if idle_tidb_serverless_number >= tidb_serverless_number:
break break
# create tidb serverless # create tidb serverless

@ -29,7 +29,9 @@ def mail_clean_document_notify_task():
# send document clean notify mail # send document clean notify mail
try: try:
dataset_auto_disable_logs = DatasetAutoDisableLog.query.filter(DatasetAutoDisableLog.notified == False).all() dataset_auto_disable_logs = (
db.session.query(DatasetAutoDisableLog).filter(DatasetAutoDisableLog.notified == False).all()
)
# group by tenant_id # group by tenant_id
dataset_auto_disable_logs_map: dict[str, list[DatasetAutoDisableLog]] = defaultdict(list) dataset_auto_disable_logs_map: dict[str, list[DatasetAutoDisableLog]] = defaultdict(list)
for dataset_auto_disable_log in dataset_auto_disable_logs: for dataset_auto_disable_log in dataset_auto_disable_logs:
@ -43,14 +45,16 @@ def mail_clean_document_notify_task():
if plan != "sandbox": if plan != "sandbox":
knowledge_details = [] knowledge_details = []
# check tenant # check tenant
tenant = Tenant.query.filter(Tenant.id == tenant_id).first() tenant = db.session.query(Tenant).filter(Tenant.id == tenant_id).first()
if not tenant: if not tenant:
continue continue
# check current owner # check current owner
current_owner_join = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, role="owner").first() current_owner_join = (
db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, role="owner").first()
)
if not current_owner_join: if not current_owner_join:
continue continue
account = Account.query.filter(Account.id == current_owner_join.account_id).first() account = db.session.query(Account).filter(Account.id == current_owner_join.account_id).first()
if not account: if not account:
continue continue
@ -63,7 +67,7 @@ def mail_clean_document_notify_task():
) )
for dataset_id, document_ids in dataset_auto_dataset_map.items(): for dataset_id, document_ids in dataset_auto_dataset_map.items():
dataset = Dataset.query.filter(Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if dataset: if dataset:
document_count = len(document_ids) document_count = len(document_ids)
knowledge_details.append(rf"Knowledge base {dataset.name}: {document_count} documents") knowledge_details.append(rf"Knowledge base {dataset.name}: {document_count} documents")

@ -5,6 +5,7 @@ import click
import app import app
from configs import dify_config from configs import dify_config
from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService
from extensions.ext_database import db
from models.dataset import TidbAuthBinding from models.dataset import TidbAuthBinding
@ -14,9 +15,11 @@ def update_tidb_serverless_status_task():
start_at = time.perf_counter() start_at = time.perf_counter()
try: try:
# check the number of idle tidb serverless # check the number of idle tidb serverless
tidb_serverless_list = TidbAuthBinding.query.filter( tidb_serverless_list = (
TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING" db.session.query(TidbAuthBinding)
).all() .filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING")
.all()
)
if len(tidb_serverless_list) == 0: if len(tidb_serverless_list) == 0:
return return
# update tidb serverless status # update tidb serverless status

@ -300,9 +300,9 @@ class AccountService:
"""Link account integrate""" """Link account integrate"""
try: try:
# Query whether there is an existing binding record for the same provider # Query whether there is an existing binding record for the same provider
account_integrate: Optional[AccountIntegrate] = AccountIntegrate.query.filter_by( account_integrate: Optional[AccountIntegrate] = (
account_id=account.id, provider=provider db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider=provider).first()
).first() )
if account_integrate: if account_integrate:
# If it exists, update the record # If it exists, update the record
@ -615,7 +615,10 @@ class TenantService:
): ):
"""Check if user have a workspace or not""" """Check if user have a workspace or not"""
available_ta = ( available_ta = (
TenantAccountJoin.query.filter_by(account_id=account.id).order_by(TenantAccountJoin.id.asc()).first() db.session.query(TenantAccountJoin)
.filter_by(account_id=account.id)
.order_by(TenantAccountJoin.id.asc())
.first()
) )
if available_ta: if available_ta:
@ -669,7 +672,7 @@ class TenantService:
if not tenant: if not tenant:
raise TenantNotFoundError("Tenant not found.") raise TenantNotFoundError("Tenant not found.")
ta = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=account.id).first() ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
if ta: if ta:
tenant.role = ta.role tenant.role = ta.role
else: else:
@ -698,7 +701,7 @@ class TenantService:
if not tenant_account_join: if not tenant_account_join:
raise AccountNotLinkTenantError("Tenant not found or account is not a member of the tenant.") raise AccountNotLinkTenantError("Tenant not found or account is not a member of the tenant.")
else: else:
TenantAccountJoin.query.filter( db.session.query(TenantAccountJoin).filter(
TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id != tenant_id TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id != tenant_id
).update({"current": False}) ).update({"current": False})
tenant_account_join.current = True tenant_account_join.current = True
@ -790,7 +793,7 @@ class TenantService:
if operator.id == member.id: if operator.id == member.id:
raise CannotOperateSelfError("Cannot operate self.") raise CannotOperateSelfError("Cannot operate self.")
ta_operator = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=operator.id).first() ta_operator = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=operator.id).first()
if not ta_operator or ta_operator.role not in perms[action]: if not ta_operator or ta_operator.role not in perms[action]:
raise NoPermissionError(f"No permission to {action} member.") raise NoPermissionError(f"No permission to {action} member.")
@ -803,7 +806,7 @@ class TenantService:
TenantService.check_member_permission(tenant, operator, account, "remove") TenantService.check_member_permission(tenant, operator, account, "remove")
ta = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=account.id).first() ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
if not ta: if not ta:
raise MemberNotInTenantError("Member not in tenant.") raise MemberNotInTenantError("Member not in tenant.")
@ -815,15 +818,23 @@ class TenantService:
"""Update member role""" """Update member role"""
TenantService.check_member_permission(tenant, operator, member, "update") TenantService.check_member_permission(tenant, operator, member, "update")
target_member_join = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=member.id).first() target_member_join = (
db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=member.id).first()
)
if not target_member_join:
raise MemberNotInTenantError("Member not in tenant.")
if target_member_join.role == new_role: if target_member_join.role == new_role:
raise RoleAlreadyAssignedError("The provided role is already assigned to the member.") raise RoleAlreadyAssignedError("The provided role is already assigned to the member.")
if new_role == "owner": if new_role == "owner":
# Find the current owner and change their role to 'admin' # Find the current owner and change their role to 'admin'
current_owner_join = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, role="owner").first() current_owner_join = (
current_owner_join.role = "admin" db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, role="owner").first()
)
if current_owner_join:
current_owner_join.role = "admin"
# Update the role of the target member # Update the role of the target member
target_member_join.role = new_role target_member_join.role = new_role
@ -840,7 +851,7 @@ class TenantService:
@staticmethod @staticmethod
def get_custom_config(tenant_id: str) -> dict: def get_custom_config(tenant_id: str) -> dict:
tenant = Tenant.query.filter(Tenant.id == tenant_id).one_or_404() tenant = db.get_or_404(Tenant, tenant_id)
return cast(dict, tenant.custom_config_dict) return cast(dict, tenant.custom_config_dict)
@ -962,7 +973,7 @@ class RegisterService:
TenantService.switch_tenant(account, tenant.id) TenantService.switch_tenant(account, tenant.id)
else: else:
TenantService.check_member_permission(tenant, inviter, account, "add") TenantService.check_member_permission(tenant, inviter, account, "add")
ta = TenantAccountJoin.query.filter_by(tenant_id=tenant.id, account_id=account.id).first() ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
if not ta: if not ta:
TenantService.create_tenant_member(tenant, account, role) TenantService.create_tenant_member(tenant, account, role)

@ -4,7 +4,7 @@ from typing import cast
import pandas as pd import pandas as pd
from flask_login import current_user from flask_login import current_user
from sqlalchemy import or_ from sqlalchemy import or_, select
from werkzeug.datastructures import FileStorage from werkzeug.datastructures import FileStorage
from werkzeug.exceptions import NotFound from werkzeug.exceptions import NotFound
@ -124,8 +124,9 @@ class AppAnnotationService:
if not app: if not app:
raise NotFound("App not found") raise NotFound("App not found")
if keyword: if keyword:
annotations = ( stmt = (
MessageAnnotation.query.filter(MessageAnnotation.app_id == app_id) select(MessageAnnotation)
.filter(MessageAnnotation.app_id == app_id)
.filter( .filter(
or_( or_(
MessageAnnotation.question.ilike("%{}%".format(keyword)), MessageAnnotation.question.ilike("%{}%".format(keyword)),
@ -133,14 +134,14 @@ class AppAnnotationService:
) )
) )
.order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc())
.paginate(page=page, per_page=limit, max_per_page=100, error_out=False)
) )
else: else:
annotations = ( stmt = (
MessageAnnotation.query.filter(MessageAnnotation.app_id == app_id) select(MessageAnnotation)
.filter(MessageAnnotation.app_id == app_id)
.order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc())
.paginate(page=page, per_page=limit, max_per_page=100, error_out=False)
) )
annotations = db.paginate(select=stmt, page=page, per_page=limit, max_per_page=100, error_out=False)
return annotations.items, annotations.total return annotations.items, annotations.total
@classmethod @classmethod
@ -325,13 +326,16 @@ class AppAnnotationService:
if not annotation: if not annotation:
raise NotFound("Annotation not found") raise NotFound("Annotation not found")
annotation_hit_histories = ( stmt = (
AppAnnotationHitHistory.query.filter( select(AppAnnotationHitHistory)
.filter(
AppAnnotationHitHistory.app_id == app_id, AppAnnotationHitHistory.app_id == app_id,
AppAnnotationHitHistory.annotation_id == annotation_id, AppAnnotationHitHistory.annotation_id == annotation_id,
) )
.order_by(AppAnnotationHitHistory.created_at.desc()) .order_by(AppAnnotationHitHistory.created_at.desc())
.paginate(page=page, per_page=limit, max_per_page=100, error_out=False) )
annotation_hit_histories = db.paginate(
select=stmt, page=page, per_page=limit, max_per_page=100, error_out=False
) )
return annotation_hit_histories.items, annotation_hit_histories.total return annotation_hit_histories.items, annotation_hit_histories.total

@ -9,7 +9,7 @@ from collections import Counter
from typing import Any, Optional from typing import Any, Optional
from flask_login import current_user from flask_login import current_user
from sqlalchemy import func from sqlalchemy import func, select
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound from werkzeug.exceptions import NotFound
@ -77,11 +77,13 @@ from tasks.sync_website_document_indexing_task import sync_website_document_inde
class DatasetService: class DatasetService:
@staticmethod @staticmethod
def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False): def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False):
query = Dataset.query.filter(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) query = select(Dataset).filter(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc())
if user: if user:
# get permitted dataset ids # get permitted dataset ids
dataset_permission = DatasetPermission.query.filter_by(account_id=user.id, tenant_id=tenant_id).all() dataset_permission = (
db.session.query(DatasetPermission).filter_by(account_id=user.id, tenant_id=tenant_id).all()
)
permitted_dataset_ids = {dp.dataset_id for dp in dataset_permission} if dataset_permission else None permitted_dataset_ids = {dp.dataset_id for dp in dataset_permission} if dataset_permission else None
if user.current_role == TenantAccountRole.DATASET_OPERATOR: if user.current_role == TenantAccountRole.DATASET_OPERATOR:
@ -129,7 +131,7 @@ class DatasetService:
else: else:
return [], 0 return [], 0
datasets = query.paginate(page=page, per_page=per_page, max_per_page=100, error_out=False) datasets = db.paginate(select=query, page=page, per_page=per_page, max_per_page=100, error_out=False)
return datasets.items, datasets.total return datasets.items, datasets.total
@ -153,9 +155,10 @@ class DatasetService:
@staticmethod @staticmethod
def get_datasets_by_ids(ids, tenant_id): def get_datasets_by_ids(ids, tenant_id):
datasets = Dataset.query.filter(Dataset.id.in_(ids), Dataset.tenant_id == tenant_id).paginate( stmt = select(Dataset).filter(Dataset.id.in_(ids), Dataset.tenant_id == tenant_id)
page=1, per_page=len(ids), max_per_page=len(ids), error_out=False
) datasets = db.paginate(select=stmt, page=1, per_page=len(ids), max_per_page=len(ids), error_out=False)
return datasets.items, datasets.total return datasets.items, datasets.total
@staticmethod @staticmethod
@ -174,7 +177,7 @@ class DatasetService:
retrieval_model: Optional[RetrievalModel] = None, retrieval_model: Optional[RetrievalModel] = None,
): ):
# check if dataset name already exists # check if dataset name already exists
if Dataset.query.filter_by(name=name, tenant_id=tenant_id).first(): if db.session.query(Dataset).filter_by(name=name, tenant_id=tenant_id).first():
raise DatasetNameDuplicateError(f"Dataset with name {name} already exists.") raise DatasetNameDuplicateError(f"Dataset with name {name} already exists.")
embedding_model = None embedding_model = None
if indexing_technique == "high_quality": if indexing_technique == "high_quality":
@ -235,7 +238,7 @@ class DatasetService:
@staticmethod @staticmethod
def get_dataset(dataset_id) -> Optional[Dataset]: def get_dataset(dataset_id) -> Optional[Dataset]:
dataset: Optional[Dataset] = Dataset.query.filter_by(id=dataset_id).first() dataset: Optional[Dataset] = db.session.query(Dataset).filter_by(id=dataset_id).first()
return dataset return dataset
@staticmethod @staticmethod
@ -436,7 +439,7 @@ class DatasetService:
# update Retrieval model # update Retrieval model
filtered_data["retrieval_model"] = data["retrieval_model"] filtered_data["retrieval_model"] = data["retrieval_model"]
dataset.query.filter_by(id=dataset_id).update(filtered_data) db.session.query(Dataset).filter_by(id=dataset_id).update(filtered_data)
db.session.commit() db.session.commit()
if action: if action:
@ -460,7 +463,7 @@ class DatasetService:
@staticmethod @staticmethod
def dataset_use_check(dataset_id) -> bool: def dataset_use_check(dataset_id) -> bool:
count = AppDatasetJoin.query.filter_by(dataset_id=dataset_id).count() count = db.session.query(AppDatasetJoin).filter_by(dataset_id=dataset_id).count()
if count > 0: if count > 0:
return True return True
return False return False
@ -475,7 +478,9 @@ class DatasetService:
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.") raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == "partial_members": if dataset.permission == "partial_members":
user_permission = DatasetPermission.query.filter_by(dataset_id=dataset.id, account_id=user.id).first() user_permission = (
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
)
if ( if (
not user_permission not user_permission
and dataset.tenant_id != user.current_tenant_id and dataset.tenant_id != user.current_tenant_id
@ -499,23 +504,24 @@ class DatasetService:
elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM: elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
if not any( if not any(
dp.dataset_id == dataset.id for dp in DatasetPermission.query.filter_by(account_id=user.id).all() dp.dataset_id == dataset.id
for dp in db.session.query(DatasetPermission).filter_by(account_id=user.id).all()
): ):
raise NoPermissionError("You do not have permission to access this dataset.") raise NoPermissionError("You do not have permission to access this dataset.")
@staticmethod @staticmethod
def get_dataset_queries(dataset_id: str, page: int, per_page: int): def get_dataset_queries(dataset_id: str, page: int, per_page: int):
dataset_queries = ( stmt = select(DatasetQuery).filter_by(dataset_id=dataset_id).order_by(db.desc(DatasetQuery.created_at))
DatasetQuery.query.filter_by(dataset_id=dataset_id)
.order_by(db.desc(DatasetQuery.created_at)) dataset_queries = db.paginate(select=stmt, page=page, per_page=per_page, max_per_page=100, error_out=False)
.paginate(page=page, per_page=per_page, max_per_page=100, error_out=False)
)
return dataset_queries.items, dataset_queries.total return dataset_queries.items, dataset_queries.total
@staticmethod @staticmethod
def get_related_apps(dataset_id: str): def get_related_apps(dataset_id: str):
return ( return (
AppDatasetJoin.query.filter(AppDatasetJoin.dataset_id == dataset_id) db.session.query(AppDatasetJoin)
.filter(AppDatasetJoin.dataset_id == dataset_id)
.order_by(db.desc(AppDatasetJoin.created_at)) .order_by(db.desc(AppDatasetJoin.created_at))
.all() .all()
) )
@ -530,10 +536,14 @@ class DatasetService:
} }
# get recent 30 days auto disable logs # get recent 30 days auto disable logs
start_date = datetime.datetime.now() - datetime.timedelta(days=30) start_date = datetime.datetime.now() - datetime.timedelta(days=30)
dataset_auto_disable_logs = DatasetAutoDisableLog.query.filter( dataset_auto_disable_logs = (
DatasetAutoDisableLog.dataset_id == dataset_id, db.session.query(DatasetAutoDisableLog)
DatasetAutoDisableLog.created_at >= start_date, .filter(
).all() DatasetAutoDisableLog.dataset_id == dataset_id,
DatasetAutoDisableLog.created_at >= start_date,
)
.all()
)
if dataset_auto_disable_logs: if dataset_auto_disable_logs:
return { return {
"document_ids": [log.document_id for log in dataset_auto_disable_logs], "document_ids": [log.document_id for log in dataset_auto_disable_logs],
@ -873,7 +883,9 @@ class DocumentService:
@staticmethod @staticmethod
def get_documents_position(dataset_id): def get_documents_position(dataset_id):
document = Document.query.filter_by(dataset_id=dataset_id).order_by(Document.position.desc()).first() document = (
db.session.query(Document).filter_by(dataset_id=dataset_id).order_by(Document.position.desc()).first()
)
if document: if document:
return document.position + 1 return document.position + 1
else: else:
@ -1010,13 +1022,17 @@ class DocumentService:
} }
# check duplicate # check duplicate
if knowledge_config.duplicate: if knowledge_config.duplicate:
document = Document.query.filter_by( document = (
dataset_id=dataset.id, db.session.query(Document)
tenant_id=current_user.current_tenant_id, .filter_by(
data_source_type="upload_file", dataset_id=dataset.id,
enabled=True, tenant_id=current_user.current_tenant_id,
name=file_name, data_source_type="upload_file",
).first() enabled=True,
name=file_name,
)
.first()
)
if document: if document:
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
@ -1054,12 +1070,16 @@ class DocumentService:
raise ValueError("No notion info list found.") raise ValueError("No notion info list found.")
exist_page_ids = [] exist_page_ids = []
exist_document = {} exist_document = {}
documents = Document.query.filter_by( documents = (
dataset_id=dataset.id, db.session.query(Document)
tenant_id=current_user.current_tenant_id, .filter_by(
data_source_type="notion_import", dataset_id=dataset.id,
enabled=True, tenant_id=current_user.current_tenant_id,
).all() data_source_type="notion_import",
enabled=True,
)
.all()
)
if documents: if documents:
for document in documents: for document in documents:
data_source_info = json.loads(document.data_source_info) data_source_info = json.loads(document.data_source_info)
@ -1067,14 +1087,18 @@ class DocumentService:
exist_document[data_source_info["notion_page_id"]] = document.id exist_document[data_source_info["notion_page_id"]] = document.id
for notion_info in notion_info_list: for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id workspace_id = notion_info.workspace_id
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.disabled == False, DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
) )
).first() .first()
)
if not data_source_binding: if not data_source_binding:
raise ValueError("Data source binding not found.") raise ValueError("Data source binding not found.")
for page in notion_info.pages: for page in notion_info.pages:
@ -1206,12 +1230,16 @@ class DocumentService:
@staticmethod @staticmethod
def get_tenant_documents_count(): def get_tenant_documents_count():
documents_count = Document.query.filter( documents_count = (
Document.completed_at.isnot(None), db.session.query(Document)
Document.enabled == True, .filter(
Document.archived == False, Document.completed_at.isnot(None),
Document.tenant_id == current_user.current_tenant_id, Document.enabled == True,
).count() Document.archived == False,
Document.tenant_id == current_user.current_tenant_id,
)
.count()
)
return documents_count return documents_count
@staticmethod @staticmethod
@ -1278,14 +1306,18 @@ class DocumentService:
notion_info_list = document_data.data_source.info_list.notion_info_list notion_info_list = document_data.data_source.info_list.notion_info_list
for notion_info in notion_info_list: for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id workspace_id = notion_info.workspace_id
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.disabled == False, DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
) )
).first() .first()
)
if not data_source_binding: if not data_source_binding:
raise ValueError("Data source binding not found.") raise ValueError("Data source binding not found.")
for page in notion_info.pages: for page in notion_info.pages:
@ -1328,7 +1360,7 @@ class DocumentService:
db.session.commit() db.session.commit()
# update document segment # update document segment
update_params = {DocumentSegment.status: "re_segment"} update_params = {DocumentSegment.status: "re_segment"}
DocumentSegment.query.filter_by(document_id=document.id).update(update_params) db.session.query(DocumentSegment).filter_by(document_id=document.id).update(update_params)
db.session.commit() db.session.commit()
# trigger async task # trigger async task
document_indexing_update_task.delay(document.dataset_id, document.id) document_indexing_update_task.delay(document.dataset_id, document.id)
@ -1918,7 +1950,8 @@ class SegmentService:
@classmethod @classmethod
def delete_segments(cls, segment_ids: list, document: Document, dataset: Dataset): def delete_segments(cls, segment_ids: list, document: Document, dataset: Dataset):
index_node_ids = ( index_node_ids = (
DocumentSegment.query.with_entities(DocumentSegment.index_node_id) db.session.query(DocumentSegment)
.with_entities(DocumentSegment.index_node_id)
.filter( .filter(
DocumentSegment.id.in_(segment_ids), DocumentSegment.id.in_(segment_ids),
DocumentSegment.dataset_id == dataset.id, DocumentSegment.dataset_id == dataset.id,
@ -2157,20 +2190,28 @@ class SegmentService:
def get_child_chunks( def get_child_chunks(
cls, segment_id: str, document_id: str, dataset_id: str, page: int, limit: int, keyword: Optional[str] = None cls, segment_id: str, document_id: str, dataset_id: str, page: int, limit: int, keyword: Optional[str] = None
): ):
query = ChildChunk.query.filter_by( query = (
tenant_id=current_user.current_tenant_id, select(ChildChunk)
dataset_id=dataset_id, .filter_by(
document_id=document_id, tenant_id=current_user.current_tenant_id,
segment_id=segment_id, dataset_id=dataset_id,
).order_by(ChildChunk.position.asc()) document_id=document_id,
segment_id=segment_id,
)
.order_by(ChildChunk.position.asc())
)
if keyword: if keyword:
query = query.where(ChildChunk.content.ilike(f"%{keyword}%")) query = query.where(ChildChunk.content.ilike(f"%{keyword}%"))
return query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False) return db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
@classmethod @classmethod
def get_child_chunk_by_id(cls, child_chunk_id: str, tenant_id: str) -> Optional[ChildChunk]: def get_child_chunk_by_id(cls, child_chunk_id: str, tenant_id: str) -> Optional[ChildChunk]:
"""Get a child chunk by its ID.""" """Get a child chunk by its ID."""
result = ChildChunk.query.filter(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id).first() result = (
db.session.query(ChildChunk)
.filter(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id)
.first()
)
return result if isinstance(result, ChildChunk) else None return result if isinstance(result, ChildChunk) else None
@classmethod @classmethod
@ -2184,7 +2225,7 @@ class SegmentService:
limit: int = 20, limit: int = 20,
): ):
"""Get segments for a document with optional filtering.""" """Get segments for a document with optional filtering."""
query = DocumentSegment.query.filter( query = select(DocumentSegment).filter(
DocumentSegment.document_id == document_id, DocumentSegment.tenant_id == tenant_id DocumentSegment.document_id == document_id, DocumentSegment.tenant_id == tenant_id
) )
@ -2194,9 +2235,8 @@ class SegmentService:
if keyword: if keyword:
query = query.filter(DocumentSegment.content.ilike(f"%{keyword}%")) query = query.filter(DocumentSegment.content.ilike(f"%{keyword}%"))
paginated_segments = query.order_by(DocumentSegment.position.asc()).paginate( query = query.order_by(DocumentSegment.position.asc())
page=page, per_page=limit, max_per_page=100, error_out=False paginated_segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
)
return paginated_segments.items, paginated_segments.total return paginated_segments.items, paginated_segments.total
@ -2236,9 +2276,11 @@ class SegmentService:
raise ValueError(ex.description) raise ValueError(ex.description)
# check segment # check segment
segment = DocumentSegment.query.filter( segment = (
DocumentSegment.id == segment_id, DocumentSegment.tenant_id == user_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == user_id)
.first()
)
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
@ -2251,9 +2293,11 @@ class SegmentService:
@classmethod @classmethod
def get_segment_by_id(cls, segment_id: str, tenant_id: str) -> Optional[DocumentSegment]: def get_segment_by_id(cls, segment_id: str, tenant_id: str) -> Optional[DocumentSegment]:
"""Get a segment by its ID.""" """Get a segment by its ID."""
result = DocumentSegment.query.filter( result = (
DocumentSegment.id == segment_id, DocumentSegment.tenant_id == tenant_id db.session.query(DocumentSegment)
).first() .filter(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == tenant_id)
.first()
)
return result if isinstance(result, DocumentSegment) else None return result if isinstance(result, DocumentSegment) else None

@ -5,6 +5,7 @@ from typing import Any, Optional, Union, cast
from urllib.parse import urlparse from urllib.parse import urlparse
import httpx import httpx
from sqlalchemy import select
from constants import HIDDEN_VALUE from constants import HIDDEN_VALUE
from core.helper import ssrf_proxy from core.helper import ssrf_proxy
@ -24,14 +25,20 @@ from services.errors.dataset import DatasetNameDuplicateError
class ExternalDatasetService: class ExternalDatasetService:
@staticmethod @staticmethod
def get_external_knowledge_apis(page, per_page, tenant_id, search=None) -> tuple[list[ExternalKnowledgeApis], int]: def get_external_knowledge_apis(
query = ExternalKnowledgeApis.query.filter(ExternalKnowledgeApis.tenant_id == tenant_id).order_by( page, per_page, tenant_id, search=None
ExternalKnowledgeApis.created_at.desc() ) -> tuple[list[ExternalKnowledgeApis], int | None]:
query = (
select(ExternalKnowledgeApis)
.filter(ExternalKnowledgeApis.tenant_id == tenant_id)
.order_by(ExternalKnowledgeApis.created_at.desc())
) )
if search: if search:
query = query.filter(ExternalKnowledgeApis.name.ilike(f"%{search}%")) query = query.filter(ExternalKnowledgeApis.name.ilike(f"%{search}%"))
external_knowledge_apis = query.paginate(page=page, per_page=per_page, max_per_page=100, error_out=False) external_knowledge_apis = db.paginate(
select=query, page=page, per_page=per_page, max_per_page=100, error_out=False
)
return external_knowledge_apis.items, external_knowledge_apis.total return external_knowledge_apis.items, external_knowledge_apis.total
@ -92,18 +99,18 @@ class ExternalDatasetService:
@staticmethod @staticmethod
def get_external_knowledge_api(external_knowledge_api_id: str) -> ExternalKnowledgeApis: def get_external_knowledge_api(external_knowledge_api_id: str) -> ExternalKnowledgeApis:
external_knowledge_api: Optional[ExternalKnowledgeApis] = ExternalKnowledgeApis.query.filter_by( external_knowledge_api: Optional[ExternalKnowledgeApis] = (
id=external_knowledge_api_id db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id).first()
).first() )
if external_knowledge_api is None: if external_knowledge_api is None:
raise ValueError("api template not found") raise ValueError("api template not found")
return external_knowledge_api return external_knowledge_api
@staticmethod @staticmethod
def update_external_knowledge_api(tenant_id, user_id, external_knowledge_api_id, args) -> ExternalKnowledgeApis: def update_external_knowledge_api(tenant_id, user_id, external_knowledge_api_id, args) -> ExternalKnowledgeApis:
external_knowledge_api: Optional[ExternalKnowledgeApis] = ExternalKnowledgeApis.query.filter_by( external_knowledge_api: Optional[ExternalKnowledgeApis] = (
id=external_knowledge_api_id, tenant_id=tenant_id db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id, tenant_id=tenant_id).first()
).first() )
if external_knowledge_api is None: if external_knowledge_api is None:
raise ValueError("api template not found") raise ValueError("api template not found")
if args.get("settings") and args.get("settings").get("api_key") == HIDDEN_VALUE: if args.get("settings") and args.get("settings").get("api_key") == HIDDEN_VALUE:
@ -120,9 +127,9 @@ class ExternalDatasetService:
@staticmethod @staticmethod
def delete_external_knowledge_api(tenant_id: str, external_knowledge_api_id: str): def delete_external_knowledge_api(tenant_id: str, external_knowledge_api_id: str):
external_knowledge_api = ExternalKnowledgeApis.query.filter_by( external_knowledge_api = (
id=external_knowledge_api_id, tenant_id=tenant_id db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id, tenant_id=tenant_id).first()
).first() )
if external_knowledge_api is None: if external_knowledge_api is None:
raise ValueError("api template not found") raise ValueError("api template not found")
@ -131,25 +138,29 @@ class ExternalDatasetService:
@staticmethod @staticmethod
def external_knowledge_api_use_check(external_knowledge_api_id: str) -> tuple[bool, int]: def external_knowledge_api_use_check(external_knowledge_api_id: str) -> tuple[bool, int]:
count = ExternalKnowledgeBindings.query.filter_by(external_knowledge_api_id=external_knowledge_api_id).count() count = (
db.session.query(ExternalKnowledgeBindings)
.filter_by(external_knowledge_api_id=external_knowledge_api_id)
.count()
)
if count > 0: if count > 0:
return True, count return True, count
return False, 0 return False, 0
@staticmethod @staticmethod
def get_external_knowledge_binding_with_dataset_id(tenant_id: str, dataset_id: str) -> ExternalKnowledgeBindings: def get_external_knowledge_binding_with_dataset_id(tenant_id: str, dataset_id: str) -> ExternalKnowledgeBindings:
external_knowledge_binding: Optional[ExternalKnowledgeBindings] = ExternalKnowledgeBindings.query.filter_by( external_knowledge_binding: Optional[ExternalKnowledgeBindings] = (
dataset_id=dataset_id, tenant_id=tenant_id db.session.query(ExternalKnowledgeBindings).filter_by(dataset_id=dataset_id, tenant_id=tenant_id).first()
).first() )
if not external_knowledge_binding: if not external_knowledge_binding:
raise ValueError("external knowledge binding not found") raise ValueError("external knowledge binding not found")
return external_knowledge_binding return external_knowledge_binding
@staticmethod @staticmethod
def document_create_args_validate(tenant_id: str, external_knowledge_api_id: str, process_parameter: dict): def document_create_args_validate(tenant_id: str, external_knowledge_api_id: str, process_parameter: dict):
external_knowledge_api = ExternalKnowledgeApis.query.filter_by( external_knowledge_api = (
id=external_knowledge_api_id, tenant_id=tenant_id db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id, tenant_id=tenant_id).first()
).first() )
if external_knowledge_api is None: if external_knowledge_api is None:
raise ValueError("api template not found") raise ValueError("api template not found")
settings = json.loads(external_knowledge_api.settings) settings = json.loads(external_knowledge_api.settings)
@ -212,11 +223,13 @@ class ExternalDatasetService:
@staticmethod @staticmethod
def create_external_dataset(tenant_id: str, user_id: str, args: dict) -> Dataset: def create_external_dataset(tenant_id: str, user_id: str, args: dict) -> Dataset:
# check if dataset name already exists # check if dataset name already exists
if Dataset.query.filter_by(name=args.get("name"), tenant_id=tenant_id).first(): if db.session.query(Dataset).filter_by(name=args.get("name"), tenant_id=tenant_id).first():
raise DatasetNameDuplicateError(f"Dataset with name {args.get('name')} already exists.") raise DatasetNameDuplicateError(f"Dataset with name {args.get('name')} already exists.")
external_knowledge_api = ExternalKnowledgeApis.query.filter_by( external_knowledge_api = (
id=args.get("external_knowledge_api_id"), tenant_id=tenant_id db.session.query(ExternalKnowledgeApis)
).first() .filter_by(id=args.get("external_knowledge_api_id"), tenant_id=tenant_id)
.first()
)
if external_knowledge_api is None: if external_knowledge_api is None:
raise ValueError("api template not found") raise ValueError("api template not found")
@ -254,15 +267,17 @@ class ExternalDatasetService:
external_retrieval_parameters: dict, external_retrieval_parameters: dict,
metadata_condition: Optional[MetadataCondition] = None, metadata_condition: Optional[MetadataCondition] = None,
) -> list: ) -> list:
external_knowledge_binding = ExternalKnowledgeBindings.query.filter_by( external_knowledge_binding = (
dataset_id=dataset_id, tenant_id=tenant_id db.session.query(ExternalKnowledgeBindings).filter_by(dataset_id=dataset_id, tenant_id=tenant_id).first()
).first() )
if not external_knowledge_binding: if not external_knowledge_binding:
raise ValueError("external knowledge binding not found") raise ValueError("external knowledge binding not found")
external_knowledge_api = ExternalKnowledgeApis.query.filter_by( external_knowledge_api = (
id=external_knowledge_binding.external_knowledge_api_id db.session.query(ExternalKnowledgeApis)
).first() .filter_by(id=external_knowledge_binding.external_knowledge_api_id)
.first()
)
if not external_knowledge_api: if not external_knowledge_api:
raise ValueError("external api template not found") raise ValueError("external api template not found")

@ -177,6 +177,21 @@ class MessageService:
return feedback return feedback
@classmethod
def get_all_messages_feedbacks(cls, app_model: App, page: int, limit: int):
"""Get all feedbacks of an app"""
offset = (page - 1) * limit
feedbacks = (
db.session.query(MessageFeedback)
.filter(MessageFeedback.app_id == app_model.id)
.order_by(MessageFeedback.created_at.desc(), MessageFeedback.id.desc())
.limit(limit)
.offset(offset)
.all()
)
return [record.to_dict() for record in feedbacks]
@classmethod @classmethod
def get_message(cls, app_model: App, user: Optional[Union[Account, EndUser]], message_id: str): def get_message(cls, app_model: App, user: Optional[Union[Account, EndUser]], message_id: str):
message = ( message = (

@ -20,9 +20,11 @@ class MetadataService:
@staticmethod @staticmethod
def create_metadata(dataset_id: str, metadata_args: MetadataArgs) -> DatasetMetadata: def create_metadata(dataset_id: str, metadata_args: MetadataArgs) -> DatasetMetadata:
# check if metadata name already exists # check if metadata name already exists
if DatasetMetadata.query.filter_by( if (
tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name db.session.query(DatasetMetadata)
).first(): .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name)
.first()
):
raise ValueError("Metadata name already exists.") raise ValueError("Metadata name already exists.")
for field in BuiltInField: for field in BuiltInField:
if field.value == metadata_args.name: if field.value == metadata_args.name:
@ -42,16 +44,18 @@ class MetadataService:
def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore
lock_key = f"dataset_metadata_lock_{dataset_id}" lock_key = f"dataset_metadata_lock_{dataset_id}"
# check if metadata name already exists # check if metadata name already exists
if DatasetMetadata.query.filter_by( if (
tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name db.session.query(DatasetMetadata)
).first(): .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name)
.first()
):
raise ValueError("Metadata name already exists.") raise ValueError("Metadata name already exists.")
for field in BuiltInField: for field in BuiltInField:
if field.value == name: if field.value == name:
raise ValueError("Metadata name already exists in Built-in fields.") raise ValueError("Metadata name already exists in Built-in fields.")
try: try:
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None) MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
metadata = DatasetMetadata.query.filter_by(id=metadata_id).first() metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id).first()
if metadata is None: if metadata is None:
raise ValueError("Metadata not found.") raise ValueError("Metadata not found.")
old_name = metadata.name old_name = metadata.name
@ -60,7 +64,9 @@ class MetadataService:
metadata.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) metadata.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
# update related documents # update related documents
dataset_metadata_bindings = DatasetMetadataBinding.query.filter_by(metadata_id=metadata_id).all() dataset_metadata_bindings = (
db.session.query(DatasetMetadataBinding).filter_by(metadata_id=metadata_id).all()
)
if dataset_metadata_bindings: if dataset_metadata_bindings:
document_ids = [binding.document_id for binding in dataset_metadata_bindings] document_ids = [binding.document_id for binding in dataset_metadata_bindings]
documents = DocumentService.get_document_by_ids(document_ids) documents = DocumentService.get_document_by_ids(document_ids)
@ -82,13 +88,15 @@ class MetadataService:
lock_key = f"dataset_metadata_lock_{dataset_id}" lock_key = f"dataset_metadata_lock_{dataset_id}"
try: try:
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None) MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
metadata = DatasetMetadata.query.filter_by(id=metadata_id).first() metadata = db.session.query(DatasetMetadata).filter_by(id=metadata_id).first()
if metadata is None: if metadata is None:
raise ValueError("Metadata not found.") raise ValueError("Metadata not found.")
db.session.delete(metadata) db.session.delete(metadata)
# deal related documents # deal related documents
dataset_metadata_bindings = DatasetMetadataBinding.query.filter_by(metadata_id=metadata_id).all() dataset_metadata_bindings = (
db.session.query(DatasetMetadataBinding).filter_by(metadata_id=metadata_id).all()
)
if dataset_metadata_bindings: if dataset_metadata_bindings:
document_ids = [binding.document_id for binding in dataset_metadata_bindings] document_ids = [binding.document_id for binding in dataset_metadata_bindings]
documents = DocumentService.get_document_by_ids(document_ids) documents = DocumentService.get_document_by_ids(document_ids)
@ -193,7 +201,7 @@ class MetadataService:
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()
# deal metadata binding # deal metadata binding
DatasetMetadataBinding.query.filter_by(document_id=operation.document_id).delete() db.session.query(DatasetMetadataBinding).filter_by(document_id=operation.document_id).delete()
for metadata_value in operation.metadata_list: for metadata_value in operation.metadata_list:
dataset_metadata_binding = DatasetMetadataBinding( dataset_metadata_binding = DatasetMetadataBinding(
tenant_id=current_user.current_tenant_id, tenant_id=current_user.current_tenant_id,
@ -230,9 +238,9 @@ class MetadataService:
"id": item.get("id"), "id": item.get("id"),
"name": item.get("name"), "name": item.get("name"),
"type": item.get("type"), "type": item.get("type"),
"count": DatasetMetadataBinding.query.filter_by( "count": db.session.query(DatasetMetadataBinding)
metadata_id=item.get("id"), dataset_id=dataset.id .filter_by(metadata_id=item.get("id"), dataset_id=dataset.id)
).count(), .count(),
} }
for item in dataset.doc_metadata or [] for item in dataset.doc_metadata or []
if item.get("id") != "built-in" if item.get("id") != "built-in"

@ -1,3 +1,4 @@
import logging
from typing import Optional from typing import Optional
from core.model_manager import ModelInstance, ModelManager from core.model_manager import ModelInstance, ModelManager
@ -12,17 +13,27 @@ from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegm
from models.dataset import Document as DatasetDocument from models.dataset import Document as DatasetDocument
from services.entities.knowledge_entities.knowledge_entities import ParentMode from services.entities.knowledge_entities.knowledge_entities import ParentMode
_logger = logging.getLogger(__name__)
class VectorService: class VectorService:
@classmethod @classmethod
def create_segments_vector( def create_segments_vector(
cls, keywords_list: Optional[list[list[str]]], segments: list[DocumentSegment], dataset: Dataset, doc_form: str cls, keywords_list: Optional[list[list[str]]], segments: list[DocumentSegment], dataset: Dataset, doc_form: str
): ):
documents = [] documents: list[Document] = []
document: Document | None = None
for segment in segments: for segment in segments:
if doc_form == IndexType.PARENT_CHILD_INDEX: if doc_form == IndexType.PARENT_CHILD_INDEX:
document = DatasetDocument.query.filter_by(id=segment.document_id).first() document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
if not document:
_logger.warning(
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
segment.document_id,
segment.id,
)
continue
# get the process rule # get the process rule
processing_rule = ( processing_rule = (
db.session.query(DatasetProcessRule) db.session.query(DatasetProcessRule)

@ -41,7 +41,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) db.session.query(DocumentSegment).filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
document = Document( document = Document(
page_content=segment.content, page_content=segment.content,
@ -78,7 +78,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) db.session.query(DocumentSegment).filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
end_at = time.perf_counter() end_at = time.perf_counter()

@ -24,7 +24,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
start_at = time.perf_counter() start_at = time.perf_counter()
try: try:
dataset = Dataset.query.filter_by(id=dataset_id).first() dataset = db.session.query(Dataset).filter_by(id=dataset_id).first()
if not dataset: if not dataset:
raise Exception("Dataset not found") raise Exception("Dataset not found")

@ -44,14 +44,18 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
page_id = data_source_info["notion_page_id"] page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"] page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"] page_edited_time = data_source_info["last_edited_time"]
data_source_binding = DataSourceOauthBinding.query.filter( data_source_binding = (
db.and_( db.session.query(DataSourceOauthBinding)
DataSourceOauthBinding.tenant_id == document.tenant_id, .filter(
DataSourceOauthBinding.provider == "notion", db.and_(
DataSourceOauthBinding.disabled == False, DataSourceOauthBinding.tenant_id == document.tenant_id,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"', DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
) )
).first() .first()
)
if not data_source_binding: if not data_source_binding:
raise ValueError("Data source binding not found.") raise ValueError("Data source binding not found.")

@ -1,49 +1,28 @@
import os import os
from textwrap import dedent
import pytest
from flask import Flask from flask import Flask
from yarl import URL from yarl import URL
from configs.app_config import DifyConfig from configs.app_config import DifyConfig
EXAMPLE_ENV_FILENAME = ".env"
def test_dify_config(monkeypatch):
@pytest.fixture
def example_env_file(tmp_path, monkeypatch) -> str:
monkeypatch.chdir(tmp_path)
file_path = tmp_path.joinpath(EXAMPLE_ENV_FILENAME)
file_path.write_text(
dedent(
"""
CONSOLE_API_URL=https://example.com
CONSOLE_WEB_URL=https://example.com
HTTP_REQUEST_MAX_WRITE_TIMEOUT=30
"""
)
)
return str(file_path)
def test_dify_config_undefined_entry(example_env_file):
# NOTE: See https://github.com/microsoft/pylance-release/issues/6099 for more details about this type error.
# load dotenv file with pydantic-settings
config = DifyConfig(_env_file=example_env_file)
# entries not defined in app settings
with pytest.raises(TypeError):
# TypeError: 'AppSettings' object is not subscriptable
assert config["LOG_LEVEL"] == "INFO"
# NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected.
# This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`.
def test_dify_config(example_env_file):
# clear system environment variables # clear system environment variables
os.environ.clear() os.environ.clear()
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "600")
# load dotenv file with pydantic-settings # load dotenv file with pydantic-settings
config = DifyConfig(_env_file=example_env_file) config = DifyConfig()
# constant values # constant values
assert config.COMMIT_SHA == "" assert config.COMMIT_SHA == ""
@ -54,7 +33,7 @@ def test_dify_config(example_env_file):
assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0 assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0
# annotated field with default value # annotated field with default value
assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 60 assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600
# annotated field with configured value # annotated field with configured value
assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30 assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30
@ -64,11 +43,24 @@ def test_dify_config(example_env_file):
# NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected. # NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected.
# This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`. # This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`.
def test_flask_configs(example_env_file): def test_flask_configs(monkeypatch):
flask_app = Flask("app") flask_app = Flask("app")
# clear system environment variables # clear system environment variables
os.environ.clear() os.environ.clear()
flask_app.config.from_mapping(DifyConfig(_env_file=example_env_file).model_dump()) # pyright: ignore
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
monkeypatch.setenv("WEB_API_CORS_ALLOW_ORIGINS", "http://127.0.0.1:3000,*")
monkeypatch.setenv("CODE_EXECUTION_ENDPOINT", "http://127.0.0.1:8194/")
flask_app.config.from_mapping(DifyConfig().model_dump()) # pyright: ignore
config = flask_app.config config = flask_app.config
# configs read from pydantic-settings # configs read from pydantic-settings
@ -83,7 +75,7 @@ def test_flask_configs(example_env_file):
# fallback to alias choices value as CONSOLE_API_URL # fallback to alias choices value as CONSOLE_API_URL
assert config["FILES_URL"] == "https://example.com" assert config["FILES_URL"] == "https://example.com"
assert config["SQLALCHEMY_DATABASE_URI"] == "postgresql://postgres:@localhost:5432/dify" assert config["SQLALCHEMY_DATABASE_URI"] == "postgresql://postgres:postgres@localhost:5432/dify"
assert config["SQLALCHEMY_ENGINE_OPTIONS"] == { assert config["SQLALCHEMY_ENGINE_OPTIONS"] == {
"connect_args": { "connect_args": {
"options": "-c timezone=UTC", "options": "-c timezone=UTC",
@ -96,13 +88,47 @@ def test_flask_configs(example_env_file):
assert config["CONSOLE_WEB_URL"] == "https://example.com" assert config["CONSOLE_WEB_URL"] == "https://example.com"
assert config["CONSOLE_CORS_ALLOW_ORIGINS"] == ["https://example.com"] assert config["CONSOLE_CORS_ALLOW_ORIGINS"] == ["https://example.com"]
assert config["WEB_API_CORS_ALLOW_ORIGINS"] == ["*"] assert config["WEB_API_CORS_ALLOW_ORIGINS"] == ["http://127.0.0.1:3000", "*"]
assert str(config["CODE_EXECUTION_ENDPOINT"]) == "http://127.0.0.1:8194/"
assert str(URL(str(config["CODE_EXECUTION_ENDPOINT"])) / "v1") == "http://127.0.0.1:8194/v1"
assert str(config["CODE_EXECUTION_ENDPOINT"]) == "http://sandbox:8194/"
assert str(URL(str(config["CODE_EXECUTION_ENDPOINT"])) / "v1") == "http://sandbox:8194/v1"
def test_inner_api_config_exist(monkeypatch):
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
monkeypatch.setenv("INNER_API_KEY", "test-inner-api-key")
def test_inner_api_config_exist(example_env_file): config = DifyConfig()
config = DifyConfig(_env_file=example_env_file)
assert config.INNER_API is False assert config.INNER_API is False
assert config.INNER_API_KEY is None assert isinstance(config.INNER_API_KEY, str)
assert len(config.INNER_API_KEY) > 0
def test_db_extras_options_merging(monkeypatch):
"""Test that DB_EXTRAS options are properly merged with default timezone setting"""
# Set environment variables
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
monkeypatch.setenv("DB_EXTRAS", "options=-c search_path=myschema")
# Create config
config = DifyConfig()
# Get engine options
engine_options = config.SQLALCHEMY_ENGINE_OPTIONS
# Verify options contains both search_path and timezone
options = engine_options["connect_args"]["options"]
assert "search_path=myschema" in options
assert "timezone=UTC" in options

File diff suppressed because it is too large Load Diff

@ -6,10 +6,12 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
# different from api or web app domain. # different from api or web app domain.
# example: http://cloud.dify.ai/console/api # example: http://cloud.dify.ai/console/api
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_WEB_PREFIX=http://localhost:3000
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from # The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain. # console or api domain.
# example: http://udify.app/api # example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
NEXT_PUBLIC_PUBLIC_WEB_PREFIX=http://localhost:3000
# The API PREFIX for MARKETPLACE # The API PREFIX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1 NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
# The URL for MARKETPLACE # The URL for MARKETPLACE

@ -31,10 +31,12 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
# different from api or web app domain. # different from api or web app domain.
# example: http://cloud.dify.ai/console/api # example: http://cloud.dify.ai/console/api
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_WEB_PREFIX=http://localhost:3000
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from # The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain. # console or api domain.
# example: http://udify.app/api # example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
NEXT_PUBLIC_PUBLIC_WEB_PREFIX=http://localhost:3000
# SENTRY # SENTRY
NEXT_PUBLIC_SENTRY_DSN= NEXT_PUBLIC_SENTRY_DSN=

@ -16,7 +16,7 @@ import AppsContext, { useAppContext } from '@/context/app-context'
import type { HtmlContentProps } from '@/app/components/base/popover' import type { HtmlContentProps } from '@/app/components/base/popover'
import CustomPopover from '@/app/components/base/popover' import CustomPopover from '@/app/components/base/popover'
import Divider from '@/app/components/base/divider' import Divider from '@/app/components/base/divider'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import { getRedirection } from '@/utils/app-redirection' import { getRedirection } from '@/utils/app-redirection'
import { useProviderContext } from '@/context/provider-context' import { useProviderContext } from '@/context/provider-context'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config' import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
@ -217,7 +217,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
try { try {
const { installed_apps }: any = await fetchInstalledAppList(app.id) || {} const { installed_apps }: any = await fetchInstalledAppList(app.id) || {}
if (installed_apps?.length > 0) if (installed_apps?.length > 0)
window.open(`${basePath}/explore/installed/${installed_apps[0].id}`, '_blank') window.open(`${WEB_PREFIX}/explore/installed/${installed_apps[0].id}`, '_blank')
else else
throw new Error('No app found in Explore') throw new Error('No app found in Explore')
} }

@ -24,7 +24,7 @@ import {
PortalToFollowElemContent, PortalToFollowElemContent,
PortalToFollowElemTrigger, PortalToFollowElemTrigger,
} from '@/app/components/base/portal-to-follow-elem' } from '@/app/components/base/portal-to-follow-elem'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import { fetchInstalledAppList } from '@/service/explore' import { fetchInstalledAppList } from '@/service/explore'
import EmbeddedModal from '@/app/components/app/overview/embedded' import EmbeddedModal from '@/app/components/app/overview/embedded'
import { useStore as useAppStore } from '@/app/components/app/store' import { useStore as useAppStore } from '@/app/components/app/store'
@ -76,7 +76,7 @@ const AppPublisher = ({
const appDetail = useAppStore(state => state.appDetail) const appDetail = useAppStore(state => state.appDetail)
const { app_base_url: appBaseURL = '', access_token: accessToken = '' } = appDetail?.site ?? {} const { app_base_url: appBaseURL = '', access_token: accessToken = '' } = appDetail?.site ?? {}
const appMode = (appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow') ? 'chat' : appDetail.mode const appMode = (appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow') ? 'chat' : appDetail.mode
const appURL = `${appBaseURL}${basePath}/${appMode}/${accessToken}` const appURL = `${appBaseURL}/${appMode}/${accessToken}`
const isChatApp = ['chat', 'agent-chat', 'completion'].includes(appDetail?.mode || '') const isChatApp = ['chat', 'agent-chat', 'completion'].includes(appDetail?.mode || '')
const language = useGetLanguage() const language = useGetLanguage()
@ -121,7 +121,7 @@ const AppPublisher = ({
try { try {
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {} const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
if (installed_apps?.length > 0) if (installed_apps?.length > 0)
window.open(`${basePath}/explore/installed/${installed_apps[0].id}`, '_blank') window.open(`${WEB_PREFIX}/explore/installed/${installed_apps[0].id}`, '_blank')
else else
throw new Error('No app found in Explore') throw new Error('No app found in Explore')
} }

@ -14,7 +14,6 @@ import Loading from '@/app/components/base/loading'
import Badge from '@/app/components/base/badge' import Badge from '@/app/components/base/badge'
import { useKnowledge } from '@/hooks/use-knowledge' import { useKnowledge } from '@/hooks/use-knowledge'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
import { basePath } from '@/utils/var'
export type ISelectDataSetProps = { export type ISelectDataSetProps = {
isShow: boolean isShow: boolean
@ -112,7 +111,7 @@ const SelectDataSet: FC<ISelectDataSetProps> = ({
}} }}
> >
<span className='text-text-tertiary'>{t('appDebug.feature.dataSet.noDataSet')}</span> <span className='text-text-tertiary'>{t('appDebug.feature.dataSet.noDataSet')}</span>
<Link href={`${basePath}/datasets/create`} className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link> <Link href={'/datasets/create'} className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link>
</div> </div>
)} )}

@ -14,7 +14,7 @@ import type { AppIconSelection } from '../../base/app-icon-picker'
import Button from '@/app/components/base/button' import Button from '@/app/components/base/button'
import Divider from '@/app/components/base/divider' import Divider from '@/app/components/base/divider'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import AppsContext, { useAppContext } from '@/context/app-context' import AppsContext, { useAppContext } from '@/context/app-context'
import { useProviderContext } from '@/context/provider-context' import { useProviderContext } from '@/context/provider-context'
import { ToastContext } from '@/app/components/base/toast' import { ToastContext } from '@/app/components/base/toast'
@ -353,11 +353,11 @@ function AppScreenShot({ mode, show }: { mode: AppMode; show: boolean }) {
'workflow': 'Workflow', 'workflow': 'Workflow',
} }
return <picture> return <picture>
<source media="(resolution: 1x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}.png`} /> <source media="(resolution: 1x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}.png`} />
<source media="(resolution: 2x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}@2x.png`} /> <source media="(resolution: 2x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}@2x.png`} />
<source media="(resolution: 3x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}@3x.png`} /> <source media="(resolution: 3x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}@3x.png`} />
<Image className={show ? '' : 'hidden'} <Image className={show ? '' : 'hidden'}
src={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}.png`} src={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}.png`}
alt='App Screen Shot' alt='App Screen Shot'
width={664} height={448} /> width={664} height={448} />
</picture> </picture>

@ -7,7 +7,6 @@ import { usePathname } from 'next/navigation'
import { useDebounce } from 'ahooks' import { useDebounce } from 'ahooks'
import { omit } from 'lodash-es' import { omit } from 'lodash-es'
import dayjs from 'dayjs' import dayjs from 'dayjs'
import { basePath } from '@/utils/var'
import { Trans, useTranslation } from 'react-i18next' import { Trans, useTranslation } from 'react-i18next'
import List from './list' import List from './list'
import Filter, { TIME_PERIOD_MAPPING } from './filter' import Filter, { TIME_PERIOD_MAPPING } from './filter'
@ -110,7 +109,7 @@ const Logs: FC<ILogsProps> = ({ appDetail }) => {
? <Loading type='app' /> ? <Loading type='app' />
: total > 0 : total > 0
? <List logs={isChatMode ? chatConversations : completionConversations} appDetail={appDetail} onRefresh={isChatMode ? mutateChatList : mutateCompletionList} /> ? <List logs={isChatMode ? chatConversations : completionConversations} appDetail={appDetail} onRefresh={isChatMode ? mutateChatList : mutateCompletionList} />
: <EmptyElement appUrl={`${appDetail.site.app_base_url}${basePath}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} /> : <EmptyElement appUrl={`${appDetail.site.app_base_url}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
} }
{/* Show Pagination only if the total is more than the limit */} {/* Show Pagination only if the total is more than the limit */}
{(total && total > APP_PAGE_LIMIT) {(total && total > APP_PAGE_LIMIT)

@ -17,7 +17,6 @@ import type { ConfigParams } from './settings'
import Tooltip from '@/app/components/base/tooltip' import Tooltip from '@/app/components/base/tooltip'
import AppBasic from '@/app/components/app-sidebar/basic' import AppBasic from '@/app/components/app-sidebar/basic'
import { asyncRunSafe, randomString } from '@/utils' import { asyncRunSafe, randomString } from '@/utils'
import { basePath } from '@/utils/var'
import Button from '@/app/components/base/button' import Button from '@/app/components/base/button'
import Switch from '@/app/components/base/switch' import Switch from '@/app/components/base/switch'
import Divider from '@/app/components/base/divider' import Divider from '@/app/components/base/divider'
@ -89,7 +88,7 @@ function AppCard({
const runningStatus = isApp ? appInfo.enable_site : appInfo.enable_api const runningStatus = isApp ? appInfo.enable_site : appInfo.enable_api
const { app_base_url, access_token } = appInfo.site ?? {} const { app_base_url, access_token } = appInfo.site ?? {}
const appMode = (appInfo.mode !== 'completion' && appInfo.mode !== 'workflow') ? 'chat' : appInfo.mode const appMode = (appInfo.mode !== 'completion' && appInfo.mode !== 'workflow') ? 'chat' : appInfo.mode
const appUrl = `${app_base_url}${basePath}/${appMode}/${access_token}` const appUrl = `${app_base_url}/${appMode}/${access_token}`
const apiUrl = appInfo?.api_base_url const apiUrl = appInfo?.api_base_url
const genClickFuncByName = (opName: string) => { const genClickFuncByName = (opName: string) => {

@ -13,7 +13,6 @@ import { IS_CE_EDITION } from '@/config'
import type { SiteInfo } from '@/models/share' import type { SiteInfo } from '@/models/share'
import { useThemeContext } from '@/app/components/base/chat/embedded-chatbot/theme/theme-context' import { useThemeContext } from '@/app/components/base/chat/embedded-chatbot/theme/theme-context'
import ActionButton from '@/app/components/base/action-button' import ActionButton from '@/app/components/base/action-button'
import { basePath } from '@/utils/var'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
type Props = { type Props = {
@ -29,7 +28,7 @@ const OPTION_MAP = {
iframe: { iframe: {
getContent: (url: string, token: string) => getContent: (url: string, token: string) =>
`<iframe `<iframe
src="${url}${basePath}/chatbot/${token}" src="${url}/chatbot/${token}"
style="width: 100%; height: 100%; min-height: 700px" style="width: 100%; height: 100%; min-height: 700px"
frameborder="0" frameborder="0"
allow="microphone"> allow="microphone">
@ -44,7 +43,7 @@ const OPTION_MAP = {
isDev: true` isDev: true`
: ''}${IS_CE_EDITION : ''}${IS_CE_EDITION
? `, ? `,
baseUrl: '${url}${basePath}'` baseUrl: '${url}'`
: ''}, : ''},
systemVariables: { systemVariables: {
// user_id: 'YOU CAN DEFINE USER ID HERE', // user_id: 'YOU CAN DEFINE USER ID HERE',
@ -53,7 +52,7 @@ const OPTION_MAP = {
} }
</script> </script>
<script <script
src="${url}${basePath}/embed.min.js" src="${url}/embed.min.js"
id="${token}" id="${token}"
defer> defer>
</script> </script>
@ -68,7 +67,7 @@ const OPTION_MAP = {
</style>`, </style>`,
}, },
chromePlugin: { chromePlugin: {
getContent: (url: string, token: string) => `ChatBot URL: ${url}${basePath}/chatbot/${token}`, getContent: (url: string, token: string) => `ChatBot URL: ${url}/chatbot/${token}`,
}, },
} }
const prefixEmbedded = 'appOverview.overview.appInfo.embedded' const prefixEmbedded = 'appOverview.overview.appInfo.embedded'

@ -11,7 +11,6 @@ import timezone from 'dayjs/plugin/timezone'
import { Trans, useTranslation } from 'react-i18next' import { Trans, useTranslation } from 'react-i18next'
import Link from 'next/link' import Link from 'next/link'
import List from './list' import List from './list'
import { basePath } from '@/utils/var'
import Filter, { TIME_PERIOD_MAPPING } from './filter' import Filter, { TIME_PERIOD_MAPPING } from './filter'
import Pagination from '@/app/components/base/pagination' import Pagination from '@/app/components/base/pagination'
import Loading from '@/app/components/base/loading' import Loading from '@/app/components/base/loading'
@ -101,7 +100,7 @@ const Logs: FC<ILogsProps> = ({ appDetail }) => {
? <Loading type='app' /> ? <Loading type='app' />
: total > 0 : total > 0
? <List logs={workflowLogs} appDetail={appDetail} onRefresh={mutate} /> ? <List logs={workflowLogs} appDetail={appDetail} onRefresh={mutate} />
: <EmptyElement appUrl={`${appDetail.site.app_base_url}${basePath}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} /> : <EmptyElement appUrl={`${appDetail.site.app_base_url}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
} }
{/* Show Pagination only if the total is more than the limit */} {/* Show Pagination only if the total is more than the limit */}
{(total && total > APP_PAGE_LIMIT) {(total && total > APP_PAGE_LIMIT)

@ -1,4 +1,4 @@
import React, { useCallback } from 'react' import React, { memo, useCallback } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { useChatWithHistoryContext } from '../context' import { useChatWithHistoryContext } from '../context'
import Input from '@/app/components/base/input' import Input from '@/app/components/base/input'
@ -112,4 +112,4 @@ const InputsFormContent = ({ showTip }: Props) => {
) )
} }
export default InputsFormContent export default memo(InputsFormContent)

@ -424,6 +424,8 @@ export const useChat = (
const response = responseItem as any const response = responseItem as any
if (thought.message_id && !hasSetResponseId) if (thought.message_id && !hasSetResponseId)
response.id = thought.message_id response.id = thought.message_id
if (thought.conversation_id)
response.conversationId = thought.conversation_id
if (response.agent_thoughts.length === 0) { if (response.agent_thoughts.length === 0) {
response.agent_thoughts.push(thought) response.agent_thoughts.push(thought)

@ -41,6 +41,7 @@ export type ThoughtItem = {
tool_input: string tool_input: string
tool_labels?: { [key: string]: TypeWithI18N } tool_labels?: { [key: string]: TypeWithI18N }
message_id: string message_id: string
conversation_id: string
observation: string observation: string
position: number position: number
files?: string[] files?: string[]

@ -1,4 +1,4 @@
import React, { useCallback } from 'react' import React, { memo, useCallback } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { useEmbeddedChatbotContext } from '../context' import { useEmbeddedChatbotContext } from '../context'
import Input from '@/app/components/base/input' import Input from '@/app/components/base/input'
@ -112,4 +112,4 @@ const InputsFormContent = ({ showTip }: Props) => {
) )
} }
export default InputsFormContent export default memo(InputsFormContent)

@ -1,5 +1,5 @@
import type { FC } from 'react' import type { FC } from 'react'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
type LogoEmbeddedChatAvatarProps = { type LogoEmbeddedChatAvatarProps = {
className?: string className?: string
@ -9,7 +9,7 @@ const LogoEmbeddedChatAvatar: FC<LogoEmbeddedChatAvatarProps> = ({
}) => { }) => {
return ( return (
<img <img
src={`${basePath}/logo/logo-embedded-chat-avatar.png`} src={`${WEB_PREFIX}/logo/logo-embedded-chat-avatar.png`}
className={`block h-10 w-10 ${className}`} className={`block h-10 w-10 ${className}`}
alt='logo' alt='logo'
/> />

@ -1,6 +1,6 @@
import classNames from '@/utils/classnames' import classNames from '@/utils/classnames'
import type { FC } from 'react' import type { FC } from 'react'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
type LogoEmbeddedChatHeaderProps = { type LogoEmbeddedChatHeaderProps = {
className?: string className?: string
@ -14,7 +14,7 @@ const LogoEmbeddedChatHeader: FC<LogoEmbeddedChatHeaderProps> = ({
<source media="(resolution: 2x)" srcSet='/logo/logo-embedded-chat-header@2x.png' /> <source media="(resolution: 2x)" srcSet='/logo/logo-embedded-chat-header@2x.png' />
<source media="(resolution: 3x)" srcSet='/logo/logo-embedded-chat-header@3x.png' /> <source media="(resolution: 3x)" srcSet='/logo/logo-embedded-chat-header@3x.png' />
<img <img
src={`${basePath}/logo/logo-embedded-chat-header.png`} src={`${WEB_PREFIX}/logo/logo-embedded-chat-header.png`}
alt='logo' alt='logo'
className={classNames('block h-6 w-auto', className)} className={classNames('block h-6 w-auto', className)}
/> />

@ -1,6 +1,6 @@
'use client' 'use client'
import type { FC } from 'react' import type { FC } from 'react'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import classNames from '@/utils/classnames' import classNames from '@/utils/classnames'
type LogoSiteProps = { type LogoSiteProps = {
@ -12,7 +12,7 @@ const LogoSite: FC<LogoSiteProps> = ({
}) => { }) => {
return ( return (
<img <img
src={`${basePath}/logo/logo.png`} src={`${WEB_PREFIX}/logo/logo.png`}
className={classNames('block w-[22.651px] h-[24.5px]', className)} className={classNames('block w-[22.651px] h-[24.5px]', className)}
alt='logo' alt='logo'
/> />

@ -41,9 +41,10 @@ const useThinkTimer = (children: any) => {
const timerRef = useRef<NodeJS.Timeout>() const timerRef = useRef<NodeJS.Timeout>()
useEffect(() => { useEffect(() => {
if (isComplete) return
timerRef.current = setInterval(() => { timerRef.current = setInterval(() => {
if (!isComplete) setElapsedTime(Math.floor((Date.now() - startTime) / 100) / 10)
setElapsedTime(Math.floor((Date.now() - startTime) / 100) / 10)
}, 100) }, 100)
return () => { return () => {
@ -53,11 +54,8 @@ const useThinkTimer = (children: any) => {
}, [startTime, isComplete]) }, [startTime, isComplete])
useEffect(() => { useEffect(() => {
if (hasEndThink(children)) { if (hasEndThink(children))
setIsComplete(true) setIsComplete(true)
if (timerRef.current)
clearInterval(timerRef.current)
}
}, [children]) }, [children])
return { elapsedTime, isComplete } return { elapsedTime, isComplete }

@ -2,47 +2,55 @@
@layer components { @layer components {
.premium-badge { .premium-badge {
@apply inline-flex justify-center items-center rounded-md border box-border border-white/95 text-white @apply shrink-0 relative inline-flex justify-center items-center rounded-md box-border border border-transparent text-white shadow-xs hover:shadow-lg bg-origin-border overflow-hidden;
background-clip: padding-box, border-box;
}
.allowHover {
@apply cursor-pointer;
} }
/* m is for the regular button */ /* m is for the regular button */
.premium-badge-m { .premium-badge-m {
@apply border shadow-lg !p-1 h-6 w-auto @apply !p-1 h-6 w-auto
} }
.premium-badge-s { .premium-badge-s {
@apply border-[0.5px] shadow-xs !px-1 !py-[3px] h-[18px] w-auto @apply border-[0.5px] !px-1 !py-[3px] h-[18px] w-auto
} }
.premium-badge-blue { .premium-badge-blue {
@apply bg-gradient-to-r from-[#5289ffe6] to-[#155aefe6] bg-util-colors-blue-blue-200 @apply bg-util-colors-blue-blue-200;
background-image: linear-gradient(90deg, #5289ffe6 0%, #155aefe6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #155aef 100%);
} }
.premium-badge-indigo {
@apply bg-gradient-to-r from-[#8098f9e6] to-[#444ce7e6] bg-util-colors-indigo-indigo-200
}
.premium-badge-gray {
@apply bg-gradient-to-r from-[#98a2b2e6] to-[#676f83e6] bg-util-colors-gray-gray-200
}
.premium-badge-orange {
@apply bg-gradient-to-r from-[#ff692ee6] to-[#e04f16e6] bg-util-colors-orange-orange-200
}
.premium-badge-blue.allowHover:hover { .premium-badge-blue.allowHover:hover {
@apply bg-gradient-to-r from-[#296dffe6] to-[#004aebe6] bg-util-colors-blue-blue-300 cursor-pointer @apply bg-util-colors-blue-blue-300;
background-image: linear-gradient(90deg, #296dffe6 0%, #004aebe6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #00329e 100%);
} }
.premium-badge-indigo {
@apply bg-util-colors-indigo-indigo-200;
background-image: linear-gradient(90deg, #8098f9e6 0%, #444ce7e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #6172f3 100%);
}
.premium-badge-indigo.allowHover:hover { .premium-badge-indigo.allowHover:hover {
@apply bg-gradient-to-r from-[#6172f3e6] to-[#2d31a6e6] bg-util-colors-indigo-indigo-300 cursor-pointer @apply bg-util-colors-indigo-indigo-300;
background-image: linear-gradient(90deg, #6172f3e6 0%, #2d31a6e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #2d31a6 100%);
} }
.premium-badge-gray {
@apply bg-util-colors-gray-gray-200;
background-image: linear-gradient(90deg, #98a2b2e6 0%, #676f83e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #676f83 100%);
}
.premium-badge-gray.allowHover:hover { .premium-badge-gray.allowHover:hover {
@apply bg-gradient-to-r from-[#676f83e6] to-[#354052e6] bg-util-colors-gray-gray-300 cursor-pointer @apply bg-util-colors-gray-gray-300;
background-image: linear-gradient(90deg, #676f83e6 0%, #354052e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #354052 100%);
} }
.premium-badge-orange {
@apply bg-util-colors-orange-orange-200;
background-image: linear-gradient(90deg, #ff692ee6 0%, #e04f16e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #e62e05 100%);
}
.premium-badge-orange.allowHover:hover { .premium-badge-orange.allowHover:hover {
@apply bg-gradient-to-r from-[#ff4405e6] to-[#b93815e6] bg-util-colors-orange-orange-300 cursor-pointer @apply bg-util-colors-orange-orange-300;
background-image: linear-gradient(90deg, #ff4405e6 0%, #b93815e6 100%), linear-gradient(135deg, var(--color-premium-badge-border-highlight-color) 0%, #e62e05 100%);
} }
} }

@ -61,13 +61,9 @@ const PremiumBadge: React.FC<PremiumBadgeProps> = ({
{children} {children}
<Highlight <Highlight
className={classNames( className={classNames(
'absolute top-0 opacity-50 hover:opacity-80', 'absolute top-0 opacity-50 right-1/2 translate-x-[20%] transition-all duration-100 ease-out hover:opacity-80 hover:translate-x-[30%]',
size === 's' ? 'h-[18px] w-12' : 'h-6 w-12', size === 's' ? 'h-[18px] w-12' : 'h-6 w-12',
)} )}
style={{
right: '50%',
transform: 'translateX(10%)',
}}
/> />
</div> </div>
) )

@ -314,7 +314,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
</Property> </Property>
<Property name='indexing_technique' type='string' key='indexing_technique'> <Property name='indexing_technique' type='string' key='indexing_technique'>
Index technique (optional) Index technique (optional)
If this is not set, embedding_model, embedding_provider_name and retrieval_model will be set to null If this is not set, embedding_model, embedding_model_provider and retrieval_model will be set to null
- <code>high_quality</code> High quality - <code>high_quality</code> High quality
- <code>economy</code> Economy - <code>economy</code> Economy
</Property> </Property>
@ -338,7 +338,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
<Property name='embedding_model' type='str' key='embedding_model'> <Property name='embedding_model' type='str' key='embedding_model'>
Embedding model name (optional) Embedding model name (optional)
</Property> </Property>
<Property name='embedding_provider_name' type='str' key='embedding_provider_name'> <Property name='embedding_model_provider' type='str' key='embedding_model_provider'>
Embedding model provider name (optional) Embedding model provider name (optional)
</Property> </Property>
<Property name='retrieval_model' type='object' key='retrieval_model'> <Property name='retrieval_model' type='object' key='retrieval_model'>
@ -1040,10 +1040,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1335,10 +1333,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1620,10 +1616,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -337,7 +337,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
<Property name='embedding_model' type='str' key='embedding_model'> <Property name='embedding_model' type='str' key='embedding_model'>
埋め込みモデル名(任意) 埋め込みモデル名(任意)
</Property> </Property>
<Property name='embedding_provider_name' type='str' key='embedding_provider_name'> <Property name='embedding_model_provider' type='str' key='embedding_model_provider'>
埋め込みモデルのプロバイダ名(任意) 埋め込みモデルのプロバイダ名(任意)
</Property> </Property>
<Property name='retrieval_model' type='object' key='retrieval_model'> <Property name='retrieval_model' type='object' key='retrieval_model'>
@ -501,7 +501,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="レスポンス"> <CodeGroup title="レスポンス">
```text {{ title: 'Response' }} ```text {{ title: 'レスポンス' }}
204 No Content 204 No Content
``` ```
</CodeGroup> </CodeGroup>
@ -797,10 +797,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="レスポンス"> <CodeGroup title="レスポンス">
```json {{ title: 'Response' }} ```text {{ title: 'レスポンス' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1092,10 +1090,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="レスポンス"> <CodeGroup title="レスポンス">
```json {{ title: 'Response' }} ```text {{ title: 'レスポンス' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1377,10 +1373,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="レスポンス"> <CodeGroup title="レスポンス">
```json {{ title: 'Response' }} ```text {{ title: 'レスポンス' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -341,7 +341,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
<Property name='embedding_model' type='str' key='embedding_model'> <Property name='embedding_model' type='str' key='embedding_model'>
Embedding 模型名称 Embedding 模型名称
</Property> </Property>
<Property name='embedding_provider_name' type='str' key='embedding_provider_name'> <Property name='embedding_model_provider' type='str' key='embedding_model_provider'>
Embedding 模型供应商 Embedding 模型供应商
</Property> </Property>
<Property name='retrieval_model' type='object' key='retrieval_model'> <Property name='retrieval_model' type='object' key='retrieval_model'>
@ -1047,10 +1047,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1342,10 +1340,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1628,10 +1624,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
``` ```
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -36,7 +36,7 @@ const CreateMetadataModal: FC<Props> = ({
{trigger} {trigger}
</PortalToFollowElemTrigger> </PortalToFollowElemTrigger>
<PortalToFollowElemContent className='z-[1000]'> <PortalToFollowElemContent className='z-[1000]'>
<CreateContent {...createContentProps} onClose={() => setOpen(false)} /> <CreateContent {...createContentProps} onClose={() => setOpen(false)} onBack={() => setOpen(false)} />
</PortalToFollowElemContent> </PortalToFollowElemContent>
</PortalToFollowElem > </PortalToFollowElem >

@ -71,6 +71,7 @@ const SelectMetadataModal: FC<Props> = ({
onSave={handleSave} onSave={handleSave}
hasBack hasBack
onBack={() => setStep(Step.select)} onBack={() => setStep(Step.select)}
onClose={() => setStep(Step.select)}
/> />
)} )}
</PortalToFollowElemContent> </PortalToFollowElemContent>

@ -383,6 +383,69 @@ The text generation application offers non-session support and is ideal for tran
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='Get feedbacks of application'
name='#app-feedbacks'
/>
<Row>
<Col>
Get application's feedbacks.
### Query
<Properties>
<Property name='page' type='string' key='page'>
optionalpaginationdefault1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
optional records per page default20
</Property>
</Properties>
### Response
- `data` (List) return apps feedback list.
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/text-to-audio' url='/text-to-audio'
method='POST' method='POST'

@ -381,6 +381,69 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
</Row> </Row>
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='アプリのメッセージの「いいね」とフィードバックを取得'
name='#app-feedbacks'
/>
<Row>
<Col>
アプリのエンドユーザーからのフィードバックや「いいね」を取得します。
### クエリ
<Properties>
<Property name='page' type='string' key='page'>
任意ページ番号。デフォルト値1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
任意1ページあたりの件数。デフォルト値20
</Property>
</Properties>
### レスポンス
- `data` (リスト) このアプリの「いいね」とフィードバックの一覧を返します。
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/text-to-audio' url='/text-to-audio'
method='POST' method='POST'

@ -355,6 +355,68 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</Col> </Col>
</Row> </Row>
---
<Heading
url='/app/feedbacks'
method='GET'
title='Get feedbacks of application'
name='#app-feedbacks'
/>
<Row>
<Col>
Get application's feedbacks.
### Query
<Properties>
<Property name='page' type='string' key='page'>
optionalpaginationdefault1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
optional records per page default20
</Property>
</Properties>
### Response
- `data` (List) return apps feedback list.
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
--- ---
<Heading <Heading
@ -738,8 +800,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{"result": "success"} 204 No Content
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -487,6 +487,69 @@ Chat applications support session persistence, allowing previous chat history to
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='Get feedbacks of application'
name='#app-feedbacks'
/>
<Row>
<Col>
Get application's feedbacks.
### Query
<Properties>
<Property name='page' type='string' key='page'>
optionalpaginationdefault1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
optional records per page default20
</Property>
</Properties>
### Response
- `data` (List) return apps feedback list.
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -765,10 +828,8 @@ Chat applications support session persistence, allowing previous chat history to
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1432,8 +1493,8 @@ Chat applications support session persistence, allowing previous chat history to
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{"result": "success"} 204 No Content
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -487,6 +487,70 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='アプリのメッセージの「いいね」とフィードバックを取得'
name='#app-feedbacks'
/>
<Row>
<Col>
アプリのエンドユーザーからのフィードバックや「いいね」を取得します。
### クエリ
<Properties>
<Property name='page' type='string' key='page'>
任意ページ番号。デフォルト値1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
任意1ページあたりの件数。デフォルト値20
</Property>
</Properties>
### レスポンス
- `data` (リスト) このアプリの「いいね」とフィードバックの一覧を返します。
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -764,10 +828,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
</CodeGroup> </CodeGroup>
<CodeGroup title="応答"> <CodeGroup title="応答">
```json {{ title: '応答' }} ```text {{ title: '応答' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -493,6 +493,71 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='获取APP的消息点赞和反馈'
name='#app-feedbacks'
/>
<Row>
<Col>
获取应用的终端用户反馈、点赞。
### Query
<Properties>
<Property name='page' type='string' key='page'>
选填分页默认值1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
选填每页数量默认值20
</Property>
</Properties>
### Response
- `data` (List) 返回该APP的点赞、反馈列表。
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -799,10 +864,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1456,8 +1519,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{"result": "success"} 204 No Content
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -450,6 +450,69 @@ Chat applications support session persistence, allowing previous chat history to
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='Get feedbacks of application'
name='#app-feedbacks'
/>
<Row>
<Col>
Get application's feedbacks.
### Query
<Properties>
<Property name='page' type='string' key='page'>
optionalpaginationdefault1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
optional records per page default20
</Property>
</Properties>
### Response
- `data` (List) return apps feedback list.
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -798,10 +861,8 @@ Chat applications support session persistence, allowing previous chat history to
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>
@ -1472,8 +1533,8 @@ Chat applications support session persistence, allowing previous chat history to
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{"result": "success"} 204 No Content
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -450,6 +450,70 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='アプリのメッセージの「いいね」とフィードバックを取得'
name='#app-feedbacks'
/>
<Row>
<Col>
アプリのエンドユーザーからのフィードバックや「いいね」を取得します。
### クエリ
<Properties>
<Property name='page' type='string' key='page'>
任意ページ番号。デフォルト値1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
任意1ページあたりの件数。デフォルト値20
</Property>
</Properties>
### レスポンス
- `data` (リスト) このアプリの「いいね」とフィードバックの一覧を返します。
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -797,10 +861,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
</CodeGroup> </CodeGroup>
<CodeGroup title="応答"> <CodeGroup title="応答">
```json {{ title: '応答' }} ```text {{ title: '応答' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -464,6 +464,69 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
--- ---
<Heading
url='/app/feedbacks'
method='GET'
title='获取APP的消息点赞和反馈'
name='#app-feedbacks'
/>
<Row>
<Col>
获取应用的终端用户反馈、点赞。
### Query
<Properties>
<Property name='page' type='string' key='page'>
选填分页默认值1
</Property>
</Properties>
<Properties>
<Property name='limit' type='string' key='limit'>
选填每页数量默认值20
</Property>
</Properties>
### Response
- `data` (List) 返回该APP的点赞、反馈列表。
</Col>
<Col sticky>
<CodeGroup title="Request" tag="GET" label="/app/feedbacks" targetCode={`curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20'`}>
```bash {{ title: 'cURL' }}
curl -X GET '${props.appDetail.api_base_url}/app/feedbacks?page=1&limit=20' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"data": [
{
"id": "8c0fbed8-e2f9-49ff-9f0e-15a35bdd0e25",
"app_id": "f252d396-fe48-450e-94ec-e184218e7346",
"conversation_id": "2397604b-9deb-430e-b285-4726e51fd62d",
"message_id": "709c0b0f-0a96-4a4e-91a4-ec0889937b11",
"rating": "like",
"content": "message feedback information-3",
"from_source": "user",
"from_end_user_id": "74286412-9a1a-42c1-929c-01edb1d381d5",
"from_account_id": null,
"created_at": "2025-04-24T09:24:38",
"updated_at": "2025-04-24T09:24:38"
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Heading <Heading
url='/messages/{message_id}/suggested' url='/messages/{message_id}/suggested'
method='GET' method='GET'
@ -811,10 +874,8 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</CodeGroup> </CodeGroup>
<CodeGroup title="Response"> <CodeGroup title="Response">
```json {{ title: 'Response' }} ```text {{ title: 'Response' }}
{ 204 No Content
"result": "success"
}
``` ```
</CodeGroup> </CodeGroup>
</Col> </Col>

@ -4,7 +4,7 @@ import { useTranslation } from 'react-i18next'
import { Menu, MenuButton, MenuItems, Transition } from '@headlessui/react' import { Menu, MenuButton, MenuItems, Transition } from '@headlessui/react'
import { RiArrowDownSLine } from '@remixicon/react' import { RiArrowDownSLine } from '@remixicon/react'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import PlanBadge from '@/app/components/header/plan-badge' import PlanBadge from '@/app/components/header/plan-badge'
import { switchWorkspace } from '@/service/common' import { switchWorkspace } from '@/service/common'
import { useWorkspacesContext } from '@/context/workspace-context' import { useWorkspacesContext } from '@/context/workspace-context'
@ -23,7 +23,7 @@ const WorkplaceSelector = () => {
return return
await switchWorkspace({ url: '/workspaces/switch', body: { tenant_id } }) await switchWorkspace({ url: '/workspaces/switch', body: { tenant_id } })
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
location.assign(`${location.origin}${basePath}`) location.assign(WEB_PREFIX)
} }
catch { catch {
notify({ type: 'error', message: t('common.provider.saveFailed') }) notify({ type: 'error', message: t('common.provider.saveFailed') })

@ -2,6 +2,7 @@
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
import Modal from '@/app/components/base/modal' import Modal from '@/app/components/base/modal'
import Input from '@/app/components/base/input' import Input from '@/app/components/base/input'
import { WEB_PREFIX } from '@/config'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { useState } from 'react' import { useState } from 'react'
import { useContext } from 'use-context-selector' import { useContext } from 'use-context-selector'
@ -33,7 +34,7 @@ const EditWorkspaceModal = ({
}, },
}) })
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
location.assign(`${location.origin}`) location.assign(WEB_PREFIX)
} }
catch { catch {
notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') })

@ -1,6 +1,5 @@
'use client' 'use client'
import React, { useCallback, useEffect, useRef, useState } from 'react' import React, { useCallback, useEffect, useRef, useState } from 'react'
import { basePath } from '@/utils/var'
import { t } from 'i18next' import { t } from 'i18next'
import copy from 'copy-to-clipboard' import copy from 'copy-to-clipboard'
import s from './index.module.css' import s from './index.module.css'
@ -19,7 +18,8 @@ const InvitationLink = ({
const selector = useRef(`invite-link-${randomString(4)}`) const selector = useRef(`invite-link-${randomString(4)}`)
const copyHandle = useCallback(() => { const copyHandle = useCallback(() => {
copy(`${!value.url.startsWith('http') ? window.location.origin : ''}${basePath}${value.url}`) // No prefix is needed here because the backend has already processed it
copy(`${!value.url.startsWith('http') ? window.location.origin : ''}${value.url}`)
setIsCopied(true) setIsCopied(true)
}, [value]) }, [value])
@ -42,7 +42,7 @@ const InvitationLink = ({
<Tooltip <Tooltip
popupContent={isCopied ? `${t('appApi.copied')}` : `${t('appApi.copy')}`} popupContent={isCopied ? `${t('appApi.copied')}` : `${t('appApi.copy')}`}
> >
<div className='r-0 absolute left-0 top-0 w-full cursor-pointer truncate pl-2 pr-2' onClick={copyHandle}>{basePath + value.url}</div> <div className='r-0 absolute left-0 top-0 w-full cursor-pointer truncate pl-2 pr-2' onClick={copyHandle}>{value.url}</div>
</Tooltip> </Tooltip>
</div> </div>
<div className="h-4 shrink-0 border bg-divider-regular" /> <div className="h-4 shrink-0 border bg-divider-regular" />

@ -1,6 +1,5 @@
import type { FC } from 'react' import type { FC } from 'react'
import type { ModelProvider } from '../declarations' import type { ModelProvider } from '../declarations'
import { basePath } from '@/utils/var'
import { useLanguage } from '../hooks' import { useLanguage } from '../hooks'
import { Openai } from '@/app/components/base/icons/src/vender/other' import { Openai } from '@/app/components/base/icons/src/vender/other'
import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm' import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm'
@ -41,7 +40,7 @@ const ProviderIcon: FC<ProviderIconProps> = ({
<div className={cn('inline-flex items-center gap-2', className)}> <div className={cn('inline-flex items-center gap-2', className)}>
<img <img
alt='provider-icon' alt='provider-icon'
src={basePath + renderI18nObject(provider.icon_small, language)} src={renderI18nObject(provider.icon_small, language)}
className='h-6 w-6' className='h-6 w-6'
/> />
<div className='system-md-semibold text-text-primary'> <div className='system-md-semibold text-text-primary'>

@ -14,7 +14,6 @@ import Nav from '../nav'
import type { NavItem } from '../nav/nav-selector' import type { NavItem } from '../nav/nav-selector'
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets' import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
import type { DataSetListResponse } from '@/models/datasets' import type { DataSetListResponse } from '@/models/datasets'
import { basePath } from '@/utils/var'
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => { const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
if (!pageIndex || previousPageData.has_more) if (!pageIndex || previousPageData.has_more)
@ -57,7 +56,7 @@ const DatasetNav = () => {
icon_background: dataset.icon_background, icon_background: dataset.icon_background,
})) as NavItem[]} })) as NavItem[]}
createText={t('common.menus.newDataset')} createText={t('common.menus.newDataset')}
onCreate={() => router.push(`${basePath}/datasets/create`)} onCreate={() => router.push('/datasets/create')}
onLoadmore={handleLoadmore} onLoadmore={handleLoadmore}
/> />
) )

@ -2,7 +2,7 @@ import {
memo, memo,
useCallback, useCallback,
} from 'react' } from 'react'
import { basePath } from '@/utils/var' import Link from 'next/link'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { import {
RiAddLine, RiAddLine,
@ -54,7 +54,7 @@ const Blocks = ({
> >
<div className='flex h-[22px] w-full items-center justify-between pl-3 pr-1 text-xs font-medium text-gray-500'> <div className='flex h-[22px] w-full items-center justify-between pl-3 pr-1 text-xs font-medium text-gray-500'>
{toolWithProvider.label[language]} {toolWithProvider.label[language]}
<a className='hidden cursor-pointer items-center group-hover:flex' href={`${basePath}/tools?category=${toolWithProvider.type}`} target='_blank'>{t('tools.addToolModal.manageInTools')}<ArrowUpRight className='ml-0.5 h-3 w-3' /></a> <Link className='hidden cursor-pointer items-center group-hover:flex' href={`/tools?category=${toolWithProvider.type}`} target='_blank'>{t('tools.addToolModal.manageInTools')}<ArrowUpRight className='ml-0.5 h-3 w-3' /></Link>
</div> </div>
{list.map((tool) => { {list.map((tool) => {
const labelContent = (() => { const labelContent = (() => {

@ -53,7 +53,10 @@ const ProviderList = () => {
}) })
}, [activeTab, tagFilterValue, keywords, collectionList]) }, [activeTab, tagFilterValue, keywords, collectionList])
const [currentProvider, setCurrentProvider] = useState<Collection | undefined>() const [currentProviderId, setCurrentProviderId] = useState<string | undefined>()
const currentProvider = useMemo<Collection | undefined>(() => {
return filteredCollectionList.find(collection => collection.id === currentProviderId)
}, [currentProviderId, filteredCollectionList])
const { data: pluginList } = useInstalledPluginList() const { data: pluginList } = useInstalledPluginList()
const invalidateInstalledPluginList = useInvalidateInstalledPluginList() const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
const currentPluginDetail = useMemo(() => { const currentPluginDetail = useMemo(() => {
@ -70,14 +73,14 @@ const ProviderList = () => {
> >
<div className={cn( <div className={cn(
'sticky top-0 z-20 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]', 'sticky top-0 z-20 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]',
currentProvider && 'pr-6', currentProviderId && 'pr-6',
)}> )}>
<TabSliderNew <TabSliderNew
value={activeTab} value={activeTab}
onChange={(state) => { onChange={(state) => {
setActiveTab(state) setActiveTab(state)
if (state !== activeTab) if (state !== activeTab)
setCurrentProvider(undefined) setCurrentProviderId(undefined)
}} }}
options={options} options={options}
/> />
@ -102,12 +105,12 @@ const ProviderList = () => {
{filteredCollectionList.map(collection => ( {filteredCollectionList.map(collection => (
<div <div
key={collection.id} key={collection.id}
onClick={() => setCurrentProvider(collection)} onClick={() => setCurrentProviderId(collection.id)}
> >
<Card <Card
className={cn( className={cn(
'cursor-pointer border-[1.5px] border-transparent', 'cursor-pointer border-[1.5px] border-transparent',
currentProvider?.id === collection.id && 'border-components-option-card-option-selected-border', currentProviderId === collection.id && 'border-components-option-card-option-selected-border',
)} )}
hideCornerMark hideCornerMark
payload={{ payload={{
@ -146,14 +149,14 @@ const ProviderList = () => {
{currentProvider && !currentProvider.plugin_id && ( {currentProvider && !currentProvider.plugin_id && (
<ProviderDetail <ProviderDetail
collection={currentProvider} collection={currentProvider}
onHide={() => setCurrentProvider(undefined)} onHide={() => setCurrentProviderId(undefined)}
onRefreshData={refetch} onRefreshData={refetch}
/> />
)} )}
<PluginDetailPanel <PluginDetailPanel
detail={currentPluginDetail} detail={currentPluginDetail}
onUpdate={() => invalidateInstalledPluginList()} onUpdate={() => invalidateInstalledPluginList()}
onHide={() => setCurrentProvider(undefined)} onHide={() => setCurrentProviderId(undefined)}
/> />
</> </>
) )

@ -6,7 +6,7 @@ import {
RiCloseLine, RiCloseLine,
} from '@remixicon/react' } from '@remixicon/react'
import { AuthHeaderPrefix, AuthType, CollectionType } from '../types' import { AuthHeaderPrefix, AuthType, CollectionType } from '../types'
import { basePath } from '@/utils/var' import Link from 'next/link'
import type { Collection, CustomCollectionBackend, Tool, WorkflowToolProviderRequest, WorkflowToolProviderResponse } from '../types' import type { Collection, CustomCollectionBackend, Tool, WorkflowToolProviderRequest, WorkflowToolProviderResponse } from '../types'
import ToolItem from './tool-item' import ToolItem from './tool-item'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
@ -279,10 +279,10 @@ const ProviderDetail = ({
variant='primary' variant='primary'
className={cn('my-3 w-[183px] shrink-0')} className={cn('my-3 w-[183px] shrink-0')}
> >
<a className='flex items-center' href={`${basePath}/app/${(customCollection as WorkflowToolProviderResponse).workflow_app_id}/workflow`} rel='noreferrer' target='_blank'> <Link className='flex items-center' href={`/app/${(customCollection as WorkflowToolProviderResponse).workflow_app_id}/workflow`} rel='noreferrer' target='_blank'>
<div className='system-sm-medium'>{t('tools.openInStudio')}</div> <div className='system-sm-medium'>{t('tools.openInStudio')}</div>
<LinkExternal02 className='ml-1 h-4 w-4' /> <LinkExternal02 className='ml-1 h-4 w-4' />
</a> </Link>
</Button> </Button>
<Button <Button
className={cn('my-3 w-[183px] shrink-0')} className={cn('my-3 w-[183px] shrink-0')}

@ -3,6 +3,7 @@ import type { FC } from 'react'
import Editor, { loader } from '@monaco-editor/react' import Editor, { loader } from '@monaco-editor/react'
import React, { useEffect, useMemo, useRef, useState } from 'react' import React, { useEffect, useMemo, useRef, useState } from 'react'
import Base from '../base' import Base from '../base'
import { WEB_PREFIX } from '@/config'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types' import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
import { import {
@ -14,7 +15,7 @@ import './style.css'
import { noop } from 'lodash-es' import { noop } from 'lodash-es'
// load file from local instead of cdn https://github.com/suren-atoyan/monaco-react/issues/482 // load file from local instead of cdn https://github.com/suren-atoyan/monaco-react/issues/482
loader.config({ paths: { vs: '/vs' } }) loader.config({ paths: { vs: `${WEB_PREFIX}/vs` } })
const CODE_EDITOR_LINE_HEIGHT = 18 const CODE_EDITOR_LINE_HEIGHT = 18

@ -9,44 +9,90 @@ import { Iteration } from '@/app/components/base/icons/src/vender/workflow'
type IterationLogTriggerProps = { type IterationLogTriggerProps = {
nodeInfo: NodeTracing nodeInfo: NodeTracing
allExecutions?: NodeTracing[]
onShowIterationResultList: (iterationResultList: NodeTracing[][], iterationResultDurationMap: IterationDurationMap) => void onShowIterationResultList: (iterationResultList: NodeTracing[][], iterationResultDurationMap: IterationDurationMap) => void
} }
const IterationLogTrigger = ({ const IterationLogTrigger = ({
nodeInfo, nodeInfo,
allExecutions,
onShowIterationResultList, onShowIterationResultList,
}: IterationLogTriggerProps) => { }: IterationLogTriggerProps) => {
const { t } = useTranslation() const { t } = useTranslation()
const filterNodesForInstance = (key: string): NodeTracing[] => {
if (!allExecutions) return []
const parallelNodes = allExecutions.filter(exec =>
exec.execution_metadata?.parallel_mode_run_id === key,
)
if (parallelNodes.length > 0)
return parallelNodes
const serialIndex = Number.parseInt(key, 10)
if (!isNaN(serialIndex)) {
const serialNodes = allExecutions.filter(exec =>
exec.execution_metadata?.iteration_id === nodeInfo.node_id
&& exec.execution_metadata?.iteration_index === serialIndex,
)
if (serialNodes.length > 0)
return serialNodes
}
return []
}
const handleOnShowIterationDetail = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation()
e.nativeEvent.stopImmediatePropagation()
const iterationNodeMeta = nodeInfo.execution_metadata
const iterDurationMap = nodeInfo?.iterDurationMap || iterationNodeMeta?.iteration_duration_map || {}
let structuredList: NodeTracing[][] = []
if (iterationNodeMeta?.iteration_duration_map) {
const instanceKeys = Object.keys(iterationNodeMeta.iteration_duration_map)
structuredList = instanceKeys
.map(key => filterNodesForInstance(key))
.filter(branchNodes => branchNodes.length > 0)
}
else if (nodeInfo.details?.length) {
structuredList = nodeInfo.details
}
onShowIterationResultList(structuredList, iterDurationMap)
}
let displayIterationCount = 0
const iterMap = nodeInfo.execution_metadata?.iteration_duration_map
if (iterMap)
displayIterationCount = Object.keys(iterMap).length
else if (nodeInfo.details?.length)
displayIterationCount = nodeInfo.details.length
else if (nodeInfo.metadata?.iterator_length)
displayIterationCount = nodeInfo.metadata.iterator_length
const getErrorCount = (details: NodeTracing[][] | undefined) => { const getErrorCount = (details: NodeTracing[][] | undefined) => {
if (!details || details.length === 0) if (!details || details.length === 0)
return 0 return 0
return details.reduce((acc, iteration) => { return details.reduce((acc, iteration) => {
if (iteration.some(item => item.status === 'failed')) if (iteration.some(item => item.status === 'failed'))
acc++ acc++
return acc return acc
}, 0) }, 0)
} }
const getCount = (iteration_curr_length: number | undefined, iteration_length: number) => { const errorCount = getErrorCount(nodeInfo.details)
if ((iteration_curr_length && iteration_curr_length < iteration_length) || !iteration_length)
return iteration_curr_length
return iteration_length
}
const handleOnShowIterationDetail = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation()
e.nativeEvent.stopImmediatePropagation()
onShowIterationResultList(nodeInfo.details || [], nodeInfo?.iterDurationMap || nodeInfo.execution_metadata?.iteration_duration_map || {})
}
return ( return (
<Button <Button
className='flex w-full cursor-pointer items-center gap-2 self-stretch rounded-lg border-none bg-components-button-tertiary-bg-hover px-3 py-2 hover:bg-components-button-tertiary-bg-hover' className='flex w-full cursor-pointer items-center gap-2 self-stretch rounded-lg border-none bg-components-button-tertiary-bg-hover px-3 py-2 hover:bg-components-button-tertiary-bg-hover'
onClick={handleOnShowIterationDetail} onClick={handleOnShowIterationDetail}
> >
<Iteration className='h-4 w-4 shrink-0 text-components-button-tertiary-text' /> <Iteration className='h-4 w-4 shrink-0 text-components-button-tertiary-text' />
<div className='system-sm-medium flex-1 text-left text-components-button-tertiary-text'>{t('workflow.nodes.iteration.iteration', { count: getCount(nodeInfo.details?.length, nodeInfo.metadata?.iterator_length) })}{getErrorCount(nodeInfo.details) > 0 && ( <div className='system-sm-medium flex-1 text-left text-components-button-tertiary-text'>{t('workflow.nodes.iteration.iteration', { count: displayIterationCount })}{errorCount > 0 && (
<> <>
{t('workflow.nodes.iteration.comma')} {t('workflow.nodes.iteration.comma')}
{t('workflow.nodes.iteration.error', { count: getErrorCount(nodeInfo.details) })} {t('workflow.nodes.iteration.error', { count: errorCount })}
</> </>
)}</div> )}</div>
<RiArrowRightSLine className='h-4 w-4 shrink-0 text-components-button-tertiary-text' /> <RiArrowRightSLine className='h-4 w-4 shrink-0 text-components-button-tertiary-text' />

@ -10,48 +10,95 @@ import { Loop } from '@/app/components/base/icons/src/vender/workflow'
type LoopLogTriggerProps = { type LoopLogTriggerProps = {
nodeInfo: NodeTracing nodeInfo: NodeTracing
allExecutions?: NodeTracing[]
onShowLoopResultList: (loopResultList: NodeTracing[][], loopResultDurationMap: LoopDurationMap, loopVariableMap: LoopVariableMap) => void onShowLoopResultList: (loopResultList: NodeTracing[][], loopResultDurationMap: LoopDurationMap, loopVariableMap: LoopVariableMap) => void
} }
const LoopLogTrigger = ({ const LoopLogTrigger = ({
nodeInfo, nodeInfo,
allExecutions,
onShowLoopResultList, onShowLoopResultList,
}: LoopLogTriggerProps) => { }: LoopLogTriggerProps) => {
const { t } = useTranslation() const { t } = useTranslation()
const filterNodesForInstance = (key: string): NodeTracing[] => {
if (!allExecutions) return []
const parallelNodes = allExecutions.filter(exec =>
exec.execution_metadata?.parallel_mode_run_id === key,
)
if (parallelNodes.length > 0)
return parallelNodes
const serialIndex = Number.parseInt(key, 10)
if (!isNaN(serialIndex)) {
const serialNodes = allExecutions.filter(exec =>
exec.execution_metadata?.loop_id === nodeInfo.node_id
&& exec.execution_metadata?.loop_index === serialIndex,
)
if (serialNodes.length > 0)
return serialNodes
}
return []
}
const handleOnShowLoopDetail = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation()
e.nativeEvent.stopImmediatePropagation()
const loopNodeMeta = nodeInfo.execution_metadata
const loopDurMap = nodeInfo?.loopDurationMap || loopNodeMeta?.loop_duration_map || {}
const loopVarMap = loopNodeMeta?.loop_variable_map || {}
let structuredList: NodeTracing[][] = []
if (loopNodeMeta?.loop_duration_map) {
const instanceKeys = Object.keys(loopNodeMeta.loop_duration_map)
structuredList = instanceKeys
.map(key => filterNodesForInstance(key))
.filter(branchNodes => branchNodes.length > 0)
}
else if (nodeInfo.details?.length) {
structuredList = nodeInfo.details
}
onShowLoopResultList(
structuredList,
loopDurMap,
loopVarMap,
)
}
let displayLoopCount = 0
const loopMap = nodeInfo.execution_metadata?.loop_duration_map
if (loopMap)
displayLoopCount = Object.keys(loopMap).length
else if (nodeInfo.details?.length)
displayLoopCount = nodeInfo.details.length
else if (nodeInfo.metadata?.loop_length)
displayLoopCount = nodeInfo.metadata.loop_length
const getErrorCount = (details: NodeTracing[][] | undefined) => { const getErrorCount = (details: NodeTracing[][] | undefined) => {
if (!details || details.length === 0) if (!details || details.length === 0)
return 0 return 0
return details.reduce((acc, loop) => { return details.reduce((acc, loop) => {
if (loop.some(item => item.status === 'failed')) if (loop.some(item => item.status === 'failed'))
acc++ acc++
return acc return acc
}, 0) }, 0)
} }
const getCount = (loop_curr_length: number | undefined, loop_length: number) => { const errorCount = getErrorCount(nodeInfo.details)
if ((loop_curr_length && loop_curr_length < loop_length) || !loop_length)
return loop_curr_length
return loop_length
}
const handleOnShowLoopDetail = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation()
e.nativeEvent.stopImmediatePropagation()
onShowLoopResultList(
nodeInfo.details || [],
nodeInfo?.loopDurationMap || nodeInfo.execution_metadata?.loop_duration_map || {},
nodeInfo.execution_metadata?.loop_variable_map || {},
)
}
return ( return (
<Button <Button
className='flex w-full cursor-pointer items-center gap-2 self-stretch rounded-lg border-none bg-components-button-tertiary-bg-hover px-3 py-2 hover:bg-components-button-tertiary-bg-hover' className='flex w-full cursor-pointer items-center gap-2 self-stretch rounded-lg border-none bg-components-button-tertiary-bg-hover px-3 py-2 hover:bg-components-button-tertiary-bg-hover'
onClick={handleOnShowLoopDetail} onClick={handleOnShowLoopDetail}
> >
<Loop className='h-4 w-4 shrink-0 text-components-button-tertiary-text' /> <Loop className='h-4 w-4 shrink-0 text-components-button-tertiary-text' />
<div className='system-sm-medium flex-1 text-left text-components-button-tertiary-text'>{t('workflow.nodes.loop.loop', { count: getCount(nodeInfo.details?.length, nodeInfo.metadata?.loop_length) })}{getErrorCount(nodeInfo.details) > 0 && ( <div className='system-sm-medium flex-1 text-left text-components-button-tertiary-text'>{t('workflow.nodes.loop.loop', { count: displayLoopCount })}{errorCount > 0 && (
<> <>
{t('workflow.nodes.loop.comma')} {t('workflow.nodes.loop.comma')}
{t('workflow.nodes.loop.error', { count: getErrorCount(nodeInfo.details) })} {t('workflow.nodes.loop.error', { count: errorCount })}
</> </>
)}</div> )}</div>
<RiArrowRightSLine className='h-4 w-4 shrink-0 text-components-button-tertiary-text' /> <RiArrowRightSLine className='h-4 w-4 shrink-0 text-components-button-tertiary-text' />

@ -32,6 +32,7 @@ import { hasRetryNode } from '@/app/components/workflow/utils'
type Props = { type Props = {
className?: string className?: string
nodeInfo: NodeTracing nodeInfo: NodeTracing
allExecutions?: NodeTracing[]
inMessage?: boolean inMessage?: boolean
hideInfo?: boolean hideInfo?: boolean
hideProcessDetail?: boolean hideProcessDetail?: boolean
@ -46,6 +47,7 @@ type Props = {
const NodePanel: FC<Props> = ({ const NodePanel: FC<Props> = ({
className, className,
nodeInfo, nodeInfo,
allExecutions,
inMessage = false, inMessage = false,
hideInfo = false, hideInfo = false,
hideProcessDetail, hideProcessDetail,
@ -157,6 +159,7 @@ const NodePanel: FC<Props> = ({
{isIterationNode && !notShowIterationNav && onShowIterationDetail && ( {isIterationNode && !notShowIterationNav && onShowIterationDetail && (
<IterationLogTrigger <IterationLogTrigger
nodeInfo={nodeInfo} nodeInfo={nodeInfo}
allExecutions={allExecutions}
onShowIterationResultList={onShowIterationDetail} onShowIterationResultList={onShowIterationDetail}
/> />
)} )}
@ -164,6 +167,7 @@ const NodePanel: FC<Props> = ({
{isLoopNode && !notShowLoopNav && onShowLoopDetail && ( {isLoopNode && !notShowLoopNav && onShowLoopDetail && (
<LoopLogTrigger <LoopLogTrigger
nodeInfo={nodeInfo} nodeInfo={nodeInfo}
allExecutions={allExecutions}
onShowLoopResultList={onShowLoopDetail} onShowLoopResultList={onShowLoopDetail}
/> />
)} )}

@ -145,6 +145,7 @@ const TracingPanel: FC<TracingPanelProps> = ({
</div> </div>
<NodePanel <NodePanel
nodeInfo={node!} nodeInfo={node!}
allExecutions={list}
onShowIterationDetail={handleShowIterationResultList} onShowIterationDetail={handleShowIterationResultList}
onShowLoopDetail={handleShowLoopResultList} onShowLoopDetail={handleShowLoopResultList}
onShowRetryDetail={handleShowRetryResultList} onShowRetryDetail={handleShowRetryResultList}

@ -3,7 +3,7 @@ import { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import useSWR from 'swr' import useSWR from 'swr'
import { useSearchParams } from 'next/navigation' import { useSearchParams } from 'next/navigation'
import { basePath } from '@/utils/var' import Link from 'next/link'
import cn from 'classnames' import cn from 'classnames'
import { CheckCircleIcon } from '@heroicons/react/24/solid' import { CheckCircleIcon } from '@heroicons/react/24/solid'
import Input from '../components/base/input' import Input from '../components/base/input'
@ -164,7 +164,7 @@ const ChangePasswordForm = () => {
</div> </div>
<div className="mx-auto mt-6 w-full"> <div className="mx-auto mt-6 w-full">
<Button variant='primary' className='w-full'> <Button variant='primary' className='w-full'>
<a href={`${basePath}/signin`}>{t('login.passwordChanged')}</a> <Link href={'/signin'}>{t('login.passwordChanged')}</Link>
</Button> </Button>
</div> </div>
</div> </div>

@ -10,7 +10,7 @@ import { zodResolver } from '@hookform/resolvers/zod'
import Loading from '../components/base/loading' import Loading from '../components/base/loading'
import Input from '../components/base/input' import Input from '../components/base/input'
import Button from '@/app/components/base/button' import Button from '@/app/components/base/button'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import { import {
fetchInitValidateStatus, fetchInitValidateStatus,
@ -71,7 +71,7 @@ const ForgotPasswordForm = () => {
fetchSetupStatus().then(() => { fetchSetupStatus().then(() => {
fetchInitValidateStatus().then((res: InitValidateStatusResponse) => { fetchInitValidateStatus().then((res: InitValidateStatusResponse) => {
if (res.status === 'not_started') if (res.status === 'not_started')
window.location.href = `${basePath}/init` window.location.href = `${WEB_PREFIX}/init`
}) })
setLoading(false) setLoading(false)

@ -5,7 +5,7 @@ import { useRouter } from 'next/navigation'
import Toast from '../components/base/toast' import Toast from '../components/base/toast'
import Loading from '../components/base/loading' import Loading from '../components/base/loading'
import Button from '@/app/components/base/button' import Button from '@/app/components/base/button'
import { basePath } from '@/utils/var' import { WEB_PREFIX } from '@/config'
import { fetchInitValidateStatus, initValidate } from '@/service/common' import { fetchInitValidateStatus, initValidate } from '@/service/common'
import type { InitValidateStatusResponse } from '@/models/common' import type { InitValidateStatusResponse } from '@/models/common'
@ -42,7 +42,7 @@ const InitPasswordPopup = () => {
useEffect(() => { useEffect(() => {
fetchInitValidateStatus().then((res: InitValidateStatusResponse) => { fetchInitValidateStatus().then((res: InitValidateStatusResponse) => {
if (res.status === 'finished') if (res.status === 'finished')
window.location.href = `${basePath}/install` window.location.href = `${WEB_PREFIX}/install`
else else
setLoading(false) setLoading(false)
}) })

@ -16,7 +16,6 @@ import Button from '@/app/components/base/button'
import { fetchInitValidateStatus, fetchSetupStatus, setup } from '@/service/common' import { fetchInitValidateStatus, fetchSetupStatus, setup } from '@/service/common'
import type { InitValidateStatusResponse, SetupStatusResponse } from '@/models/common' import type { InitValidateStatusResponse, SetupStatusResponse } from '@/models/common'
import { basePath } from '@/utils/var'
const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/
@ -81,12 +80,12 @@ const InstallForm = () => {
fetchSetupStatus().then((res: SetupStatusResponse) => { fetchSetupStatus().then((res: SetupStatusResponse) => {
if (res.step === 'finished') { if (res.step === 'finished') {
localStorage.setItem('setup_status', 'finished') localStorage.setItem('setup_status', 'finished')
router.push(`${basePath}/signin`) router.push('/signin')
} }
else { else {
fetchInitValidateStatus().then((res: InitValidateStatusResponse) => { fetchInitValidateStatus().then((res: InitValidateStatusResponse) => {
if (res.status === 'not_started') if (res.status === 'not_started')
router.push(`${basePath}/init`) router.push('/init')
}) })
} }
setLoading(false) setLoading(false)

@ -39,7 +39,9 @@ const LocaleLayout = async ({
<body <body
className="color-scheme h-full select-auto" className="color-scheme h-full select-auto"
data-api-prefix={process.env.NEXT_PUBLIC_API_PREFIX} data-api-prefix={process.env.NEXT_PUBLIC_API_PREFIX}
data-web-prefix={process.env.NEXT_PUBLIC_WEB_PREFIX}
data-pubic-api-prefix={process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX} data-pubic-api-prefix={process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX}
data-pubic-web-prefix={process.env.NEXT_PUBLIC_PUBLIC_WEB_PREFIX}
data-marketplace-api-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX} data-marketplace-api-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX}
data-marketplace-url-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX} data-marketplace-url-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX}
data-public-edition={process.env.NEXT_PUBLIC_EDITION} data-public-edition={process.env.NEXT_PUBLIC_EDITION}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save