Merge branch 'langgenius:main' into main

pull/22646/head
Aurelius Huang 9 months ago committed by GitHub
commit 39f9e867dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -1,6 +1,6 @@
#!/bin/bash
npm add -g pnpm@10.11.1
npm add -g pnpm@10.13.1
cd web && pnpm install
pipx install uv
@ -12,3 +12,4 @@ echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f do
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
source /home/vscode/.bashrc

@ -28,7 +28,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
api/**
@ -75,7 +75,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**
@ -113,7 +113,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
docker/generate_docker_compose
@ -144,7 +144,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
**.sh
@ -152,13 +152,15 @@ jobs:
**.yml
**Dockerfile
dev/**
.editorconfig
- name: Super-linter
uses: super-linter/super-linter/slim@v7
uses: super-linter/super-linter/slim@v8
if: steps.changed-files.outputs.any_changed == 'true'
env:
BASH_SEVERITY: warning
DEFAULT_BRANCH: main
DEFAULT_BRANCH: origin/main
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
@ -168,16 +170,6 @@ jobs:
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
# VALIDATE_GITHUB_ACTIONS: true
VALIDATE_DOCKERFILE_HADOLINT: true
VALIDATE_EDITORCONFIG: true
VALIDATE_XML: true
VALIDATE_YAML: true
- name: EditorConfig checks
uses: super-linter/super-linter/slim@v7
env:
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
IGNORE_GITIGNORED_FILES: true
# EditorConfig validation
VALIDATE_EDITORCONFIG: true
EDITORCONFIG_FILE_NAME: editorconfig-checker.json

@ -27,7 +27,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**

@ -144,6 +144,8 @@ CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
# Weaviate configuration
WEAVIATE_ENDPOINT=http://localhost:8080
@ -469,6 +471,16 @@ APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
CELERY_BEAT_SCHEDULER_TIME=1
# Celery schedule tasks configuration
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
ENABLE_CLEAN_MESSAGES=false
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
ENABLE_DATASETS_QUEUE_MONITOR=false
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
# Position configuration
POSITION_TOOL_PINS=
POSITION_TOOL_INCLUDES=

@ -47,6 +47,8 @@ RUN \
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
# install fonts to support the use of tools like pypdfium2
fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
media-types \
# install libmagic to support the use of python-magic guess MIMETYPE

@ -74,7 +74,12 @@
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin
```
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
```bash
uv run celery -A app.celery beat
```
## Testing

@ -832,6 +832,41 @@ class CeleryBeatConfig(BaseSettings):
)
class CeleryScheduleTasksConfig(BaseSettings):
ENABLE_CLEAN_EMBEDDING_CACHE_TASK: bool = Field(
description="Enable clean embedding cache task",
default=False,
)
ENABLE_CLEAN_UNUSED_DATASETS_TASK: bool = Field(
description="Enable clean unused datasets task",
default=False,
)
ENABLE_CREATE_TIDB_SERVERLESS_TASK: bool = Field(
description="Enable create tidb service job task",
default=False,
)
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: bool = Field(
description="Enable update tidb service job status task",
default=False,
)
ENABLE_CLEAN_MESSAGES: bool = Field(
description="Enable clean messages task",
default=False,
)
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field(
description="Enable mail clean document notify task",
default=False,
)
ENABLE_DATASETS_QUEUE_MONITOR: bool = Field(
description="Enable queue monitor task",
default=False,
)
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: bool = Field(
description="Enable check upgradable plugin task",
default=True,
)
class PositionConfig(BaseSettings):
POSITION_PROVIDER_PINS: str = Field(
description="Comma-separated list of pinned model providers",
@ -961,5 +996,6 @@ class FeatureConfig(
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,
CeleryScheduleTasksConfig,
):
pass

@ -85,6 +85,11 @@ class VectorStoreConfig(BaseSettings):
default=False,
)
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(

@ -1,4 +1,4 @@
from datetime import UTC, datetime
from datetime import datetime
import pytz # pip install pytz
from flask_login import current_user
@ -19,6 +19,7 @@ from fields.conversation_fields import (
conversation_pagination_fields,
conversation_with_summary_pagination_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import login_required
from models import Conversation, EndUser, Message, MessageAnnotation
@ -315,7 +316,7 @@ def _get_conversation(app_model, conversation_id):
raise NotFound("Conversation Not Exists.")
if not conversation.read_at:
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
conversation.read_at = naive_utc_now()
conversation.read_account_id = current_user.id
db.session.commit()

@ -1,5 +1,3 @@
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
@ -10,6 +8,7 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Site
@ -77,7 +76,7 @@ class AppSite(Resource):
setattr(site, attr_name, value)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site
@ -101,7 +100,7 @@ class AppSiteAccessTokenReset(Resource):
site.code = Site.generate_code(16)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site

@ -1,5 +1,3 @@
import datetime
from flask import request
from flask_restful import Resource, reqparse
@ -7,6 +5,7 @@ from constants.languages import supported_language
from controllers.console import api
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import StrLen, email, extract_remote_ip, timezone
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService
@ -65,7 +64,7 @@ class ActivateApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Optional
import requests
@ -13,6 +12,7 @@ from configs import dify_config
from constants.languages import languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_remote_ip
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from models import Account
@ -110,7 +110,7 @@ class OAuthCallback(Resource):
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
try:

@ -1,4 +1,3 @@
import datetime
import json
from flask import request
@ -15,6 +14,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.notion_extractor import NotionExtractor
from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import DataSourceOauthBinding, Document
from services.dataset_service import DatasetService, DocumentService
@ -88,7 +88,7 @@ class DataSourceApi(Resource):
if action == "enable":
if data_source_binding.disabled:
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:
@ -97,7 +97,7 @@ class DataSourceApi(Resource):
if action == "disable":
if not data_source_binding.disabled:
data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:

@ -1,6 +1,5 @@
import logging
from argparse import ArgumentTypeError
from datetime import UTC, datetime
from typing import cast
from flask import request
@ -49,6 +48,7 @@ from fields.document_fields import (
document_status_fields,
document_with_segments_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from services.dataset_service import DatasetService, DocumentService
@ -750,7 +750,7 @@ class DocumentProcessingApi(DocumentResource):
raise InvalidActionError("Document not in indexing state.")
document.paused_by = current_user.id
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
document.paused_at = naive_utc_now()
document.is_paused = True
db.session.commit()
@ -830,7 +830,7 @@ class DocumentMetadataApi(DocumentResource):
document.doc_metadata[key] = value
document.doc_type = doc_type
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
document.updated_at = naive_utc_now()
db.session.commit()
return {"result": "success", "message": "Document metadata updated."}, 200

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import reqparse
@ -27,6 +26,7 @@ from core.errors.error import (
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs import helper
from libs.datetime_utils import naive_utc_now
from libs.helper import uuid_value
from models.model import AppMode
from services.app_generate_service import AppGenerateService
@ -51,7 +51,7 @@ class CompletionApi(InstalledAppResource):
streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:
@ -111,7 +111,7 @@ class ChatApi(InstalledAppResource):
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Any
from flask import request
@ -13,6 +12,7 @@ from controllers.console.explore.wraps import InstalledAppResource
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
from extensions.ext_database import db
from fields.installed_app_fields import installed_app_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import App, InstalledApp, RecommendedApp
from services.account_service import TenantService
@ -122,7 +122,7 @@ class InstalledAppsListApi(Resource):
tenant_id=current_tenant_id,
app_owner_tenant_id=app.tenant_id,
is_pinned=False,
last_used_at=datetime.now(UTC).replace(tzinfo=None),
last_used_at=naive_utc_now(),
)
db.session.add(new_installed_app)
db.session.commit()

@ -1,5 +1,3 @@
import datetime
import pytz
from flask import request
from flask_login import current_user
@ -35,6 +33,7 @@ from controllers.console.wraps import (
)
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.datetime_utils import naive_utc_now
from libs.helper import TimestampField, email, extract_remote_ip, timezone
from libs.login import login_required
from models import AccountIntegrate, InvitationCode
@ -80,7 +79,7 @@ class AccountInitApi(Resource):
raise InvalidInvitationCodeError()
invitation_code.status = "used"
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
invitation_code.used_at = naive_utc_now()
invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id
@ -88,7 +87,7 @@ class AccountInitApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = "active"
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
return {"result": "success"}

@ -12,7 +12,8 @@ from controllers.console.wraps import account_initialization_required, setup_req
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.impl.exc import PluginDaemonClientSideError
from libs.login import login_required
from models.account import TenantPluginPermission
from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission
from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService
from services.plugin.plugin_parameter_service import PluginParameterService
from services.plugin.plugin_permission_service import PluginPermissionService
from services.plugin.plugin_service import PluginService
@ -534,6 +535,114 @@ class PluginFetchDynamicSelectOptionsApi(Resource):
return jsonable_encoder({"options": options})
class PluginChangePreferencesApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
user = current_user
if not user.is_admin_or_owner:
raise Forbidden()
req = reqparse.RequestParser()
req.add_argument("permission", type=dict, required=True, location="json")
req.add_argument("auto_upgrade", type=dict, required=True, location="json")
args = req.parse_args()
tenant_id = user.current_tenant_id
permission = args["permission"]
install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone"))
debug_permission = TenantPluginPermission.DebugPermission(permission.get("debug_permission", "everyone"))
auto_upgrade = args["auto_upgrade"]
strategy_setting = TenantPluginAutoUpgradeStrategy.StrategySetting(
auto_upgrade.get("strategy_setting", "fix_only")
)
upgrade_time_of_day = auto_upgrade.get("upgrade_time_of_day", 0)
upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode(auto_upgrade.get("upgrade_mode", "exclude"))
exclude_plugins = auto_upgrade.get("exclude_plugins", [])
include_plugins = auto_upgrade.get("include_plugins", [])
# set permission
set_permission_result = PluginPermissionService.change_permission(
tenant_id,
install_permission,
debug_permission,
)
if not set_permission_result:
return jsonable_encoder({"success": False, "message": "Failed to set permission"})
# set auto upgrade strategy
set_auto_upgrade_strategy_result = PluginAutoUpgradeService.change_strategy(
tenant_id,
strategy_setting,
upgrade_time_of_day,
upgrade_mode,
exclude_plugins,
include_plugins,
)
if not set_auto_upgrade_strategy_result:
return jsonable_encoder({"success": False, "message": "Failed to set auto upgrade strategy"})
return jsonable_encoder({"success": True})
class PluginFetchPreferencesApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
tenant_id = current_user.current_tenant_id
permission = PluginPermissionService.get_permission(tenant_id)
permission_dict = {
"install_permission": TenantPluginPermission.InstallPermission.EVERYONE,
"debug_permission": TenantPluginPermission.DebugPermission.EVERYONE,
}
if permission:
permission_dict["install_permission"] = permission.install_permission
permission_dict["debug_permission"] = permission.debug_permission
auto_upgrade = PluginAutoUpgradeService.get_strategy(tenant_id)
auto_upgrade_dict = {
"strategy_setting": TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED,
"upgrade_time_of_day": 0,
"upgrade_mode": TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
"exclude_plugins": [],
"include_plugins": [],
}
if auto_upgrade:
auto_upgrade_dict = {
"strategy_setting": auto_upgrade.strategy_setting,
"upgrade_time_of_day": auto_upgrade.upgrade_time_of_day,
"upgrade_mode": auto_upgrade.upgrade_mode,
"exclude_plugins": auto_upgrade.exclude_plugins,
"include_plugins": auto_upgrade.include_plugins,
}
return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict})
class PluginAutoUpgradeExcludePluginApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
# exclude one single plugin
tenant_id = current_user.current_tenant_id
req = reqparse.RequestParser()
req.add_argument("plugin_id", type=str, required=True, location="json")
args = req.parse_args()
return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])})
api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key")
api.add_resource(PluginListApi, "/workspaces/current/plugin/list")
api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions")
@ -560,3 +669,7 @@ api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permissi
api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch")
api.add_resource(PluginFetchDynamicSelectOptionsApi, "/workspaces/current/plugin/parameters/dynamic-options")
api.add_resource(PluginFetchPreferencesApi, "/workspaces/current/plugin/preferences/fetch")
api.add_resource(PluginChangePreferencesApi, "/workspaces/current/plugin/preferences/change")
api.add_resource(PluginAutoUpgradeExcludePluginApi, "/workspaces/current/plugin/preferences/autoupgrade/exclude")

@ -29,7 +29,7 @@ from libs.login import login_required
from services.plugin.oauth_service import OAuthProxyService
from services.tools.api_tools_manage_service import ApiToolManageService
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
from services.tools.mcp_tools_mange_service import MCPToolManageService
from services.tools.mcp_tools_manage_service import MCPToolManageService
from services.tools.tool_labels_service import ToolLabelsService
from services.tools.tools_manage_service import ToolCommonService
from services.tools.tools_transform_service import ToolTransformService
@ -739,7 +739,7 @@ class ToolOAuthCallback(Resource):
raise Forbidden("no oauth available client config found for this tool provider")
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback"
credentials = oauth_handler.get_credentials(
credentials_response = oauth_handler.get_credentials(
tenant_id=tenant_id,
user_id=user_id,
plugin_id=plugin_id,
@ -747,7 +747,10 @@ class ToolOAuthCallback(Resource):
redirect_uri=redirect_uri,
system_credentials=oauth_client_params,
request=request,
).credentials
)
credentials = credentials_response.credentials
expires_at = credentials_response.expires_at
if not credentials:
raise Exception("the plugin credentials failed")
@ -758,6 +761,7 @@ class ToolOAuthCallback(Resource):
tenant_id=tenant_id,
provider=provider,
credentials=dict(credentials),
expires_at=expires_at,
api_type=CredentialType.OAUTH2,
)
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")

@ -1,5 +1,6 @@
import logging
from flask import request
from flask_restful import Resource, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
@ -23,6 +24,7 @@ from core.errors.error import (
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
@ -111,6 +113,10 @@ class ChatApi(Resource):
args = parser.parse_args()
external_trace_id = get_external_trace_id(request)
if external_trace_id:
args["external_trace_id"] = external_trace_id
streaming = args["response_mode"] == "streaming"
try:

@ -1,6 +1,7 @@
import logging
from dateutil.parser import isoparse
from flask import request
from flask_restful import Resource, fields, marshal_with, reqparse
from flask_restful.inputs import int_range
from sqlalchemy.orm import Session, sessionmaker
@ -23,6 +24,7 @@ from core.errors.error import (
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
from extensions.ext_database import db
@ -90,7 +92,9 @@ class WorkflowRunApi(Resource):
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
args = parser.parse_args()
external_trace_id = get_external_trace_id(request)
if external_trace_id:
args["external_trace_id"] = external_trace_id
streaming = args.get("response_mode") == "streaming"
try:

@ -1,6 +1,6 @@
import time
from collections.abc import Callable
from datetime import UTC, datetime, timedelta
from datetime import timedelta
from enum import Enum
from functools import wraps
from typing import Optional
@ -15,6 +15,7 @@ from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from libs.login import _get_user
from models.account import Account, Tenant, TenantAccountJoin, TenantStatus
from models.dataset import Dataset, RateLimitLog
@ -256,7 +257,7 @@ def validate_and_get_api_token(scope: str | None = None):
if auth_scheme != "bearer":
raise Unauthorized("Authorization scheme must be 'Bearer'")
current_time = datetime.now(UTC).replace(tzinfo=None)
current_time = naive_utc_now()
cutoff_time = current_time - timedelta(minutes=1)
with Session(db.engine, expire_on_commit=False) as session:
update_stmt = (

@ -1,48 +0,0 @@
## Guidelines for Database Connection Management in App Runner and Task Pipeline
Due to the presence of tasks in App Runner that require long execution times, such as LLM generation and external requests, Flask-Sqlalchemy's strategy for database connection pooling is to allocate one connection (transaction) per request. This approach keeps a connection occupied even during non-DB tasks, leading to the inability to acquire new connections during high concurrency requests due to multiple long-running tasks.
Therefore, the database operations in App Runner and Task Pipeline must ensure connections are closed immediately after use, and it's better to pass IDs rather than Model objects to avoid detach errors.
Examples:
1. Creating a new record:
```python
app = App(id=1)
db.session.add(app)
db.session.commit()
db.session.refresh(app) # Retrieve table default values, like created_at, cached in the app object, won't affect after close
# Handle non-long-running tasks or store the content of the App instance in memory (via variable assignment).
db.session.close()
return app.id
```
2. Fetching a record from the table:
```python
app = db.session.query(App).filter(App.id == app_id).first()
created_at = app.created_at
db.session.close()
# Handle tasks (include long-running).
```
3. Updating a table field:
```python
app = db.session.query(App).filter(App.id == app_id).first()
app.updated_at = time.utcnow()
db.session.commit()
db.session.close()
return app_id
```

@ -7,7 +7,8 @@ from typing import Any, Literal, Optional, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
import contexts
from configs import dify_config
@ -23,6 +24,7 @@ from core.app.apps.message_based_app_generator import MessageBasedAppGenerator
from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
from core.app.entities.task_entities import ChatbotAppBlockingResponse, ChatbotAppStreamResponse
from core.helper.trace_id_helper import extract_external_trace_id_from_args
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.ops.ops_trace_manager import TraceQueueManager
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
@ -112,7 +114,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
query = query.replace("\x00", "")
inputs = args["inputs"]
extras = {"auto_generate_conversation_name": args.get("auto_generate_name", False)}
extras = {
"auto_generate_conversation_name": args.get("auto_generate_name", False),
**extract_external_trace_id_from_args(args),
}
# get conversation
conversation = None
@ -482,21 +487,52 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
"""
with preserve_flask_contexts(flask_app, context_vars=context):
try:
# get conversation and message
conversation = self._get_conversation(conversation_id)
message = self._get_message(message_id)
# chatbot app
runner = AdvancedChatAppRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
conversation=conversation,
message=message,
dialogue_count=self._dialogue_count,
variable_loader=variable_loader,
# get conversation and message
conversation = self._get_conversation(conversation_id)
message = self._get_message(message_id)
with Session(db.engine, expire_on_commit=False) as session:
workflow = session.scalar(
select(Workflow).where(
Workflow.tenant_id == application_generate_entity.app_config.tenant_id,
Workflow.app_id == application_generate_entity.app_config.app_id,
Workflow.id == application_generate_entity.app_config.workflow_id,
)
)
if workflow is None:
raise ValueError("Workflow not found")
# Determine system_user_id based on invocation source
is_external_api_call = application_generate_entity.invoke_from in {
InvokeFrom.WEB_APP,
InvokeFrom.SERVICE_API,
}
if is_external_api_call:
# For external API calls, use end user's session ID
end_user = session.scalar(select(EndUser).where(EndUser.id == application_generate_entity.user_id))
system_user_id = end_user.session_id if end_user else ""
else:
# For internal calls, use the original user ID
system_user_id = application_generate_entity.user_id
app = session.scalar(select(App).where(App.id == application_generate_entity.app_config.app_id))
if app is None:
raise ValueError("App not found")
runner = AdvancedChatAppRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
conversation=conversation,
message=message,
dialogue_count=self._dialogue_count,
variable_loader=variable_loader,
workflow=workflow,
system_user_id=system_user_id,
app=app,
)
try:
runner.run()
except GenerateTaskStoppedError:
pass

@ -1,6 +1,6 @@
import logging
from collections.abc import Mapping
from typing import Any, cast
from typing import Any, Optional, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
@ -9,13 +9,19 @@ from configs import dify_config
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
from core.app.entities.app_invoke_entities import (
AdvancedChatAppGenerateEntity,
AppGenerateEntity,
InvokeFrom,
)
from core.app.entities.queue_entities import (
QueueAnnotationReplyEvent,
QueueStopEvent,
QueueTextChunkEvent,
)
from core.app.features.annotation_reply.annotation_reply import AnnotationReplyFeature
from core.moderation.base import ModerationError
from core.moderation.input_moderation import InputModeration
from core.variables.variables import VariableUnion
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
from core.workflow.entities.variable_pool import VariablePool
@ -23,8 +29,9 @@ from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from models import Workflow
from models.enums import UserFrom
from models.model import App, Conversation, EndUser, Message
from models.model import App, Conversation, Message, MessageAnnotation
from models.workflow import ConversationVariable, WorkflowType
logger = logging.getLogger(__name__)
@ -37,21 +44,29 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
def __init__(
self,
*,
application_generate_entity: AdvancedChatAppGenerateEntity,
queue_manager: AppQueueManager,
conversation: Conversation,
message: Message,
dialogue_count: int,
variable_loader: VariableLoader,
workflow: Workflow,
system_user_id: str,
app: App,
) -> None:
super().__init__(queue_manager, variable_loader)
super().__init__(
queue_manager=queue_manager,
variable_loader=variable_loader,
app_id=application_generate_entity.app_config.app_id,
)
self.application_generate_entity = application_generate_entity
self.conversation = conversation
self.message = message
self._dialogue_count = dialogue_count
def _get_app_id(self) -> str:
return self.application_generate_entity.app_config.app_id
self._workflow = workflow
self.system_user_id = system_user_id
self._app = app
def run(self) -> None:
app_config = self.application_generate_entity.app_config
@ -61,18 +76,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
if not app_record:
raise ValueError("App not found")
workflow = self.get_workflow(app_model=app_record, workflow_id=app_config.workflow_id)
if not workflow:
raise ValueError("Workflow not initialized")
user_id: str | None = None
if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}:
end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first()
if end_user:
user_id = end_user.session_id
else:
user_id = self.application_generate_entity.user_id
workflow_callbacks: list[WorkflowCallback] = []
if dify_config.DEBUG:
workflow_callbacks.append(WorkflowLoggingCallback())
@ -80,14 +83,14 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
if self.application_generate_entity.single_iteration_run:
# if only single iteration run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=workflow,
workflow=self._workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=dict(self.application_generate_entity.single_iteration_run.inputs),
)
elif self.application_generate_entity.single_loop_run:
# if only single loop run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=workflow,
workflow=self._workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=dict(self.application_generate_entity.single_loop_run.inputs),
)
@ -98,7 +101,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
# moderation
if self.handle_input_moderation(
app_record=app_record,
app_record=self._app,
app_generate_entity=self.application_generate_entity,
inputs=inputs,
query=query,
@ -108,7 +111,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
# annotation reply
if self.handle_annotation_reply(
app_record=app_record,
app_record=self._app,
message=self.message,
query=query,
app_generate_entity=self.application_generate_entity,
@ -128,7 +131,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
ConversationVariable.from_variable(
app_id=self.conversation.app_id, conversation_id=self.conversation.id, variable=variable
)
for variable in workflow.conversation_variables
for variable in self._workflow.conversation_variables
]
session.add_all(db_conversation_variables)
# Convert database entities to variables.
@ -141,7 +144,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
query=query,
files=files,
conversation_id=self.conversation.id,
user_id=user_id,
user_id=self.system_user_id,
dialogue_count=self._dialogue_count,
app_id=app_config.app_id,
workflow_id=app_config.workflow_id,
@ -152,25 +155,25 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
variable_pool = VariablePool(
system_variables=system_inputs,
user_inputs=inputs,
environment_variables=workflow.environment_variables,
environment_variables=self._workflow.environment_variables,
# Based on the definition of `VariableUnion`,
# `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
conversation_variables=cast(list[VariableUnion], conversation_variables),
)
# init graph
graph = self._init_graph(graph_config=workflow.graph_dict)
graph = self._init_graph(graph_config=self._workflow.graph_dict)
db.session.close()
# RUN WORKFLOW
workflow_entry = WorkflowEntry(
tenant_id=workflow.tenant_id,
app_id=workflow.app_id,
workflow_id=workflow.id,
workflow_type=WorkflowType.value_of(workflow.type),
tenant_id=self._workflow.tenant_id,
app_id=self._workflow.app_id,
workflow_id=self._workflow.id,
workflow_type=WorkflowType.value_of(self._workflow.type),
graph=graph,
graph_config=workflow.graph_dict,
graph_config=self._workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
user_from=(
UserFrom.ACCOUNT
@ -241,3 +244,51 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
self._publish_event(QueueTextChunkEvent(text=text))
self._publish_event(QueueStopEvent(stopped_by=stopped_by))
def query_app_annotations_to_reply(
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
) -> Optional[MessageAnnotation]:
"""
Query app annotations to reply
:param app_record: app record
:param message: message
:param query: query
:param user_id: user id
:param invoke_from: invoke from
:return:
"""
annotation_reply_feature = AnnotationReplyFeature()
return annotation_reply_feature.query(
app_record=app_record, message=message, query=query, user_id=user_id, invoke_from=invoke_from
)
def moderation_for_inputs(
self,
*,
app_id: str,
tenant_id: str,
app_generate_entity: AppGenerateEntity,
inputs: Mapping[str, Any],
query: str | None = None,
message_id: str,
) -> tuple[bool, Mapping[str, Any], str]:
"""
Process sensitive_word_avoidance.
:param app_id: app id
:param tenant_id: tenant id
:param app_generate_entity: app generate entity
:param inputs: inputs
:param query: query
:param message_id: message id
:return:
"""
moderation_feature = InputModeration()
return moderation_feature.check(
app_id=app_id,
tenant_id=tenant_id,
app_config=app_generate_entity.app_config,
inputs=dict(inputs),
query=query or "",
message_id=message_id,
trace_manager=app_generate_entity.trace_manager,
)

@ -559,6 +559,7 @@ class AdvancedChatAppGenerateTaskPipeline:
outputs=event.outputs,
conversation_id=self._conversation_id,
trace_manager=trace_manager,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
session=session,
@ -590,6 +591,7 @@ class AdvancedChatAppGenerateTaskPipeline:
exceptions_count=event.exceptions_count,
conversation_id=None,
trace_manager=trace_manager,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
session=session,
@ -622,6 +624,7 @@ class AdvancedChatAppGenerateTaskPipeline:
conversation_id=self._conversation_id,
trace_manager=trace_manager,
exceptions_count=event.exceptions_count,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
session=session,
@ -653,6 +656,7 @@ class AdvancedChatAppGenerateTaskPipeline:
error_message=event.get_stop_reason(),
conversation_id=self._conversation_id,
trace_manager=trace_manager,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
session=session,

@ -1,7 +1,6 @@
import json
import logging
from collections.abc import Generator
from datetime import UTC, datetime
from typing import Optional, Union, cast
from core.app.app_config.entities import EasyUIBasedAppConfig, EasyUIBasedAppModelConfigFrom
@ -25,6 +24,7 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBasedGenerateTaskPipeline
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models import Account
from models.enums import CreatorUserRole
from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile
@ -184,7 +184,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
db.session.commit()
db.session.refresh(conversation)
else:
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
message = Message(

@ -7,7 +7,8 @@ from typing import Any, Literal, Optional, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
import contexts
from configs import dify_config
@ -22,6 +23,7 @@ from core.app.apps.workflow.generate_response_converter import WorkflowAppGenera
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
from core.helper.trace_id_helper import extract_external_trace_id_from_args
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.ops.ops_trace_manager import TraceQueueManager
from core.repositories import DifyCoreRepositoryFactory
@ -123,6 +125,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
)
inputs: Mapping[str, Any] = args["inputs"]
extras = {
**extract_external_trace_id_from_args(args),
}
workflow_run_id = str(uuid.uuid4())
# init application generate entity
application_generate_entity = WorkflowAppGenerateEntity(
@ -142,6 +148,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
call_depth=call_depth,
trace_manager=trace_manager,
workflow_execution_id=workflow_run_id,
extras=extras,
)
contexts.plugin_tool_providers.set({})
@ -439,17 +446,44 @@ class WorkflowAppGenerator(BaseAppGenerator):
"""
with preserve_flask_contexts(flask_app, context_vars=context):
try:
# workflow app
runner = WorkflowAppRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
workflow_thread_pool_id=workflow_thread_pool_id,
variable_loader=variable_loader,
with Session(db.engine, expire_on_commit=False) as session:
workflow = session.scalar(
select(Workflow).where(
Workflow.tenant_id == application_generate_entity.app_config.tenant_id,
Workflow.app_id == application_generate_entity.app_config.app_id,
Workflow.id == application_generate_entity.app_config.workflow_id,
)
)
if workflow is None:
raise ValueError("Workflow not found")
# Determine system_user_id based on invocation source
is_external_api_call = application_generate_entity.invoke_from in {
InvokeFrom.WEB_APP,
InvokeFrom.SERVICE_API,
}
if is_external_api_call:
# For external API calls, use end user's session ID
end_user = session.scalar(select(EndUser).where(EndUser.id == application_generate_entity.user_id))
system_user_id = end_user.session_id if end_user else ""
else:
# For internal calls, use the original user ID
system_user_id = application_generate_entity.user_id
runner = WorkflowAppRunner(
application_generate_entity=application_generate_entity,
queue_manager=queue_manager,
workflow_thread_pool_id=workflow_thread_pool_id,
variable_loader=variable_loader,
workflow=workflow,
system_user_id=system_user_id,
)
try:
runner.run()
except GenerateTaskStoppedError:
except GenerateTaskStoppedError as e:
logger.warning(f"Task stopped: {str(e)}")
pass
except InvokeAuthorizationError:
queue_manager.publish_error(
@ -465,8 +499,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
except Exception as e:
logger.exception("Unknown Error when generating")
queue_manager.publish_error(e, PublishFrom.APPLICATION_MANAGER)
finally:
db.session.close()
def _handle_response(
self,

@ -14,10 +14,8 @@ from core.workflow.entities.variable_pool import VariablePool
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import VariableLoader
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from models.enums import UserFrom
from models.model import App, EndUser
from models.workflow import WorkflowType
from models.workflow import Workflow, WorkflowType
logger = logging.getLogger(__name__)
@ -29,22 +27,23 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
def __init__(
self,
*,
application_generate_entity: WorkflowAppGenerateEntity,
queue_manager: AppQueueManager,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
workflow: Workflow,
system_user_id: str,
) -> None:
"""
:param application_generate_entity: application generate entity
:param queue_manager: application queue manager
:param workflow_thread_pool_id: workflow thread pool id
"""
super().__init__(queue_manager, variable_loader)
super().__init__(
queue_manager=queue_manager,
variable_loader=variable_loader,
app_id=application_generate_entity.app_config.app_id,
)
self.application_generate_entity = application_generate_entity
self.workflow_thread_pool_id = workflow_thread_pool_id
def _get_app_id(self) -> str:
return self.application_generate_entity.app_config.app_id
self._workflow = workflow
self._sys_user_id = system_user_id
def run(self) -> None:
"""
@ -53,24 +52,6 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
app_config = self.application_generate_entity.app_config
app_config = cast(WorkflowAppConfig, app_config)
user_id = None
if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}:
end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first()
if end_user:
user_id = end_user.session_id
else:
user_id = self.application_generate_entity.user_id
app_record = db.session.query(App).filter(App.id == app_config.app_id).first()
if not app_record:
raise ValueError("App not found")
workflow = self.get_workflow(app_model=app_record, workflow_id=app_config.workflow_id)
if not workflow:
raise ValueError("Workflow not initialized")
db.session.close()
workflow_callbacks: list[WorkflowCallback] = []
if dify_config.DEBUG:
workflow_callbacks.append(WorkflowLoggingCallback())
@ -79,14 +60,14 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
if self.application_generate_entity.single_iteration_run:
# if only single iteration run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_iteration(
workflow=workflow,
workflow=self._workflow,
node_id=self.application_generate_entity.single_iteration_run.node_id,
user_inputs=self.application_generate_entity.single_iteration_run.inputs,
)
elif self.application_generate_entity.single_loop_run:
# if only single loop run is requested
graph, variable_pool = self._get_graph_and_variable_pool_of_single_loop(
workflow=workflow,
workflow=self._workflow,
node_id=self.application_generate_entity.single_loop_run.node_id,
user_inputs=self.application_generate_entity.single_loop_run.inputs,
)
@ -98,7 +79,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
system_inputs = SystemVariable(
files=files,
user_id=user_id,
user_id=self._sys_user_id,
app_id=app_config.app_id,
workflow_id=app_config.workflow_id,
workflow_execution_id=self.application_generate_entity.workflow_execution_id,
@ -107,21 +88,21 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
variable_pool = VariablePool(
system_variables=system_inputs,
user_inputs=inputs,
environment_variables=workflow.environment_variables,
environment_variables=self._workflow.environment_variables,
conversation_variables=[],
)
# init graph
graph = self._init_graph(graph_config=workflow.graph_dict)
graph = self._init_graph(graph_config=self._workflow.graph_dict)
# RUN WORKFLOW
workflow_entry = WorkflowEntry(
tenant_id=workflow.tenant_id,
app_id=workflow.app_id,
workflow_id=workflow.id,
workflow_type=WorkflowType.value_of(workflow.type),
tenant_id=self._workflow.tenant_id,
app_id=self._workflow.app_id,
workflow_id=self._workflow.id,
workflow_type=WorkflowType.value_of(self._workflow.type),
graph=graph,
graph_config=workflow.graph_dict,
graph_config=self._workflow.graph_dict,
user_id=self.application_generate_entity.user_id,
user_from=(
UserFrom.ACCOUNT

@ -490,6 +490,7 @@ class WorkflowAppGenerateTaskPipeline:
outputs=event.outputs,
conversation_id=None,
trace_manager=trace_manager,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
# save workflow app log
@ -524,6 +525,7 @@ class WorkflowAppGenerateTaskPipeline:
exceptions_count=event.exceptions_count,
conversation_id=None,
trace_manager=trace_manager,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
# save workflow app log
@ -561,6 +563,7 @@ class WorkflowAppGenerateTaskPipeline:
conversation_id=None,
trace_manager=trace_manager,
exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0,
external_trace_id=self._application_generate_entity.extras.get("external_trace_id"),
)
# save workflow app log

@ -1,8 +1,7 @@
from collections.abc import Mapping
from typing import Any, Optional, cast
from typing import Any, cast
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.apps.base_app_runner import AppRunner
from core.app.entities.queue_entities import (
AppQueueEvent,
QueueAgentLogEvent,
@ -65,18 +64,20 @@ from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
from core.workflow.system_variable import SystemVariable
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool
from core.workflow.workflow_entry import WorkflowEntry
from extensions.ext_database import db
from models.model import App
from models.workflow import Workflow
class WorkflowBasedAppRunner(AppRunner):
def __init__(self, queue_manager: AppQueueManager, variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER) -> None:
self.queue_manager = queue_manager
class WorkflowBasedAppRunner:
def __init__(
self,
*,
queue_manager: AppQueueManager,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
app_id: str,
) -> None:
self._queue_manager = queue_manager
self._variable_loader = variable_loader
def _get_app_id(self) -> str:
raise NotImplementedError("not implemented")
self._app_id = app_id
def _init_graph(self, graph_config: Mapping[str, Any]) -> Graph:
"""
@ -693,21 +694,5 @@ class WorkflowBasedAppRunner(AppRunner):
)
)
def get_workflow(self, app_model: App, workflow_id: str) -> Optional[Workflow]:
"""
Get workflow
"""
# fetch workflow by workflow_id
workflow = (
db.session.query(Workflow)
.filter(
Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.id == workflow_id
)
.first()
)
# return workflow
return workflow
def _publish_event(self, event: AppQueueEvent) -> None:
self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER)
self._queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER)

@ -7,6 +7,7 @@ from core.model_runtime.entities import (
AudioPromptMessageContent,
DocumentPromptMessageContent,
ImagePromptMessageContent,
TextPromptMessageContent,
VideoPromptMessageContent,
)
from core.model_runtime.entities.message_entities import PromptMessageContentUnionTypes
@ -44,11 +45,44 @@ def to_prompt_message_content(
*,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> PromptMessageContentUnionTypes:
"""
Convert a file to prompt message content.
This function converts files to their appropriate prompt message content types.
For supported file types (IMAGE, AUDIO, VIDEO, DOCUMENT), it creates the
corresponding message content with proper encoding/URL.
For unsupported file types, instead of raising an error, it returns a
TextPromptMessageContent with a descriptive message about the file.
Args:
f: The file to convert
image_detail_config: Optional detail configuration for image files
Returns:
PromptMessageContentUnionTypes: The appropriate message content type
Raises:
ValueError: If file extension or mime_type is missing
"""
if f.extension is None:
raise ValueError("Missing file extension")
if f.mime_type is None:
raise ValueError("Missing file mime_type")
prompt_class_map: Mapping[FileType, type[PromptMessageContentUnionTypes]] = {
FileType.IMAGE: ImagePromptMessageContent,
FileType.AUDIO: AudioPromptMessageContent,
FileType.VIDEO: VideoPromptMessageContent,
FileType.DOCUMENT: DocumentPromptMessageContent,
}
# Check if file type is supported
if f.type not in prompt_class_map:
# For unsupported file types, return a text description
return TextPromptMessageContent(data=f"[Unsupported file type: {f.filename} ({f.type.value})]")
# Process supported file types
params = {
"base64_data": _get_encoded_string(f) if dify_config.MULTIMODAL_SEND_FORMAT == "base64" else "",
"url": _to_url(f) if dify_config.MULTIMODAL_SEND_FORMAT == "url" else "",
@ -58,17 +92,7 @@ def to_prompt_message_content(
if f.type == FileType.IMAGE:
params["detail"] = image_detail_config or ImagePromptMessageContent.DETAIL.LOW
prompt_class_map: Mapping[FileType, type[PromptMessageContentUnionTypes]] = {
FileType.IMAGE: ImagePromptMessageContent,
FileType.AUDIO: AudioPromptMessageContent,
FileType.VIDEO: VideoPromptMessageContent,
FileType.DOCUMENT: DocumentPromptMessageContent,
}
try:
return prompt_class_map[f.type].model_validate(params)
except KeyError:
raise ValueError(f"file type {f.type} is not supported")
return prompt_class_map[f.type].model_validate(params)
def download(f: File, /):

@ -25,9 +25,29 @@ def batch_fetch_plugin_manifests(plugin_ids: list[str]) -> Sequence[MarketplaceP
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = requests.post(url, json={"plugin_ids": plugin_ids})
response.raise_for_status()
return [MarketplacePluginDeclaration(**plugin) for plugin in response.json()["data"]["plugins"]]
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = requests.post(url, json={"plugin_ids": plugin_ids})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration(**plugin))
except Exception as e:
pass
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = requests.post(url, json={"unique_identifier": plugin_unique_identifier})

@ -0,0 +1,42 @@
import re
from collections.abc import Mapping
from typing import Any, Optional
def is_valid_trace_id(trace_id: str) -> bool:
"""
Check if the trace_id is valid.
Requirements: 1-128 characters, only letters, numbers, '-', and '_'.
"""
return bool(re.match(r"^[a-zA-Z0-9\-_]{1,128}$", trace_id))
def get_external_trace_id(request: Any) -> Optional[str]:
"""
Retrieve the trace_id from the request.
Priority: header ('X-Trace-Id'), then parameters, then JSON body. Returns None if not provided or invalid.
"""
trace_id = request.headers.get("X-Trace-Id")
if not trace_id:
trace_id = request.args.get("trace_id")
if not trace_id and getattr(request, "is_json", False):
json_data = getattr(request, "json", None)
if json_data:
trace_id = json_data.get("trace_id")
if isinstance(trace_id, str) and is_valid_trace_id(trace_id):
return trace_id
return None
def extract_external_trace_id_from_args(args: Mapping[str, Any]) -> dict:
"""
Extract 'external_trace_id' from args.
Returns a dict suitable for use in extras. Returns an empty dict if not found.
"""
trace_id = args.get("external_trace_id")
if trace_id:
return {"external_trace_id": trace_id}
return {}

@ -672,8 +672,7 @@ class IndexingRunner:
if extra_update_params:
update_params.update(extra_update_params)
db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params)
db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) # type: ignore
db.session.commit()
@staticmethod

@ -114,7 +114,8 @@ class LLMGenerator:
),
)
questions = output_parser.parse(cast(str, response.message.content))
text_content = response.message.get_text_content()
questions = output_parser.parse(text_content) if text_content else []
except InvokeError:
questions = []
except Exception:

@ -15,5 +15,4 @@ class SuggestedQuestionsAfterAnswerOutputParser:
json_obj = json.loads(action_match.group(0).strip())
else:
json_obj = []
return json_obj

@ -8,7 +8,7 @@ from core.mcp.types import (
OAuthTokens,
)
from models.tools import MCPToolProvider
from services.tools.mcp_tools_mange_service import MCPToolManageService
from services.tools.mcp_tools_manage_service import MCPToolManageService
LATEST_PROTOCOL_VERSION = "1.0"

@ -68,15 +68,17 @@ class MCPClient:
}
parsed_url = urlparse(self.server_url)
path = parsed_url.path
path = parsed_url.path or ""
method_name = path.rstrip("/").split("/")[-1] if path else ""
try:
if method_name in connection_methods:
client_factory = connection_methods[method_name]
self.connect_server(client_factory, method_name)
except KeyError:
else:
try:
logger.debug(f"Not supported method {method_name} found in URL path, trying default 'mcp' method.")
self.connect_server(sse_client, "sse")
except MCPConnectionError:
logger.debug("MCP connection failed with 'sse', falling back to 'mcp' method.")
self.connect_server(streamablehttp_client, "mcp")
def connect_server(
@ -91,7 +93,7 @@ class MCPClient:
else {}
)
self._streams_context = client_factory(url=self.server_url, headers=headers)
if self._streams_context is None:
if not self._streams_context:
raise MCPConnectionError("Failed to create connection context")
# Use exit_stack to manage context managers properly
@ -141,10 +143,11 @@ class MCPClient:
try:
# ExitStack will handle proper cleanup of all managed context managers
self.exit_stack.close()
except Exception as e:
logging.exception("Error during cleanup")
raise ValueError(f"Error during cleanup: {e}")
finally:
self._session = None
self._session_context = None
self._streams_context = None
self._initialized = False
except Exception as e:
logging.exception("Error during cleanup")
raise ValueError(f"Error during cleanup: {e}")

@ -156,6 +156,23 @@ class PromptMessage(ABC, BaseModel):
"""
return not self.content
def get_text_content(self) -> str:
"""
Get text content from prompt message.
:return: Text content as string, empty string if no text content
"""
if isinstance(self.content, str):
return self.content
elif isinstance(self.content, list):
text_parts = []
for item in self.content:
if isinstance(item, TextPromptMessageContent):
text_parts.append(item.data)
return "".join(text_parts)
else:
return ""
@field_validator("content", mode="before")
@classmethod
def validate_content(cls, v):

@ -101,7 +101,8 @@ class AliyunDataTrace(BaseTraceInstance):
raise ValueError(f"Aliyun get run url failed: {str(e)}")
def workflow_trace(self, trace_info: WorkflowTraceInfo):
trace_id = convert_to_trace_id(trace_info.workflow_run_id)
external_trace_id = trace_info.metadata.get("external_trace_id")
trace_id = external_trace_id or convert_to_trace_id(trace_info.workflow_run_id)
workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow")
self.add_workflow_span(trace_id, workflow_span_id, trace_info)

@ -153,7 +153,8 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
}
workflow_metadata.update(trace_info.metadata)
trace_id = uuid_to_trace_id(trace_info.workflow_run_id)
external_trace_id = trace_info.metadata.get("external_trace_id")
trace_id = external_trace_id or uuid_to_trace_id(trace_info.workflow_run_id)
span_id = RandomIdGenerator().generate_span_id()
context = SpanContext(
trace_id=trace_id,

@ -67,13 +67,14 @@ class LangFuseDataTrace(BaseTraceInstance):
self.generate_name_trace(trace_info)
def workflow_trace(self, trace_info: WorkflowTraceInfo):
trace_id = trace_info.workflow_run_id
external_trace_id = trace_info.metadata.get("external_trace_id")
trace_id = external_trace_id or trace_info.workflow_run_id
user_id = trace_info.metadata.get("user_id")
metadata = trace_info.metadata
metadata["workflow_app_log_id"] = trace_info.workflow_app_log_id
if trace_info.message_id:
trace_id = trace_info.message_id
trace_id = external_trace_id or trace_info.message_id
name = TraceTaskName.MESSAGE_TRACE.value
trace_data = LangfuseTrace(
id=trace_id,

@ -65,7 +65,8 @@ class LangSmithDataTrace(BaseTraceInstance):
self.generate_name_trace(trace_info)
def workflow_trace(self, trace_info: WorkflowTraceInfo):
trace_id = trace_info.message_id or trace_info.workflow_run_id
external_trace_id = trace_info.metadata.get("external_trace_id")
trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id
if trace_info.start_time is None:
trace_info.start_time = datetime.now()
message_dotted_order = (

@ -96,7 +96,8 @@ class OpikDataTrace(BaseTraceInstance):
self.generate_name_trace(trace_info)
def workflow_trace(self, trace_info: WorkflowTraceInfo):
dify_trace_id = trace_info.workflow_run_id
external_trace_id = trace_info.metadata.get("external_trace_id")
dify_trace_id = external_trace_id or trace_info.workflow_run_id
opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id)
workflow_metadata = wrap_metadata(
trace_info.metadata, message_id=trace_info.message_id, workflow_app_log_id=trace_info.workflow_app_log_id
@ -104,7 +105,7 @@ class OpikDataTrace(BaseTraceInstance):
root_span_id = None
if trace_info.message_id:
dify_trace_id = trace_info.message_id
dify_trace_id = external_trace_id or trace_info.message_id
opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id)
trace_data = {

@ -520,6 +520,10 @@ class TraceTask:
"app_id": workflow_run.app_id,
}
external_trace_id = self.kwargs.get("external_trace_id")
if external_trace_id:
metadata["external_trace_id"] = external_trace_id
workflow_trace_info = WorkflowTraceInfo(
workflow_data=workflow_run.to_dict(),
conversation_id=conversation_id,

@ -87,7 +87,8 @@ class WeaveDataTrace(BaseTraceInstance):
self.generate_name_trace(trace_info)
def workflow_trace(self, trace_info: WorkflowTraceInfo):
trace_id = trace_info.message_id or trace_info.workflow_run_id
external_trace_id = trace_info.metadata.get("external_trace_id")
trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id
if trace_info.start_time is None:
trace_info.start_time = datetime.now()

@ -32,6 +32,13 @@ class MarketplacePluginDeclaration(BaseModel):
latest_package_identifier: str = Field(
..., description="Unique identifier for the latest package release of the plugin"
)
status: str = Field(..., description="Indicate the status of marketplace plugin, enum from `active` `deleted`")
deprecated_reason: str = Field(
..., description="Not empty when status='deleted', indicates the reason why this plugin is deleted(deprecated)"
)
alternative_plugin_id: str = Field(
..., description="Optional, indicates the alternative plugin for user to switch to"
)
@model_validator(mode="before")
@classmethod

@ -182,6 +182,10 @@ class PluginOAuthAuthorizationUrlResponse(BaseModel):
class PluginOAuthCredentialsResponse(BaseModel):
metadata: Mapping[str, Any] = Field(
default_factory=dict, description="The metadata of the OAuth, like avatar url, name, etc."
)
expires_at: int = Field(default=-1, description="The expires at time of the credentials. UTC timestamp.")
credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.")

@ -84,6 +84,41 @@ class OAuthHandler(BasePluginClient):
except Exception as e:
raise ValueError(f"Error getting credentials: {e}")
def refresh_credentials(
self,
tenant_id: str,
user_id: str,
plugin_id: str,
provider: str,
redirect_uri: str,
system_credentials: Mapping[str, Any],
credentials: Mapping[str, Any],
) -> PluginOAuthCredentialsResponse:
try:
response = self._request_with_plugin_daemon_response_stream(
"POST",
f"plugin/{tenant_id}/dispatch/oauth/refresh_credentials",
PluginOAuthCredentialsResponse,
data={
"user_id": user_id,
"data": {
"provider": provider,
"redirect_uri": redirect_uri,
"system_credentials": system_credentials,
"credentials": credentials,
},
},
headers={
"X-Plugin-ID": plugin_id,
"Content-Type": "application/json",
},
)
for resp in response:
return resp
raise ValueError("No response received from plugin daemon for refresh credentials request.")
except Exception as e:
raise ValueError(f"Error refreshing credentials: {e}")
def _convert_request_to_raw_data(self, request: Request) -> bytes:
"""
Convert a Request object to raw HTTP data.

@ -233,6 +233,12 @@ class AnalyticdbVectorOpenAPI:
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
document_ids_filter = kwargs.get("document_ids_filter")
where_clause = ""
if document_ids_filter:
document_ids = ", ".join(f"'{id}'" for id in document_ids_filter)
where_clause += f"metadata_->>'document_id' IN ({document_ids})"
score_threshold = kwargs.get("score_threshold") or 0.0
request = gpdb_20160503_models.QueryCollectionDataRequest(
dbinstance_id=self.config.instance_id,
@ -245,7 +251,7 @@ class AnalyticdbVectorOpenAPI:
vector=query_vector,
content=None,
top_k=kwargs.get("top_k", 4),
filter=None,
filter=where_clause,
)
response = self._client.query_collection_data(request)
documents = []
@ -265,6 +271,11 @@ class AnalyticdbVectorOpenAPI:
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
document_ids_filter = kwargs.get("document_ids_filter")
where_clause = ""
if document_ids_filter:
document_ids = ", ".join(f"'{id}'" for id in document_ids_filter)
where_clause += f"metadata_->>'document_id' IN ({document_ids})"
score_threshold = float(kwargs.get("score_threshold") or 0.0)
request = gpdb_20160503_models.QueryCollectionDataRequest(
dbinstance_id=self.config.instance_id,
@ -277,7 +288,7 @@ class AnalyticdbVectorOpenAPI:
vector=None,
content=query,
top_k=kwargs.get("top_k", 4),
filter=None,
filter=where_clause,
)
response = self._client.query_collection_data(request)
documents = []

@ -147,10 +147,17 @@ class ElasticSearchVector(BaseVector):
return docs
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
query_str = {"match": {Field.CONTENT_KEY.value: query}}
query_str: dict[str, Any] = {"match": {Field.CONTENT_KEY.value: query}}
document_ids_filter = kwargs.get("document_ids_filter")
if document_ids_filter:
query_str["filter"] = {"terms": {"metadata.document_id": document_ids_filter}} # type: ignore
query_str = {
"bool": {
"must": {"match": {Field.CONTENT_KEY.value: query}},
"filter": {"terms": {"metadata.document_id": document_ids_filter}},
}
}
results = self._client.search(index=self._collection_name, query=query_str, size=kwargs.get("top_k", 4))
docs = []
for hit in results["hits"]["hits"]:

@ -6,7 +6,7 @@ from uuid import UUID, uuid4
from numpy import ndarray
from pgvecto_rs.sqlalchemy import VECTOR # type: ignore
from pydantic import BaseModel, model_validator
from sqlalchemy import Float, String, create_engine, insert, select, text
from sqlalchemy import Float, create_engine, insert, select, text
from sqlalchemy import text as sql_text
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import Mapped, Session, mapped_column
@ -67,7 +67,7 @@ class PGVectoRS(BaseVector):
postgresql.UUID(as_uuid=True),
primary_key=True,
)
text: Mapped[str] = mapped_column(String)
text: Mapped[str]
meta: Mapped[dict] = mapped_column(postgresql.JSONB)
vector: Mapped[ndarray] = mapped_column(VECTOR(dim))

@ -118,10 +118,21 @@ class TableStoreVector(BaseVector):
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
top_k = kwargs.get("top_k", 4)
return self._search_by_vector(query_vector, top_k)
document_ids_filter = kwargs.get("document_ids_filter")
filtered_list = None
if document_ids_filter:
filtered_list = ["document_id=" + item for item in document_ids_filter]
score_threshold = float(kwargs.get("score_threshold") or 0.0)
return self._search_by_vector(query_vector, filtered_list, top_k, score_threshold)
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
return self._search_by_full_text(query)
top_k = kwargs.get("top_k", 4)
document_ids_filter = kwargs.get("document_ids_filter")
filtered_list = None
if document_ids_filter:
filtered_list = ["document_id=" + item for item in document_ids_filter]
return self._search_by_full_text(query, filtered_list, top_k)
def delete(self) -> None:
self._delete_table_if_exist()
@ -230,32 +241,51 @@ class TableStoreVector(BaseVector):
primary_key = [("id", id)]
row = tablestore.Row(primary_key)
self._tablestore_client.delete_row(self._table_name, row, None)
logging.info("Tablestore delete row successfully. id:%s", id)
def _search_by_metadata(self, key: str, value: str) -> list[str]:
query = tablestore.SearchQuery(
tablestore.TermQuery(self._tags_field, str(key) + "=" + str(value)),
limit=100,
limit=1000,
get_total_count=False,
)
rows: list[str] = []
next_token = None
while True:
if next_token is not None:
query.next_token = next_token
search_response = self._tablestore_client.search(
table_name=self._table_name,
index_name=self._index_name,
search_query=query,
columns_to_get=tablestore.ColumnsToGet(
column_names=[Field.PRIMARY_KEY.value], return_type=tablestore.ColumnReturnType.SPECIFIED
),
)
search_response = self._tablestore_client.search(
table_name=self._table_name,
index_name=self._index_name,
search_query=query,
columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX),
)
if search_response is not None:
rows.extend([row[0][0][1] for row in search_response.rows])
return [row[0][0][1] for row in search_response.rows]
if search_response is None or search_response.next_token == b"":
break
else:
next_token = search_response.next_token
def _search_by_vector(self, query_vector: list[float], top_k: int) -> list[Document]:
ots_query = tablestore.KnnVectorQuery(
return rows
def _search_by_vector(
self, query_vector: list[float], document_ids_filter: list[str] | None, top_k: int, score_threshold: float
) -> list[Document]:
knn_vector_query = tablestore.KnnVectorQuery(
field_name=Field.VECTOR.value,
top_k=top_k,
float32_query_vector=query_vector,
)
if document_ids_filter:
knn_vector_query.filter = tablestore.TermsQuery(self._tags_field, document_ids_filter)
sort = tablestore.Sort(sorters=[tablestore.ScoreSort(sort_order=tablestore.SortOrder.DESC)])
search_query = tablestore.SearchQuery(ots_query, limit=top_k, get_total_count=False, sort=sort)
search_query = tablestore.SearchQuery(knn_vector_query, limit=top_k, get_total_count=False, sort=sort)
search_response = self._tablestore_client.search(
table_name=self._table_name,
@ -263,30 +293,32 @@ class TableStoreVector(BaseVector):
search_query=search_query,
columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX),
)
logging.info(
"Tablestore search successfully. request_id:%s",
search_response.request_id,
)
return self._to_query_result(search_response)
def _to_query_result(self, search_response: tablestore.SearchResponse) -> list[Document]:
documents = []
for row in search_response.rows:
documents.append(
Document(
page_content=row[1][2][1],
vector=json.loads(row[1][3][1]),
metadata=json.loads(row[1][0][1]),
for search_hit in search_response.search_hits:
if search_hit.score > score_threshold:
metadata = json.loads(search_hit.row[1][0][1])
metadata["score"] = search_hit.score
documents.append(
Document(
page_content=search_hit.row[1][2][1],
vector=json.loads(search_hit.row[1][3][1]),
metadata=metadata,
)
)
)
documents = sorted(documents, key=lambda x: x.metadata["score"] if x.metadata else 0, reverse=True)
return documents
def _search_by_full_text(self, query: str) -> list[Document]:
def _search_by_full_text(self, query: str, document_ids_filter: list[str] | None, top_k: int) -> list[Document]:
bool_query = tablestore.BoolQuery()
bool_query.must_queries.append(tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value))
if document_ids_filter:
bool_query.filter_queries.append(tablestore.TermsQuery(self._tags_field, document_ids_filter))
search_query = tablestore.SearchQuery(
query=tablestore.MatchQuery(text=query, field_name=Field.CONTENT_KEY.value),
query=bool_query,
sort=tablestore.Sort(sorters=[tablestore.ScoreSort(sort_order=tablestore.SortOrder.DESC)]),
limit=100,
limit=top_k,
)
search_response = self._tablestore_client.search(
table_name=self._table_name,
@ -295,7 +327,16 @@ class TableStoreVector(BaseVector):
columns_to_get=tablestore.ColumnsToGet(return_type=tablestore.ColumnReturnType.ALL_FROM_INDEX),
)
return self._to_query_result(search_response)
documents = []
for search_hit in search_response.search_hits:
documents.append(
Document(
page_content=search_hit.row[1][2][1],
vector=json.loads(search_hit.row[1][3][1]),
metadata=json.loads(search_hit.row[1][0][1]),
)
)
return documents
class TableStoreVectorFactory(AbstractVectorFactory):

@ -206,9 +206,19 @@ class TencentVector(BaseVector):
def delete_by_ids(self, ids: list[str]) -> None:
if not ids:
return
self._client.delete(
database_name=self._client_config.database, collection_name=self.collection_name, document_ids=ids
)
total_count = len(ids)
batch_size = self._client_config.max_upsert_batch_size
batch = math.ceil(total_count / batch_size)
for j in range(batch):
start_idx = j * batch_size
end_idx = min(total_count, (j + 1) * batch_size)
batch_ids = ids[start_idx:end_idx]
self._client.delete(
database_name=self._client_config.database, collection_name=self.collection_name, document_ids=batch_ids
)
def delete_by_metadata_field(self, key: str, value: str) -> None:
self._client.delete(
@ -274,7 +284,8 @@ class TencentVector(BaseVector):
# Compatible with version 1.1.3 and below.
meta = json.loads(meta)
score = 1 - result.get("score", 0.0)
score = result.get("score", 0.0)
else:
score = result.get("score", 0.0)
if score > score_threshold:
meta["score"] = score
doc = Document(page_content=result.get(self.field_text), metadata=meta)

@ -334,9 +334,10 @@ class NotionExtractor(BaseExtractor):
last_edited_time = self.get_notion_last_edited_time()
data_source_info = document_model.data_source_info_dict
data_source_info["last_edited_time"] = last_edited_time
update_params = {DocumentModel.data_source_info: json.dumps(data_source_info)}
db.session.query(DocumentModel).filter_by(id=document_model.id).update(update_params)
db.session.query(DocumentModel).filter_by(id=document_model.id).update(
{DocumentModel.data_source_info: json.dumps(data_source_info)}
) # type: ignore
db.session.commit()
def get_notion_last_edited_time(self) -> str:

@ -1,16 +1,19 @@
import json
import logging
import mimetypes
from collections.abc import Generator
import time
from collections.abc import Generator, Mapping
from os import listdir, path
from threading import Lock
from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
from pydantic import TypeAdapter
from yarl import URL
import contexts
from core.helper.provider_cache import ToolProviderCredentialsCache
from core.plugin.entities.plugin import ToolProviderID
from core.plugin.impl.oauth import OAuthHandler
from core.plugin.impl.tool import PluginToolManager
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.__base.tool_runtime import ToolRuntime
@ -21,7 +24,7 @@ from core.tools.plugin_tool.tool import PluginTool
from core.tools.utils.uuid_utils import is_valid_uuid
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
from core.workflow.entities.variable_pool import VariablePool
from services.tools.mcp_tools_mange_service import MCPToolManageService
from services.tools.mcp_tools_manage_service import MCPToolManageService
if TYPE_CHECKING:
from core.workflow.nodes.tool.entities import ToolEntity
@ -244,12 +247,47 @@ class ToolManager:
tenant_id=tenant_id, provider=provider_id, credential_id=builtin_provider.id
),
)
# decrypt the credentials
decrypted_credentials: Mapping[str, Any] = encrypter.decrypt(builtin_provider.credentials)
# check if the credentials is expired
if builtin_provider.expires_at != -1 and (builtin_provider.expires_at - 60) < int(time.time()):
# TODO: circular import
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
# refresh the credentials
tool_provider = ToolProviderID(provider_id)
provider_name = tool_provider.provider_name
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/tool/callback"
system_credentials = BuiltinToolManageService.get_oauth_client(tenant_id, provider_id)
oauth_handler = OAuthHandler()
# refresh the credentials
refreshed_credentials = oauth_handler.refresh_credentials(
tenant_id=tenant_id,
user_id=builtin_provider.user_id,
plugin_id=tool_provider.plugin_id,
provider=provider_name,
redirect_uri=redirect_uri,
system_credentials=system_credentials or {},
credentials=decrypted_credentials,
)
# update the credentials
builtin_provider.encrypted_credentials = (
TypeAdapter(dict[str, Any])
.dump_json(encrypter.encrypt(dict(refreshed_credentials.credentials)))
.decode("utf-8")
)
builtin_provider.expires_at = refreshed_credentials.expires_at
db.session.commit()
decrypted_credentials = refreshed_credentials.credentials
return cast(
BuiltinTool,
builtin_tool.fork_tool_runtime(
runtime=ToolRuntime(
tenant_id=tenant_id,
credentials=encrypter.decrypt(builtin_provider.credentials),
credentials=dict(decrypted_credentials),
credential_type=CredentialType.of(builtin_provider.credential_type),
runtime_parameters={},
invoke_from=invoke_from,

@ -1,5 +1,5 @@
from abc import abstractmethod
from typing import Any, Optional
from typing import Optional
from msal_extensions.persistence import ABC # type: ignore
from pydantic import BaseModel, ConfigDict
@ -21,11 +21,7 @@ class DatasetRetrieverBaseTool(BaseModel, ABC):
model_config = ConfigDict(arbitrary_types_allowed=True)
@abstractmethod
def _run(
self,
*args: Any,
**kwargs: Any,
) -> Any:
def _run(self, query: str) -> str:
"""Use the tool.
Add run_manager: Optional[CallbackManagerForToolRun] = None

@ -270,7 +270,14 @@ class AgentNode(BaseNode):
)
extra = tool.get("extra", {})
runtime_variable_pool = variable_pool if self._node_data.version != "1" else None
# This is an issue that caused problems before.
# Logically, we shouldn't use the node_data.version field for judgment
# But for backward compatibility with historical data
# this version field judgment is still preserved here.
runtime_variable_pool: VariablePool | None = None
if node_data.version != "1" or node_data.tool_node_version != "1":
runtime_variable_pool = variable_pool
tool_runtime = ToolManager.get_agent_tool_runtime(
self.tenant_id, self.app_id, entity, self.invoke_from, runtime_variable_pool
)
@ -479,7 +486,7 @@ class AgentNode(BaseNode):
text = ""
files: list[File] = []
json: list[dict] = []
json_list: list[dict] = []
agent_logs: list[AgentLogEvent] = []
agent_execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] = {}
@ -557,7 +564,7 @@ class AgentNode(BaseNode):
if key in WorkflowNodeExecutionMetadataKey.__members__.values()
}
if message.message.json_object is not None:
json.append(message.message.json_object)
json_list.append(message.message.json_object)
elif message.type == ToolInvokeMessage.MessageType.LINK:
assert isinstance(message.message, ToolInvokeMessage.TextMessage)
stream_text = f"Link: {message.message.text}\n"
@ -669,8 +676,8 @@ class AgentNode(BaseNode):
}
)
# Step 2: normalize JSON into {"data": [...]}.change json to list[dict]
if json:
json_output.extend(json)
if json_list:
json_output.extend(json_list)
else:
json_output.append({"data": []})

@ -13,6 +13,10 @@ class AgentNodeData(BaseNodeData):
agent_strategy_name: str
agent_strategy_label: str # redundancy
memory: MemoryConfig | None = None
# The version of the tool parameter.
# If this value is None, it indicates this is a previous version
# and requires using the legacy parameter parsing rules.
tool_node_version: str | None = None
class AgentInput(BaseModel):
value: Union[list[str], list[ToolSelector], Any]

@ -118,7 +118,7 @@ class KnowledgeRetrievalNodeData(BaseNodeData):
multiple_retrieval_config: Optional[MultipleRetrievalConfig] = None
single_retrieval_config: Optional[SingleRetrievalConfig] = None
metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled"
metadata_model_config: ModelConfig
metadata_model_config: Optional[ModelConfig] = None
metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None
vision: VisionConfig = Field(default_factory=VisionConfig)

@ -462,7 +462,7 @@ class KnowledgeRetrievalNode(BaseNode):
expected_value = self.graph_runtime_state.variable_pool.convert_template(
expected_value
).value[0]
if expected_value.value_type == "number": # type: ignore
if expected_value.value_type in {"number", "integer", "float"}: # type: ignore
expected_value = expected_value.value # type: ignore
elif expected_value.value_type == "string": # type: ignore
expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore
@ -509,6 +509,8 @@ class KnowledgeRetrievalNode(BaseNode):
# get all metadata field
metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all()
all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields]
if node_data.metadata_model_config is None:
raise ValueError("metadata_model_config is required")
# get metadata model instance and fetch model config
model_instance, model_config = self.get_model_config(node_data.metadata_model_config)
# fetch prompt messages
@ -701,7 +703,7 @@ class KnowledgeRetrievalNode(BaseNode):
)
def _get_prompt_template(self, node_data: KnowledgeRetrievalNodeData, metadata_fields: list, query: str):
model_mode = ModelMode(node_data.metadata_model_config.mode)
model_mode = ModelMode(node_data.metadata_model_config.mode) # type: ignore
input_text = query
prompt_messages: list[LLMNodeChatModelMessage] = []

@ -565,7 +565,7 @@ class LLMNode(BaseNode):
retriever_resources=original_retriever_resource, context=context_str.strip()
)
def _convert_to_original_retriever_resource(self, context_dict: dict):
def _convert_to_original_retriever_resource(self, context_dict: dict) -> RetrievalSourceMetadata | None:
if (
"metadata" in context_dict
and "_source" in context_dict["metadata"]

@ -73,6 +73,9 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = {
},
NodeType.TOOL: {
LATEST_VERSION: ToolNode,
# This is an issue that caused problems before.
# Logically, we shouldn't use two different versions to point to the same class here,
# but in order to maintain compatibility with historical data, this approach has been retained.
"2": ToolNode,
"1": ToolNode,
},
@ -123,6 +126,9 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = {
},
NodeType.AGENT: {
LATEST_VERSION: AgentNode,
# This is an issue that caused problems before.
# Logically, we shouldn't use two different versions to point to the same class here,
# but in order to maintain compatibility with historical data, this approach has been retained.
"2": AgentNode,
"1": AgentNode,
},

@ -59,6 +59,10 @@ class ToolNodeData(BaseNodeData, ToolEntity):
return typ
tool_parameters: dict[str, ToolInput]
# The version of the tool parameter.
# If this value is None, it indicates this is a previous version
# and requires using the legacy parameter parsing rules.
tool_node_version: str | None = None
@field_validator("tool_parameters", mode="before")
@classmethod

@ -70,7 +70,13 @@ class ToolNode(BaseNode):
try:
from core.tools.tool_manager import ToolManager
variable_pool = self.graph_runtime_state.variable_pool if self._node_data.version != "1" else None
# This is an issue that caused problems before.
# Logically, we shouldn't use the node_data.version field for judgment
# But for backward compatibility with historical data
# this version field judgment is still preserved here.
variable_pool: VariablePool | None = None
if node_data.version != "1" or node_data.tool_node_version != "1":
variable_pool = self.graph_runtime_state.variable_pool
tool_runtime = ToolManager.get_workflow_tool_runtime(
self.tenant_id, self.app_id, self.node_id, self._node_data, self.invoke_from, variable_pool
)
@ -310,7 +316,14 @@ class ToolNode(BaseNode):
variables[variable_name] = variable_value
elif message.type == ToolInvokeMessage.MessageType.FILE:
assert message.meta is not None
assert isinstance(message.meta, File)
assert isinstance(message.meta, dict)
# Validate that meta contains a 'file' key
if "file" not in message.meta:
raise ToolNodeError("File message is missing 'file' key in meta")
# Validate that the file is an instance of File
if not isinstance(message.meta["file"], File):
raise ToolNodeError(f"Expected File object but got {type(message.meta['file']).__name__}")
files.append(message.meta["file"])
elif message.type == ToolInvokeMessage.MessageType.LOG:
assert isinstance(message.message, ToolInvokeMessage.LogMessage)

@ -1,6 +1,6 @@
from collections.abc import Mapping
from dataclasses import dataclass
from datetime import UTC, datetime
from datetime import datetime
from typing import Any, Optional, Union
from uuid import uuid4
@ -71,7 +71,7 @@ class WorkflowCycleManager:
workflow_version=self._workflow_info.version,
graph=self._workflow_info.graph_data,
inputs=inputs,
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
return self._save_and_cache_workflow_execution(execution)
@ -85,6 +85,7 @@ class WorkflowCycleManager:
outputs: Mapping[str, Any] | None = None,
conversation_id: Optional[str] = None,
trace_manager: Optional[TraceQueueManager] = None,
external_trace_id: Optional[str] = None,
) -> WorkflowExecution:
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
@ -96,7 +97,7 @@ class WorkflowCycleManager:
total_steps=total_steps,
)
self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id)
self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id)
self._workflow_execution_repository.save(workflow_execution)
return workflow_execution
@ -111,6 +112,7 @@ class WorkflowCycleManager:
exceptions_count: int = 0,
conversation_id: Optional[str] = None,
trace_manager: Optional[TraceQueueManager] = None,
external_trace_id: Optional[str] = None,
) -> WorkflowExecution:
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
@ -123,7 +125,7 @@ class WorkflowCycleManager:
exceptions_count=exceptions_count,
)
self._add_trace_task_if_needed(trace_manager, execution, conversation_id)
self._add_trace_task_if_needed(trace_manager, execution, conversation_id, external_trace_id)
self._workflow_execution_repository.save(execution)
return execution
@ -139,6 +141,7 @@ class WorkflowCycleManager:
conversation_id: Optional[str] = None,
trace_manager: Optional[TraceQueueManager] = None,
exceptions_count: int = 0,
external_trace_id: Optional[str] = None,
) -> WorkflowExecution:
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
now = naive_utc_now()
@ -154,7 +157,7 @@ class WorkflowCycleManager:
)
self._fail_running_node_executions(workflow_execution.id_, error_message, now)
self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id)
self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id)
self._workflow_execution_repository.save(workflow_execution)
return workflow_execution
@ -312,6 +315,7 @@ class WorkflowCycleManager:
trace_manager: Optional[TraceQueueManager],
workflow_execution: WorkflowExecution,
conversation_id: Optional[str],
external_trace_id: Optional[str],
) -> None:
"""Add trace task if trace manager is provided."""
if trace_manager:
@ -321,6 +325,7 @@ class WorkflowCycleManager:
workflow_execution=workflow_execution,
conversation_id=conversation_id,
user_id=trace_manager.user_id,
external_trace_id=external_trace_id,
)
)
@ -356,7 +361,7 @@ class WorkflowCycleManager:
created_at: Optional[datetime] = None,
) -> WorkflowNodeExecution:
"""Create a node execution from an event."""
now = datetime.now(UTC).replace(tzinfo=None)
now = naive_utc_now()
created_at = created_at or now
metadata = {
@ -403,7 +408,7 @@ class WorkflowCycleManager:
handle_special_values: bool = False,
) -> None:
"""Update node execution with completion data."""
finished_at = datetime.now(UTC).replace(tzinfo=None)
finished_at = naive_utc_now()
elapsed_time = (finished_at - event.start_at).total_seconds()
# Process data

@ -163,6 +163,7 @@ class WorkflowEntry:
graph=graph,
graph_runtime_state=GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()),
)
node.init_node_data(node_config_data)
try:
# variable selector to variable mapping
@ -273,6 +274,7 @@ class WorkflowEntry:
graph=graph,
graph_runtime_state=GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()),
)
node.init_node_data(node_data)
try:
# variable selector to variable mapping

@ -22,7 +22,7 @@ if [[ "${MODE}" == "worker" ]]; then
exec celery -A app.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \
--max-tasks-per-child ${MAX_TASK_PRE_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \
-Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion}
-Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion,plugin}
elif [[ "${MODE}" == "beat" ]]; then
exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO}

@ -1,4 +1,3 @@
import datetime
import logging
import time
@ -8,6 +7,7 @@ from werkzeug.exceptions import NotFound
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from events.event_handlers.document_index_event import document_index_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Document
@ -33,7 +33,7 @@ def handle(sender, **kwargs):
raise NotFound("Document not found")
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
documents.append(document)
db.session.add(document)
db.session.commit()

@ -64,49 +64,62 @@ def init_app(app: DifyApp) -> Celery:
celery_app.set_default()
app.extensions["celery"] = celery_app
imports = [
"schedule.clean_embedding_cache_task",
"schedule.clean_unused_datasets_task",
"schedule.create_tidb_serverless_task",
"schedule.update_tidb_serverless_status_task",
"schedule.clean_messages",
"schedule.mail_clean_document_notify_task",
"schedule.queue_monitor_task",
]
imports = []
day = dify_config.CELERY_BEAT_SCHEDULER_TIME
beat_schedule = {
"clean_embedding_cache_task": {
# if you add a new task, please add the switch to CeleryScheduleTasksConfig
beat_schedule = {}
if dify_config.ENABLE_CLEAN_EMBEDDING_CACHE_TASK:
imports.append("schedule.clean_embedding_cache_task")
beat_schedule["clean_embedding_cache_task"] = {
"task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task",
"schedule": timedelta(days=day),
},
"clean_unused_datasets_task": {
}
if dify_config.ENABLE_CLEAN_UNUSED_DATASETS_TASK:
imports.append("schedule.clean_unused_datasets_task")
beat_schedule["clean_unused_datasets_task"] = {
"task": "schedule.clean_unused_datasets_task.clean_unused_datasets_task",
"schedule": timedelta(days=day),
},
"create_tidb_serverless_task": {
}
if dify_config.ENABLE_CREATE_TIDB_SERVERLESS_TASK:
imports.append("schedule.create_tidb_serverless_task")
beat_schedule["create_tidb_serverless_task"] = {
"task": "schedule.create_tidb_serverless_task.create_tidb_serverless_task",
"schedule": crontab(minute="0", hour="*"),
},
"update_tidb_serverless_status_task": {
}
if dify_config.ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:
imports.append("schedule.update_tidb_serverless_status_task")
beat_schedule["update_tidb_serverless_status_task"] = {
"task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task",
"schedule": timedelta(minutes=10),
},
"clean_messages": {
}
if dify_config.ENABLE_CLEAN_MESSAGES:
imports.append("schedule.clean_messages")
beat_schedule["clean_messages"] = {
"task": "schedule.clean_messages.clean_messages",
"schedule": timedelta(days=day),
},
# every Monday
"mail_clean_document_notify_task": {
}
if dify_config.ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:
imports.append("schedule.mail_clean_document_notify_task")
beat_schedule["mail_clean_document_notify_task"] = {
"task": "schedule.mail_clean_document_notify_task.mail_clean_document_notify_task",
"schedule": crontab(minute="0", hour="10", day_of_week="1"),
},
"datasets-queue-monitor": {
}
if dify_config.ENABLE_DATASETS_QUEUE_MONITOR:
imports.append("schedule.queue_monitor_task")
beat_schedule["datasets-queue-monitor"] = {
"task": "schedule.queue_monitor_task.queue_monitor_task",
"schedule": timedelta(
minutes=dify_config.QUEUE_MONITOR_INTERVAL if dify_config.QUEUE_MONITOR_INTERVAL else 30
),
},
}
}
if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:
imports.append("schedule.check_upgradable_plugin_task")
beat_schedule["check_upgradable_plugin_task"] = {
"task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task",
"schedule": crontab(minute="*/15"),
}
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
return celery_app

@ -1,5 +1,5 @@
from collections.abc import Generator
from datetime import UTC, datetime, timedelta
from datetime import timedelta
from typing import Optional
from azure.identity import ChainedTokenCredential, DefaultAzureCredential
@ -8,6 +8,7 @@ from azure.storage.blob import AccountSasPermissions, BlobServiceClient, Resourc
from configs import dify_config
from extensions.ext_redis import redis_client
from extensions.storage.base_storage import BaseStorage
from libs.datetime_utils import naive_utc_now
class AzureBlobStorage(BaseStorage):
@ -78,7 +79,7 @@ class AzureBlobStorage(BaseStorage):
account_key=self.account_key or "",
resource_types=ResourceTypes(service=True, container=True, object=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
expiry=naive_utc_now() + timedelta(hours=1),
)
redis_client.set(cache_key, sas_token, ex=3000)
return BlobServiceClient(account_url=self.account_url or "", credential=sas_token)

@ -148,9 +148,7 @@ def _build_from_local_file(
if strict_type_validation and detected_file_type.value != specified_type:
raise ValueError("Detected file type does not match the specified type. Please verify the file.")
file_type = (
FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM.value else detected_file_type
)
file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type
return File(
id=mapping.get("id"),
@ -199,9 +197,7 @@ def _build_from_remote_url(
raise ValueError("Detected file type does not match the specified type. Please verify the file.")
file_type = (
FileType(specified_type)
if specified_type and specified_type != FileType.CUSTOM.value
else detected_file_type
FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type
)
return File(
@ -286,9 +282,7 @@ def _build_from_tool_file(
if strict_type_validation and specified_type and detected_file_type.value != specified_type:
raise ValueError("Detected file type does not match the specified type. Please verify the file.")
file_type = (
FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM.value else detected_file_type
)
file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type
return File(
id=mapping.get("id"),

@ -0,0 +1,461 @@
"""
Email Internationalization Module
This module provides a centralized, elegant way to handle email internationalization
in Dify. It follows Domain-Driven Design principles with proper type hints and
eliminates the need for repetitive language switching logic.
"""
from dataclasses import dataclass
from enum import Enum
from typing import Any, Optional, Protocol
from flask import render_template
from pydantic import BaseModel, Field
from extensions.ext_mail import mail
from services.feature_service import BrandingModel, FeatureService
class EmailType(Enum):
"""Enumeration of supported email types."""
RESET_PASSWORD = "reset_password"
INVITE_MEMBER = "invite_member"
EMAIL_CODE_LOGIN = "email_code_login"
CHANGE_EMAIL_OLD = "change_email_old"
CHANGE_EMAIL_NEW = "change_email_new"
OWNER_TRANSFER_CONFIRM = "owner_transfer_confirm"
OWNER_TRANSFER_OLD_NOTIFY = "owner_transfer_old_notify"
OWNER_TRANSFER_NEW_NOTIFY = "owner_transfer_new_notify"
ACCOUNT_DELETION_SUCCESS = "account_deletion_success"
ACCOUNT_DELETION_VERIFICATION = "account_deletion_verification"
ENTERPRISE_CUSTOM = "enterprise_custom"
QUEUE_MONITOR_ALERT = "queue_monitor_alert"
DOCUMENT_CLEAN_NOTIFY = "document_clean_notify"
class EmailLanguage(Enum):
"""Supported email languages with fallback handling."""
EN_US = "en-US"
ZH_HANS = "zh-Hans"
@classmethod
def from_language_code(cls, language_code: str) -> "EmailLanguage":
"""Convert a language code to EmailLanguage with fallback to English."""
if language_code == "zh-Hans":
return cls.ZH_HANS
return cls.EN_US
@dataclass(frozen=True)
class EmailTemplate:
"""Immutable value object representing an email template configuration."""
subject: str
template_path: str
branded_template_path: str
@dataclass(frozen=True)
class EmailContent:
"""Immutable value object containing rendered email content."""
subject: str
html_content: str
template_context: dict[str, Any]
class EmailI18nConfig(BaseModel):
"""Configuration for email internationalization."""
model_config = {"frozen": True, "extra": "forbid"}
templates: dict[EmailType, dict[EmailLanguage, EmailTemplate]] = Field(
default_factory=dict, description="Mapping of email types to language-specific templates"
)
def get_template(self, email_type: EmailType, language: EmailLanguage) -> EmailTemplate:
"""Get template configuration for specific email type and language."""
type_templates = self.templates.get(email_type)
if not type_templates:
raise ValueError(f"No templates configured for email type: {email_type}")
template = type_templates.get(language)
if not template:
# Fallback to English if specific language not found
template = type_templates.get(EmailLanguage.EN_US)
if not template:
raise ValueError(f"No template found for {email_type} in {language} or English")
return template
class EmailRenderer(Protocol):
"""Protocol for email template renderers."""
def render_template(self, template_path: str, **context: Any) -> str:
"""Render email template with given context."""
...
class FlaskEmailRenderer:
"""Flask-based email template renderer."""
def render_template(self, template_path: str, **context: Any) -> str:
"""Render email template using Flask's render_template."""
return render_template(template_path, **context)
class BrandingService(Protocol):
"""Protocol for branding service abstraction."""
def get_branding_config(self) -> BrandingModel:
"""Get current branding configuration."""
...
class FeatureBrandingService:
"""Feature service based branding implementation."""
def get_branding_config(self) -> BrandingModel:
"""Get branding configuration from feature service."""
return FeatureService.get_system_features().branding
class EmailSender(Protocol):
"""Protocol for email sending abstraction."""
def send_email(self, to: str, subject: str, html_content: str) -> None:
"""Send email with given parameters."""
...
class FlaskMailSender:
"""Flask-Mail based email sender."""
def send_email(self, to: str, subject: str, html_content: str) -> None:
"""Send email using Flask-Mail."""
if mail.is_inited():
mail.send(to=to, subject=subject, html=html_content)
class EmailI18nService:
"""
Main service for internationalized email handling.
This service provides a clean API for sending internationalized emails
with proper branding support and template management.
"""
def __init__(
self,
config: EmailI18nConfig,
renderer: EmailRenderer,
branding_service: BrandingService,
sender: EmailSender,
) -> None:
self._config = config
self._renderer = renderer
self._branding_service = branding_service
self._sender = sender
def send_email(
self,
email_type: EmailType,
language_code: str,
to: str,
template_context: Optional[dict[str, Any]] = None,
) -> None:
"""
Send internationalized email with branding support.
Args:
email_type: Type of email to send
language_code: Target language code
to: Recipient email address
template_context: Additional context for template rendering
"""
if template_context is None:
template_context = {}
language = EmailLanguage.from_language_code(language_code)
email_content = self._render_email_content(email_type, language, template_context)
self._sender.send_email(to=to, subject=email_content.subject, html_content=email_content.html_content)
def send_change_email(
self,
language_code: str,
to: str,
code: str,
phase: str,
) -> None:
"""
Send change email notification with phase-specific handling.
Args:
language_code: Target language code
to: Recipient email address
code: Verification code
phase: Either 'old_email' or 'new_email'
"""
if phase == "old_email":
email_type = EmailType.CHANGE_EMAIL_OLD
elif phase == "new_email":
email_type = EmailType.CHANGE_EMAIL_NEW
else:
raise ValueError(f"Invalid phase: {phase}. Must be 'old_email' or 'new_email'")
self.send_email(
email_type=email_type,
language_code=language_code,
to=to,
template_context={
"to": to,
"code": code,
},
)
def send_raw_email(
self,
to: str | list[str],
subject: str,
html_content: str,
) -> None:
"""
Send a raw email directly without template processing.
This method is provided for backward compatibility with legacy email
sending that uses pre-rendered HTML content (e.g., enterprise emails
with custom templates).
Args:
to: Recipient email address(es)
subject: Email subject
html_content: Pre-rendered HTML content
"""
if isinstance(to, list):
for recipient in to:
self._sender.send_email(to=recipient, subject=subject, html_content=html_content)
else:
self._sender.send_email(to=to, subject=subject, html_content=html_content)
def _render_email_content(
self,
email_type: EmailType,
language: EmailLanguage,
template_context: dict[str, Any],
) -> EmailContent:
"""Render email content with branding and internationalization."""
template_config = self._config.get_template(email_type, language)
branding = self._branding_service.get_branding_config()
# Determine template path based on branding
template_path = template_config.branded_template_path if branding.enabled else template_config.template_path
# Prepare template context with branding information
full_context = {
**template_context,
"branding_enabled": branding.enabled,
"application_title": branding.application_title if branding.enabled else "Dify",
}
# Render template
html_content = self._renderer.render_template(template_path, **full_context)
# Apply templating to subject with all context variables
subject = template_config.subject
try:
subject = subject.format(**full_context)
except KeyError:
# If template variables are missing, fall back to basic formatting
if branding.enabled and "{application_title}" in subject:
subject = subject.format(application_title=branding.application_title)
return EmailContent(
subject=subject,
html_content=html_content,
template_context=full_context,
)
def create_default_email_config() -> EmailI18nConfig:
"""Create default email i18n configuration with all supported templates."""
templates: dict[EmailType, dict[EmailLanguage, EmailTemplate]] = {
EmailType.RESET_PASSWORD: {
EmailLanguage.EN_US: EmailTemplate(
subject="Set Your {application_title} Password",
template_path="reset_password_mail_template_en-US.html",
branded_template_path="without-brand/reset_password_mail_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="设置您的 {application_title} 密码",
template_path="reset_password_mail_template_zh-CN.html",
branded_template_path="without-brand/reset_password_mail_template_zh-CN.html",
),
},
EmailType.INVITE_MEMBER: {
EmailLanguage.EN_US: EmailTemplate(
subject="Join {application_title} Workspace Now",
template_path="invite_member_mail_template_en-US.html",
branded_template_path="without-brand/invite_member_mail_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="立即加入 {application_title} 工作空间",
template_path="invite_member_mail_template_zh-CN.html",
branded_template_path="without-brand/invite_member_mail_template_zh-CN.html",
),
},
EmailType.EMAIL_CODE_LOGIN: {
EmailLanguage.EN_US: EmailTemplate(
subject="{application_title} Login Code",
template_path="email_code_login_mail_template_en-US.html",
branded_template_path="without-brand/email_code_login_mail_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="{application_title} 登录验证码",
template_path="email_code_login_mail_template_zh-CN.html",
branded_template_path="without-brand/email_code_login_mail_template_zh-CN.html",
),
},
EmailType.CHANGE_EMAIL_OLD: {
EmailLanguage.EN_US: EmailTemplate(
subject="Check your current email",
template_path="change_mail_confirm_old_template_en-US.html",
branded_template_path="without-brand/change_mail_confirm_old_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="检测您现在的邮箱",
template_path="change_mail_confirm_old_template_zh-CN.html",
branded_template_path="without-brand/change_mail_confirm_old_template_zh-CN.html",
),
},
EmailType.CHANGE_EMAIL_NEW: {
EmailLanguage.EN_US: EmailTemplate(
subject="Confirm your new email address",
template_path="change_mail_confirm_new_template_en-US.html",
branded_template_path="without-brand/change_mail_confirm_new_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="确认您的邮箱地址变更",
template_path="change_mail_confirm_new_template_zh-CN.html",
branded_template_path="without-brand/change_mail_confirm_new_template_zh-CN.html",
),
},
EmailType.OWNER_TRANSFER_CONFIRM: {
EmailLanguage.EN_US: EmailTemplate(
subject="Verify Your Request to Transfer Workspace Ownership",
template_path="transfer_workspace_owner_confirm_template_en-US.html",
branded_template_path="without-brand/transfer_workspace_owner_confirm_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="验证您转移工作空间所有权的请求",
template_path="transfer_workspace_owner_confirm_template_zh-CN.html",
branded_template_path="without-brand/transfer_workspace_owner_confirm_template_zh-CN.html",
),
},
EmailType.OWNER_TRANSFER_OLD_NOTIFY: {
EmailLanguage.EN_US: EmailTemplate(
subject="Workspace ownership has been transferred",
template_path="transfer_workspace_old_owner_notify_template_en-US.html",
branded_template_path="without-brand/transfer_workspace_old_owner_notify_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="工作区所有权已转移",
template_path="transfer_workspace_old_owner_notify_template_zh-CN.html",
branded_template_path="without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html",
),
},
EmailType.OWNER_TRANSFER_NEW_NOTIFY: {
EmailLanguage.EN_US: EmailTemplate(
subject="You are now the owner of {WorkspaceName}",
template_path="transfer_workspace_new_owner_notify_template_en-US.html",
branded_template_path="without-brand/transfer_workspace_new_owner_notify_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您现在是 {WorkspaceName} 的所有者",
template_path="transfer_workspace_new_owner_notify_template_zh-CN.html",
branded_template_path="without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html",
),
},
EmailType.ACCOUNT_DELETION_SUCCESS: {
EmailLanguage.EN_US: EmailTemplate(
subject="Your Dify.AI Account Has Been Successfully Deleted",
template_path="delete_account_success_template_en-US.html",
branded_template_path="delete_account_success_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的 Dify.AI 账户已成功删除",
template_path="delete_account_success_template_zh-CN.html",
branded_template_path="delete_account_success_template_zh-CN.html",
),
},
EmailType.ACCOUNT_DELETION_VERIFICATION: {
EmailLanguage.EN_US: EmailTemplate(
subject="Dify.AI Account Deletion and Verification",
template_path="delete_account_code_email_template_en-US.html",
branded_template_path="delete_account_code_email_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="Dify.AI 账户删除和验证",
template_path="delete_account_code_email_template_zh-CN.html",
branded_template_path="delete_account_code_email_template_zh-CN.html",
),
},
EmailType.QUEUE_MONITOR_ALERT: {
EmailLanguage.EN_US: EmailTemplate(
subject="Alert: Dataset Queue pending tasks exceeded the limit",
template_path="queue_monitor_alert_email_template_en-US.html",
branded_template_path="queue_monitor_alert_email_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="警报:数据集队列待处理任务超过限制",
template_path="queue_monitor_alert_email_template_zh-CN.html",
branded_template_path="queue_monitor_alert_email_template_zh-CN.html",
),
},
EmailType.DOCUMENT_CLEAN_NOTIFY: {
EmailLanguage.EN_US: EmailTemplate(
subject="Dify Knowledge base auto disable notification",
template_path="clean_document_job_mail_template-US.html",
branded_template_path="clean_document_job_mail_template-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="Dify 知识库自动禁用通知",
template_path="clean_document_job_mail_template_zh-CN.html",
branded_template_path="clean_document_job_mail_template_zh-CN.html",
),
},
}
return EmailI18nConfig(templates=templates)
# Singleton instance for application-wide use
def get_default_email_i18n_service() -> EmailI18nService:
"""Get configured email i18n service with default dependencies."""
config = create_default_email_config()
renderer = FlaskEmailRenderer()
branding_service = FeatureBrandingService()
sender = FlaskMailSender()
return EmailI18nService(
config=config,
renderer=renderer,
branding_service=branding_service,
sender=sender,
)
# Global instance
_email_i18n_service: Optional[EmailI18nService] = None
def get_email_i18n_service() -> EmailI18nService:
"""Get global email i18n service instance."""
global _email_i18n_service
if _email_i18n_service is None:
_email_i18n_service = get_default_email_i18n_service()
return _email_i18n_service

@ -1,4 +1,3 @@
import datetime
import urllib.parse
from typing import Any
@ -6,6 +5,7 @@ import requests
from flask_login import current_user
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.source import DataSourceOauthBinding
@ -75,7 +75,7 @@ class NotionOAuth(OAuthDataSource):
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
new_data_source_binding = DataSourceOauthBinding(
@ -115,7 +115,7 @@ class NotionOAuth(OAuthDataSource):
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
new_data_source_binding = DataSourceOauthBinding(
@ -154,7 +154,7 @@ class NotionOAuth(OAuthDataSource):
}
data_source_binding.source_info = new_source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
raise ValueError("Data source binding not found")

@ -12,7 +12,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4474872b0ee6'
down_revision = '16081485540c'
down_revision = '2adcbe1f5dfb'
branch_labels = None
depends_on = None

@ -0,0 +1,51 @@
"""update models
Revision ID: 1a83934ad6d1
Revises: 71f5020c6470
Create Date: 2025-07-21 09:35:48.774794
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1a83934ad6d1'
down_revision = '71f5020c6470'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.alter_column('server_identifier',
existing_type=sa.VARCHAR(length=24),
type_=sa.String(length=64),
existing_nullable=False)
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('tool_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=128),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('tool_name',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.alter_column('server_identifier',
existing_type=sa.String(length=64),
type_=sa.VARCHAR(length=24),
existing_nullable=False)
# ### end Alembic commands ###

@ -0,0 +1,34 @@
"""oauth_refresh_token
Revision ID: 375fe79ead14
Revises: 1a83934ad6d1
Create Date: 2025-07-22 00:19:45.599636
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '375fe79ead14'
down_revision = '1a83934ad6d1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('expires_at', sa.BigInteger(), server_default=sa.text('-1'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.drop_column('expires_at')
# ### end Alembic commands ###

@ -1,18 +1,18 @@
"""empty message
"""add_tenant_plugin_autoupgrade_table
Revision ID: 16081485540c
Revises: d28f2004b072
Create Date: 2025-05-15 16:35:39.113777
Revision ID: 8bcc02c9bd07
Revises: 375fe79ead14
Create Date: 2025-07-23 15:08:50.161441
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '16081485540c'
down_revision = '2adcbe1f5dfb'
revision = '8bcc02c9bd07'
down_revision = '375fe79ead14'
branch_labels = None
depends_on = None
@ -37,5 +37,6 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tenant_plugin_auto_upgrade_strategies')
# ### end Alembic commands ###

@ -1,5 +1,6 @@
import enum
import json
from datetime import datetime
from typing import Optional, cast
from flask_login import UserMixin # type: ignore
@ -85,21 +86,23 @@ class Account(UserMixin, Base):
__table_args__ = (db.PrimaryKeyConstraint("id", name="account_pkey"), db.Index("account_email_idx", "email"))
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
name = db.Column(db.String(255), nullable=False)
email = db.Column(db.String(255), nullable=False)
password = db.Column(db.String(255), nullable=True)
password_salt = db.Column(db.String(255), nullable=True)
avatar = db.Column(db.String(255))
interface_language = db.Column(db.String(255))
interface_theme = db.Column(db.String(255))
timezone = db.Column(db.String(255))
last_login_at = db.Column(db.DateTime)
last_login_ip = db.Column(db.String(255))
last_active_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
status = db.Column(db.String(16), nullable=False, server_default=db.text("'active'::character varying"))
initialized_at = db.Column(db.DateTime)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
name: Mapped[str] = mapped_column(db.String(255))
email: Mapped[str] = mapped_column(db.String(255))
password: Mapped[Optional[str]] = mapped_column(db.String(255))
password_salt: Mapped[Optional[str]] = mapped_column(db.String(255))
avatar: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True)
interface_language: Mapped[Optional[str]] = mapped_column(db.String(255))
interface_theme: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True)
timezone: Mapped[Optional[str]] = mapped_column(db.String(255))
last_login_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True)
last_login_ip: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True)
last_active_at: Mapped[datetime] = mapped_column(
db.DateTime, server_default=func.current_timestamp(), nullable=False
)
status: Mapped[str] = mapped_column(db.String(16), server_default=db.text("'active'::character varying"))
initialized_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False)
updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False)
@reconstructor
def init_on_load(self):
@ -143,7 +146,7 @@ class Account(UserMixin, Base):
return
tenant, join = tenant_account_join
self.role = join.role
self.role = TenantAccountRole(join.role)
self._current_tenant = tenant
@property
@ -196,14 +199,14 @@ class Tenant(Base):
__tablename__ = "tenants"
__table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),)
id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
name = db.Column(db.String(255), nullable=False)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
name: Mapped[str] = mapped_column(db.String(255))
encrypt_public_key = db.Column(db.Text)
plan = db.Column(db.String(255), nullable=False, server_default=db.text("'basic'::character varying"))
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
custom_config = db.Column(db.Text)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
plan: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'basic'::character varying"))
status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'normal'::character varying"))
custom_config: Mapped[Optional[str]] = mapped_column(db.Text)
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp(), nullable=False)
updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
def get_accounts(self) -> list[Account]:
return (
@ -230,14 +233,14 @@ class TenantAccountJoin(Base):
db.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
account_id = db.Column(StringUUID, nullable=False)
current = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
role = db.Column(db.String(16), nullable=False, server_default="normal")
invited_by = db.Column(StringUUID, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID)
account_id: Mapped[str] = mapped_column(StringUUID)
current: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false"))
role: Mapped[str] = mapped_column(db.String(16), server_default="normal")
invited_by: Mapped[Optional[str]] = mapped_column(StringUUID)
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
class AccountIntegrate(Base):
@ -248,13 +251,13 @@ class AccountIntegrate(Base):
db.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
account_id = db.Column(StringUUID, nullable=False)
provider = db.Column(db.String(16), nullable=False)
open_id = db.Column(db.String(255), nullable=False)
encrypted_token = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
account_id: Mapped[str] = mapped_column(StringUUID)
provider: Mapped[str] = mapped_column(db.String(16))
open_id: Mapped[str] = mapped_column(db.String(255))
encrypted_token: Mapped[str] = mapped_column(db.String(255))
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
class InvitationCode(Base):
@ -265,15 +268,15 @@ class InvitationCode(Base):
db.Index("invitation_codes_code_idx", "code", "status"),
)
id = db.Column(db.Integer, nullable=False)
batch = db.Column(db.String(255), nullable=False)
code = db.Column(db.String(32), nullable=False)
status = db.Column(db.String(16), nullable=False, server_default=db.text("'unused'::character varying"))
used_at = db.Column(db.DateTime)
used_by_tenant_id = db.Column(StringUUID)
used_by_account_id = db.Column(StringUUID)
deprecated_at = db.Column(db.DateTime)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
id: Mapped[int] = mapped_column(db.Integer)
batch: Mapped[str] = mapped_column(db.String(255))
code: Mapped[str] = mapped_column(db.String(32))
status: Mapped[str] = mapped_column(db.String(16), server_default=db.text("'unused'::character varying"))
used_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime)
used_by_tenant_id: Mapped[Optional[str]] = mapped_column(StringUUID)
used_by_account_id: Mapped[Optional[str]] = mapped_column(StringUUID)
deprecated_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=db.text("CURRENT_TIMESTAMP(0)"))
class TenantPluginPermission(Base):
@ -299,3 +302,35 @@ class TenantPluginPermission(Base):
db.String(16), nullable=False, server_default="everyone"
)
debug_permission: Mapped[DebugPermission] = mapped_column(db.String(16), nullable=False, server_default="noone")
class TenantPluginAutoUpgradeStrategy(Base):
class StrategySetting(enum.StrEnum):
DISABLED = "disabled"
FIX_ONLY = "fix_only"
LATEST = "latest"
class UpgradeMode(enum.StrEnum):
ALL = "all"
PARTIAL = "partial"
EXCLUDE = "exclude"
__tablename__ = "tenant_plugin_auto_upgrade_strategies"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="tenant_plugin_auto_upgrade_strategy_pkey"),
db.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
strategy_setting: Mapped[StrategySetting] = mapped_column(db.String(16), nullable=False, server_default="fix_only")
upgrade_time_of_day: Mapped[int] = mapped_column(db.Integer, nullable=False, default=0) # seconds of the day
upgrade_mode: Mapped[UpgradeMode] = mapped_column(db.String(16), nullable=False, server_default="exclude")
exclude_plugins: Mapped[list[str]] = mapped_column(
db.ARRAY(db.String(255)), nullable=False
) # plugin_id (author/name)
include_plugins: Mapped[list[str]] = mapped_column(
db.ARRAY(db.String(255)), nullable=False
) # plugin_id (author/name)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())

@ -1,6 +1,7 @@
import enum
from sqlalchemy import func
from sqlalchemy.orm import mapped_column
from .base import Base
from .engine import db
@ -21,9 +22,9 @@ class APIBasedExtension(Base):
db.Index("api_based_extension_tenant_idx", "tenant_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
name = db.Column(db.String(255), nullable=False)
api_endpoint = db.Column(db.String(255), nullable=False)
api_key = db.Column(db.Text, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
name = mapped_column(db.String(255), nullable=False)
api_endpoint = mapped_column(db.String(255), nullable=False)
api_key = mapped_column(db.Text, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())

@ -8,12 +8,13 @@ import os
import pickle
import re
import time
from datetime import datetime
from json import JSONDecodeError
from typing import Any, cast
from typing import Any, Optional, cast
from sqlalchemy import func
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import Mapped, mapped_column
from configs import dify_config
from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource
@ -45,24 +46,24 @@ class Dataset(Base):
INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None]
PROVIDER_LIST = ["vendor", "external", None]
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.Text, nullable=True)
provider = db.Column(db.String(255), nullable=False, server_default=db.text("'vendor'::character varying"))
permission = db.Column(db.String(255), nullable=False, server_default=db.text("'only_me'::character varying"))
data_source_type = db.Column(db.String(255))
indexing_technique = db.Column(db.String(255), nullable=True)
index_struct = db.Column(db.Text, nullable=True)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
embedding_model = db.Column(db.String(255), nullable=True)
embedding_model_provider = db.Column(db.String(255), nullable=True)
collection_binding_id = db.Column(StringUUID, nullable=True)
retrieval_model = db.Column(JSONB, nullable=True)
built_in_field_enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID)
name: Mapped[str] = mapped_column(db.String(255))
description = mapped_column(db.Text, nullable=True)
provider: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'vendor'::character varying"))
permission: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'only_me'::character varying"))
data_source_type = mapped_column(db.String(255))
indexing_technique: Mapped[Optional[str]] = mapped_column(db.String(255))
index_struct = mapped_column(db.Text, nullable=True)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
embedding_model = db.Column(db.String(255), nullable=True) # TODO: mapped_column
embedding_model_provider = db.Column(db.String(255), nullable=True) # TODO: mapped_column
collection_binding_id = mapped_column(StringUUID, nullable=True)
retrieval_model = mapped_column(JSONB, nullable=True)
built_in_field_enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
@property
def dataset_keyword_table(self):
@ -255,7 +256,7 @@ class Dataset(Base):
@staticmethod
def gen_collection_name_by_id(dataset_id: str) -> str:
normalized_dataset_id = dataset_id.replace("-", "_")
return f"Vector_index_{normalized_dataset_id}_Node"
return f"{dify_config.VECTOR_INDEX_NAME_PREFIX}_{normalized_dataset_id}_Node"
class DatasetProcessRule(Base):
@ -265,12 +266,12 @@ class DatasetProcessRule(Base):
db.Index("dataset_process_rule_dataset_id_idx", "dataset_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
dataset_id = db.Column(StringUUID, nullable=False)
mode = db.Column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying"))
rules = db.Column(db.Text, nullable=True)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
dataset_id = mapped_column(StringUUID, nullable=False)
mode = mapped_column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying"))
rules = mapped_column(db.Text, nullable=True)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
MODES = ["automatic", "custom", "hierarchical"]
PRE_PROCESSING_RULES = ["remove_stopwords", "remove_extra_spaces", "remove_urls_emails"]
@ -309,62 +310,64 @@ class Document(Base):
)
# initial fields
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
position = db.Column(db.Integer, nullable=False)
data_source_type = db.Column(db.String(255), nullable=False)
data_source_info = db.Column(db.Text, nullable=True)
dataset_process_rule_id = db.Column(StringUUID, nullable=True)
batch = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
created_from = db.Column(db.String(255), nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_api_request_id = db.Column(StringUUID, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
position = mapped_column(db.Integer, nullable=False)
data_source_type = mapped_column(db.String(255), nullable=False)
data_source_info = mapped_column(db.Text, nullable=True)
dataset_process_rule_id = mapped_column(StringUUID, nullable=True)
batch = mapped_column(db.String(255), nullable=False)
name = mapped_column(db.String(255), nullable=False)
created_from = mapped_column(db.String(255), nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_api_request_id = mapped_column(StringUUID, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
# start processing
processing_started_at = db.Column(db.DateTime, nullable=True)
processing_started_at = mapped_column(db.DateTime, nullable=True)
# parsing
file_id = db.Column(db.Text, nullable=True)
word_count = db.Column(db.Integer, nullable=True)
parsing_completed_at = db.Column(db.DateTime, nullable=True)
file_id = mapped_column(db.Text, nullable=True)
word_count = mapped_column(db.Integer, nullable=True)
parsing_completed_at = mapped_column(db.DateTime, nullable=True)
# cleaning
cleaning_completed_at = db.Column(db.DateTime, nullable=True)
cleaning_completed_at = mapped_column(db.DateTime, nullable=True)
# split
splitting_completed_at = db.Column(db.DateTime, nullable=True)
splitting_completed_at = mapped_column(db.DateTime, nullable=True)
# indexing
tokens = db.Column(db.Integer, nullable=True)
indexing_latency = db.Column(db.Float, nullable=True)
completed_at = db.Column(db.DateTime, nullable=True)
tokens = mapped_column(db.Integer, nullable=True)
indexing_latency = mapped_column(db.Float, nullable=True)
completed_at = mapped_column(db.DateTime, nullable=True)
# pause
is_paused = db.Column(db.Boolean, nullable=True, server_default=db.text("false"))
paused_by = db.Column(StringUUID, nullable=True)
paused_at = db.Column(db.DateTime, nullable=True)
is_paused = mapped_column(db.Boolean, nullable=True, server_default=db.text("false"))
paused_by = mapped_column(StringUUID, nullable=True)
paused_at = mapped_column(db.DateTime, nullable=True)
# error
error = db.Column(db.Text, nullable=True)
stopped_at = db.Column(db.DateTime, nullable=True)
error = mapped_column(db.Text, nullable=True)
stopped_at = mapped_column(db.DateTime, nullable=True)
# basic fields
indexing_status = db.Column(db.String(255), nullable=False, server_default=db.text("'waiting'::character varying"))
enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
disabled_at = db.Column(db.DateTime, nullable=True)
disabled_by = db.Column(StringUUID, nullable=True)
archived = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
archived_reason = db.Column(db.String(255), nullable=True)
archived_by = db.Column(StringUUID, nullable=True)
archived_at = db.Column(db.DateTime, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
doc_type = db.Column(db.String(40), nullable=True)
doc_metadata = db.Column(JSONB, nullable=True)
doc_form = db.Column(db.String(255), nullable=False, server_default=db.text("'text_model'::character varying"))
doc_language = db.Column(db.String(255), nullable=True)
indexing_status = mapped_column(
db.String(255), nullable=False, server_default=db.text("'waiting'::character varying")
)
enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
disabled_at = mapped_column(db.DateTime, nullable=True)
disabled_by = mapped_column(StringUUID, nullable=True)
archived = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
archived_reason = mapped_column(db.String(255), nullable=True)
archived_by = mapped_column(StringUUID, nullable=True)
archived_at = mapped_column(db.DateTime, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
doc_type = mapped_column(db.String(40), nullable=True)
doc_metadata = mapped_column(JSONB, nullable=True)
doc_form = mapped_column(db.String(255), nullable=False, server_default=db.text("'text_model'::character varying"))
doc_language = mapped_column(db.String(255), nullable=True)
DATA_SOURCES = ["upload_file", "notion_import", "website_crawl"]
@ -652,35 +655,35 @@ class DocumentSegment(Base):
)
# initial fields
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
document_id = db.Column(StringUUID, nullable=False)
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
document_id = mapped_column(StringUUID, nullable=False)
position: Mapped[int]
content = db.Column(db.Text, nullable=False)
answer = db.Column(db.Text, nullable=True)
word_count = db.Column(db.Integer, nullable=False)
tokens = db.Column(db.Integer, nullable=False)
content = mapped_column(db.Text, nullable=False)
answer = mapped_column(db.Text, nullable=True)
word_count: Mapped[int]
tokens: Mapped[int]
# indexing fields
keywords = db.Column(db.JSON, nullable=True)
index_node_id = db.Column(db.String(255), nullable=True)
index_node_hash = db.Column(db.String(255), nullable=True)
keywords = mapped_column(db.JSON, nullable=True)
index_node_id = mapped_column(db.String(255), nullable=True)
index_node_hash = mapped_column(db.String(255), nullable=True)
# basic fields
hit_count = db.Column(db.Integer, nullable=False, default=0)
enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
disabled_at = db.Column(db.DateTime, nullable=True)
disabled_by = db.Column(StringUUID, nullable=True)
status = db.Column(db.String(255), nullable=False, server_default=db.text("'waiting'::character varying"))
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
indexing_at = db.Column(db.DateTime, nullable=True)
completed_at = db.Column(db.DateTime, nullable=True)
error = db.Column(db.Text, nullable=True)
stopped_at = db.Column(db.DateTime, nullable=True)
hit_count = mapped_column(db.Integer, nullable=False, default=0)
enabled = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
disabled_at = mapped_column(db.DateTime, nullable=True)
disabled_by = mapped_column(StringUUID, nullable=True)
status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'waiting'::character varying"))
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
indexing_at = mapped_column(db.DateTime, nullable=True)
completed_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime, nullable=True)
error = mapped_column(db.Text, nullable=True)
stopped_at = mapped_column(db.DateTime, nullable=True)
@property
def dataset(self):
@ -800,25 +803,25 @@ class ChildChunk(Base):
)
# initial fields
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
document_id = db.Column(StringUUID, nullable=False)
segment_id = db.Column(StringUUID, nullable=False)
position = db.Column(db.Integer, nullable=False)
content = db.Column(db.Text, nullable=False)
word_count = db.Column(db.Integer, nullable=False)
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
document_id = mapped_column(StringUUID, nullable=False)
segment_id = mapped_column(StringUUID, nullable=False)
position = mapped_column(db.Integer, nullable=False)
content = mapped_column(db.Text, nullable=False)
word_count = mapped_column(db.Integer, nullable=False)
# indexing fields
index_node_id = db.Column(db.String(255), nullable=True)
index_node_hash = db.Column(db.String(255), nullable=True)
type = db.Column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying"))
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
indexing_at = db.Column(db.DateTime, nullable=True)
completed_at = db.Column(db.DateTime, nullable=True)
error = db.Column(db.Text, nullable=True)
index_node_id = mapped_column(db.String(255), nullable=True)
index_node_hash = mapped_column(db.String(255), nullable=True)
type = mapped_column(db.String(255), nullable=False, server_default=db.text("'automatic'::character varying"))
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
indexing_at = mapped_column(db.DateTime, nullable=True)
completed_at = mapped_column(db.DateTime, nullable=True)
error = mapped_column(db.Text, nullable=True)
@property
def dataset(self):
@ -840,10 +843,10 @@ class AppDatasetJoin(Base):
db.Index("app_dataset_join_app_dataset_idx", "dataset_id", "app_id"),
)
id = db.Column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
@property
def app(self):
@ -857,14 +860,14 @@ class DatasetQuery(Base):
db.Index("dataset_query_dataset_id_idx", "dataset_id"),
)
id = db.Column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()"))
dataset_id = db.Column(StringUUID, nullable=False)
content = db.Column(db.Text, nullable=False)
source = db.Column(db.String(255), nullable=False)
source_app_id = db.Column(StringUUID, nullable=True)
created_by_role = db.Column(db.String, nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=db.text("uuid_generate_v4()"))
dataset_id = mapped_column(StringUUID, nullable=False)
content = mapped_column(db.Text, nullable=False)
source = mapped_column(db.String(255), nullable=False)
source_app_id = mapped_column(StringUUID, nullable=True)
created_by_role = mapped_column(db.String, nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
class DatasetKeywordTable(Base):
@ -874,10 +877,10 @@ class DatasetKeywordTable(Base):
db.Index("dataset_keyword_table_dataset_id_idx", "dataset_id"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
dataset_id = db.Column(StringUUID, nullable=False, unique=True)
keyword_table = db.Column(db.Text, nullable=False)
data_source_type = db.Column(
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
dataset_id = mapped_column(StringUUID, nullable=False, unique=True)
keyword_table = mapped_column(db.Text, nullable=False)
data_source_type = mapped_column(
db.String(255), nullable=False, server_default=db.text("'database'::character varying")
)
@ -920,14 +923,14 @@ class Embedding(Base):
db.Index("created_at_idx", "created_at"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
model_name = db.Column(
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
model_name = mapped_column(
db.String(255), nullable=False, server_default=db.text("'text-embedding-ada-002'::character varying")
)
hash = db.Column(db.String(64), nullable=False)
embedding = db.Column(db.LargeBinary, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
provider_name = db.Column(db.String(255), nullable=False, server_default=db.text("''::character varying"))
hash = mapped_column(db.String(64), nullable=False)
embedding = mapped_column(db.LargeBinary, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
provider_name = mapped_column(db.String(255), nullable=False, server_default=db.text("''::character varying"))
def set_embedding(self, embedding_data: list[float]):
self.embedding = pickle.dumps(embedding_data, protocol=pickle.HIGHEST_PROTOCOL)
@ -943,12 +946,12 @@ class DatasetCollectionBinding(Base):
db.Index("provider_model_name_idx", "provider_name", "model_name"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
provider_name = db.Column(db.String(255), nullable=False)
model_name = db.Column(db.String(255), nullable=False)
type = db.Column(db.String(40), server_default=db.text("'dataset'::character varying"), nullable=False)
collection_name = db.Column(db.String(64), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
provider_name = mapped_column(db.String(255), nullable=False)
model_name = mapped_column(db.String(255), nullable=False)
type = mapped_column(db.String(40), server_default=db.text("'dataset'::character varying"), nullable=False)
collection_name = mapped_column(db.String(64), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class TidbAuthBinding(Base):
@ -960,15 +963,15 @@ class TidbAuthBinding(Base):
db.Index("tidb_auth_bindings_created_at_idx", "created_at"),
db.Index("tidb_auth_bindings_status_idx", "status"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=True)
cluster_id = db.Column(db.String(255), nullable=False)
cluster_name = db.Column(db.String(255), nullable=False)
active = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
status = db.Column(db.String(255), nullable=False, server_default=db.text("CREATING"))
account = db.Column(db.String(255), nullable=False)
password = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=True)
cluster_id = mapped_column(db.String(255), nullable=False)
cluster_name = mapped_column(db.String(255), nullable=False)
active = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
status = mapped_column(db.String(255), nullable=False, server_default=db.text("CREATING"))
account = mapped_column(db.String(255), nullable=False)
password = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class Whitelist(Base):
@ -977,10 +980,10 @@ class Whitelist(Base):
db.PrimaryKeyConstraint("id", name="whitelists_pkey"),
db.Index("whitelists_tenant_idx", "tenant_id"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=True)
category = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=True)
category = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class DatasetPermission(Base):
@ -992,12 +995,12 @@ class DatasetPermission(Base):
db.Index("idx_dataset_permissions_tenant_id", "tenant_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"), primary_key=True)
dataset_id = db.Column(StringUUID, nullable=False)
account_id = db.Column(StringUUID, nullable=False)
tenant_id = db.Column(StringUUID, nullable=False)
has_permission = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"), primary_key=True)
dataset_id = mapped_column(StringUUID, nullable=False)
account_id = mapped_column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
has_permission = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class ExternalKnowledgeApis(Base):
@ -1008,15 +1011,15 @@ class ExternalKnowledgeApis(Base):
db.Index("external_knowledge_apis_name_idx", "name"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.String(255), nullable=False)
tenant_id = db.Column(StringUUID, nullable=False)
settings = db.Column(db.Text, nullable=True)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
name = mapped_column(db.String(255), nullable=False)
description = mapped_column(db.String(255), nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
settings = mapped_column(db.Text, nullable=True)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
def to_dict(self):
return {
@ -1063,15 +1066,15 @@ class ExternalKnowledgeBindings(Base):
db.Index("external_knowledge_bindings_external_knowledge_api_idx", "external_knowledge_api_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
external_knowledge_api_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
external_knowledge_id = db.Column(db.Text, nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
external_knowledge_api_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
external_knowledge_id = mapped_column(db.Text, nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class DatasetAutoDisableLog(Base):
@ -1083,12 +1086,12 @@ class DatasetAutoDisableLog(Base):
db.Index("dataset_auto_disable_log_created_atx", "created_at"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
document_id = db.Column(StringUUID, nullable=False)
notified = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
document_id = mapped_column(StringUUID, nullable=False)
notified = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
class RateLimitLog(Base):
@ -1099,11 +1102,11 @@ class RateLimitLog(Base):
db.Index("rate_limit_log_operation_idx", "operation"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
subscription_plan = db.Column(db.String(255), nullable=False)
operation = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
subscription_plan = mapped_column(db.String(255), nullable=False)
operation = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
class DatasetMetadata(Base):
@ -1114,15 +1117,15 @@ class DatasetMetadata(Base):
db.Index("dataset_metadata_dataset_idx", "dataset_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
type = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
created_by = db.Column(StringUUID, nullable=False)
updated_by = db.Column(StringUUID, nullable=True)
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
type = mapped_column(db.String(255), nullable=False)
name = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
created_by = mapped_column(StringUUID, nullable=False)
updated_by = mapped_column(StringUUID, nullable=True)
class DatasetMetadataBinding(Base):
@ -1135,10 +1138,10 @@ class DatasetMetadataBinding(Base):
db.Index("dataset_metadata_binding_document_idx", "document_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
metadata_id = db.Column(StringUUID, nullable=False)
document_id = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_by = db.Column(StringUUID, nullable=False)
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
metadata_id = mapped_column(StringUUID, nullable=False)
document_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_by = mapped_column(StringUUID, nullable=False)

@ -40,8 +40,8 @@ class DifySetup(Base):
__tablename__ = "dify_setups"
__table_args__ = (db.PrimaryKeyConstraint("version", name="dify_setup_pkey"),)
version = db.Column(db.String(255), nullable=False)
setup_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
version = mapped_column(db.String(255), nullable=False)
setup_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class AppMode(StrEnum):
@ -74,31 +74,31 @@ class App(Base):
__tablename__ = "apps"
__table_args__ = (db.PrimaryKeyConstraint("id", name="app_pkey"), db.Index("app_tenant_id_idx", "tenant_id"))
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.Text, nullable=False, server_default=db.text("''::character varying"))
mode: Mapped[str] = mapped_column(db.String(255), nullable=False)
icon_type = db.Column(db.String(255), nullable=True) # image, emoji
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID)
name: Mapped[str] = mapped_column(db.String(255))
description: Mapped[str] = mapped_column(db.Text, server_default=db.text("''::character varying"))
mode: Mapped[str] = mapped_column(db.String(255))
icon_type: Mapped[Optional[str]] = mapped_column(db.String(255)) # image, emoji
icon = db.Column(db.String(255))
icon_background = db.Column(db.String(255))
app_model_config_id = db.Column(StringUUID, nullable=True)
workflow_id = db.Column(StringUUID, nullable=True)
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
enable_site = db.Column(db.Boolean, nullable=False)
enable_api = db.Column(db.Boolean, nullable=False)
api_rpm = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
api_rph = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
is_demo = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
is_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
is_universal = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
tracing = db.Column(db.Text, nullable=True)
max_active_requests: Mapped[Optional[int]] = mapped_column(nullable=True)
created_by = db.Column(StringUUID, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
icon_background: Mapped[Optional[str]] = mapped_column(db.String(255))
app_model_config_id = mapped_column(StringUUID, nullable=True)
workflow_id = mapped_column(StringUUID, nullable=True)
status: Mapped[str] = mapped_column(db.String(255), server_default=db.text("'normal'::character varying"))
enable_site: Mapped[bool] = mapped_column(db.Boolean)
enable_api: Mapped[bool] = mapped_column(db.Boolean)
api_rpm: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
api_rph: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
is_demo: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false"))
is_public: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false"))
is_universal: Mapped[bool] = mapped_column(db.Boolean, server_default=db.text("false"))
tracing = mapped_column(db.Text, nullable=True)
max_active_requests: Mapped[Optional[int]]
created_by = mapped_column(StringUUID, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
use_icon_as_answer_icon: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
@property
def desc_or_prompt(self):
@ -307,34 +307,34 @@ class AppModelConfig(Base):
__tablename__ = "app_model_configs"
__table_args__ = (db.PrimaryKeyConstraint("id", name="app_model_config_pkey"), db.Index("app_app_id_idx", "app_id"))
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
provider = db.Column(db.String(255), nullable=True)
model_id = db.Column(db.String(255), nullable=True)
configs = db.Column(db.JSON, nullable=True)
created_by = db.Column(StringUUID, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
opening_statement = db.Column(db.Text)
suggested_questions = db.Column(db.Text)
suggested_questions_after_answer = db.Column(db.Text)
speech_to_text = db.Column(db.Text)
text_to_speech = db.Column(db.Text)
more_like_this = db.Column(db.Text)
model = db.Column(db.Text)
user_input_form = db.Column(db.Text)
dataset_query_variable = db.Column(db.String(255))
pre_prompt = db.Column(db.Text)
agent_mode = db.Column(db.Text)
sensitive_word_avoidance = db.Column(db.Text)
retriever_resource = db.Column(db.Text)
prompt_type = db.Column(db.String(255), nullable=False, server_default=db.text("'simple'::character varying"))
chat_prompt_config = db.Column(db.Text)
completion_prompt_config = db.Column(db.Text)
dataset_configs = db.Column(db.Text)
external_data_tools = db.Column(db.Text)
file_upload = db.Column(db.Text)
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
provider = mapped_column(db.String(255), nullable=True)
model_id = mapped_column(db.String(255), nullable=True)
configs = mapped_column(db.JSON, nullable=True)
created_by = mapped_column(StringUUID, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
opening_statement = mapped_column(db.Text)
suggested_questions = mapped_column(db.Text)
suggested_questions_after_answer = mapped_column(db.Text)
speech_to_text = mapped_column(db.Text)
text_to_speech = mapped_column(db.Text)
more_like_this = mapped_column(db.Text)
model = mapped_column(db.Text)
user_input_form = mapped_column(db.Text)
dataset_query_variable = mapped_column(db.String(255))
pre_prompt = mapped_column(db.Text)
agent_mode = mapped_column(db.Text)
sensitive_word_avoidance = mapped_column(db.Text)
retriever_resource = mapped_column(db.Text)
prompt_type = mapped_column(db.String(255), nullable=False, server_default=db.text("'simple'::character varying"))
chat_prompt_config = mapped_column(db.Text)
completion_prompt_config = mapped_column(db.Text)
dataset_configs = mapped_column(db.Text)
external_data_tools = mapped_column(db.Text)
file_upload = mapped_column(db.Text)
@property
def app(self):
@ -561,19 +561,19 @@ class RecommendedApp(Base):
db.Index("recommended_app_is_listed_idx", "is_listed", "language"),
)
id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
description = db.Column(db.JSON, nullable=False)
copyright = db.Column(db.String(255), nullable=False)
privacy_policy = db.Column(db.String(255), nullable=False)
id = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
description = mapped_column(db.JSON, nullable=False)
copyright = mapped_column(db.String(255), nullable=False)
privacy_policy = mapped_column(db.String(255), nullable=False)
custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="")
category = db.Column(db.String(255), nullable=False)
position = db.Column(db.Integer, nullable=False, default=0)
is_listed = db.Column(db.Boolean, nullable=False, default=True)
install_count = db.Column(db.Integer, nullable=False, default=0)
language = db.Column(db.String(255), nullable=False, server_default=db.text("'en-US'::character varying"))
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
category = mapped_column(db.String(255), nullable=False)
position = mapped_column(db.Integer, nullable=False, default=0)
is_listed = mapped_column(db.Boolean, nullable=False, default=True)
install_count = mapped_column(db.Integer, nullable=False, default=0)
language = mapped_column(db.String(255), nullable=False, server_default=db.text("'en-US'::character varying"))
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def app(self):
@ -590,14 +590,14 @@ class InstalledApp(Base):
db.UniqueConstraint("tenant_id", "app_id", name="unique_tenant_app"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
app_id = db.Column(StringUUID, nullable=False)
app_owner_tenant_id = db.Column(StringUUID, nullable=False)
position = db.Column(db.Integer, nullable=False, default=0)
is_pinned = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
last_used_at = db.Column(db.DateTime, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
app_id = mapped_column(StringUUID, nullable=False)
app_owner_tenant_id = mapped_column(StringUUID, nullable=False)
position = mapped_column(db.Integer, nullable=False, default=0)
is_pinned = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
last_used_at = mapped_column(db.DateTime, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def app(self):
@ -618,42 +618,42 @@ class Conversation(Base):
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
app_model_config_id = db.Column(StringUUID, nullable=True)
model_provider = db.Column(db.String(255), nullable=True)
override_model_configs = db.Column(db.Text)
model_id = db.Column(db.String(255), nullable=True)
app_id = mapped_column(StringUUID, nullable=False)
app_model_config_id = mapped_column(StringUUID, nullable=True)
model_provider = mapped_column(db.String(255), nullable=True)
override_model_configs = mapped_column(db.Text)
model_id = mapped_column(db.String(255), nullable=True)
mode: Mapped[str] = mapped_column(db.String(255))
name = db.Column(db.String(255), nullable=False)
summary = db.Column(db.Text)
name = mapped_column(db.String(255), nullable=False)
summary = mapped_column(db.Text)
_inputs: Mapped[dict] = mapped_column("inputs", db.JSON)
introduction = db.Column(db.Text)
system_instruction = db.Column(db.Text)
system_instruction_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
status = db.Column(db.String(255), nullable=False)
introduction = mapped_column(db.Text)
system_instruction = mapped_column(db.Text)
system_instruction_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
status = mapped_column(db.String(255), nullable=False)
# The `invoke_from` records how the conversation is created.
#
# Its value corresponds to the members of `InvokeFrom`.
# (api/core/app/entities/app_invoke_entities.py)
invoke_from = db.Column(db.String(255), nullable=True)
invoke_from = mapped_column(db.String(255), nullable=True)
# ref: ConversationSource.
from_source = db.Column(db.String(255), nullable=False)
from_end_user_id = db.Column(StringUUID)
from_account_id = db.Column(StringUUID)
read_at = db.Column(db.DateTime)
read_account_id = db.Column(StringUUID)
from_source = mapped_column(db.String(255), nullable=False)
from_end_user_id = mapped_column(StringUUID)
from_account_id = mapped_column(StringUUID)
read_at = mapped_column(db.DateTime)
read_account_id = mapped_column(StringUUID)
dialogue_count: Mapped[int] = mapped_column(default=0)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
messages = db.relationship("Message", backref="conversation", lazy="select", passive_deletes="all")
message_annotations = db.relationship(
"MessageAnnotation", backref="conversation", lazy="select", passive_deletes="all"
)
is_deleted = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
is_deleted = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
@property
def inputs(self):
@ -896,36 +896,36 @@ class Message(Base):
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
model_provider = db.Column(db.String(255), nullable=True)
model_id = db.Column(db.String(255), nullable=True)
override_model_configs = db.Column(db.Text)
conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=False)
app_id = mapped_column(StringUUID, nullable=False)
model_provider = mapped_column(db.String(255), nullable=True)
model_id = mapped_column(db.String(255), nullable=True)
override_model_configs = mapped_column(db.Text)
conversation_id = mapped_column(StringUUID, db.ForeignKey("conversations.id"), nullable=False)
_inputs: Mapped[dict] = mapped_column("inputs", db.JSON)
query: Mapped[str] = db.Column(db.Text, nullable=False)
message = db.Column(db.JSON, nullable=False)
message_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
message_unit_price = db.Column(db.Numeric(10, 4), nullable=False)
message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
answer: Mapped[str] = db.Column(db.Text, nullable=False)
answer_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False)
answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
parent_message_id = db.Column(StringUUID, nullable=True)
provider_response_latency = db.Column(db.Float, nullable=False, server_default=db.text("0"))
total_price = db.Column(db.Numeric(10, 7))
currency = db.Column(db.String(255), nullable=False)
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
error = db.Column(db.Text)
message_metadata = db.Column(db.Text)
invoke_from: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True)
from_source = db.Column(db.String(255), nullable=False)
from_end_user_id: Mapped[Optional[str]] = db.Column(StringUUID)
from_account_id: Mapped[Optional[str]] = db.Column(StringUUID)
query: Mapped[str] = mapped_column(db.Text, nullable=False)
message = mapped_column(db.JSON, nullable=False)
message_tokens: Mapped[int] = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
message_unit_price = mapped_column(db.Numeric(10, 4), nullable=False)
message_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
answer: Mapped[str] = db.Column(db.Text, nullable=False) # TODO make it mapped_column
answer_tokens: Mapped[int] = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
answer_unit_price = mapped_column(db.Numeric(10, 4), nullable=False)
answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
parent_message_id = mapped_column(StringUUID, nullable=True)
provider_response_latency = mapped_column(db.Float, nullable=False, server_default=db.text("0"))
total_price = mapped_column(db.Numeric(10, 7))
currency = mapped_column(db.String(255), nullable=False)
status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
error = mapped_column(db.Text)
message_metadata = mapped_column(db.Text)
invoke_from: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True)
from_source = mapped_column(db.String(255), nullable=False)
from_end_user_id: Mapped[Optional[str]] = mapped_column(StringUUID)
from_account_id: Mapped[Optional[str]] = mapped_column(StringUUID)
created_at: Mapped[datetime] = mapped_column(db.DateTime, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
agent_based = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
workflow_run_id: Mapped[str] = db.Column(StringUUID)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
agent_based = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
workflow_run_id: Mapped[Optional[str]] = mapped_column(StringUUID)
@property
def inputs(self):
@ -1239,17 +1239,17 @@ class MessageFeedback(Base):
db.Index("message_feedback_conversation_idx", "conversation_id", "from_source", "rating"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
conversation_id = db.Column(StringUUID, nullable=False)
message_id = db.Column(StringUUID, nullable=False)
rating = db.Column(db.String(255), nullable=False)
content = db.Column(db.Text)
from_source = db.Column(db.String(255), nullable=False)
from_end_user_id = db.Column(StringUUID)
from_account_id = db.Column(StringUUID)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
conversation_id = mapped_column(StringUUID, nullable=False)
message_id = mapped_column(StringUUID, nullable=False)
rating = mapped_column(db.String(255), nullable=False)
content = mapped_column(db.Text)
from_source = mapped_column(db.String(255), nullable=False)
from_end_user_id = mapped_column(StringUUID)
from_account_id = mapped_column(StringUUID)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def from_account(self):
@ -1301,16 +1301,16 @@ class MessageFile(Base):
self.created_by_role = created_by_role.value
self.created_by = created_by
id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
message_id: Mapped[str] = db.Column(StringUUID, nullable=False)
type: Mapped[str] = db.Column(db.String(255), nullable=False)
transfer_method: Mapped[str] = db.Column(db.String(255), nullable=False)
url: Mapped[Optional[str]] = db.Column(db.Text, nullable=True)
belongs_to: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True)
upload_file_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True)
created_by_role: Mapped[str] = db.Column(db.String(255), nullable=False)
created_by: Mapped[str] = db.Column(StringUUID, nullable=False)
created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
message_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
type: Mapped[str] = mapped_column(db.String(255), nullable=False)
transfer_method: Mapped[str] = mapped_column(db.String(255), nullable=False)
url: Mapped[Optional[str]] = mapped_column(db.Text, nullable=True)
belongs_to: Mapped[Optional[str]] = mapped_column(db.String(255), nullable=True)
upload_file_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True)
created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class MessageAnnotation(Base):
@ -1322,16 +1322,16 @@ class MessageAnnotation(Base):
db.Index("message_annotation_message_idx", "message_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=True)
message_id = db.Column(StringUUID, nullable=True)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id: Mapped[str] = mapped_column(StringUUID)
conversation_id: Mapped[Optional[str]] = mapped_column(StringUUID, db.ForeignKey("conversations.id"))
message_id: Mapped[Optional[str]] = mapped_column(StringUUID)
question = db.Column(db.Text, nullable=True)
content = db.Column(db.Text, nullable=False)
hit_count = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
account_id = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
content = mapped_column(db.Text, nullable=False)
hit_count = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
account_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def account(self):
@ -1354,17 +1354,17 @@ class AppAnnotationHitHistory(Base):
db.Index("app_annotation_hit_histories_message_idx", "message_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
annotation_id: Mapped[str] = db.Column(StringUUID, nullable=False)
source = db.Column(db.Text, nullable=False)
question = db.Column(db.Text, nullable=False)
account_id = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
score = db.Column(Float, nullable=False, server_default=db.text("0"))
message_id = db.Column(StringUUID, nullable=False)
annotation_question = db.Column(db.Text, nullable=False)
annotation_content = db.Column(db.Text, nullable=False)
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
annotation_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
source = mapped_column(db.Text, nullable=False)
question = mapped_column(db.Text, nullable=False)
account_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
score = mapped_column(Float, nullable=False, server_default=db.text("0"))
message_id = mapped_column(StringUUID, nullable=False)
annotation_question = mapped_column(db.Text, nullable=False)
annotation_content = mapped_column(db.Text, nullable=False)
@property
def account(self):
@ -1389,14 +1389,14 @@ class AppAnnotationSetting(Base):
db.Index("app_annotation_settings_app_idx", "app_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
score_threshold = db.Column(Float, nullable=False, server_default=db.text("0"))
collection_binding_id = db.Column(StringUUID, nullable=False)
created_user_id = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_user_id = db.Column(StringUUID, nullable=False)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
score_threshold = mapped_column(Float, nullable=False, server_default=db.text("0"))
collection_binding_id = mapped_column(StringUUID, nullable=False)
created_user_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_user_id = mapped_column(StringUUID, nullable=False)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def collection_binding_detail(self):
@ -1417,14 +1417,14 @@ class OperationLog(Base):
db.Index("operation_log_account_action_idx", "tenant_id", "account_id", "action"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
account_id = db.Column(StringUUID, nullable=False)
action = db.Column(db.String(255), nullable=False)
content = db.Column(db.JSON)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_ip = db.Column(db.String(255), nullable=False)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
account_id = mapped_column(StringUUID, nullable=False)
action = mapped_column(db.String(255), nullable=False)
content = mapped_column(db.JSON)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_ip = mapped_column(db.String(255), nullable=False)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class EndUser(Base, UserMixin):
@ -1435,16 +1435,16 @@ class EndUser(Base, UserMixin):
db.Index("end_user_tenant_session_id_idx", "tenant_id", "session_id", "type"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
app_id = db.Column(StringUUID, nullable=True)
type = db.Column(db.String(255), nullable=False)
external_user_id = db.Column(db.String(255), nullable=True)
name = db.Column(db.String(255))
is_anonymous = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
app_id = mapped_column(StringUUID, nullable=True)
type = mapped_column(db.String(255), nullable=False)
external_user_id = mapped_column(db.String(255), nullable=True)
name = mapped_column(db.String(255))
is_anonymous = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
session_id: Mapped[str] = mapped_column()
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class AppMCPServer(Base):
@ -1454,17 +1454,17 @@ class AppMCPServer(Base):
db.UniqueConstraint("tenant_id", "app_id", name="unique_app_mcp_server_tenant_app_id"),
db.UniqueConstraint("server_code", name="unique_app_mcp_server_server_code"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
app_id = db.Column(StringUUID, nullable=False)
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.String(255), nullable=False)
server_code = db.Column(db.String(255), nullable=False)
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
parameters = db.Column(db.Text, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
app_id = mapped_column(StringUUID, nullable=False)
name = mapped_column(db.String(255), nullable=False)
description = mapped_column(db.String(255), nullable=False)
server_code = mapped_column(db.String(255), nullable=False)
status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
parameters = mapped_column(db.Text, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@staticmethod
def generate_server_code(n):
@ -1488,30 +1488,30 @@ class Site(Base):
db.Index("site_code_idx", "code", "status"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
title = db.Column(db.String(255), nullable=False)
icon_type = db.Column(db.String(255), nullable=True)
icon = db.Column(db.String(255))
icon_background = db.Column(db.String(255))
description = db.Column(db.Text)
default_language = db.Column(db.String(255), nullable=False)
chat_color_theme = db.Column(db.String(255))
chat_color_theme_inverted = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
copyright = db.Column(db.String(255))
privacy_policy = db.Column(db.String(255))
show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
title = mapped_column(db.String(255), nullable=False)
icon_type = mapped_column(db.String(255), nullable=True)
icon = mapped_column(db.String(255))
icon_background = mapped_column(db.String(255))
description = mapped_column(db.Text)
default_language = mapped_column(db.String(255), nullable=False)
chat_color_theme = mapped_column(db.String(255))
chat_color_theme_inverted = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
copyright = mapped_column(db.String(255))
privacy_policy = mapped_column(db.String(255))
show_workflow_steps = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
use_icon_as_answer_icon = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
_custom_disclaimer: Mapped[str] = mapped_column("custom_disclaimer", sa.TEXT, default="")
customize_domain = db.Column(db.String(255))
customize_token_strategy = db.Column(db.String(255), nullable=False)
prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
created_by = db.Column(StringUUID, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = db.Column(StringUUID, nullable=True)
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
code = db.Column(db.String(255))
customize_domain = mapped_column(db.String(255))
customize_token_strategy = mapped_column(db.String(255), nullable=False)
prompt_public = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
status = mapped_column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
created_by = mapped_column(StringUUID, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
code = mapped_column(db.String(255))
@property
def custom_disclaimer(self):
@ -1546,13 +1546,13 @@ class ApiToken(Base):
db.Index("api_token_tenant_idx", "tenant_id", "type"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=True)
tenant_id = db.Column(StringUUID, nullable=True)
type = db.Column(db.String(16), nullable=False)
token = db.Column(db.String(255), nullable=False)
last_used_at = db.Column(db.DateTime, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=True)
tenant_id = mapped_column(StringUUID, nullable=True)
type = mapped_column(db.String(16), nullable=False)
token = mapped_column(db.String(255), nullable=False)
last_used_at = mapped_column(db.DateTime, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@staticmethod
def generate_api_key(prefix, n):
@ -1570,23 +1570,23 @@ class UploadFile(Base):
db.Index("upload_file_tenant_idx", "tenant_id"),
)
id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
storage_type: Mapped[str] = db.Column(db.String(255), nullable=False)
key: Mapped[str] = db.Column(db.String(255), nullable=False)
name: Mapped[str] = db.Column(db.String(255), nullable=False)
size: Mapped[int] = db.Column(db.Integer, nullable=False)
extension: Mapped[str] = db.Column(db.String(255), nullable=False)
mime_type: Mapped[str] = db.Column(db.String(255), nullable=True)
created_by_role: Mapped[str] = db.Column(
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
storage_type: Mapped[str] = mapped_column(db.String(255), nullable=False)
key: Mapped[str] = mapped_column(db.String(255), nullable=False)
name: Mapped[str] = mapped_column(db.String(255), nullable=False)
size: Mapped[int] = mapped_column(db.Integer, nullable=False)
extension: Mapped[str] = mapped_column(db.String(255), nullable=False)
mime_type: Mapped[str] = mapped_column(db.String(255), nullable=True)
created_by_role: Mapped[str] = mapped_column(
db.String(255), nullable=False, server_default=db.text("'account'::character varying")
)
created_by: Mapped[str] = db.Column(StringUUID, nullable=False)
created_at: Mapped[datetime] = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
used: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True)
used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True)
hash: Mapped[str | None] = db.Column(db.String(255), nullable=True)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
used: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
used_at: Mapped[datetime | None] = mapped_column(db.DateTime, nullable=True)
hash: Mapped[str | None] = mapped_column(db.String(255), nullable=True)
source_url: Mapped[str] = mapped_column(sa.TEXT, default="")
def __init__(
@ -1632,14 +1632,14 @@ class ApiRequest(Base):
db.Index("api_request_token_idx", "tenant_id", "api_token_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
api_token_id = db.Column(StringUUID, nullable=False)
path = db.Column(db.String(255), nullable=False)
request = db.Column(db.Text, nullable=True)
response = db.Column(db.Text, nullable=True)
ip = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
api_token_id = mapped_column(StringUUID, nullable=False)
path = mapped_column(db.String(255), nullable=False)
request = mapped_column(db.Text, nullable=True)
response = mapped_column(db.Text, nullable=True)
ip = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class MessageChain(Base):
@ -1649,12 +1649,12 @@ class MessageChain(Base):
db.Index("message_chain_message_id_idx", "message_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = db.Column(StringUUID, nullable=False)
type = db.Column(db.String(255), nullable=False)
input = db.Column(db.Text, nullable=True)
output = db.Column(db.Text, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = mapped_column(StringUUID, nullable=False)
type = mapped_column(db.String(255), nullable=False)
input = mapped_column(db.Text, nullable=True)
output = mapped_column(db.Text, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
class MessageAgentThought(Base):
@ -1665,34 +1665,34 @@ class MessageAgentThought(Base):
db.Index("message_agent_thought_message_chain_id_idx", "message_chain_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = db.Column(StringUUID, nullable=False)
message_chain_id = db.Column(StringUUID, nullable=True)
position = db.Column(db.Integer, nullable=False)
thought = db.Column(db.Text, nullable=True)
tool = db.Column(db.Text, nullable=True)
tool_labels_str = db.Column(db.Text, nullable=False, server_default=db.text("'{}'::text"))
tool_meta_str = db.Column(db.Text, nullable=False, server_default=db.text("'{}'::text"))
tool_input = db.Column(db.Text, nullable=True)
observation = db.Column(db.Text, nullable=True)
# plugin_id = db.Column(StringUUID, nullable=True) ## for future design
tool_process_data = db.Column(db.Text, nullable=True)
message = db.Column(db.Text, nullable=True)
message_token = db.Column(db.Integer, nullable=True)
message_unit_price = db.Column(db.Numeric, nullable=True)
message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
message_files = db.Column(db.Text, nullable=True)
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = mapped_column(StringUUID, nullable=False)
message_chain_id = mapped_column(StringUUID, nullable=True)
position = mapped_column(db.Integer, nullable=False)
thought = mapped_column(db.Text, nullable=True)
tool = mapped_column(db.Text, nullable=True)
tool_labels_str = mapped_column(db.Text, nullable=False, server_default=db.text("'{}'::text"))
tool_meta_str = mapped_column(db.Text, nullable=False, server_default=db.text("'{}'::text"))
tool_input = mapped_column(db.Text, nullable=True)
observation = mapped_column(db.Text, nullable=True)
# plugin_id = mapped_column(StringUUID, nullable=True) ## for future design
tool_process_data = mapped_column(db.Text, nullable=True)
message = mapped_column(db.Text, nullable=True)
message_token = mapped_column(db.Integer, nullable=True)
message_unit_price = mapped_column(db.Numeric, nullable=True)
message_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
message_files = mapped_column(db.Text, nullable=True)
answer = db.Column(db.Text, nullable=True)
answer_token = db.Column(db.Integer, nullable=True)
answer_unit_price = db.Column(db.Numeric, nullable=True)
answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
tokens = db.Column(db.Integer, nullable=True)
total_price = db.Column(db.Numeric, nullable=True)
currency = db.Column(db.String, nullable=True)
latency = db.Column(db.Float, nullable=True)
created_by_role = db.Column(db.String, nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
answer_token = mapped_column(db.Integer, nullable=True)
answer_unit_price = mapped_column(db.Numeric, nullable=True)
answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
tokens = mapped_column(db.Integer, nullable=True)
total_price = mapped_column(db.Numeric, nullable=True)
currency = mapped_column(db.String, nullable=True)
latency = mapped_column(db.Float, nullable=True)
created_by_role = mapped_column(db.String, nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
@property
def files(self) -> list:
@ -1778,24 +1778,24 @@ class DatasetRetrieverResource(Base):
db.Index("dataset_retriever_resource_message_id_idx", "message_id"),
)
id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = db.Column(StringUUID, nullable=False)
position = db.Column(db.Integer, nullable=False)
dataset_id = db.Column(StringUUID, nullable=False)
dataset_name = db.Column(db.Text, nullable=False)
document_id = db.Column(StringUUID, nullable=True)
document_name = db.Column(db.Text, nullable=False)
data_source_type = db.Column(db.Text, nullable=True)
segment_id = db.Column(StringUUID, nullable=True)
score = db.Column(db.Float, nullable=True)
content = db.Column(db.Text, nullable=False)
hit_count = db.Column(db.Integer, nullable=True)
word_count = db.Column(db.Integer, nullable=True)
segment_position = db.Column(db.Integer, nullable=True)
index_node_hash = db.Column(db.Text, nullable=True)
retriever_from = db.Column(db.Text, nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
id = mapped_column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()"))
message_id = mapped_column(StringUUID, nullable=False)
position = mapped_column(db.Integer, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
dataset_name = mapped_column(db.Text, nullable=False)
document_id = mapped_column(StringUUID, nullable=True)
document_name = mapped_column(db.Text, nullable=False)
data_source_type = mapped_column(db.Text, nullable=True)
segment_id = mapped_column(StringUUID, nullable=True)
score = mapped_column(db.Float, nullable=True)
content = mapped_column(db.Text, nullable=False)
hit_count = mapped_column(db.Integer, nullable=True)
word_count = mapped_column(db.Integer, nullable=True)
segment_position = mapped_column(db.Integer, nullable=True)
index_node_hash = mapped_column(db.Text, nullable=True)
retriever_from = mapped_column(db.Text, nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
class Tag(Base):
@ -1808,12 +1808,12 @@ class Tag(Base):
TAG_TYPE_LIST = ["knowledge", "app"]
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=True)
type = db.Column(db.String(16), nullable=False)
name = db.Column(db.String(255), nullable=False)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=True)
type = mapped_column(db.String(16), nullable=False)
name = mapped_column(db.String(255), nullable=False)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class TagBinding(Base):
@ -1824,12 +1824,12 @@ class TagBinding(Base):
db.Index("tag_bind_tag_id_idx", "tag_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=True)
tag_id = db.Column(StringUUID, nullable=True)
target_id = db.Column(StringUUID, nullable=True)
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=True)
tag_id = mapped_column(StringUUID, nullable=True)
target_id = mapped_column(StringUUID, nullable=True)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
class TraceAppConfig(Base):
@ -1839,15 +1839,15 @@ class TraceAppConfig(Base):
db.Index("trace_app_config_app_id_idx", "app_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
tracing_provider = db.Column(db.String(255), nullable=True)
tracing_config = db.Column(db.JSON, nullable=True)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
tracing_provider = mapped_column(db.String(255), nullable=True)
tracing_config = mapped_column(db.JSON, nullable=True)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
)
is_active = db.Column(db.Boolean, nullable=False, server_default=db.text("true"))
is_active = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
@property
def tracing_config_dict(self):

@ -2,6 +2,7 @@ import json
from sqlalchemy import func
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import mapped_column
from models.base import Base
@ -17,14 +18,14 @@ class DataSourceOauthBinding(Base):
db.Index("source_info_idx", "source_info", postgresql_using="gin"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
access_token = db.Column(db.String(255), nullable=False)
provider = db.Column(db.String(255), nullable=False)
source_info = db.Column(JSONB, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
disabled = db.Column(db.Boolean, nullable=True, server_default=db.text("false"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
access_token = mapped_column(db.String(255), nullable=False)
provider = mapped_column(db.String(255), nullable=False)
source_info = mapped_column(JSONB, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
disabled = mapped_column(db.Boolean, nullable=True, server_default=db.text("false"))
class DataSourceApiKeyAuthBinding(Base):
@ -35,14 +36,14 @@ class DataSourceApiKeyAuthBinding(Base):
db.Index("data_source_api_key_auth_binding_provider_idx", "provider"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
category = db.Column(db.String(255), nullable=False)
provider = db.Column(db.String(255), nullable=False)
credentials = db.Column(db.Text, nullable=True) # JSON
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
disabled = db.Column(db.Boolean, nullable=True, server_default=db.text("false"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
category = mapped_column(db.String(255), nullable=False)
provider = mapped_column(db.String(255), nullable=False)
credentials = mapped_column(db.Text, nullable=True) # JSON
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
disabled = mapped_column(db.Boolean, nullable=True, server_default=db.text("false"))
def to_dict(self):
return {

@ -1,7 +1,10 @@
from datetime import UTC, datetime
from datetime import datetime
from typing import Optional
from celery import states # type: ignore
from sqlalchemy.orm import Mapped, mapped_column
from libs.datetime_utils import naive_utc_now
from models.base import Base
from .engine import db
@ -12,23 +15,23 @@ class CeleryTask(Base):
__tablename__ = "celery_taskmeta"
id = db.Column(db.Integer, db.Sequence("task_id_sequence"), primary_key=True, autoincrement=True)
task_id = db.Column(db.String(155), unique=True)
status = db.Column(db.String(50), default=states.PENDING)
result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(
id = mapped_column(db.Integer, db.Sequence("task_id_sequence"), primary_key=True, autoincrement=True)
task_id = mapped_column(db.String(155), unique=True)
status = mapped_column(db.String(50), default=states.PENDING)
result = mapped_column(db.PickleType, nullable=True)
date_done = mapped_column(
db.DateTime,
default=lambda: datetime.now(UTC).replace(tzinfo=None),
onupdate=lambda: datetime.now(UTC).replace(tzinfo=None),
default=lambda: naive_utc_now(),
onupdate=lambda: naive_utc_now(),
nullable=True,
)
traceback = db.Column(db.Text, nullable=True)
name = db.Column(db.String(155), nullable=True)
args = db.Column(db.LargeBinary, nullable=True)
kwargs = db.Column(db.LargeBinary, nullable=True)
worker = db.Column(db.String(155), nullable=True)
retries = db.Column(db.Integer, nullable=True)
queue = db.Column(db.String(155), nullable=True)
traceback = mapped_column(db.Text, nullable=True)
name = mapped_column(db.String(155), nullable=True)
args = mapped_column(db.LargeBinary, nullable=True)
kwargs = mapped_column(db.LargeBinary, nullable=True)
worker = mapped_column(db.String(155), nullable=True)
retries = mapped_column(db.Integer, nullable=True)
queue = mapped_column(db.String(155), nullable=True)
class CeleryTaskSet(Base):
@ -36,7 +39,9 @@ class CeleryTaskSet(Base):
__tablename__ = "celery_tasksetmeta"
id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
taskset_id = db.Column(db.String(155), unique=True)
result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(db.DateTime, default=lambda: datetime.now(UTC).replace(tzinfo=None), nullable=True)
id: Mapped[int] = mapped_column(
db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True
)
taskset_id = mapped_column(db.String(155), unique=True)
result = mapped_column(db.PickleType, nullable=True)
date_done: Mapped[Optional[datetime]] = mapped_column(db.DateTime, default=lambda: naive_utc_now(), nullable=True)

@ -93,6 +93,7 @@ class BuiltinToolProvider(Base):
credential_type: Mapped[str] = mapped_column(
db.String(32), nullable=False, server_default=db.text("'api-key'::character varying")
)
expires_at: Mapped[int] = mapped_column(db.BigInteger, nullable=False, server_default=db.text("-1"))
@property
def credentials(self) -> dict:
@ -110,26 +111,26 @@ class ApiToolProvider(Base):
db.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# name of the api provider
name = db.Column(db.String(255), nullable=False, server_default=db.text("'API KEY 1'::character varying"))
name = mapped_column(db.String(255), nullable=False, server_default=db.text("'API KEY 1'::character varying"))
# icon
icon = db.Column(db.String(255), nullable=False)
icon = mapped_column(db.String(255), nullable=False)
# original schema
schema = db.Column(db.Text, nullable=False)
schema_type_str: Mapped[str] = db.Column(db.String(40), nullable=False)
schema = mapped_column(db.Text, nullable=False)
schema_type_str: Mapped[str] = mapped_column(db.String(40), nullable=False)
# who created this tool
user_id = db.Column(StringUUID, nullable=False)
user_id = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = db.Column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
# description of the provider
description = db.Column(db.Text, nullable=False)
description = mapped_column(db.Text, nullable=False)
# json format tools
tools_str = db.Column(db.Text, nullable=False)
tools_str = mapped_column(db.Text, nullable=False)
# json format credentials
credentials_str = db.Column(db.Text, nullable=False)
credentials_str = mapped_column(db.Text, nullable=False)
# privacy policy
privacy_policy = db.Column(db.String(255), nullable=True)
privacy_policy = mapped_column(db.String(255), nullable=True)
# custom_disclaimer
custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="")
@ -254,7 +255,7 @@ class MCPToolProvider(Base):
# name of the mcp provider
name: Mapped[str] = mapped_column(db.String(40), nullable=False)
# server identifier of the mcp provider
server_identifier: Mapped[str] = mapped_column(db.String(24), nullable=False)
server_identifier: Mapped[str] = mapped_column(db.String(64), nullable=False)
# encrypted url of the mcp provider
server_url: Mapped[str] = mapped_column(db.Text, nullable=False)
# hash of server_url for uniqueness check
@ -348,33 +349,33 @@ class ToolModelInvoke(Base):
__tablename__ = "tool_model_invokes"
__table_args__ = (db.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# who invoke this tool
user_id = db.Column(StringUUID, nullable=False)
user_id = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = db.Column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
# provider
provider = db.Column(db.String(255), nullable=False)
provider = mapped_column(db.String(255), nullable=False)
# type
tool_type = db.Column(db.String(40), nullable=False)
tool_type = mapped_column(db.String(40), nullable=False)
# tool name
tool_name = db.Column(db.String(40), nullable=False)
tool_name = mapped_column(db.String(128), nullable=False)
# invoke parameters
model_parameters = db.Column(db.Text, nullable=False)
model_parameters = mapped_column(db.Text, nullable=False)
# prompt messages
prompt_messages = db.Column(db.Text, nullable=False)
prompt_messages = mapped_column(db.Text, nullable=False)
# invoke response
model_response = db.Column(db.Text, nullable=False)
model_response = mapped_column(db.Text, nullable=False)
prompt_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
answer_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False)
answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
provider_response_latency = db.Column(db.Float, nullable=False, server_default=db.text("0"))
total_price = db.Column(db.Numeric(10, 7))
currency = db.Column(db.String(255), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
prompt_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
answer_tokens = mapped_column(db.Integer, nullable=False, server_default=db.text("0"))
answer_unit_price = mapped_column(db.Numeric(10, 4), nullable=False)
answer_price_unit = mapped_column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001"))
provider_response_latency = mapped_column(db.Float, nullable=False, server_default=db.text("0"))
total_price = mapped_column(db.Numeric(10, 7))
currency = mapped_column(db.String(255), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@deprecated
@ -391,18 +392,18 @@ class ToolConversationVariables(Base):
db.Index("conversation_id_idx", "conversation_id"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# conversation user id
user_id = db.Column(StringUUID, nullable=False)
user_id = mapped_column(StringUUID, nullable=False)
# tenant id
tenant_id = db.Column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
# conversation id
conversation_id = db.Column(StringUUID, nullable=False)
conversation_id = mapped_column(StringUUID, nullable=False)
# variables pool
variables_str = db.Column(db.Text, nullable=False)
variables_str = mapped_column(db.Text, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def variables(self) -> Any:
@ -451,26 +452,26 @@ class DeprecatedPublishedAppTool(Base):
db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# id of the app
app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False)
app_id = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False)
user_id: Mapped[str] = db.Column(StringUUID, nullable=False)
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# who published this tool
description = db.Column(db.Text, nullable=False)
description = mapped_column(db.Text, nullable=False)
# llm_description of the tool, for LLM
llm_description = db.Column(db.Text, nullable=False)
llm_description = mapped_column(db.Text, nullable=False)
# query description, query will be seem as a parameter of the tool,
# to describe this parameter to llm, we need this field
query_description = db.Column(db.Text, nullable=False)
query_description = mapped_column(db.Text, nullable=False)
# query name, the name of the query parameter
query_name = db.Column(db.String(40), nullable=False)
query_name = mapped_column(db.String(40), nullable=False)
# name of the tool provider
tool_name = db.Column(db.String(40), nullable=False)
tool_name = mapped_column(db.String(40), nullable=False)
# author
author = db.Column(db.String(40), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
author = mapped_column(db.String(40), nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
updated_at = mapped_column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
@property
def description_i18n(self) -> I18nObject:

@ -15,12 +15,14 @@ class SavedMessage(Base):
db.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
message_id = db.Column(StringUUID, nullable=False)
created_by_role = db.Column(db.String(255), nullable=False, server_default=db.text("'end_user'::character varying"))
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
message_id = mapped_column(StringUUID, nullable=False)
created_by_role = mapped_column(
db.String(255), nullable=False, server_default=db.text("'end_user'::character varying")
)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
@property
def message(self):
@ -34,9 +36,11 @@ class PinnedConversation(Base):
db.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = db.Column(StringUUID, nullable=False)
id = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
conversation_id: Mapped[str] = mapped_column(StringUUID)
created_by_role = db.Column(db.String(255), nullable=False, server_default=db.text("'end_user'::character varying"))
created_by = db.Column(StringUUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
created_by_role = mapped_column(
db.String(255), nullable=False, server_default=db.text("'end_user'::character varying")
)
created_by = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())

@ -1,7 +1,7 @@
import json
import logging
from collections.abc import Mapping, Sequence
from datetime import UTC, datetime
from datetime import datetime
from enum import Enum, StrEnum
from typing import TYPE_CHECKING, Any, Optional, Union
from uuid import uuid4
@ -16,6 +16,7 @@ from core.variables.variables import FloatVariable, IntegerVariable, StringVaria
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from core.workflow.nodes.enums import NodeType
from factories.variable_factory import TypeMismatchError, build_segment_with_type
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_tenant_id
from ._workflow_exc import NodeNotFoundError, WorkflowDataError
@ -138,7 +139,7 @@ class Workflow(Base):
updated_at: Mapped[datetime] = mapped_column(
db.DateTime,
nullable=False,
default=datetime.now(UTC).replace(tzinfo=None),
default=naive_utc_now(),
server_onupdate=func.current_timestamp(),
)
_environment_variables: Mapped[str] = mapped_column(
@ -179,7 +180,7 @@ class Workflow(Base):
workflow.conversation_variables = conversation_variables or []
workflow.marked_name = marked_name
workflow.marked_comment = marked_comment
workflow.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow.created_at = naive_utc_now()
workflow.updated_at = workflow.created_at
return workflow
@ -907,7 +908,7 @@ _EDITABLE_SYSTEM_VARIABLE = frozenset(["query", "files"])
def _naive_utc_datetime():
return datetime.now(UTC).replace(tzinfo=None)
return naive_utc_now()
class WorkflowDraftVariable(Base):

@ -0,0 +1,49 @@
import time
import click
import app
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
click.echo(click.style("Start check upgradable plugin.", fg="green"))
start_at = time.perf_counter()
now_seconds_of_day = time.time() % 86400 - 30 # we assume the tz is UTC
click.echo(click.style("Now seconds of day: {}".format(now_seconds_of_day), fg="green"))
strategies = (
db.session.query(TenantPluginAutoUpgradeStrategy)
.filter(
TenantPluginAutoUpgradeStrategy.upgrade_time_of_day >= now_seconds_of_day,
TenantPluginAutoUpgradeStrategy.upgrade_time_of_day
< now_seconds_of_day + AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL,
TenantPluginAutoUpgradeStrategy.strategy_setting
!= TenantPluginAutoUpgradeStrategy.StrategySetting.DISABLED,
)
.all()
)
for strategy in strategies:
process_tenant_plugin_autoupgrade_check_task.delay(
strategy.tenant_id,
strategy.strategy_setting,
strategy.upgrade_time_of_day,
strategy.upgrade_mode,
strategy.exclude_plugins,
strategy.include_plugins,
)
end_at = time.perf_counter()
click.echo(
click.style(
"Checked upgradable plugin success latency: {}".format(end_at - start_at),
fg="green",
)
)

@ -99,9 +99,7 @@ def clean_unused_datasets_task():
index_processor.clean(dataset, None)
# update document
update_params = {Document.enabled: False}
db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params)
db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False})
db.session.commit()
click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green"))
except Exception as e:
@ -176,9 +174,7 @@ def clean_unused_datasets_task():
index_processor.clean(dataset, None)
# update document
update_params = {Document.enabled: False}
db.session.query(Document).filter_by(dataset_id=dataset.id).update(update_params)
db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False})
db.session.commit()
click.echo(
click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")

@ -3,12 +3,12 @@ import time
from collections import defaultdict
import click
from flask import render_template # type: ignore
import app
from configs import dify_config
from extensions.ext_database import db
from extensions.ext_mail import mail
from libs.email_i18n import EmailType, get_email_i18n_service
from models.account import Account, Tenant, TenantAccountJoin
from models.dataset import Dataset, DatasetAutoDisableLog
from services.feature_service import FeatureService
@ -72,14 +72,16 @@ def mail_clean_document_notify_task():
document_count = len(document_ids)
knowledge_details.append(rf"Knowledge base {dataset.name}: {document_count} documents")
if knowledge_details:
html_content = render_template(
"clean_document_job_mail_template-US.html",
userName=account.email,
knowledge_details=knowledge_details,
url=url,
)
mail.send(
to=account.email, subject="Dify Knowledge base auto disable notification", html=html_content
email_service = get_email_i18n_service()
email_service.send_email(
email_type=EmailType.DOCUMENT_CLEAN_NOTIFY,
language_code="en-US",
to=account.email,
template_context={
"userName": account.email,
"knowledge_details": knowledge_details,
"url": url,
},
)
# update notified to True

@ -3,13 +3,12 @@ from datetime import datetime
from urllib.parse import urlparse
import click
from flask import render_template
from redis import Redis
import app
from configs import dify_config
from extensions.ext_database import db
from extensions.ext_mail import mail
from libs.email_i18n import EmailType, get_email_i18n_service
# Create a dedicated Redis connection (using the same configuration as Celery)
celery_broker_url = dify_config.CELERY_BROKER_URL
@ -39,18 +38,20 @@ def queue_monitor_task():
alter_emails = dify_config.QUEUE_MONITOR_ALERT_EMAILS
if alter_emails:
to_list = alter_emails.split(",")
email_service = get_email_i18n_service()
for to in to_list:
try:
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
html_content = render_template(
"queue_monitor_alert_email_template_en-US.html",
queue_name=queue_name,
queue_length=queue_length,
threshold=threshold,
alert_time=current_time,
)
mail.send(
to=to, subject="Alert: Dataset Queue pending tasks exceeded the limit", html=html_content
email_service.send_email(
email_type=EmailType.QUEUE_MONITOR_ALERT,
language_code="en-US",
to=to,
template_context={
"queue_name": queue_name,
"queue_length": queue_length,
"threshold": threshold,
"alert_time": current_time,
},
)
except Exception as e:
logging.exception(click.style("Exception occurred during sending email", fg="red"))

@ -17,6 +17,7 @@ from constants.languages import language_timezone_mapping, languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client, redis_fallback
from libs.datetime_utils import naive_utc_now
from libs.helper import RateLimiter, TokenManager
from libs.passport import PassportService
from libs.password import compare_password, hash_password, valid_password
@ -28,6 +29,7 @@ from models.account import (
Tenant,
TenantAccountJoin,
TenantAccountRole,
TenantPluginAutoUpgradeStrategy,
TenantStatus,
)
from models.model import DifySetup
@ -135,8 +137,8 @@ class AccountService:
available_ta.current = True
db.session.commit()
if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
if naive_utc_now() - account.last_active_at > timedelta(minutes=10):
account.last_active_at = naive_utc_now()
db.session.commit()
return cast(Account, account)
@ -180,7 +182,7 @@ class AccountService:
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
@ -318,7 +320,7 @@ class AccountService:
# If it exists, update the record
account_integrate.open_id = open_id
account_integrate.encrypted_token = "" # todo
account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
account_integrate.updated_at = naive_utc_now()
else:
# If it does not exist, create a new record
account_integrate = AccountIntegrate(
@ -353,7 +355,7 @@ class AccountService:
@staticmethod
def update_login_info(account: Account, *, ip_address: str) -> None:
"""Update last login time and ip"""
account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
account.last_login_at = naive_utc_now()
account.last_login_ip = ip_address
db.session.add(account)
db.session.commit()
@ -827,6 +829,17 @@ class TenantService:
db.session.add(tenant)
db.session.commit()
plugin_upgrade_strategy = TenantPluginAutoUpgradeStrategy(
tenant_id=tenant.id,
strategy_setting=TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
upgrade_time_of_day=0,
upgrade_mode=TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
exclude_plugins=[],
include_plugins=[],
)
db.session.add(plugin_upgrade_strategy)
db.session.commit()
tenant.encrypt_public_key = generate_key_pair(tenant.id)
db.session.commit()
return tenant
@ -997,7 +1010,7 @@ class TenantService:
.filter(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id)
.first()
)
return join.role if join else None
return TenantAccountRole(join.role) if join else None
@staticmethod
def get_tenant_count() -> int:
@ -1066,15 +1079,6 @@ class TenantService:
target_member_join.role = new_role
db.session.commit()
@staticmethod
def dissolve_tenant(tenant: Tenant, operator: Account) -> None:
"""Dissolve tenant"""
if not TenantService.check_member_permission(tenant, operator, operator, "remove"):
raise NoPermissionError("No permission to dissolve tenant.")
db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id).delete()
db.session.delete(tenant)
db.session.commit()
@staticmethod
def get_custom_config(tenant_id: str) -> dict:
tenant = db.get_or_404(Tenant, tenant_id)
@ -1117,7 +1121,7 @@ class RegisterService:
)
account.last_login_ip = ip_address
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
@ -1158,7 +1162,7 @@ class RegisterService:
is_setup=is_setup,
)
account.status = AccountStatus.ACTIVE.value if not status else status.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
if open_id is not None and provider is not None:
AccountService.link_account_integrate(provider, open_id, account)

@ -1,7 +1,6 @@
import json
import logging
from datetime import UTC, datetime
from typing import Optional, cast
from typing import Optional, TypedDict, cast
from flask_login import current_user
from flask_sqlalchemy.pagination import Pagination
@ -17,6 +16,7 @@ from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from events.app_event import app_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.account import Account
from models.model import App, AppMode, AppModelConfig, Site
from models.tools import ApiToolProvider
@ -220,22 +220,31 @@ class AppService:
return app
def update_app(self, app: App, args: dict) -> App:
class ArgsDict(TypedDict):
name: str
description: str
icon_type: str
icon: str
icon_background: str
use_icon_as_answer_icon: bool
max_active_requests: int
def update_app(self, app: App, args: ArgsDict) -> App:
"""
Update app
:param app: App instance
:param args: request args
:return: App instance
"""
app.name = args.get("name")
app.description = args.get("description", "")
app.icon_type = args.get("icon_type", "emoji")
app.icon = args.get("icon")
app.icon_background = args.get("icon_background")
app.name = args["name"]
app.description = args["description"]
app.icon_type = args["icon_type"]
app.icon = args["icon"]
app.icon_background = args["icon_background"]
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
app.max_active_requests = args.get("max_active_requests")
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -249,7 +258,7 @@ class AppService:
"""
app.name = name
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -265,7 +274,7 @@ class AppService:
app.icon = icon
app.icon_background = icon_background
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -282,7 +291,7 @@ class AppService:
app.enable_site = enable_site
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -299,7 +308,7 @@ class AppService:
app.enable_api = enable_api
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app

@ -82,7 +82,7 @@ class BillingService:
if not join:
raise ValueError("Tenant account join not found")
if not TenantAccountRole.is_privileged_role(join.role):
if not TenantAccountRole.is_privileged_role(TenantAccountRole(join.role)):
raise ValueError("Only team owner or team admin can perform this action")
@classmethod

@ -1,5 +1,4 @@
from collections.abc import Callable, Sequence
from datetime import UTC, datetime
from typing import Optional, Union
from sqlalchemy import asc, desc, func, or_, select
@ -8,6 +7,7 @@ from sqlalchemy.orm import Session
from core.app.entities.app_invoke_entities import InvokeFrom
from core.llm_generator.llm_generator import LLMGenerator
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from models import ConversationVariable
from models.account import Account
@ -113,7 +113,7 @@ class ConversationService:
return cls.auto_generate_name(app_model, conversation)
else:
conversation.name = name
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
return conversation
@ -169,7 +169,7 @@ class ConversationService:
conversation = cls.get_conversation(app_model, conversation_id, user)
conversation.is_deleted = True
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
@classmethod

@ -26,6 +26,7 @@ from events.document_event import document_was_deleted
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs import helper
from libs.datetime_utils import naive_utc_now
from models.account import Account, TenantAccountRole
from models.dataset import (
AppDatasetJoin,
@ -214,9 +215,9 @@ class DatasetService:
dataset.created_by = account.id
dataset.updated_by = account.id
dataset.tenant_id = tenant_id
dataset.embedding_model_provider = embedding_model.provider if embedding_model else None
dataset.embedding_model = embedding_model.model if embedding_model else None
dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None
dataset.embedding_model_provider = embedding_model.provider if embedding_model else None # type: ignore
dataset.embedding_model = embedding_model.model if embedding_model else None # type: ignore
dataset.retrieval_model = retrieval_model.model_dump() if retrieval_model else None # type: ignore
dataset.permission = permission or DatasetPermissionEnum.ONLY_ME
dataset.provider = provider
db.session.add(dataset)
@ -428,7 +429,7 @@ class DatasetService:
# Add metadata fields
filtered_data["updated_by"] = user.id
filtered_data["updated_at"] = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
filtered_data["updated_at"] = naive_utc_now()
# update Retrieval model
filtered_data["retrieval_model"] = data["retrieval_model"]
@ -994,7 +995,7 @@ class DocumentService:
# update document to be paused
document.is_paused = True
document.paused_by = current_user.id
document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.paused_at = naive_utc_now()
db.session.add(document)
db.session.commit()
@ -1539,8 +1540,10 @@ class DocumentService:
db.session.add(document)
db.session.commit()
# update document segment
update_params = {DocumentSegment.status: "re_segment"}
db.session.query(DocumentSegment).filter_by(document_id=document.id).update(update_params)
db.session.query(DocumentSegment).filter_by(document_id=document.id).update(
{DocumentSegment.status: "re_segment"}
) # type: ignore
db.session.commit()
# trigger async task
document_indexing_update_task.delay(document.dataset_id, document.id)
@ -2225,7 +2228,7 @@ class SegmentService:
# calc embedding use tokens
if document.doc_form == "qa_model":
segment.answer = args.answer
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0]
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] # type: ignore
else:
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
segment.content = content

@ -1,6 +1,5 @@
import json
from copy import deepcopy
from datetime import UTC, datetime
from typing import Any, Optional, Union, cast
from urllib.parse import urlparse
@ -11,6 +10,7 @@ from constants import HIDDEN_VALUE
from core.helper import ssrf_proxy
from core.rag.entities.metadata_entities import MetadataCondition
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import (
Dataset,
ExternalKnowledgeApis,
@ -120,7 +120,7 @@ class ExternalDatasetService:
external_knowledge_api.description = args.get("description", "")
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
external_knowledge_api.updated_by = user_id
external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
external_knowledge_api.updated_at = naive_utc_now()
db.session.commit()
return external_knowledge_api

@ -0,0 +1,87 @@
from sqlalchemy.orm import Session
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
class PluginAutoUpgradeService:
@staticmethod
def get_strategy(tenant_id: str) -> TenantPluginAutoUpgradeStrategy | None:
with Session(db.engine) as session:
return (
session.query(TenantPluginAutoUpgradeStrategy)
.filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
.first()
)
@staticmethod
def change_strategy(
tenant_id: str,
strategy_setting: TenantPluginAutoUpgradeStrategy.StrategySetting,
upgrade_time_of_day: int,
upgrade_mode: TenantPluginAutoUpgradeStrategy.UpgradeMode,
exclude_plugins: list[str],
include_plugins: list[str],
) -> bool:
with Session(db.engine) as session:
exist_strategy = (
session.query(TenantPluginAutoUpgradeStrategy)
.filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
.first()
)
if not exist_strategy:
strategy = TenantPluginAutoUpgradeStrategy(
tenant_id=tenant_id,
strategy_setting=strategy_setting,
upgrade_time_of_day=upgrade_time_of_day,
upgrade_mode=upgrade_mode,
exclude_plugins=exclude_plugins,
include_plugins=include_plugins,
)
session.add(strategy)
else:
exist_strategy.strategy_setting = strategy_setting
exist_strategy.upgrade_time_of_day = upgrade_time_of_day
exist_strategy.upgrade_mode = upgrade_mode
exist_strategy.exclude_plugins = exclude_plugins
exist_strategy.include_plugins = include_plugins
session.commit()
return True
@staticmethod
def exclude_plugin(tenant_id: str, plugin_id: str) -> bool:
with Session(db.engine) as session:
exist_strategy = (
session.query(TenantPluginAutoUpgradeStrategy)
.filter(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
.first()
)
if not exist_strategy:
# create for this tenant
PluginAutoUpgradeService.change_strategy(
tenant_id,
TenantPluginAutoUpgradeStrategy.StrategySetting.FIX_ONLY,
0,
TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE,
[plugin_id],
[],
)
return True
else:
if exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE:
if plugin_id not in exist_strategy.exclude_plugins:
new_exclude_plugins = exist_strategy.exclude_plugins.copy()
new_exclude_plugins.append(plugin_id)
exist_strategy.exclude_plugins = new_exclude_plugins
elif exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL:
if plugin_id in exist_strategy.include_plugins:
new_include_plugins = exist_strategy.include_plugins.copy()
new_include_plugins.remove(plugin_id)
exist_strategy.include_plugins = new_include_plugins
elif exist_strategy.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.ALL:
exist_strategy.upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE
exist_strategy.exclude_plugins = [plugin_id]
session.commit()
return True

@ -38,6 +38,9 @@ class PluginService:
plugin_id: str
version: str
unique_identifier: str
status: str
deprecated_reason: str
alternative_plugin_id: str
REDIS_KEY_PREFIX = "plugin_service:latest_plugin:"
REDIS_TTL = 60 * 5 # 5 minutes
@ -71,6 +74,9 @@ class PluginService:
plugin_id=plugin_id,
version=manifest.latest_version,
unique_identifier=manifest.latest_package_identifier,
status=manifest.status,
deprecated_reason=manifest.deprecated_reason,
alternative_plugin_id=manifest.alternative_plugin_id,
)
# Store in Redis

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save