Merge branch 'main' into mapped_column

pull/22644/head
Asuka Minato 9 months ago committed by GitHub
commit d39776c0c9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -1,6 +1,6 @@
#!/bin/bash
npm add -g pnpm@10.11.1
npm add -g pnpm@10.13.1
cd web && pnpm install
pipx install uv
@ -12,3 +12,4 @@ echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f do
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
source /home/vscode/.bashrc

@ -28,7 +28,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
api/**
@ -75,7 +75,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**
@ -113,7 +113,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
docker/generate_docker_compose
@ -144,7 +144,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
**.sh
@ -152,13 +152,15 @@ jobs:
**.yml
**Dockerfile
dev/**
.editorconfig
- name: Super-linter
uses: super-linter/super-linter/slim@v7
uses: super-linter/super-linter/slim@v8
if: steps.changed-files.outputs.any_changed == 'true'
env:
BASH_SEVERITY: warning
DEFAULT_BRANCH: main
DEFAULT_BRANCH: origin/main
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
@ -168,16 +170,6 @@ jobs:
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
# VALIDATE_GITHUB_ACTIONS: true
VALIDATE_DOCKERFILE_HADOLINT: true
VALIDATE_EDITORCONFIG: true
VALIDATE_XML: true
VALIDATE_YAML: true
- name: EditorConfig checks
uses: super-linter/super-linter/slim@v7
env:
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
IGNORE_GITIGNORED_FILES: true
# EditorConfig validation
VALIDATE_EDITORCONFIG: true
EDITORCONFIG_FILE_NAME: editorconfig-checker.json

@ -27,7 +27,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**

@ -144,6 +144,8 @@ CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
# Weaviate configuration
WEAVIATE_ENDPOINT=http://localhost:8080

@ -85,6 +85,11 @@ class VectorStoreConfig(BaseSettings):
default=False,
)
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(

@ -1,4 +1,4 @@
from datetime import UTC, datetime
from datetime import datetime
import pytz # pip install pytz
from flask_login import current_user
@ -19,6 +19,7 @@ from fields.conversation_fields import (
conversation_pagination_fields,
conversation_with_summary_pagination_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import login_required
from models import Conversation, EndUser, Message, MessageAnnotation
@ -315,7 +316,7 @@ def _get_conversation(app_model, conversation_id):
raise NotFound("Conversation Not Exists.")
if not conversation.read_at:
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
conversation.read_at = naive_utc_now()
conversation.read_account_id = current_user.id
db.session.commit()

@ -1,5 +1,3 @@
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
@ -10,6 +8,7 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Site
@ -77,7 +76,7 @@ class AppSite(Resource):
setattr(site, attr_name, value)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site
@ -101,7 +100,7 @@ class AppSiteAccessTokenReset(Resource):
site.code = Site.generate_code(16)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site

@ -1,5 +1,3 @@
import datetime
from flask import request
from flask_restful import Resource, reqparse
@ -7,6 +5,7 @@ from constants.languages import supported_language
from controllers.console import api
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import StrLen, email, extract_remote_ip, timezone
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService
@ -65,7 +64,7 @@ class ActivateApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Optional
import requests
@ -13,6 +12,7 @@ from configs import dify_config
from constants.languages import languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_remote_ip
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from models import Account
@ -110,7 +110,7 @@ class OAuthCallback(Resource):
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
try:

@ -1,4 +1,3 @@
import datetime
import json
from flask import request
@ -15,6 +14,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.notion_extractor import NotionExtractor
from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import DataSourceOauthBinding, Document
from services.dataset_service import DatasetService, DocumentService
@ -88,7 +88,7 @@ class DataSourceApi(Resource):
if action == "enable":
if data_source_binding.disabled:
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:
@ -97,7 +97,7 @@ class DataSourceApi(Resource):
if action == "disable":
if not data_source_binding.disabled:
data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:

@ -1,6 +1,5 @@
import logging
from argparse import ArgumentTypeError
from datetime import UTC, datetime
from typing import cast
from flask import request
@ -49,6 +48,7 @@ from fields.document_fields import (
document_status_fields,
document_with_segments_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from services.dataset_service import DatasetService, DocumentService
@ -750,7 +750,7 @@ class DocumentProcessingApi(DocumentResource):
raise InvalidActionError("Document not in indexing state.")
document.paused_by = current_user.id
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
document.paused_at = naive_utc_now()
document.is_paused = True
db.session.commit()
@ -830,7 +830,7 @@ class DocumentMetadataApi(DocumentResource):
document.doc_metadata[key] = value
document.doc_type = doc_type
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
document.updated_at = naive_utc_now()
db.session.commit()
return {"result": "success", "message": "Document metadata updated."}, 200

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import reqparse
@ -27,6 +26,7 @@ from core.errors.error import (
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs import helper
from libs.datetime_utils import naive_utc_now
from libs.helper import uuid_value
from models.model import AppMode
from services.app_generate_service import AppGenerateService
@ -51,7 +51,7 @@ class CompletionApi(InstalledAppResource):
streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:
@ -111,7 +111,7 @@ class ChatApi(InstalledAppResource):
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Any
from flask import request
@ -13,6 +12,7 @@ from controllers.console.explore.wraps import InstalledAppResource
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
from extensions.ext_database import db
from fields.installed_app_fields import installed_app_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import App, InstalledApp, RecommendedApp
from services.account_service import TenantService
@ -122,7 +122,7 @@ class InstalledAppsListApi(Resource):
tenant_id=current_tenant_id,
app_owner_tenant_id=app.tenant_id,
is_pinned=False,
last_used_at=datetime.now(UTC).replace(tzinfo=None),
last_used_at=naive_utc_now(),
)
db.session.add(new_installed_app)
db.session.commit()

@ -1,5 +1,3 @@
import datetime
import pytz
from flask import request
from flask_login import current_user
@ -35,6 +33,7 @@ from controllers.console.wraps import (
)
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.datetime_utils import naive_utc_now
from libs.helper import TimestampField, email, extract_remote_ip, timezone
from libs.login import login_required
from models import AccountIntegrate, InvitationCode
@ -80,7 +79,7 @@ class AccountInitApi(Resource):
raise InvalidInvitationCodeError()
invitation_code.status = "used"
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
invitation_code.used_at = naive_utc_now()
invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id
@ -88,7 +87,7 @@ class AccountInitApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = "active"
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
return {"result": "success"}

@ -1,6 +1,6 @@
import time
from collections.abc import Callable
from datetime import UTC, datetime, timedelta
from datetime import timedelta
from enum import Enum
from functools import wraps
from typing import Optional
@ -15,6 +15,7 @@ from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from libs.login import _get_user
from models.account import Account, Tenant, TenantAccountJoin, TenantStatus
from models.dataset import Dataset, RateLimitLog
@ -256,7 +257,7 @@ def validate_and_get_api_token(scope: str | None = None):
if auth_scheme != "bearer":
raise Unauthorized("Authorization scheme must be 'Bearer'")
current_time = datetime.now(UTC).replace(tzinfo=None)
current_time = naive_utc_now()
cutoff_time = current_time - timedelta(minutes=1)
with Session(db.engine, expire_on_commit=False) as session:
update_stmt = (

@ -1,7 +1,6 @@
import json
import logging
from collections.abc import Generator
from datetime import UTC, datetime
from typing import Optional, Union, cast
from core.app.app_config.entities import EasyUIBasedAppConfig, EasyUIBasedAppModelConfigFrom
@ -25,6 +24,7 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBasedGenerateTaskPipeline
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models import Account
from models.enums import CreatorUserRole
from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile
@ -184,7 +184,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
db.session.commit()
db.session.refresh(conversation)
else:
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
message = Message(

@ -1,6 +1,6 @@
from collections.abc import Mapping
from dataclasses import dataclass
from datetime import UTC, datetime
from datetime import datetime
from typing import Any, Optional, Union
from uuid import uuid4
@ -71,7 +71,7 @@ class WorkflowCycleManager:
workflow_version=self._workflow_info.version,
graph=self._workflow_info.graph_data,
inputs=inputs,
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
return self._save_and_cache_workflow_execution(execution)
@ -356,7 +356,7 @@ class WorkflowCycleManager:
created_at: Optional[datetime] = None,
) -> WorkflowNodeExecution:
"""Create a node execution from an event."""
now = datetime.now(UTC).replace(tzinfo=None)
now = naive_utc_now()
created_at = created_at or now
metadata = {
@ -403,7 +403,7 @@ class WorkflowCycleManager:
handle_special_values: bool = False,
) -> None:
"""Update node execution with completion data."""
finished_at = datetime.now(UTC).replace(tzinfo=None)
finished_at = naive_utc_now()
elapsed_time = (finished_at - event.start_at).total_seconds()
# Process data

@ -1,4 +1,3 @@
import datetime
import logging
import time
@ -8,6 +7,7 @@ from werkzeug.exceptions import NotFound
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from events.event_handlers.document_index_event import document_index_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Document
@ -33,7 +33,7 @@ def handle(sender, **kwargs):
raise NotFound("Document not found")
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
documents.append(document)
db.session.add(document)
db.session.commit()

@ -1,5 +1,5 @@
from collections.abc import Generator
from datetime import UTC, datetime, timedelta
from datetime import timedelta
from typing import Optional
from azure.identity import ChainedTokenCredential, DefaultAzureCredential
@ -8,6 +8,7 @@ from azure.storage.blob import AccountSasPermissions, BlobServiceClient, Resourc
from configs import dify_config
from extensions.ext_redis import redis_client
from extensions.storage.base_storage import BaseStorage
from libs.datetime_utils import naive_utc_now
class AzureBlobStorage(BaseStorage):
@ -78,7 +79,7 @@ class AzureBlobStorage(BaseStorage):
account_key=self.account_key or "",
resource_types=ResourceTypes(service=True, container=True, object=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
expiry=naive_utc_now() + timedelta(hours=1),
)
redis_client.set(cache_key, sas_token, ex=3000)
return BlobServiceClient(account_url=self.account_url or "", credential=sas_token)

@ -1,4 +1,3 @@
import datetime
import urllib.parse
from typing import Any
@ -6,6 +5,7 @@ import requests
from flask_login import current_user
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.source import DataSourceOauthBinding
@ -75,7 +75,7 @@ class NotionOAuth(OAuthDataSource):
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
new_data_source_binding = DataSourceOauthBinding(
@ -115,7 +115,7 @@ class NotionOAuth(OAuthDataSource):
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
new_data_source_binding = DataSourceOauthBinding(
@ -154,7 +154,7 @@ class NotionOAuth(OAuthDataSource):
}
data_source_binding.source_info = new_source_info
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.commit()
else:
raise ValueError("Data source binding not found")

@ -256,7 +256,7 @@ class Dataset(Base):
@staticmethod
def gen_collection_name_by_id(dataset_id: str) -> str:
normalized_dataset_id = dataset_id.replace("-", "_")
return f"Vector_index_{normalized_dataset_id}_Node"
return f"{dify_config.VECTOR_INDEX_NAME_PREFIX}_{normalized_dataset_id}_Node"
class DatasetProcessRule(Base):

@ -1,8 +1,7 @@
from datetime import UTC, datetime
from celery import states # type: ignore
from sqlalchemy.orm import mapped_column
from libs.datetime_utils import naive_utc_now
from models.base import Base
from .engine import db
@ -19,8 +18,8 @@ class CeleryTask(Base):
result = mapped_column(db.PickleType, nullable=True)
date_done = mapped_column(
db.DateTime,
default=lambda: datetime.now(UTC).replace(tzinfo=None),
onupdate=lambda: datetime.now(UTC).replace(tzinfo=None),
default=lambda: naive_utc_now(),
onupdate=lambda: naive_utc_now(),
nullable=True,
)
traceback = mapped_column(db.Text, nullable=True)
@ -37,7 +36,8 @@ class CeleryTaskSet(Base):
__tablename__ = "celery_tasksetmeta"
id = mapped_column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
taskset_id = mapped_column(db.String(155), unique=True)
result = mapped_column(db.PickleType, nullable=True)
date_done = mapped_column(db.DateTime, default=lambda: datetime.now(UTC).replace(tzinfo=None), nullable=True)
id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
taskset_id = db.Column(db.String(155), unique=True)
result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(db.DateTime, default=lambda: naive_utc_now(), nullable=True)

@ -1,7 +1,7 @@
import json
import logging
from collections.abc import Mapping, Sequence
from datetime import UTC, datetime
from datetime import datetime
from enum import Enum, StrEnum
from typing import TYPE_CHECKING, Any, Optional, Union
from uuid import uuid4
@ -16,6 +16,7 @@ from core.variables.variables import FloatVariable, IntegerVariable, StringVaria
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from core.workflow.nodes.enums import NodeType
from factories.variable_factory import TypeMismatchError, build_segment_with_type
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_tenant_id
from ._workflow_exc import NodeNotFoundError, WorkflowDataError
@ -138,7 +139,7 @@ class Workflow(Base):
updated_at: Mapped[datetime] = mapped_column(
db.DateTime,
nullable=False,
default=datetime.now(UTC).replace(tzinfo=None),
default=naive_utc_now(),
server_onupdate=func.current_timestamp(),
)
_environment_variables: Mapped[str] = mapped_column(
@ -179,7 +180,7 @@ class Workflow(Base):
workflow.conversation_variables = conversation_variables or []
workflow.marked_name = marked_name
workflow.marked_comment = marked_comment
workflow.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow.created_at = naive_utc_now()
workflow.updated_at = workflow.created_at
return workflow
@ -907,7 +908,7 @@ _EDITABLE_SYSTEM_VARIABLE = frozenset(["query", "files"])
def _naive_utc_datetime():
return datetime.now(UTC).replace(tzinfo=None)
return naive_utc_now()
class WorkflowDraftVariable(Base):

@ -17,6 +17,7 @@ from constants.languages import language_timezone_mapping, languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client, redis_fallback
from libs.datetime_utils import naive_utc_now
from libs.helper import RateLimiter, TokenManager
from libs.passport import PassportService
from libs.password import compare_password, hash_password, valid_password
@ -135,8 +136,8 @@ class AccountService:
available_ta.current = True
db.session.commit()
if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
if naive_utc_now() - account.last_active_at > timedelta(minutes=10):
account.last_active_at = naive_utc_now()
db.session.commit()
return cast(Account, account)
@ -180,7 +181,7 @@ class AccountService:
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
@ -318,7 +319,7 @@ class AccountService:
# If it exists, update the record
account_integrate.open_id = open_id
account_integrate.encrypted_token = "" # todo
account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
account_integrate.updated_at = naive_utc_now()
else:
# If it does not exist, create a new record
account_integrate = AccountIntegrate(
@ -353,7 +354,7 @@ class AccountService:
@staticmethod
def update_login_info(account: Account, *, ip_address: str) -> None:
"""Update last login time and ip"""
account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
account.last_login_at = naive_utc_now()
account.last_login_ip = ip_address
db.session.add(account)
db.session.commit()
@ -1117,7 +1118,7 @@ class RegisterService:
)
account.last_login_ip = ip_address
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
@ -1158,7 +1159,7 @@ class RegisterService:
is_setup=is_setup,
)
account.status = AccountStatus.ACTIVE.value if not status else status.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
if open_id is not None and provider is not None:
AccountService.link_account_integrate(provider, open_id, account)

@ -1,6 +1,5 @@
import json
import logging
from datetime import UTC, datetime
from typing import Optional, cast
from flask_login import current_user
@ -17,6 +16,7 @@ from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from events.app_event import app_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.account import Account
from models.model import App, AppMode, AppModelConfig, Site
from models.tools import ApiToolProvider
@ -235,7 +235,7 @@ class AppService:
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
app.max_active_requests = args.get("max_active_requests")
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -249,7 +249,7 @@ class AppService:
"""
app.name = name
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -265,7 +265,7 @@ class AppService:
app.icon = icon
app.icon_background = icon_background
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -282,7 +282,7 @@ class AppService:
app.enable_site = enable_site
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app
@ -299,7 +299,7 @@ class AppService:
app.enable_api = enable_api
app.updated_by = current_user.id
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
app.updated_at = naive_utc_now()
db.session.commit()
return app

@ -1,5 +1,4 @@
from collections.abc import Callable, Sequence
from datetime import UTC, datetime
from typing import Optional, Union
from sqlalchemy import asc, desc, func, or_, select
@ -8,6 +7,7 @@ from sqlalchemy.orm import Session
from core.app.entities.app_invoke_entities import InvokeFrom
from core.llm_generator.llm_generator import LLMGenerator
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from models import ConversationVariable
from models.account import Account
@ -113,7 +113,7 @@ class ConversationService:
return cls.auto_generate_name(app_model, conversation)
else:
conversation.name = name
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
return conversation
@ -169,7 +169,7 @@ class ConversationService:
conversation = cls.get_conversation(app_model, conversation_id, user)
conversation.is_deleted = True
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
conversation.updated_at = naive_utc_now()
db.session.commit()
@classmethod

@ -26,6 +26,7 @@ from events.document_event import document_was_deleted
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs import helper
from libs.datetime_utils import naive_utc_now
from models.account import Account, TenantAccountRole
from models.dataset import (
AppDatasetJoin,
@ -428,7 +429,7 @@ class DatasetService:
# Add metadata fields
filtered_data["updated_by"] = user.id
filtered_data["updated_at"] = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
filtered_data["updated_at"] = naive_utc_now()
# update Retrieval model
filtered_data["retrieval_model"] = data["retrieval_model"]
@ -994,7 +995,7 @@ class DocumentService:
# update document to be paused
document.is_paused = True
document.paused_by = current_user.id
document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.paused_at = naive_utc_now()
db.session.add(document)
db.session.commit()

@ -1,6 +1,5 @@
import json
from copy import deepcopy
from datetime import UTC, datetime
from typing import Any, Optional, Union, cast
from urllib.parse import urlparse
@ -11,6 +10,7 @@ from constants import HIDDEN_VALUE
from core.helper import ssrf_proxy
from core.rag.entities.metadata_entities import MetadataCondition
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import (
Dataset,
ExternalKnowledgeApis,
@ -120,7 +120,7 @@ class ExternalDatasetService:
external_knowledge_api.description = args.get("description", "")
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
external_knowledge_api.updated_by = user_id
external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
external_knowledge_api.updated_at = naive_utc_now()
db.session.commit()
return external_knowledge_api

@ -2,7 +2,6 @@ import json
import time
import uuid
from collections.abc import Callable, Generator, Mapping, Sequence
from datetime import UTC, datetime
from typing import Any, Optional, cast
from uuid import uuid4
@ -33,6 +32,7 @@ from core.workflow.workflow_entry import WorkflowEntry
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from libs.datetime_utils import naive_utc_now
from models.account import Account
from models.model import App, AppMode
from models.tools import WorkflowToolProvider
@ -232,7 +232,7 @@ class WorkflowService:
workflow.graph = json.dumps(graph)
workflow.features = json.dumps(features)
workflow.updated_by = account.id
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
workflow.updated_at = naive_utc_now()
workflow.environment_variables = environment_variables
workflow.conversation_variables = conversation_variables
@ -268,7 +268,7 @@ class WorkflowService:
tenant_id=app_model.tenant_id,
app_id=app_model.id,
type=draft_workflow.type,
version=Workflow.version_from_datetime(datetime.now(UTC).replace(tzinfo=None)),
version=Workflow.version_from_datetime(naive_utc_now()),
graph=draft_workflow.graph,
features=draft_workflow.features,
created_by=account.id,
@ -523,8 +523,8 @@ class WorkflowService:
node_type=node.type_,
title=node.title,
elapsed_time=time.perf_counter() - start_at,
created_at=datetime.now(UTC).replace(tzinfo=None),
finished_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
finished_at=naive_utc_now(),
)
if run_succeeded and node_run_result:
@ -621,7 +621,7 @@ class WorkflowService:
setattr(workflow, field, value)
workflow.updated_by = account_id
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
workflow.updated_at = naive_utc_now()
return workflow

@ -1,4 +1,3 @@
import datetime
import logging
import time
@ -8,6 +7,7 @@ from celery import shared_task # type: ignore
from configs import dify_config
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document
from services.feature_service import FeatureService
@ -53,7 +53,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
db.session.close()
@ -68,7 +68,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
if document:
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
documents.append(document)
db.session.add(document)
db.session.commit()

@ -26,8 +26,15 @@ redis_mock.hgetall = MagicMock(return_value={})
redis_mock.hdel = MagicMock()
redis_mock.incr = MagicMock(return_value=1)
# Add the API directory to Python path to ensure proper imports
import sys
sys.path.insert(0, PROJECT_DIR)
# apply the mock to the Redis client in the Flask app
redis_patcher = patch("extensions.ext_redis.redis_client", redis_mock)
from extensions import ext_redis
redis_patcher = patch.object(ext_redis, "redis_client", redis_mock)
redis_patcher.start()

@ -0,0 +1,49 @@
import pytest
from services.auth.api_key_auth_base import ApiKeyAuthBase
class ConcreteApiKeyAuth(ApiKeyAuthBase):
"""Concrete implementation for testing abstract base class"""
def validate_credentials(self):
return True
class TestApiKeyAuthBase:
def test_should_store_credentials_on_init(self):
"""Test that credentials are properly stored during initialization"""
credentials = {"api_key": "test_key", "auth_type": "bearer"}
auth = ConcreteApiKeyAuth(credentials)
assert auth.credentials == credentials
def test_should_not_instantiate_abstract_class(self):
"""Test that ApiKeyAuthBase cannot be instantiated directly"""
credentials = {"api_key": "test_key"}
with pytest.raises(TypeError) as exc_info:
ApiKeyAuthBase(credentials)
assert "Can't instantiate abstract class" in str(exc_info.value)
assert "validate_credentials" in str(exc_info.value)
def test_should_allow_subclass_implementation(self):
"""Test that subclasses can properly implement the abstract method"""
credentials = {"api_key": "test_key", "auth_type": "bearer"}
auth = ConcreteApiKeyAuth(credentials)
# Should not raise any exception
result = auth.validate_credentials()
assert result is True
def test_should_handle_empty_credentials(self):
"""Test initialization with empty credentials"""
credentials = {}
auth = ConcreteApiKeyAuth(credentials)
assert auth.credentials == {}
def test_should_handle_none_credentials(self):
"""Test initialization with None credentials"""
credentials = None
auth = ConcreteApiKeyAuth(credentials)
assert auth.credentials is None

@ -0,0 +1,81 @@
from unittest.mock import MagicMock, patch
import pytest
from services.auth.api_key_auth_factory import ApiKeyAuthFactory
from services.auth.auth_type import AuthType
class TestApiKeyAuthFactory:
"""Test cases for ApiKeyAuthFactory"""
@pytest.mark.parametrize(
("provider", "auth_class_path"),
[
(AuthType.FIRECRAWL, "services.auth.firecrawl.firecrawl.FirecrawlAuth"),
(AuthType.WATERCRAWL, "services.auth.watercrawl.watercrawl.WatercrawlAuth"),
(AuthType.JINA, "services.auth.jina.jina.JinaAuth"),
],
)
def test_get_apikey_auth_factory_valid_providers(self, provider, auth_class_path):
"""Test getting auth factory for all valid providers"""
with patch(auth_class_path) as mock_auth:
auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider)
assert auth_class == mock_auth
@pytest.mark.parametrize(
"invalid_provider",
[
"invalid_provider",
"",
None,
123,
"UNSUPPORTED",
],
)
def test_get_apikey_auth_factory_invalid_providers(self, invalid_provider):
"""Test getting auth factory with various invalid providers"""
with pytest.raises(ValueError) as exc_info:
ApiKeyAuthFactory.get_apikey_auth_factory(invalid_provider)
assert str(exc_info.value) == "Invalid provider"
@pytest.mark.parametrize(
("credentials_return_value", "expected_result"),
[
(True, True),
(False, False),
],
)
@patch("services.auth.api_key_auth_factory.ApiKeyAuthFactory.get_apikey_auth_factory")
def test_validate_credentials_delegates_to_auth_instance(
self, mock_get_factory, credentials_return_value, expected_result
):
"""Test that validate_credentials delegates to auth instance correctly"""
# Arrange
mock_auth_instance = MagicMock()
mock_auth_instance.validate_credentials.return_value = credentials_return_value
mock_auth_class = MagicMock(return_value=mock_auth_instance)
mock_get_factory.return_value = mock_auth_class
# Act
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"api_key": "test_key"})
result = factory.validate_credentials()
# Assert
assert result is expected_result
mock_auth_instance.validate_credentials.assert_called_once()
@patch("services.auth.api_key_auth_factory.ApiKeyAuthFactory.get_apikey_auth_factory")
def test_validate_credentials_propagates_exceptions(self, mock_get_factory):
"""Test that exceptions from auth instance are propagated"""
# Arrange
mock_auth_instance = MagicMock()
mock_auth_instance.validate_credentials.side_effect = Exception("Authentication error")
mock_auth_class = MagicMock(return_value=mock_auth_instance)
mock_get_factory.return_value = mock_auth_class
# Act & Assert
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"api_key": "test_key"})
with pytest.raises(Exception) as exc_info:
factory.validate_credentials()
assert str(exc_info.value) == "Authentication error"

@ -0,0 +1,155 @@
from unittest.mock import MagicMock, patch
import pytest
import requests
from services.auth.jina.jina import JinaAuth
class TestJinaAuth:
def test_should_initialize_with_valid_bearer_credentials(self):
"""Test successful initialization with valid bearer credentials"""
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
assert auth.api_key == "test_api_key_123"
assert auth.credentials == credentials
def test_should_raise_error_for_invalid_auth_type(self):
"""Test that non-bearer auth type raises ValueError"""
credentials = {"auth_type": "basic", "config": {"api_key": "test_api_key_123"}}
with pytest.raises(ValueError) as exc_info:
JinaAuth(credentials)
assert str(exc_info.value) == "Invalid auth type, Jina Reader auth type must be Bearer"
def test_should_raise_error_for_missing_api_key(self):
"""Test that missing API key raises ValueError"""
credentials = {"auth_type": "bearer", "config": {}}
with pytest.raises(ValueError) as exc_info:
JinaAuth(credentials)
assert str(exc_info.value) == "No API key provided"
def test_should_raise_error_for_missing_config(self):
"""Test that missing config section raises ValueError"""
credentials = {"auth_type": "bearer"}
with pytest.raises(ValueError) as exc_info:
JinaAuth(credentials)
assert str(exc_info.value) == "No API key provided"
@patch("services.auth.jina.jina.requests.post")
def test_should_validate_valid_credentials_successfully(self, mock_post):
"""Test successful credential validation"""
mock_response = MagicMock()
mock_response.status_code = 200
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
result = auth.validate_credentials()
assert result is True
mock_post.assert_called_once_with(
"https://r.jina.ai",
headers={"Content-Type": "application/json", "Authorization": "Bearer test_api_key_123"},
json={"url": "https://example.com"},
)
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_http_402_error(self, mock_post):
"""Test handling of 402 Payment Required error"""
mock_response = MagicMock()
mock_response.status_code = 402
mock_response.json.return_value = {"error": "Payment required"}
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(Exception) as exc_info:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 402. Error: Payment required"
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_http_409_error(self, mock_post):
"""Test handling of 409 Conflict error"""
mock_response = MagicMock()
mock_response.status_code = 409
mock_response.json.return_value = {"error": "Conflict error"}
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(Exception) as exc_info:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 409. Error: Conflict error"
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_http_500_error(self, mock_post):
"""Test handling of 500 Internal Server Error"""
mock_response = MagicMock()
mock_response.status_code = 500
mock_response.json.return_value = {"error": "Internal server error"}
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(Exception) as exc_info:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 500. Error: Internal server error"
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_unexpected_error_with_text_response(self, mock_post):
"""Test handling of unexpected errors with text response"""
mock_response = MagicMock()
mock_response.status_code = 403
mock_response.text = '{"error": "Forbidden"}'
mock_response.json.side_effect = Exception("Not JSON")
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(Exception) as exc_info:
auth.validate_credentials()
assert str(exc_info.value) == "Failed to authorize. Status code: 403. Error: Forbidden"
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_unexpected_error_without_text(self, mock_post):
"""Test handling of unexpected errors without text response"""
mock_response = MagicMock()
mock_response.status_code = 404
mock_response.text = ""
mock_response.json.side_effect = Exception("Not JSON")
mock_post.return_value = mock_response
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(Exception) as exc_info:
auth.validate_credentials()
assert str(exc_info.value) == "Unexpected error occurred while trying to authorize. Status code: 404"
@patch("services.auth.jina.jina.requests.post")
def test_should_handle_network_errors(self, mock_post):
"""Test handling of network connection errors"""
mock_post.side_effect = requests.ConnectionError("Network error")
credentials = {"auth_type": "bearer", "config": {"api_key": "test_api_key_123"}}
auth = JinaAuth(credentials)
with pytest.raises(requests.ConnectionError):
auth.validate_credentials()
def test_should_not_expose_api_key_in_error_messages(self):
"""Test that API key is not exposed in error messages"""
credentials = {"auth_type": "bearer", "config": {"api_key": "super_secret_key_12345"}}
auth = JinaAuth(credentials)
# Verify API key is stored but not in any error message
assert auth.api_key == "super_secret_key_12345"
# Test various error scenarios don't expose the key
with pytest.raises(ValueError) as exc_info:
JinaAuth({"auth_type": "basic", "config": {"api_key": "super_secret_key_12345"}})
assert "super_secret_key_12345" not in str(exc_info.value)

@ -102,17 +102,16 @@ class TestDatasetServiceUpdateDataset:
patch("services.dataset_service.DatasetService.get_dataset") as mock_get_dataset,
patch("services.dataset_service.DatasetService.check_dataset_permission") as mock_check_perm,
patch("extensions.ext_database.db.session") as mock_db,
patch("services.dataset_service.datetime") as mock_datetime,
patch("services.dataset_service.naive_utc_now") as mock_naive_utc_now,
):
current_time = datetime.datetime(2023, 1, 1, 12, 0, 0)
mock_datetime.datetime.now.return_value = current_time
mock_datetime.UTC = datetime.UTC
mock_naive_utc_now.return_value = current_time
yield {
"get_dataset": mock_get_dataset,
"check_permission": mock_check_perm,
"db_session": mock_db,
"datetime": mock_datetime,
"naive_utc_now": mock_naive_utc_now,
"current_time": current_time,
}
@ -292,7 +291,7 @@ class TestDatasetServiceUpdateDataset:
"embedding_model_provider": "openai",
"embedding_model": "text-embedding-ada-002",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(
@ -327,7 +326,7 @@ class TestDatasetServiceUpdateDataset:
"indexing_technique": "high_quality",
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
actual_call_args = mock_dataset_service_dependencies[
@ -365,7 +364,7 @@ class TestDatasetServiceUpdateDataset:
"collection_binding_id": None,
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(
@ -422,7 +421,7 @@ class TestDatasetServiceUpdateDataset:
"collection_binding_id": "binding-456",
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(
@ -463,7 +462,7 @@ class TestDatasetServiceUpdateDataset:
"collection_binding_id": "binding-123",
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(
@ -525,7 +524,7 @@ class TestDatasetServiceUpdateDataset:
"collection_binding_id": "binding-789",
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(
@ -568,7 +567,7 @@ class TestDatasetServiceUpdateDataset:
"collection_binding_id": "binding-123",
"retrieval_model": "new_model",
"updated_by": user.id,
"updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None),
"updated_at": mock_dataset_service_dependencies["current_time"],
}
self._assert_database_update_called(

@ -412,6 +412,8 @@ SUPABASE_URL=your-server-url
# The type of vector store to use.
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
WEAVIATE_ENDPOINT=http://weaviate:8080

@ -136,6 +136,7 @@ x-shared-env: &shared-api-worker-env
SUPABASE_API_KEY: ${SUPABASE_API_KEY:-your-access-key}
SUPABASE_URL: ${SUPABASE_URL:-your-server-url}
VECTOR_STORE: ${VECTOR_STORE:-weaviate}
VECTOR_INDEX_NAME_PREFIX: ${VECTOR_INDEX_NAME_PREFIX:-Vector_index}
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080}
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
QDRANT_URL: ${QDRANT_URL:-http://qdrant:6333}

@ -1 +1,7 @@
from dify_client.client import ChatClient, CompletionClient, WorkflowClient, KnowledgeBaseClient, DifyClient
from dify_client.client import (
ChatClient,
CompletionClient,
WorkflowClient,
KnowledgeBaseClient,
DifyClient,
)

@ -6,7 +6,7 @@ LABEL maintainer="takatost@gmail.com"
# RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories
RUN apk add --no-cache tzdata
RUN npm install -g pnpm@10.11.1
RUN npm install -g pnpm@10.13.1
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"

@ -1,6 +1,5 @@
'use client'
import { useTranslation } from 'react-i18next'
import { basePath } from '@/utils/var'
import {
RiAddLine,
RiArrowRightLine,
@ -18,7 +17,7 @@ const CreateAppCard = ({ ref }: CreateAppCardProps) => {
<div className='bg-background-default-dimm flex min-h-[160px] flex-col rounded-xl border-[0.5px]
border-components-panel-border transition-all duration-200 ease-in-out'
>
<Link ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
<Link ref={ref} className='group flex grow cursor-pointer items-start p-4' href='/datasets/create'>
<div className='flex items-center gap-3'>
<div className='flex h-10 w-10 items-center justify-center rounded-lg border border-dashed border-divider-regular bg-background-default-lighter
p-2 group-hover:border-solid group-hover:border-effects-highlight group-hover:bg-background-default-dodge'
@ -29,7 +28,7 @@ const CreateAppCard = ({ ref }: CreateAppCardProps) => {
</div>
</Link>
<div className='system-xs-regular p-4 pt-0 text-text-tertiary'>{t('dataset.createDatasetIntro')}</div>
<Link className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
<Link className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href='/datasets/connect'>
<div className='system-xs-medium text-text-tertiary group-hover:text-text-accent'>{t('dataset.connectDataset')}</div>
<RiArrowRightLine className='h-3.5 w-3.5 text-text-tertiary group-hover:text-text-accent' />
</Link>

@ -14,7 +14,6 @@ import Loading from '@/app/components/base/loading'
import Badge from '@/app/components/base/badge'
import { useKnowledge } from '@/hooks/use-knowledge'
import cn from '@/utils/classnames'
import { basePath } from '@/utils/var'
export type ISelectDataSetProps = {
isShow: boolean
@ -112,7 +111,7 @@ const SelectDataSet: FC<ISelectDataSetProps> = ({
}}
>
<span className='text-text-tertiary'>{t('appDebug.feature.dataSet.noDataSet')}</span>
<Link href={`${basePath}/datasets/create`} className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link>
<Link href='/datasets/create' className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link>
</div>
)}

@ -333,7 +333,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
<Col>
根据 workflow 执行 ID 获取 workflow 任务当前执行结果
### Path
- `workflow_run_id` (string) workflow_run_id,可在流式返回 Chunk 中获取
- `workflow_run_id` (string) workflow 执行 ID,可在流式返回 Chunk 中获取
### Response
- `id` (string) workflow 执行 ID
- `workflow_id` (string) 关联的 Workflow ID

@ -14,7 +14,6 @@ import Nav from '../nav'
import type { NavItem } from '../nav/nav-selector'
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
import type { DataSetListResponse } from '@/models/datasets'
import { basePath } from '@/utils/var'
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
if (!pageIndex || previousPageData.has_more)
@ -57,7 +56,7 @@ const DatasetNav = () => {
icon_background: dataset.icon_background,
})) as NavItem[]}
createText={t('common.menus.newDataset')}
onCreate={() => router.push(`${basePath}/datasets/create`)}
onCreate={() => router.push('/datasets/create')}
onLoadmore={handleLoadmore}
isApp={false}
/>

Loading…
Cancel
Save