Merge branch 'langgenius:main' into feat/rediscluster-support-custom-user
commit
3c9103bac8
@ -0,0 +1,14 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
"""Matrixone vector database configuration."""
|
||||
|
||||
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
|
||||
MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server")
|
||||
MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone")
|
||||
MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone")
|
||||
MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to")
|
||||
MATRIXONE_METRIC: str = Field(
|
||||
default="l2", description="Distance metric type for vector similarity search (cosine or l2)"
|
||||
)
|
||||
@ -0,0 +1,233 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from functools import wraps
|
||||
from typing import Any, Optional
|
||||
|
||||
from mo_vector.client import MoVectorClient # type: ignore
|
||||
from pydantic import BaseModel, model_validator
|
||||
|
||||
from configs import dify_config
|
||||
from core.rag.datasource.vdb.vector_base import BaseVector
|
||||
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.embedding.embedding_base import Embeddings
|
||||
from core.rag.models.document import Document
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
host: str = "localhost"
|
||||
port: int = 6001
|
||||
user: str = "dump"
|
||||
password: str = "111"
|
||||
database: str = "dify"
|
||||
metric: str = "l2"
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict) -> dict:
|
||||
if not values["host"]:
|
||||
raise ValueError("config host is required")
|
||||
if not values["port"]:
|
||||
raise ValueError("config port is required")
|
||||
if not values["user"]:
|
||||
raise ValueError("config user is required")
|
||||
if not values["password"]:
|
||||
raise ValueError("config password is required")
|
||||
if not values["database"]:
|
||||
raise ValueError("config database is required")
|
||||
return values
|
||||
|
||||
|
||||
def ensure_client(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(None, False)
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MatrixoneVector(BaseVector):
|
||||
"""
|
||||
Matrixone vector storage implementation.
|
||||
"""
|
||||
|
||||
def __init__(self, collection_name: str, config: MatrixoneConfig):
|
||||
super().__init__(collection_name)
|
||||
self.config = config
|
||||
self.collection_name = collection_name.lower()
|
||||
self.client = None
|
||||
|
||||
@property
|
||||
def collection_name(self):
|
||||
return self._collection_name
|
||||
|
||||
@collection_name.setter
|
||||
def collection_name(self, value):
|
||||
self._collection_name = value
|
||||
|
||||
def get_type(self) -> str:
|
||||
return VectorType.MATRIXONE
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
return self.add_texts(texts, embeddings)
|
||||
|
||||
def _get_client(self, dimension: Optional[int] = None, create_table: bool = False) -> MoVectorClient:
|
||||
"""
|
||||
Create a new client for the collection.
|
||||
|
||||
The collection will be created if it doesn't exist.
|
||||
"""
|
||||
lock_name = f"vector_indexing_lock_{self._collection_name}"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
client = MoVectorClient(
|
||||
connection_string=f"mysql+pymysql://{self.config.user}:{self.config.password}@{self.config.host}:{self.config.port}/{self.config.database}",
|
||||
table_name=self.collection_name,
|
||||
vector_dimension=dimension,
|
||||
create_table=create_table,
|
||||
)
|
||||
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(collection_exist_cache_key):
|
||||
return client
|
||||
try:
|
||||
client.create_full_text_index()
|
||||
except Exception as e:
|
||||
logger.exception("Failed to create full text index")
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
return client
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
assert self.client is not None
|
||||
ids = []
|
||||
for _, doc in enumerate(documents):
|
||||
if doc.metadata is not None:
|
||||
doc_id = doc.metadata.get("doc_id", str(uuid.uuid4()))
|
||||
ids.append(doc_id)
|
||||
self.client.insert(
|
||||
texts=[doc.page_content for doc in documents],
|
||||
embeddings=embeddings,
|
||||
metadatas=[doc.metadata for doc in documents],
|
||||
ids=ids,
|
||||
)
|
||||
return ids
|
||||
|
||||
@ensure_client
|
||||
def text_exists(self, id: str) -> bool:
|
||||
assert self.client is not None
|
||||
result = self.client.get(ids=[id])
|
||||
return len(result) > 0
|
||||
|
||||
@ensure_client
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
assert self.client is not None
|
||||
if not ids:
|
||||
return
|
||||
self.client.delete(ids=ids)
|
||||
|
||||
@ensure_client
|
||||
def get_ids_by_metadata_field(self, key: str, value: str):
|
||||
assert self.client is not None
|
||||
results = self.client.query_by_metadata(filter={key: value})
|
||||
return [result.id for result in results]
|
||||
|
||||
@ensure_client
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete(filter={key: value})
|
||||
|
||||
@ensure_client
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
|
||||
results = self.client.query(
|
||||
query_vector=query_vector,
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
# TODO: add the score threshold to the query
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
score_threshold = float(kwargs.get("score_threshold", 0.0))
|
||||
|
||||
results = self.client.full_text_query(
|
||||
keywords=[query],
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
if isinstance(metadata, str):
|
||||
import json
|
||||
|
||||
metadata = json.loads(metadata)
|
||||
score = 1 - result.distance
|
||||
if score >= score_threshold:
|
||||
metadata["score"] = score
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def delete(self) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete()
|
||||
|
||||
|
||||
class MatrixoneVectorFactory(AbstractVectorFactory):
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MatrixoneVector:
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
else:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.MATRIXONE, collection_name))
|
||||
|
||||
config = MatrixoneConfig(
|
||||
host=dify_config.MATRIXONE_HOST or "localhost",
|
||||
port=dify_config.MATRIXONE_PORT or 6001,
|
||||
user=dify_config.MATRIXONE_USER or "dump",
|
||||
password=dify_config.MATRIXONE_PASSWORD or "111",
|
||||
database=dify_config.MATRIXONE_DATABASE or "dify",
|
||||
metric=dify_config.MATRIXONE_METRIC or "l2",
|
||||
)
|
||||
return MatrixoneVector(collection_name=collection_name, config=config)
|
||||
@ -0,0 +1,42 @@
|
||||
import logging
|
||||
|
||||
import sendgrid # type: ignore
|
||||
from python_http_client.exceptions import ForbiddenError, UnauthorizedError
|
||||
from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore
|
||||
|
||||
|
||||
class SendGridClient:
|
||||
def __init__(self, sendgrid_api_key: str, _from: str):
|
||||
self.sendgrid_api_key = sendgrid_api_key
|
||||
self._from = _from
|
||||
|
||||
def send(self, mail: dict):
|
||||
logging.debug("Sending email with SendGrid")
|
||||
|
||||
try:
|
||||
_to = mail["to"]
|
||||
|
||||
if not _to:
|
||||
raise ValueError("SendGridClient: Cannot send email: recipient address is missing.")
|
||||
|
||||
sg = sendgrid.SendGridAPIClient(api_key=self.sendgrid_api_key)
|
||||
from_email = Email(self._from)
|
||||
to_email = To(_to)
|
||||
subject = mail["subject"]
|
||||
content = Content("text/html", mail["html"])
|
||||
mail = Mail(from_email, to_email, subject, content)
|
||||
mail_json = mail.get() # type: ignore
|
||||
response = sg.client.mail.send.post(request_body=mail_json)
|
||||
logging.debug(response.status_code)
|
||||
logging.debug(response.body)
|
||||
logging.debug(response.headers)
|
||||
|
||||
except TimeoutError as e:
|
||||
logging.exception("SendGridClient Timeout occurred while sending email")
|
||||
raise
|
||||
except (UnauthorizedError, ForbiddenError) as e:
|
||||
logging.exception("SendGridClient Authentication failed. Verify that your credentials and the 'from")
|
||||
raise
|
||||
except Exception as e:
|
||||
logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}")
|
||||
raise
|
||||
@ -0,0 +1,66 @@
|
||||
"""remove sequence_number from workflow_runs
|
||||
|
||||
Revision ID: 0ab65e1cc7fa
|
||||
Revises: 4474872b0ee6
|
||||
Create Date: 2025-06-19 16:33:13.377215
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0ab65e1cc7fa'
|
||||
down_revision = '4474872b0ee6'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('workflow_run_tenant_app_sequence_idx'))
|
||||
batch_op.drop_column('sequence_number')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
# WARNING: This downgrade CANNOT recover the original sequence_number values!
|
||||
# The original sequence numbers are permanently lost after the upgrade.
|
||||
# This downgrade will regenerate sequence numbers based on created_at order,
|
||||
# which may result in different values than the original sequence numbers.
|
||||
#
|
||||
# If you need to preserve original sequence numbers, use the alternative
|
||||
# migration approach that creates a backup table before removal.
|
||||
|
||||
# Step 1: Add sequence_number column as nullable first
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sequence_number', sa.INTEGER(), autoincrement=False, nullable=True))
|
||||
|
||||
# Step 2: Populate sequence_number values based on created_at order within each app
|
||||
# NOTE: This recreates sequence numbering logic but values will be different
|
||||
# from the original sequence numbers that were removed in the upgrade
|
||||
connection = op.get_bind()
|
||||
connection.execute(sa.text("""
|
||||
UPDATE workflow_runs
|
||||
SET sequence_number = subquery.row_num
|
||||
FROM (
|
||||
SELECT id, ROW_NUMBER() OVER (
|
||||
PARTITION BY tenant_id, app_id
|
||||
ORDER BY created_at, id
|
||||
) as row_num
|
||||
FROM workflow_runs
|
||||
) subquery
|
||||
WHERE workflow_runs.id = subquery.id
|
||||
"""))
|
||||
|
||||
# Step 3: Make the column NOT NULL and add the index
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.alter_column('sequence_number', nullable=False)
|
||||
batch_op.create_index(batch_op.f('workflow_run_tenant_app_sequence_idx'), ['tenant_id', 'app_id', 'sequence_number'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -0,0 +1,5 @@
|
||||
from services.errors.base import BaseServiceError
|
||||
|
||||
|
||||
class PluginInstallationForbiddenError(BaseServiceError):
|
||||
pass
|
||||
@ -0,0 +1,25 @@
|
||||
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
|
||||
from tests.integration_tests.vdb.test_vector_store import (
|
||||
AbstractVectorTest,
|
||||
get_example_text,
|
||||
setup_mock_redis,
|
||||
)
|
||||
|
||||
|
||||
class MatrixoneVectorTest(AbstractVectorTest):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.vector = MatrixoneVector(
|
||||
collection_name=self.collection_name,
|
||||
config=MatrixoneConfig(
|
||||
host="localhost", port=6001, user="dump", password="111", database="dify", metric="l2"
|
||||
),
|
||||
)
|
||||
|
||||
def get_ids_by_metadata_field(self):
|
||||
ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id)
|
||||
assert len(ids) == 1
|
||||
|
||||
|
||||
def test_matrixone_vector(setup_mock_redis):
|
||||
MatrixoneVectorTest().run_all_tests()
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,46 @@
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import type { SystemFeatures } from '@/types/feature'
|
||||
import { InstallationScope } from '@/types/feature'
|
||||
import type { Plugin, PluginManifestInMarket } from '../../types'
|
||||
|
||||
type PluginProps = (Plugin | PluginManifestInMarket) & { from: 'github' | 'marketplace' | 'package' }
|
||||
|
||||
export function pluginInstallLimit(plugin: PluginProps, systemFeatures: SystemFeatures) {
|
||||
if (systemFeatures.plugin_installation_permission.restrict_to_marketplace_only) {
|
||||
if (plugin.from === 'github' || plugin.from === 'package')
|
||||
return { canInstall: false }
|
||||
}
|
||||
|
||||
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.ALL) {
|
||||
return {
|
||||
canInstall: true,
|
||||
}
|
||||
}
|
||||
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.NONE) {
|
||||
return {
|
||||
canInstall: false,
|
||||
}
|
||||
}
|
||||
const verification = plugin.verification || {}
|
||||
if (!plugin.verification || !plugin.verification.authorized_category)
|
||||
verification.authorized_category = 'langgenius'
|
||||
|
||||
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.OFFICIAL_ONLY) {
|
||||
return {
|
||||
canInstall: verification.authorized_category === 'langgenius',
|
||||
}
|
||||
}
|
||||
if (systemFeatures.plugin_installation_permission.plugin_installation_scope === InstallationScope.OFFICIAL_AND_PARTNER) {
|
||||
return {
|
||||
canInstall: verification.authorized_category === 'langgenius' || verification.authorized_category === 'partner',
|
||||
}
|
||||
}
|
||||
return {
|
||||
canInstall: true,
|
||||
}
|
||||
}
|
||||
|
||||
export default function usePluginInstallLimit(plugin: PluginProps) {
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
return pluginInstallLimit(plugin, systemFeatures)
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import classNames from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useTheme } from 'next-themes'
|
||||
|
||||
type LoginLogoProps = {
|
||||
className?: string
|
||||
}
|
||||
|
||||
const LoginLogo: FC<LoginLogoProps> = ({
|
||||
className,
|
||||
}) => {
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
const { theme } = useTheme()
|
||||
|
||||
let src = theme === 'light' ? '/logo/logo-site.png' : `/logo/logo-site-${theme}.png`
|
||||
if (systemFeatures.branding.enabled)
|
||||
src = systemFeatures.branding.login_page_logo
|
||||
|
||||
return (
|
||||
<img
|
||||
src={src}
|
||||
className={classNames('block w-auto h-10', className)}
|
||||
alt='logo'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export default LoginLogo
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue