|
|
|
|
@ -18,8 +18,8 @@ from services.errors.account import NoPermissionError
|
|
|
|
|
from services.errors.dataset import DatasetNameDuplicateError
|
|
|
|
|
from services.errors.document import DocumentIndexingError
|
|
|
|
|
from services.errors.file import FileNotExistsError
|
|
|
|
|
from tasks import document_indexing_update_task
|
|
|
|
|
from tasks.document_indexing_task import document_indexing_task
|
|
|
|
|
from tasks.document_indexing_update_task import document_indexing_update_task
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DatasetService:
|
|
|
|
|
@ -358,68 +358,68 @@ class DocumentService:
|
|
|
|
|
if dataset.indexing_technique == 'high_quality':
|
|
|
|
|
IndexBuilder.get_default_service_context(dataset.tenant_id)
|
|
|
|
|
|
|
|
|
|
if document_data["original_document_id"]:
|
|
|
|
|
DocumentService.update_document_with_dataset_id(dataset, document_data, account)
|
|
|
|
|
# save process rule
|
|
|
|
|
if not dataset_process_rule:
|
|
|
|
|
process_rule = document_data["process_rule"]
|
|
|
|
|
if process_rule["mode"] == "custom":
|
|
|
|
|
dataset_process_rule = DatasetProcessRule(
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
mode=process_rule["mode"],
|
|
|
|
|
rules=json.dumps(process_rule["rules"]),
|
|
|
|
|
created_by=account.id
|
|
|
|
|
)
|
|
|
|
|
elif process_rule["mode"] == "automatic":
|
|
|
|
|
dataset_process_rule = DatasetProcessRule(
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
mode=process_rule["mode"],
|
|
|
|
|
rules=json.dumps(DatasetProcessRule.AUTOMATIC_RULES),
|
|
|
|
|
created_by=account.id
|
|
|
|
|
)
|
|
|
|
|
db.session.add(dataset_process_rule)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
if 'original_document_id' in document_data and document_data["original_document_id"]:
|
|
|
|
|
document = DocumentService.update_document_with_dataset_id(dataset, document_data, account)
|
|
|
|
|
else:
|
|
|
|
|
# save process rule
|
|
|
|
|
if not dataset_process_rule:
|
|
|
|
|
process_rule = document_data["process_rule"]
|
|
|
|
|
if process_rule["mode"] == "custom":
|
|
|
|
|
dataset_process_rule = DatasetProcessRule(
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
mode=process_rule["mode"],
|
|
|
|
|
rules=json.dumps(process_rule["rules"]),
|
|
|
|
|
created_by=account.id
|
|
|
|
|
)
|
|
|
|
|
elif process_rule["mode"] == "automatic":
|
|
|
|
|
dataset_process_rule = DatasetProcessRule(
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
mode=process_rule["mode"],
|
|
|
|
|
rules=json.dumps(DatasetProcessRule.AUTOMATIC_RULES),
|
|
|
|
|
created_by=account.id
|
|
|
|
|
)
|
|
|
|
|
db.session.add(dataset_process_rule)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
file_name = ''
|
|
|
|
|
data_source_info = {}
|
|
|
|
|
if document_data["data_source"]["type"] == "upload_file":
|
|
|
|
|
file_id = document_data["data_source"]["info"]
|
|
|
|
|
file = db.session.query(UploadFile).filter(
|
|
|
|
|
UploadFile.tenant_id == dataset.tenant_id,
|
|
|
|
|
UploadFile.id == file_id
|
|
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
# raise error if file not found
|
|
|
|
|
if not file:
|
|
|
|
|
raise FileNotExistsError()
|
|
|
|
|
|
|
|
|
|
file_name = file.name
|
|
|
|
|
data_source_info = {
|
|
|
|
|
"upload_file_id": file_id,
|
|
|
|
|
}
|
|
|
|
|
file_name = ''
|
|
|
|
|
data_source_info = {}
|
|
|
|
|
if document_data["data_source"]["type"] == "upload_file":
|
|
|
|
|
file_id = document_data["data_source"]["info"]
|
|
|
|
|
file = db.session.query(UploadFile).filter(
|
|
|
|
|
UploadFile.tenant_id == dataset.tenant_id,
|
|
|
|
|
UploadFile.id == file_id
|
|
|
|
|
).first()
|
|
|
|
|
|
|
|
|
|
# save document
|
|
|
|
|
position = DocumentService.get_documents_position(dataset.id)
|
|
|
|
|
document = Document(
|
|
|
|
|
tenant_id=dataset.tenant_id,
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
position=position,
|
|
|
|
|
data_source_type=document_data["data_source"]["type"],
|
|
|
|
|
data_source_info=json.dumps(data_source_info),
|
|
|
|
|
dataset_process_rule_id=dataset_process_rule.id,
|
|
|
|
|
batch=time.strftime('%Y%m%d%H%M%S') + str(random.randint(100000, 999999)),
|
|
|
|
|
name=file_name,
|
|
|
|
|
created_from=created_from,
|
|
|
|
|
created_by=account.id,
|
|
|
|
|
# created_api_request_id = db.Column(UUID, nullable=True)
|
|
|
|
|
)
|
|
|
|
|
# raise error if file not found
|
|
|
|
|
if not file:
|
|
|
|
|
raise FileNotExistsError()
|
|
|
|
|
|
|
|
|
|
db.session.add(document)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
file_name = file.name
|
|
|
|
|
data_source_info = {
|
|
|
|
|
"upload_file_id": file_id,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# trigger async task
|
|
|
|
|
document_indexing_task.delay(document.dataset_id, document.id)
|
|
|
|
|
# save document
|
|
|
|
|
position = DocumentService.get_documents_position(dataset.id)
|
|
|
|
|
document = Document(
|
|
|
|
|
tenant_id=dataset.tenant_id,
|
|
|
|
|
dataset_id=dataset.id,
|
|
|
|
|
position=position,
|
|
|
|
|
data_source_type=document_data["data_source"]["type"],
|
|
|
|
|
data_source_info=json.dumps(data_source_info),
|
|
|
|
|
dataset_process_rule_id=dataset_process_rule.id,
|
|
|
|
|
batch=time.strftime('%Y%m%d%H%M%S') + str(random.randint(100000, 999999)),
|
|
|
|
|
name=file_name,
|
|
|
|
|
created_from=created_from,
|
|
|
|
|
created_by=account.id,
|
|
|
|
|
# created_api_request_id = db.Column(UUID, nullable=True)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.add(document)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
# trigger async task
|
|
|
|
|
document_indexing_task.delay(document.dataset_id, document.id)
|
|
|
|
|
return document
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
@ -430,7 +430,7 @@ class DocumentService:
|
|
|
|
|
if document.display_status != 'available':
|
|
|
|
|
raise ValueError("Document is not available")
|
|
|
|
|
# save process rule
|
|
|
|
|
if 'process_rule' in document_data or document_data['process_rule']:
|
|
|
|
|
if 'process_rule' in document_data and document_data['process_rule']:
|
|
|
|
|
process_rule = document_data["process_rule"]
|
|
|
|
|
if process_rule["mode"] == "custom":
|
|
|
|
|
dataset_process_rule = DatasetProcessRule(
|
|
|
|
|
@ -450,7 +450,7 @@ class DocumentService:
|
|
|
|
|
db.session.commit()
|
|
|
|
|
document.dataset_process_rule_id = dataset_process_rule.id
|
|
|
|
|
# update document data source
|
|
|
|
|
if 'data_source' in document_data or document_data['data_source']:
|
|
|
|
|
if 'data_source' in document_data and document_data['data_source']:
|
|
|
|
|
file_name = ''
|
|
|
|
|
data_source_info = {}
|
|
|
|
|
if document_data["data_source"]["type"] == "upload_file":
|
|
|
|
|
@ -513,17 +513,17 @@ class DocumentService:
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def document_create_args_validate(cls, args: dict):
|
|
|
|
|
if 'original_document_id ' not in args or not args['original_document_id']:
|
|
|
|
|
if 'original_document_id' not in args or not args['original_document_id']:
|
|
|
|
|
DocumentService.data_source_args_validate(args)
|
|
|
|
|
DocumentService.process_rule_args_validate(args)
|
|
|
|
|
else:
|
|
|
|
|
if ('data_source' not in args or not args['data_source']) and (
|
|
|
|
|
'process_rule' not in args or not args['process_rule']):
|
|
|
|
|
if ('data_source' not in args and not args['data_source'])\
|
|
|
|
|
and ('process_rule' not in args and not args['process_rule']):
|
|
|
|
|
raise ValueError("Data source or Process rule is required")
|
|
|
|
|
else:
|
|
|
|
|
if 'data_source' in args or args['data_source']:
|
|
|
|
|
if 'data_source' in args and args['data_source']:
|
|
|
|
|
DocumentService.data_source_args_validate(args)
|
|
|
|
|
elif 'process_rule' in args or args['process_rule']:
|
|
|
|
|
if 'process_rule' in args and args['process_rule']:
|
|
|
|
|
DocumentService.process_rule_args_validate(args)
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
|