orm filter -> where (#22801)
Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -25,7 +25,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document_id).first()
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first()
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document not found: {}".format(dataset_document_id), fg="red"))
|
||||
db.session.close()
|
||||
@@ -43,7 +43,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.document_id == dataset_document.id,
|
||||
DocumentSegment.enabled == False,
|
||||
DocumentSegment.status == "completed",
|
||||
@@ -86,12 +86,10 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
index_processor.load(dataset, documents)
|
||||
|
||||
# delete auto disable log
|
||||
db.session.query(DatasetAutoDisableLog).filter(
|
||||
DatasetAutoDisableLog.document_id == dataset_document.id
|
||||
).delete()
|
||||
db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.document_id == dataset_document.id).delete()
|
||||
|
||||
# update segment to enable
|
||||
db.session.query(DocumentSegment).filter(DocumentSegment.document_id == dataset_document.id).update(
|
||||
db.session.query(DocumentSegment).where(DocumentSegment.document_id == dataset_document.id).update(
|
||||
{
|
||||
DocumentSegment.enabled: True,
|
||||
DocumentSegment.disabled_at: None,
|
||||
|
@@ -29,7 +29,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
|
||||
start_at = time.perf_counter()
|
||||
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
|
||||
# get app info
|
||||
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
|
||||
if app:
|
||||
try:
|
||||
@@ -48,7 +48,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
|
||||
documents.append(document)
|
||||
# if annotation reply is enabled , batch add annotations' index
|
||||
app_annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
|
||||
)
|
||||
|
||||
if app_annotation_setting:
|
||||
|
@@ -19,16 +19,14 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
|
||||
logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
# get app info
|
||||
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
annotations_count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).count()
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
annotations_count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).count()
|
||||
if not app:
|
||||
logging.info(click.style("App not found: {}".format(app_id), fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
app_annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
|
||||
)
|
||||
app_annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
|
||||
|
||||
if not app_annotation_setting:
|
||||
logging.info(click.style("App annotation setting not found: {}".format(app_id), fg="red"))
|
||||
|
@@ -30,14 +30,14 @@ def enable_annotation_reply_task(
|
||||
logging.info(click.style("Start add app annotation to index: {}".format(app_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
# get app info
|
||||
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
|
||||
if not app:
|
||||
logging.info(click.style("App not found: {}".format(app_id), fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).all()
|
||||
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).all()
|
||||
enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id))
|
||||
enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id))
|
||||
|
||||
@@ -46,9 +46,7 @@ def enable_annotation_reply_task(
|
||||
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
|
||||
embedding_provider_name, embedding_model_name, "annotation"
|
||||
)
|
||||
annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
|
||||
)
|
||||
annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
|
||||
if annotation_setting:
|
||||
if dataset_collection_binding.id != annotation_setting.collection_binding_id:
|
||||
old_dataset_collection_binding = (
|
||||
|
@@ -27,12 +27,12 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id.in_(document_ids)).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids)).all()
|
||||
# check segment is exist
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
@@ -42,7 +42,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
for upload_file_id in image_upload_file_ids:
|
||||
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||
image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
@@ -56,7 +56,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
|
||||
db.session.commit()
|
||||
if file_ids:
|
||||
files = db.session.query(UploadFile).filter(UploadFile.id.in_(file_ids)).all()
|
||||
files = db.session.query(UploadFile).where(UploadFile.id.in_(file_ids)).all()
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
|
@@ -81,7 +81,7 @@ def batch_create_segment_to_index_task(
|
||||
segment_hash = helper.generate_text_hash(content) # type: ignore
|
||||
max_position = (
|
||||
db.session.query(func.max(DocumentSegment.position))
|
||||
.filter(DocumentSegment.document_id == dataset_document.id)
|
||||
.where(DocumentSegment.document_id == dataset_document.id)
|
||||
.scalar()
|
||||
)
|
||||
segment_document = DocumentSegment(
|
||||
|
@@ -53,8 +53,8 @@ def clean_dataset_task(
|
||||
index_struct=index_struct,
|
||||
collection_binding_id=collection_binding_id,
|
||||
)
|
||||
documents = db.session.query(Document).filter(Document.dataset_id == dataset_id).all()
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset_id).all()
|
||||
documents = db.session.query(Document).where(Document.dataset_id == dataset_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.dataset_id == dataset_id).all()
|
||||
|
||||
if documents is None or len(documents) == 0:
|
||||
logging.info(click.style("No documents found for dataset: {}".format(dataset_id), fg="green"))
|
||||
@@ -72,7 +72,7 @@ def clean_dataset_task(
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
for upload_file_id in image_upload_file_ids:
|
||||
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||
image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
|
||||
if image_file is None:
|
||||
continue
|
||||
try:
|
||||
@@ -85,12 +85,12 @@ def clean_dataset_task(
|
||||
db.session.delete(image_file)
|
||||
db.session.delete(segment)
|
||||
|
||||
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetQuery).filter(DatasetQuery.dataset_id == dataset_id).delete()
|
||||
db.session.query(AppDatasetJoin).filter(AppDatasetJoin.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetProcessRule).where(DatasetProcessRule.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetQuery).where(DatasetQuery.dataset_id == dataset_id).delete()
|
||||
db.session.query(AppDatasetJoin).where(AppDatasetJoin.dataset_id == dataset_id).delete()
|
||||
# delete dataset metadata
|
||||
db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetMetadataBinding).filter(DatasetMetadataBinding.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset_id).delete()
|
||||
# delete files
|
||||
if documents:
|
||||
for document in documents:
|
||||
@@ -102,7 +102,7 @@ def clean_dataset_task(
|
||||
file_id = data_source_info["upload_file_id"]
|
||||
file = (
|
||||
db.session.query(UploadFile)
|
||||
.filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
|
||||
.where(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
|
||||
.first()
|
||||
)
|
||||
if not file:
|
||||
|
@@ -28,12 +28,12 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
# check segment is exist
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
@@ -43,7 +43,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
for upload_file_id in image_upload_file_ids:
|
||||
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
||||
image_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
|
||||
if image_file is None:
|
||||
continue
|
||||
try:
|
||||
@@ -58,7 +58,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
db.session.commit()
|
||||
if file_id:
|
||||
file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first()
|
||||
file = db.session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
if file:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
@@ -68,7 +68,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
db.session.commit()
|
||||
|
||||
# delete dataset metadata binding
|
||||
db.session.query(DatasetMetadataBinding).filter(
|
||||
db.session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id == document_id,
|
||||
).delete()
|
||||
|
@@ -24,17 +24,17 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
index_type = dataset.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
for document_id in document_ids:
|
||||
document = db.session.query(Document).filter(Document.id == document_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id).first()
|
||||
db.session.delete(document)
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
|
@@ -24,7 +24,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||
logging.info(click.style("Start create segment to index: {}".format(segment_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
db.session.close()
|
||||
|
@@ -35,7 +35,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
elif action == "add":
|
||||
dataset_documents = (
|
||||
db.session.query(DatasetDocument)
|
||||
.filter(
|
||||
.where(
|
||||
DatasetDocument.dataset_id == dataset_id,
|
||||
DatasetDocument.indexing_status == "completed",
|
||||
DatasetDocument.enabled == True,
|
||||
@@ -46,7 +46,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
|
||||
if dataset_documents:
|
||||
dataset_documents_ids = [doc.id for doc in dataset_documents]
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id.in_(dataset_documents_ids)).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id.in_(dataset_documents_ids)).update(
|
||||
{"indexing_status": "indexing"}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
@@ -56,7 +56,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
# add from vector index
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
|
||||
.where(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
|
||||
.order_by(DocumentSegment.position.asc())
|
||||
.all()
|
||||
)
|
||||
@@ -76,19 +76,19 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
documents.append(document)
|
||||
# save vector index
|
||||
index_processor.load(dataset, documents, with_keywords=False)
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update(
|
||||
{"indexing_status": "completed"}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update(
|
||||
{"indexing_status": "error", "error": str(e)}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
elif action == "update":
|
||||
dataset_documents = (
|
||||
db.session.query(DatasetDocument)
|
||||
.filter(
|
||||
.where(
|
||||
DatasetDocument.dataset_id == dataset_id,
|
||||
DatasetDocument.indexing_status == "completed",
|
||||
DatasetDocument.enabled == True,
|
||||
@@ -100,7 +100,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
if dataset_documents:
|
||||
# update document status
|
||||
dataset_documents_ids = [doc.id for doc in dataset_documents]
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id.in_(dataset_documents_ids)).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id.in_(dataset_documents_ids)).update(
|
||||
{"indexing_status": "indexing"}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
@@ -113,7 +113,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
try:
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
|
||||
.where(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
|
||||
.order_by(DocumentSegment.position.asc())
|
||||
.all()
|
||||
)
|
||||
@@ -148,12 +148,12 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
documents.append(document)
|
||||
# save vector index
|
||||
index_processor.load(dataset, documents, with_keywords=False)
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update(
|
||||
{"indexing_status": "completed"}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document.id).update(
|
||||
db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document.id).update(
|
||||
{"indexing_status": "error", "error": str(e)}, synchronize_session=False
|
||||
)
|
||||
db.session.commit()
|
||||
|
@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def delete_account_task(account_id):
|
||||
account = db.session.query(Account).filter(Account.id == account_id).first()
|
||||
account = db.session.query(Account).where(Account.id == account_id).first()
|
||||
try:
|
||||
BillingService.delete_account(account_id)
|
||||
except Exception as e:
|
||||
|
@@ -22,11 +22,11 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume
|
||||
logging.info(click.style("Start delete segment from index", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
return
|
||||
|
||||
dataset_document = db.session.query(Document).filter(Document.id == document_id).first()
|
||||
dataset_document = db.session.query(Document).where(Document.id == document_id).first()
|
||||
if not dataset_document:
|
||||
return
|
||||
|
||||
|
@@ -21,7 +21,7 @@ def disable_segment_from_index_task(segment_id: str):
|
||||
logging.info(click.style("Start disable segment from index: {}".format(segment_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
db.session.close()
|
||||
|
@@ -23,13 +23,13 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
"""
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first()
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
|
||||
@@ -44,7 +44,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
@@ -64,7 +64,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green"))
|
||||
except Exception:
|
||||
# update segment error msg
|
||||
db.session.query(DocumentSegment).filter(
|
||||
db.session.query(DocumentSegment).where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
|
@@ -25,7 +25,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logging.info(click.style("Start sync document: {}".format(document_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
@@ -46,7 +46,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
page_edited_time = data_source_info["last_edited_time"]
|
||||
data_source_binding = (
|
||||
db.session.query(DataSourceOauthBinding)
|
||||
.filter(
|
||||
.where(
|
||||
db.and_(
|
||||
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
||||
DataSourceOauthBinding.provider == "notion",
|
||||
@@ -77,13 +77,13 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
raise Exception("Dataset not found")
|
||||
index_type = document.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
# delete from vector index
|
||||
|
@@ -24,7 +24,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
documents = []
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset is not found: {}".format(dataset_id), fg="yellow"))
|
||||
db.session.close()
|
||||
@@ -48,7 +48,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
except Exception as e:
|
||||
for document_id in document_ids:
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
@@ -63,7 +63,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
logging.info(click.style("Start process document: {}".format(document_id), fg="green"))
|
||||
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
|
||||
if document:
|
||||
|
@@ -23,7 +23,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
logging.info(click.style("Start update document: {}".format(document_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
@@ -36,14 +36,14 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
raise Exception("Dataset not found")
|
||||
|
||||
index_type = document.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
|
@@ -25,7 +25,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
documents = []
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset is None:
|
||||
logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red"))
|
||||
db.session.close()
|
||||
@@ -50,7 +50,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
except Exception as e:
|
||||
for document_id in document_ids:
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
@@ -66,7 +66,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
logging.info(click.style("Start process document: {}".format(document_id), fg="green"))
|
||||
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
|
||||
if document:
|
||||
@@ -74,7 +74,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
index_type = document.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
|
@@ -24,7 +24,7 @@ def enable_segment_to_index_task(segment_id: str):
|
||||
logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
db.session.close()
|
||||
|
@@ -25,12 +25,12 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
Usage: enable_segments_to_index_task.delay(segment_ids, dataset_id, document_id)
|
||||
"""
|
||||
start_at = time.perf_counter()
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first()
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
|
||||
@@ -45,7 +45,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
|
||||
segments = (
|
||||
db.session.query(DocumentSegment)
|
||||
.filter(
|
||||
.where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
@@ -95,7 +95,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
except Exception as e:
|
||||
logging.exception("enable segments to index failed")
|
||||
# update segment error msg
|
||||
db.session.query(DocumentSegment).filter(
|
||||
db.session.query(DocumentSegment).where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
|
@@ -21,7 +21,7 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
|
||||
logging.info(click.style("Recover document: {}".format(document_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
|
@@ -76,7 +76,7 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_model_configs(tenant_id: str, app_id: str):
|
||||
def del_model_config(model_config_id: str):
|
||||
db.session.query(AppModelConfig).filter(AppModelConfig.id == model_config_id).delete(synchronize_session=False)
|
||||
db.session.query(AppModelConfig).where(AppModelConfig.id == model_config_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from app_model_configs where app_id=:app_id limit 1000""",
|
||||
@@ -88,14 +88,14 @@ def _delete_app_model_configs(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_site(tenant_id: str, app_id: str):
|
||||
def del_site(site_id: str):
|
||||
db.session.query(Site).filter(Site.id == site_id).delete(synchronize_session=False)
|
||||
db.session.query(Site).where(Site.id == site_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records("""select id from sites where app_id=:app_id limit 1000""", {"app_id": app_id}, del_site, "site")
|
||||
|
||||
|
||||
def _delete_app_mcp_servers(tenant_id: str, app_id: str):
|
||||
def del_mcp_server(mcp_server_id: str):
|
||||
db.session.query(AppMCPServer).filter(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False)
|
||||
db.session.query(AppMCPServer).where(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from app_mcp_servers where app_id=:app_id limit 1000""",
|
||||
@@ -107,7 +107,7 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_api_tokens(tenant_id: str, app_id: str):
|
||||
def del_api_token(api_token_id: str):
|
||||
db.session.query(ApiToken).filter(ApiToken.id == api_token_id).delete(synchronize_session=False)
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from api_tokens where app_id=:app_id limit 1000""", {"app_id": app_id}, del_api_token, "api token"
|
||||
@@ -116,7 +116,7 @@ def _delete_app_api_tokens(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_installed_apps(tenant_id: str, app_id: str):
|
||||
def del_installed_app(installed_app_id: str):
|
||||
db.session.query(InstalledApp).filter(InstalledApp.id == installed_app_id).delete(synchronize_session=False)
|
||||
db.session.query(InstalledApp).where(InstalledApp.id == installed_app_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from installed_apps where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
@@ -128,7 +128,7 @@ def _delete_installed_apps(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_recommended_apps(tenant_id: str, app_id: str):
|
||||
def del_recommended_app(recommended_app_id: str):
|
||||
db.session.query(RecommendedApp).filter(RecommendedApp.id == recommended_app_id).delete(
|
||||
db.session.query(RecommendedApp).where(RecommendedApp.id == recommended_app_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -142,9 +142,9 @@ def _delete_recommended_apps(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_annotation_data(tenant_id: str, app_id: str):
|
||||
def del_annotation_hit_history(annotation_hit_history_id: str):
|
||||
db.session.query(AppAnnotationHitHistory).filter(
|
||||
AppAnnotationHitHistory.id == annotation_hit_history_id
|
||||
).delete(synchronize_session=False)
|
||||
db.session.query(AppAnnotationHitHistory).where(AppAnnotationHitHistory.id == annotation_hit_history_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
_delete_records(
|
||||
"""select id from app_annotation_hit_histories where app_id=:app_id limit 1000""",
|
||||
@@ -154,7 +154,7 @@ def _delete_app_annotation_data(tenant_id: str, app_id: str):
|
||||
)
|
||||
|
||||
def del_annotation_setting(annotation_setting_id: str):
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.id == annotation_setting_id).delete(
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.id == annotation_setting_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -168,7 +168,7 @@ def _delete_app_annotation_data(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_dataset_joins(tenant_id: str, app_id: str):
|
||||
def del_dataset_join(dataset_join_id: str):
|
||||
db.session.query(AppDatasetJoin).filter(AppDatasetJoin.id == dataset_join_id).delete(synchronize_session=False)
|
||||
db.session.query(AppDatasetJoin).where(AppDatasetJoin.id == dataset_join_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from app_dataset_joins where app_id=:app_id limit 1000""",
|
||||
@@ -180,7 +180,7 @@ def _delete_app_dataset_joins(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_workflows(tenant_id: str, app_id: str):
|
||||
def del_workflow(workflow_id: str):
|
||||
db.session.query(Workflow).filter(Workflow.id == workflow_id).delete(synchronize_session=False)
|
||||
db.session.query(Workflow).where(Workflow.id == workflow_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from workflows where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
@@ -220,7 +220,7 @@ def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_app_log(workflow_app_log_id: str):
|
||||
db.session.query(WorkflowAppLog).filter(WorkflowAppLog.id == workflow_app_log_id).delete(
|
||||
db.session.query(WorkflowAppLog).where(WorkflowAppLog.id == workflow_app_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -234,10 +234,10 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_conversations(tenant_id: str, app_id: str):
|
||||
def del_conversation(conversation_id: str):
|
||||
db.session.query(PinnedConversation).filter(PinnedConversation.conversation_id == conversation_id).delete(
|
||||
db.session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(Conversation).filter(Conversation.id == conversation_id).delete(synchronize_session=False)
|
||||
db.session.query(Conversation).where(Conversation.id == conversation_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from conversations where app_id=:app_id limit 1000""",
|
||||
@@ -257,19 +257,19 @@ def _delete_conversation_variables(*, app_id: str):
|
||||
|
||||
def _delete_app_messages(tenant_id: str, app_id: str):
|
||||
def del_message(message_id: str):
|
||||
db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message_id).delete(
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id == message_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message_id).delete(
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageChain).filter(MessageChain.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message_id).delete(
|
||||
db.session.query(MessageChain).where(MessageChain.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageFile).filter(MessageFile.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(SavedMessage).filter(SavedMessage.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(Message).filter(Message.id == message_id).delete()
|
||||
db.session.query(MessageFile).where(MessageFile.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(SavedMessage).where(SavedMessage.message_id == message_id).delete(synchronize_session=False)
|
||||
db.session.query(Message).where(Message.id == message_id).delete()
|
||||
|
||||
_delete_records(
|
||||
"""select id from messages where app_id=:app_id limit 1000""", {"app_id": app_id}, del_message, "message"
|
||||
@@ -278,7 +278,7 @@ def _delete_app_messages(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_workflow_tool_providers(tenant_id: str, app_id: str):
|
||||
def del_tool_provider(tool_provider_id: str):
|
||||
db.session.query(WorkflowToolProvider).filter(WorkflowToolProvider.id == tool_provider_id).delete(
|
||||
db.session.query(WorkflowToolProvider).where(WorkflowToolProvider.id == tool_provider_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -292,7 +292,7 @@ def _delete_workflow_tool_providers(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_tag_bindings(tenant_id: str, app_id: str):
|
||||
def del_tag_binding(tag_binding_id: str):
|
||||
db.session.query(TagBinding).filter(TagBinding.id == tag_binding_id).delete(synchronize_session=False)
|
||||
db.session.query(TagBinding).where(TagBinding.id == tag_binding_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from tag_bindings where tenant_id=:tenant_id and target_id=:app_id limit 1000""",
|
||||
@@ -304,7 +304,7 @@ def _delete_app_tag_bindings(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_end_users(tenant_id: str, app_id: str):
|
||||
def del_end_user(end_user_id: str):
|
||||
db.session.query(EndUser).filter(EndUser.id == end_user_id).delete(synchronize_session=False)
|
||||
db.session.query(EndUser).where(EndUser.id == end_user_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
"""select id from end_users where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
@@ -316,7 +316,7 @@ def _delete_end_users(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_trace_app_configs(tenant_id: str, app_id: str):
|
||||
def del_trace_app_config(trace_app_config_id: str):
|
||||
db.session.query(TraceAppConfig).filter(TraceAppConfig.id == trace_app_config_id).delete(
|
||||
db.session.query(TraceAppConfig).where(TraceAppConfig.id == trace_app_config_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
|
@@ -22,7 +22,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
logging.info(click.style("Start remove document segments from index: {}".format(document_id), fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).filter(Document.id == document_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id).first()
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
db.session.close()
|
||||
@@ -43,7 +43,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
|
||||
index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
if index_node_ids:
|
||||
try:
|
||||
@@ -51,7 +51,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
except Exception:
|
||||
logging.exception(f"clean dataset {dataset.id} from index failed")
|
||||
# update segment to disable
|
||||
db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).update(
|
||||
db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update(
|
||||
{
|
||||
DocumentSegment.enabled: False,
|
||||
DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||
|
@@ -25,7 +25,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
documents: list[Document] = []
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red"))
|
||||
db.session.close()
|
||||
@@ -45,7 +45,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
)
|
||||
except Exception as e:
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
@@ -59,7 +59,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
|
||||
logging.info(click.style("Start retry document: {}".format(document_id), fg="green"))
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="yellow"))
|
||||
@@ -69,7 +69,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
# clean old data
|
||||
index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
# delete from vector index
|
||||
|
@@ -24,7 +24,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
"""
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset is None:
|
||||
raise ValueError("Dataset not found")
|
||||
|
||||
@@ -41,7 +41,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
)
|
||||
except Exception as e:
|
||||
document = (
|
||||
db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
@@ -53,7 +53,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
return
|
||||
|
||||
logging.info(click.style("Start sync website document: {}".format(document_id), fg="green"))
|
||||
document = db.session.query(Document).filter(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="yellow"))
|
||||
return
|
||||
@@ -61,7 +61,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
# clean old data
|
||||
index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
|
||||
|
||||
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
# delete from vector index
|
||||
|
Reference in New Issue
Block a user