chore(api/tasks): apply ruff reformatting (#7594)

This commit is contained in:
Bowen Liang
2024-08-26 13:38:37 +08:00
committed by GitHub
parent 3be756eaed
commit 979422cdc6
29 changed files with 546 additions and 508 deletions

View File

@@ -10,9 +10,10 @@ from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@shared_task(queue='dataset')
def add_annotation_to_index_task(annotation_id: str, question: str, tenant_id: str, app_id: str,
collection_binding_id: str):
@shared_task(queue="dataset")
def add_annotation_to_index_task(
annotation_id: str, question: str, tenant_id: str, app_id: str, collection_binding_id: str
):
"""
Add annotation to index.
:param annotation_id: annotation id
@@ -23,38 +24,34 @@ def add_annotation_to_index_task(annotation_id: str, question: str, tenant_id: s
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
"""
logging.info(click.style('Start build index for annotation: {}'.format(annotation_id), fg='green'))
logging.info(click.style("Start build index for annotation: {}".format(annotation_id), fg="green"))
start_at = time.perf_counter()
try:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
collection_binding_id,
'annotation'
collection_binding_id, "annotation"
)
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)
document = Document(
page_content=question,
metadata={
"annotation_id": annotation_id,
"app_id": app_id,
"doc_id": annotation_id
}
page_content=question, metadata={"annotation_id": annotation_id, "app_id": app_id, "doc_id": annotation_id}
)
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.create([document], duplicate_check=True)
end_at = time.perf_counter()
logging.info(
click.style(
'Build index successful for annotation: {} latency: {}'.format(annotation_id, end_at - start_at),
fg='green'))
"Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at),
fg="green",
)
)
except Exception:
logging.exception("Build index for annotation failed")

View File

@@ -14,9 +14,8 @@ from models.model import App, AppAnnotationSetting, MessageAnnotation
from services.dataset_service import DatasetCollectionBindingService
@shared_task(queue='dataset')
def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str,
user_id: str):
@shared_task(queue="dataset")
def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str, user_id: str):
"""
Add annotation to index.
:param job_id: job_id
@@ -26,72 +25,66 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
:param user_id: user_id
"""
logging.info(click.style('Start batch import annotation: {}'.format(job_id), fg='green'))
logging.info(click.style("Start batch import annotation: {}".format(job_id), fg="green"))
start_at = time.perf_counter()
indexing_cache_key = 'app_annotation_batch_import_{}'.format(str(job_id))
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
# get app info
app = db.session.query(App).filter(
App.id == app_id,
App.tenant_id == tenant_id,
App.status == 'normal'
).first()
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
if app:
try:
documents = []
for content in content_list:
annotation = MessageAnnotation(
app_id=app.id,
content=content['answer'],
question=content['question'],
account_id=user_id
app_id=app.id, content=content["answer"], question=content["question"], account_id=user_id
)
db.session.add(annotation)
db.session.flush()
document = Document(
page_content=content['question'],
metadata={
"annotation_id": annotation.id,
"app_id": app_id,
"doc_id": annotation.id
}
page_content=content["question"],
metadata={"annotation_id": annotation.id, "app_id": app_id, "doc_id": annotation.id},
)
documents.append(document)
# if annotation reply is enabled , batch add annotations' index
app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app_id
).first()
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
)
if app_annotation_setting:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
app_annotation_setting.collection_binding_id,
'annotation'
dataset_collection_binding = (
DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
app_annotation_setting.collection_binding_id, "annotation"
)
)
if not dataset_collection_binding:
raise NotFound("App annotation setting not found")
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.create(documents, duplicate_check=True)
db.session.commit()
redis_client.setex(indexing_cache_key, 600, 'completed')
redis_client.setex(indexing_cache_key, 600, "completed")
end_at = time.perf_counter()
logging.info(
click.style(
'Build index successful for batch import annotation: {} latency: {}'.format(job_id, end_at - start_at),
fg='green'))
"Build index successful for batch import annotation: {} latency: {}".format(
job_id, end_at - start_at
),
fg="green",
)
)
except Exception as e:
db.session.rollback()
redis_client.setex(indexing_cache_key, 600, 'error')
indexing_error_msg_key = 'app_annotation_batch_import_error_msg_{}'.format(str(job_id))
redis_client.setex(indexing_cache_key, 600, "error")
indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
redis_client.setex(indexing_error_msg_key, 600, str(e))
logging.exception("Build index for batch import annotations failed")

View File

@@ -9,36 +9,33 @@ from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@shared_task(queue='dataset')
def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str,
collection_binding_id: str):
@shared_task(queue="dataset")
def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str, collection_binding_id: str):
"""
Async delete annotation index task
"""
logging.info(click.style('Start delete app annotation index: {}'.format(app_id), fg='green'))
logging.info(click.style("Start delete app annotation index: {}".format(app_id), fg="green"))
start_at = time.perf_counter()
try:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
collection_binding_id,
'annotation'
collection_binding_id, "annotation"
)
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
collection_binding_id=dataset_collection_binding.id
indexing_technique="high_quality",
collection_binding_id=dataset_collection_binding.id,
)
try:
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector.delete_by_metadata_field('annotation_id', annotation_id)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.delete_by_metadata_field("annotation_id", annotation_id)
except Exception:
logging.exception("Delete annotation index failed when annotation deleted.")
end_at = time.perf_counter()
logging.info(
click.style('App annotations index deleted : {} latency: {}'.format(app_id, end_at - start_at),
fg='green'))
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
logging.exception("Annotation deleted index failed:{}".format(str(e)))

View File

@@ -12,49 +12,44 @@ from models.dataset import Dataset
from models.model import App, AppAnnotationSetting, MessageAnnotation
@shared_task(queue='dataset')
@shared_task(queue="dataset")
def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
"""
Async enable annotation reply task
"""
logging.info(click.style('Start delete app annotations index: {}'.format(app_id), fg='green'))
logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green"))
start_at = time.perf_counter()
# get app info
app = db.session.query(App).filter(
App.id == app_id,
App.tenant_id == tenant_id,
App.status == 'normal'
).first()
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
annotations_count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).count()
if not app:
raise NotFound("App not found")
app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app_id
).first()
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
)
if not app_annotation_setting:
raise NotFound("App annotation setting not found")
disable_app_annotation_key = 'disable_app_annotation_{}'.format(str(app_id))
disable_app_annotation_job_key = 'disable_app_annotation_job_{}'.format(str(job_id))
disable_app_annotation_key = "disable_app_annotation_{}".format(str(app_id))
disable_app_annotation_job_key = "disable_app_annotation_job_{}".format(str(job_id))
try:
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
collection_binding_id=app_annotation_setting.collection_binding_id
indexing_technique="high_quality",
collection_binding_id=app_annotation_setting.collection_binding_id,
)
try:
if annotations_count > 0:
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector.delete_by_metadata_field('app_id', app_id)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.delete_by_metadata_field("app_id", app_id)
except Exception:
logging.exception("Delete annotation index failed when annotation deleted.")
redis_client.setex(disable_app_annotation_job_key, 600, 'completed')
redis_client.setex(disable_app_annotation_job_key, 600, "completed")
# delete annotation setting
db.session.delete(app_annotation_setting)
@@ -62,12 +57,12 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
end_at = time.perf_counter()
logging.info(
click.style('App annotations index deleted : {} latency: {}'.format(app_id, end_at - start_at),
fg='green'))
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
logging.exception("Annotation batch deleted index failed:{}".format(str(e)))
redis_client.setex(disable_app_annotation_job_key, 600, 'error')
disable_app_annotation_error_key = 'disable_app_annotation_error_{}'.format(str(job_id))
redis_client.setex(disable_app_annotation_job_key, 600, "error")
disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id))
redis_client.setex(disable_app_annotation_error_key, 600, str(e))
finally:
redis_client.delete(disable_app_annotation_key)

View File

@@ -15,37 +15,39 @@ from models.model import App, AppAnnotationSetting, MessageAnnotation
from services.dataset_service import DatasetCollectionBindingService
@shared_task(queue='dataset')
def enable_annotation_reply_task(job_id: str, app_id: str, user_id: str, tenant_id: str, score_threshold: float,
embedding_provider_name: str, embedding_model_name: str):
@shared_task(queue="dataset")
def enable_annotation_reply_task(
job_id: str,
app_id: str,
user_id: str,
tenant_id: str,
score_threshold: float,
embedding_provider_name: str,
embedding_model_name: str,
):
"""
Async enable annotation reply task
"""
logging.info(click.style('Start add app annotation to index: {}'.format(app_id), fg='green'))
logging.info(click.style("Start add app annotation to index: {}".format(app_id), fg="green"))
start_at = time.perf_counter()
# get app info
app = db.session.query(App).filter(
App.id == app_id,
App.tenant_id == tenant_id,
App.status == 'normal'
).first()
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
if not app:
raise NotFound("App not found")
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).all()
enable_app_annotation_key = 'enable_app_annotation_{}'.format(str(app_id))
enable_app_annotation_job_key = 'enable_app_annotation_job_{}'.format(str(job_id))
enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id))
enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id))
try:
documents = []
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_provider_name,
embedding_model_name,
'annotation'
embedding_provider_name, embedding_model_name, "annotation"
)
annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
)
annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app_id).first()
if annotation_setting:
annotation_setting.score_threshold = score_threshold
annotation_setting.collection_binding_id = dataset_collection_binding.id
@@ -58,48 +60,42 @@ def enable_annotation_reply_task(job_id: str, app_id: str, user_id: str, tenant_
score_threshold=score_threshold,
collection_binding_id=dataset_collection_binding.id,
created_user_id=user_id,
updated_user_id=user_id
updated_user_id=user_id,
)
db.session.add(new_app_annotation_setting)
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=embedding_provider_name,
embedding_model=embedding_model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)
if annotations:
for annotation in annotations:
document = Document(
page_content=annotation.question,
metadata={
"annotation_id": annotation.id,
"app_id": app_id,
"doc_id": annotation.id
}
metadata={"annotation_id": annotation.id, "app_id": app_id, "doc_id": annotation.id},
)
documents.append(document)
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
try:
vector.delete_by_metadata_field('app_id', app_id)
vector.delete_by_metadata_field("app_id", app_id)
except Exception as e:
logging.info(
click.style('Delete annotation index error: {}'.format(str(e)),
fg='red'))
logging.info(click.style("Delete annotation index error: {}".format(str(e)), fg="red"))
vector.create(documents)
db.session.commit()
redis_client.setex(enable_app_annotation_job_key, 600, 'completed')
redis_client.setex(enable_app_annotation_job_key, 600, "completed")
end_at = time.perf_counter()
logging.info(
click.style('App annotations added to index: {} latency: {}'.format(app_id, end_at - start_at),
fg='green'))
click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
logging.exception("Annotation batch created index failed:{}".format(str(e)))
redis_client.setex(enable_app_annotation_job_key, 600, 'error')
enable_app_annotation_error_key = 'enable_app_annotation_error_{}'.format(str(job_id))
redis_client.setex(enable_app_annotation_job_key, 600, "error")
enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id))
redis_client.setex(enable_app_annotation_error_key, 600, str(e))
db.session.rollback()
finally:

View File

@@ -10,9 +10,10 @@ from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@shared_task(queue='dataset')
def update_annotation_to_index_task(annotation_id: str, question: str, tenant_id: str, app_id: str,
collection_binding_id: str):
@shared_task(queue="dataset")
def update_annotation_to_index_task(
annotation_id: str, question: str, tenant_id: str, app_id: str, collection_binding_id: str
):
"""
Update annotation to index.
:param annotation_id: annotation id
@@ -23,39 +24,35 @@ def update_annotation_to_index_task(annotation_id: str, question: str, tenant_id
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
"""
logging.info(click.style('Start update index for annotation: {}'.format(annotation_id), fg='green'))
logging.info(click.style("Start update index for annotation: {}".format(annotation_id), fg="green"))
start_at = time.perf_counter()
try:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
collection_binding_id,
'annotation'
collection_binding_id, "annotation"
)
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)
document = Document(
page_content=question,
metadata={
"annotation_id": annotation_id,
"app_id": app_id,
"doc_id": annotation_id
}
page_content=question, metadata={"annotation_id": annotation_id, "app_id": app_id, "doc_id": annotation_id}
)
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector.delete_by_metadata_field('annotation_id', annotation_id)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.delete_by_metadata_field("annotation_id", annotation_id)
vector.add_texts([document])
end_at = time.perf_counter()
logging.info(
click.style(
'Build index successful for annotation: {} latency: {}'.format(annotation_id, end_at - start_at),
fg='green'))
"Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at),
fg="green",
)
)
except Exception:
logging.exception("Build index for annotation failed")