Refactor: use logger = logging.getLogger(__name__) in logging (#24515)

Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
This commit is contained in:
Yongtao Huang
2025-08-26 18:10:31 +08:00
committed by GitHub
parent 8af2ae973f
commit fa753239ad
102 changed files with 565 additions and 401 deletions

View File

@@ -21,6 +21,8 @@ from models.dataset import Dataset, Document, DocumentSegment
from models.model import UploadFile
from services.vector_service import VectorService
logger = logging.getLogger(__name__)
@shared_task(queue="dataset")
def batch_create_segment_to_index_task(
@@ -42,7 +44,7 @@ def batch_create_segment_to_index_task(
Usage: batch_create_segment_to_index_task.delay(job_id, upload_file_id, dataset_id, document_id, tenant_id, user_id)
"""
logging.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green"))
logger.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green"))
start_at = time.perf_counter()
indexing_cache_key = f"segment_batch_import_{job_id}"
@@ -142,14 +144,14 @@ def batch_create_segment_to_index_task(
db.session.commit()
redis_client.setex(indexing_cache_key, 600, "completed")
end_at = time.perf_counter()
logging.info(
logger.info(
click.style(
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
fg="green",
)
)
except Exception:
logging.exception("Segments batch created index failed")
logger.exception("Segments batch created index failed")
redis_client.setex(indexing_cache_key, 600, "error")
finally:
db.session.close()