[Chore/Refactor] Use centralized naive_utc_now for UTC datetime operations (#24352)

Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
-LAN-
2025-08-22 23:53:05 +08:00
committed by GitHub
parent 295b47cbff
commit da9af7b547
34 changed files with 153 additions and 150 deletions

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -10,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.rag.models.document import ChildDocument, Document
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import DatasetAutoDisableLog, DocumentSegment
from models.dataset import Document as DatasetDocument
@@ -95,7 +95,7 @@ def add_document_to_index_task(dataset_document_id: str):
DocumentSegment.enabled: True,
DocumentSegment.disabled_at: None,
DocumentSegment.disabled_by: None,
DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.updated_at: naive_utc_now(),
}
)
db.session.commit()
@@ -107,7 +107,7 @@ def add_document_to_index_task(dataset_document_id: str):
except Exception as e:
logging.exception("add document to index failed")
dataset_document.enabled = False
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
dataset_document.disabled_at = naive_utc_now()
dataset_document.indexing_status = "error"
dataset_document.error = str(e)
db.session.commit()

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -9,6 +8,7 @@ from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.models.document import Document
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset
from models.model import App, AppAnnotationSetting, MessageAnnotation
from services.dataset_service import DatasetCollectionBindingService
@@ -72,7 +72,7 @@ def enable_annotation_reply_task(
annotation_setting.score_threshold = score_threshold
annotation_setting.collection_binding_id = dataset_collection_binding.id
annotation_setting.updated_user_id = user_id
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
annotation_setting.updated_at = naive_utc_now()
db.session.add(annotation_setting)
else:
new_app_annotation_setting = AppAnnotationSetting(

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import tempfile
import time
@@ -17,6 +16,7 @@ from extensions.ext_database import db
from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from libs import helper
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
from models.model import UploadFile
from services.vector_service import VectorService
@@ -123,9 +123,9 @@ def batch_create_segment_to_index_task(
word_count=len(content),
tokens=tokens,
created_by=user_id,
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
indexing_at=naive_utc_now(),
status="completed",
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
completed_at=naive_utc_now(),
)
if dataset_document.doc_form == "qa_model":
segment_document.answer = segment["answer"]

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
from typing import Optional
@@ -10,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.rag.models.document import Document
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import DocumentSegment
@@ -41,7 +41,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
db.session.query(DocumentSegment).filter_by(id=segment.id).update(
{
DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.indexing_at: naive_utc_now(),
}
)
db.session.commit()
@@ -79,7 +79,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
db.session.query(DocumentSegment).filter_by(id=segment.id).update(
{
DocumentSegment.status: "completed",
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.completed_at: naive_utc_now(),
}
)
db.session.commit()
@@ -89,7 +89,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
except Exception as e:
logging.exception("create segment to index failed")
segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_at = naive_utc_now()
segment.status = "error"
segment.error = str(e)
db.session.commit()

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -9,6 +8,7 @@ from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.extractor.notion_extractor import NotionExtractor
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
from models.source import DataSourceOauthBinding
@@ -72,7 +72,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
# check the page is updated
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
db.session.commit()
# delete all document segment and index

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -8,6 +7,7 @@ from celery import shared_task # type: ignore
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@@ -31,7 +31,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
return
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
db.session.commit()
# delete all document segment and index

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -9,6 +8,7 @@ from configs import dify_config
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
from services.feature_service import FeatureService
@@ -55,7 +55,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
return
@@ -86,7 +86,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
documents.append(document)
db.session.add(document)
db.session.commit()

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -10,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.rag.models.document import ChildDocument, Document
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import DocumentSegment
@@ -89,7 +89,7 @@ def enable_segment_to_index_task(segment_id: str):
except Exception as e:
logging.exception("enable segment to index failed")
segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_at = naive_utc_now()
segment.status = "error"
segment.error = str(e)
db.session.commit()

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -10,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.rag.models.document import ChildDocument, Document
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, DocumentSegment
from models.dataset import Document as DatasetDocument
@@ -103,7 +103,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
{
"error": str(e),
"status": "error",
"disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
"disabled_at": naive_utc_now(),
"enabled": False,
}
)

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -8,6 +7,7 @@ from celery import shared_task # type: ignore
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import Document, DocumentSegment
@@ -54,9 +54,9 @@ def remove_document_from_index_task(document_id: str):
db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update(
{
DocumentSegment.enabled: False,
DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.disabled_at: naive_utc_now(),
DocumentSegment.disabled_by: document.disabled_by,
DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.updated_at: naive_utc_now(),
}
)
db.session.commit()

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -9,6 +8,7 @@ from core.indexing_runner import IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
from services.feature_service import FeatureService
@@ -51,7 +51,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
redis_client.delete(retry_indexing_cache_key)
@@ -79,7 +79,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
db.session.add(document)
db.session.commit()
@@ -89,7 +89,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))

View File

@@ -1,4 +1,3 @@
import datetime
import logging
import time
@@ -9,6 +8,7 @@ from core.indexing_runner import IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
from services.feature_service import FeatureService
@@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
redis_client.delete(sync_indexing_cache_key)
@@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
db.session.commit()
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.processing_started_at = naive_utc_now()
db.session.add(document)
db.session.commit()
@@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.stopped_at = naive_utc_now()
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))