[Chore/Refactor] Use centralized naive_utc_now for UTC datetime operations (#24352)

Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
-LAN-
2025-08-22 23:53:05 +08:00
committed by GitHub
parent 295b47cbff
commit da9af7b547
34 changed files with 153 additions and 150 deletions

View File

@@ -1,4 +1,3 @@
import datetime
import uuid
from collections import OrderedDict
from typing import Any, NamedTuple
@@ -13,6 +12,7 @@ from controllers.console.app.workflow_draft_variable import (
)
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from factories.variable_factory import build_segment
from libs.datetime_utils import naive_utc_now
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList
@@ -57,7 +57,7 @@ class TestWorkflowDraftVariableFields:
)
sys_var.id = str(uuid.uuid4())
sys_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
sys_var.last_edited_at = naive_utc_now()
sys_var.visible = True
expected_without_value = OrderedDict(
@@ -88,7 +88,7 @@ class TestWorkflowDraftVariableFields:
)
node_var.id = str(uuid.uuid4())
node_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
node_var.last_edited_at = naive_utc_now()
expected_without_value: OrderedDict[str, Any] = OrderedDict(
{

View File

@@ -5,7 +5,6 @@ These tests verify the Celery-based asynchronous storage functionality
for workflow execution data.
"""
from datetime import UTC, datetime
from unittest.mock import Mock, patch
from uuid import uuid4
@@ -13,6 +12,7 @@ import pytest
from core.repositories.celery_workflow_execution_repository import CeleryWorkflowExecutionRepository
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowType
from libs.datetime_utils import naive_utc_now
from models import Account, EndUser
from models.enums import WorkflowRunTriggeredFrom
@@ -56,7 +56,7 @@ def sample_workflow_execution():
workflow_version="1.0",
graph={"nodes": [], "edges": []},
inputs={"input1": "value1"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
@@ -199,7 +199,7 @@ class TestCeleryWorkflowExecutionRepository:
workflow_version="1.0",
graph={"nodes": [], "edges": []},
inputs={"input1": "value1"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
exec2 = WorkflowExecution.new(
id_=str(uuid4()),
@@ -208,7 +208,7 @@ class TestCeleryWorkflowExecutionRepository:
workflow_version="1.0",
graph={"nodes": [], "edges": []},
inputs={"input2": "value2"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Save both executions
@@ -235,7 +235,7 @@ class TestCeleryWorkflowExecutionRepository:
workflow_version="1.0",
graph={"nodes": [], "edges": []},
inputs={"input1": "value1"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
repo.save(execution)

View File

@@ -5,7 +5,6 @@ These tests verify the Celery-based asynchronous storage functionality
for workflow node execution data.
"""
from datetime import UTC, datetime
from unittest.mock import Mock, patch
from uuid import uuid4
@@ -18,6 +17,7 @@ from core.workflow.entities.workflow_node_execution import (
)
from core.workflow.nodes.enums import NodeType
from core.workflow.repositories.workflow_node_execution_repository import OrderConfig
from libs.datetime_utils import naive_utc_now
from models import Account, EndUser
from models.workflow import WorkflowNodeExecutionTriggeredFrom
@@ -65,7 +65,7 @@ def sample_workflow_node_execution():
title="Test Node",
inputs={"input1": "value1"},
status=WorkflowNodeExecutionStatus.RUNNING,
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
@@ -263,7 +263,7 @@ class TestCeleryWorkflowNodeExecutionRepository:
title="Node 1",
inputs={"input1": "value1"},
status=WorkflowNodeExecutionStatus.RUNNING,
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
exec2 = WorkflowNodeExecution(
id=str(uuid4()),
@@ -276,7 +276,7 @@ class TestCeleryWorkflowNodeExecutionRepository:
title="Node 2",
inputs={"input2": "value2"},
status=WorkflowNodeExecutionStatus.RUNNING,
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
# Save both executions
@@ -314,7 +314,7 @@ class TestCeleryWorkflowNodeExecutionRepository:
title="Node 2",
inputs={},
status=WorkflowNodeExecutionStatus.RUNNING,
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
exec2 = WorkflowNodeExecution(
id=str(uuid4()),
@@ -327,7 +327,7 @@ class TestCeleryWorkflowNodeExecutionRepository:
title="Node 1",
inputs={},
status=WorkflowNodeExecutionStatus.RUNNING,
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
# Save in random order

View File

@@ -1,6 +1,5 @@
import uuid
from collections.abc import Generator
from datetime import UTC, datetime
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.graph_engine.entities.event import (
@@ -15,6 +14,7 @@ from core.workflow.nodes.answer.answer_stream_processor import AnswerStreamProce
from core.workflow.nodes.enums import NodeType
from core.workflow.nodes.start.entities import StartNodeData
from core.workflow.system_variable import SystemVariable
from libs.datetime_utils import naive_utc_now
def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
@@ -29,7 +29,7 @@ def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngine
def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
route_node_state = RouteNodeState(node_id=next_node_id, start_at=naive_utc_now())
parallel_id = graph.node_parallel_mapping.get(next_node_id)
parallel_start_node_id = None
@@ -68,7 +68,7 @@ def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEve
)
route_node_state.status = RouteNodeState.Status.SUCCESS
route_node_state.finished_at = datetime.now(UTC).replace(tzinfo=None)
route_node_state.finished_at = naive_utc_now()
yield NodeRunSucceededEvent(
id=node_execution_id,
node_id=next_node_id,

View File

@@ -1,5 +1,4 @@
import json
from datetime import UTC, datetime
from unittest.mock import MagicMock
import pytest
@@ -23,6 +22,7 @@ from core.workflow.repositories.workflow_execution_repository import WorkflowExe
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
from core.workflow.system_variable import SystemVariable
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
from libs.datetime_utils import naive_utc_now
from models.enums import CreatorUserRole
from models.model import AppMode
from models.workflow import Workflow, WorkflowRun
@@ -145,8 +145,8 @@ def real_workflow():
workflow.graph = json.dumps(graph_data)
workflow.features = json.dumps({"file_upload": {"enabled": False}})
workflow.created_by = "test-user-id"
workflow.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
workflow.created_at = naive_utc_now()
workflow.updated_at = naive_utc_now()
workflow._environment_variables = "{}"
workflow._conversation_variables = "{}"
@@ -169,7 +169,7 @@ def real_workflow_run():
workflow_run.outputs = json.dumps({"answer": "test answer"})
workflow_run.created_by_role = CreatorUserRole.ACCOUNT
workflow_run.created_by = "test-user-id"
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow_run.created_at = naive_utc_now()
return workflow_run
@@ -211,7 +211,7 @@ def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execu
workflow_type=WorkflowType.CHAT,
graph={"nodes": [], "edges": []},
inputs={"query": "test query"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Pre-populate the cache with the workflow execution
@@ -245,7 +245,7 @@ def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execut
workflow_type=WorkflowType.CHAT,
graph={"nodes": [], "edges": []},
inputs={"query": "test query"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Pre-populate the cache with the workflow execution
@@ -282,7 +282,7 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execu
workflow_type=WorkflowType.CHAT,
graph={"nodes": [], "edges": []},
inputs={"query": "test query"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Pre-populate the cache with the workflow execution
@@ -335,7 +335,7 @@ def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_work
workflow_type=WorkflowType.CHAT,
graph={"nodes": [], "edges": []},
inputs={"query": "test query"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Pre-populate the cache with the workflow execution
@@ -366,7 +366,7 @@ def test_handle_workflow_node_execution_success(workflow_cycle_manager):
event.process_data = {"process": "test process"}
event.outputs = {"output": "test output"}
event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100}
event.start_at = datetime.now(UTC).replace(tzinfo=None)
event.start_at = naive_utc_now()
# Create a real node execution
@@ -379,7 +379,7 @@ def test_handle_workflow_node_execution_success(workflow_cycle_manager):
node_id="test-node-id",
node_type=NodeType.LLM,
title="Test Node",
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
# Pre-populate the cache with the node execution
@@ -409,7 +409,7 @@ def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workfl
workflow_type=WorkflowType.CHAT,
graph={"nodes": [], "edges": []},
inputs={"query": "test query"},
started_at=datetime.now(UTC).replace(tzinfo=None),
started_at=naive_utc_now(),
)
# Pre-populate the cache with the workflow execution
@@ -443,7 +443,7 @@ def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
event.process_data = {"process": "test process"}
event.outputs = {"output": "test output"}
event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100}
event.start_at = datetime.now(UTC).replace(tzinfo=None)
event.start_at = naive_utc_now()
event.error = "Test error message"
# Create a real node execution
@@ -457,7 +457,7 @@ def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
node_id="test-node-id",
node_type=NodeType.LLM,
title="Test Node",
created_at=datetime.now(UTC).replace(tzinfo=None),
created_at=naive_utc_now(),
)
# Pre-populate the cache with the node execution

View File

@@ -93,16 +93,15 @@ class TestDatasetServiceBatchUpdateDocumentStatus:
with (
patch("services.dataset_service.DocumentService.get_document") as mock_get_doc,
patch("extensions.ext_database.db.session") as mock_db,
patch("services.dataset_service.datetime") as mock_datetime,
patch("services.dataset_service.naive_utc_now") as mock_naive_utc_now,
):
current_time = datetime.datetime(2023, 1, 1, 12, 0, 0)
mock_datetime.datetime.now.return_value = current_time
mock_datetime.UTC = datetime.UTC
mock_naive_utc_now.return_value = current_time
yield {
"get_document": mock_get_doc,
"db_session": mock_db,
"datetime": mock_datetime,
"naive_utc_now": mock_naive_utc_now,
"current_time": current_time,
}
@@ -120,21 +119,21 @@ class TestDatasetServiceBatchUpdateDocumentStatus:
assert document.enabled == True
assert document.disabled_at is None
assert document.disabled_by is None
assert document.updated_at == current_time.replace(tzinfo=None)
assert document.updated_at == current_time
def _assert_document_disabled(self, document: Mock, user_id: str, current_time: datetime.datetime):
"""Helper method to verify document was disabled correctly."""
assert document.enabled == False
assert document.disabled_at == current_time.replace(tzinfo=None)
assert document.disabled_at == current_time
assert document.disabled_by == user_id
assert document.updated_at == current_time.replace(tzinfo=None)
assert document.updated_at == current_time
def _assert_document_archived(self, document: Mock, user_id: str, current_time: datetime.datetime):
"""Helper method to verify document was archived correctly."""
assert document.archived == True
assert document.archived_at == current_time.replace(tzinfo=None)
assert document.archived_at == current_time
assert document.archived_by == user_id
assert document.updated_at == current_time.replace(tzinfo=None)
assert document.updated_at == current_time
def _assert_document_unarchived(self, document: Mock):
"""Helper method to verify document was unarchived correctly."""
@@ -430,7 +429,7 @@ class TestDatasetServiceBatchUpdateDocumentStatus:
# Verify document attributes were updated correctly
self._assert_document_unarchived(archived_doc)
assert archived_doc.updated_at == mock_document_service_dependencies["current_time"].replace(tzinfo=None)
assert archived_doc.updated_at == mock_document_service_dependencies["current_time"]
# Verify Redis cache was set (because document is enabled)
redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1)
@@ -495,9 +494,7 @@ class TestDatasetServiceBatchUpdateDocumentStatus:
# Verify document was unarchived
self._assert_document_unarchived(archived_disabled_doc)
assert archived_disabled_doc.updated_at == mock_document_service_dependencies["current_time"].replace(
tzinfo=None
)
assert archived_disabled_doc.updated_at == mock_document_service_dependencies["current_time"]
# Verify no Redis cache was set (document is disabled)
redis_mock.setex.assert_not_called()