make logging not use f-str, change others to f-str (#22882)

This commit is contained in:
Asuka Minato
2025-07-25 11:32:48 +09:00
committed by GitHub
parent 570aee5fe6
commit a189d293f8
164 changed files with 557 additions and 563 deletions

View File

@@ -238,13 +238,13 @@ class GraphEngine:
while True:
# max steps reached
if self.graph_runtime_state.node_run_steps > self.max_execution_steps:
raise GraphRunFailedError("Max steps {} reached.".format(self.max_execution_steps))
raise GraphRunFailedError(f"Max steps {self.max_execution_steps} reached.")
# or max execution time reached
if self._is_timed_out(
start_at=self.graph_runtime_state.start_at, max_execution_time=self.max_execution_time
):
raise GraphRunFailedError("Max execution time {}s reached.".format(self.max_execution_time))
raise GraphRunFailedError(f"Max execution time {self.max_execution_time}s reached.")
# init route node state
route_node_state = self.graph_runtime_state.node_run_state.create_node_state(node_id=next_node_id)
@@ -377,7 +377,7 @@ class GraphEngine:
edge = cast(GraphEdge, sub_edge_mappings[0])
if edge.run_condition is None:
logger.warning(f"Edge {edge.target_node_id} run condition is None")
logger.warning("Edge %s run condition is None", edge.target_node_id)
continue
result = ConditionManager.get_condition_handler(
@@ -848,7 +848,7 @@ class GraphEngine:
)
return
except Exception as e:
logger.exception(f"Node {node.title} run failed")
logger.exception("Node %s run failed", node.title)
raise e
def _append_variables_recursively(self, node_id: str, variable_key_list: list[str], variable_value: VariableValue):

View File

@@ -36,7 +36,7 @@ class StreamProcessor(ABC):
reachable_node_ids: list[str] = []
unreachable_first_node_ids: list[str] = []
if finished_node_id not in self.graph.edge_mapping:
logger.warning(f"node {finished_node_id} has no edge mapping")
logger.warning("node %s has no edge mapping", finished_node_id)
return
for edge in self.graph.edge_mapping[finished_node_id]:
if (

View File

@@ -65,7 +65,7 @@ class BaseNode:
try:
result = self._run()
except Exception as e:
logger.exception(f"Node {self.node_id} failed to run")
logger.exception("Node %s failed to run", self.node_id)
result = NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=str(e),

View File

@@ -363,7 +363,7 @@ def _extract_text_from_docx(file_content: bytes) -> str:
text.append(markdown_table)
except Exception as e:
logger.warning(f"Failed to extract table from DOC: {e}")
logger.warning("Failed to extract table from DOC: %s", e)
continue
return "\n".join(text)

View File

@@ -129,7 +129,7 @@ class HttpRequestNode(BaseNode):
},
)
except HttpRequestNodeError as e:
logger.warning(f"http request node {self.node_id} failed to run: {e}")
logger.warning("http request node %s failed to run: %s", self.node_id, e)
return NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=str(e),

View File

@@ -129,7 +129,7 @@ class IfElseNode(BaseNode):
var_mapping: dict[str, list[str]] = {}
for case in typed_node_data.cases or []:
for condition in case.conditions:
key = "{}.#{}#".format(node_id, ".".join(condition.variable_selector))
key = f"{node_id}.#{'.'.join(condition.variable_selector)}#"
var_mapping[key] = condition.variable_selector
return var_mapping

View File

@@ -616,7 +616,7 @@ class IterationNode(BaseNode):
)
except IterationNodeError as e:
logger.warning(f"Iteration run failed:{str(e)}")
logger.warning("Iteration run failed:%s", str(e))
yield IterationRunFailedEvent(
iteration_id=self.id,
iteration_node_id=self.node_id,

View File

@@ -670,7 +670,7 @@ class ParameterExtractorNode(BaseNode):
return cast(dict, json.loads(json_str))
except Exception:
pass
logger.info(f"extra error: {result}")
logger.info("extra error: %s", result)
return None
def _extract_json_from_tool_call(self, tool_call: AssistantPromptMessage.ToolCall) -> Optional[dict]:
@@ -690,7 +690,7 @@ class ParameterExtractorNode(BaseNode):
return cast(dict, json.loads(json_str))
except Exception:
pass
logger.info(f"extra error: {result}")
logger.info("extra error: %s", result)
return None
def _generate_default_result(self, data: ParameterExtractorNodeData) -> dict:

View File

@@ -67,7 +67,7 @@ class WorkflowEntry:
# check call depth
workflow_call_max_depth = dify_config.WORKFLOW_CALL_MAX_DEPTH
if call_depth > workflow_call_max_depth:
raise ValueError("Max workflow call depth {} reached.".format(workflow_call_max_depth))
raise ValueError(f"Max workflow call depth {workflow_call_max_depth} reached.")
# init workflow run state
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
@@ -193,7 +193,13 @@ class WorkflowEntry:
# run node
generator = node.run()
except Exception as e:
logger.exception(f"error while running node, {workflow.id=}, {node.id=}, {node.type_=}, {node.version()=}")
logger.exception(
"error while running node, workflow_id=%s, node_id=%s, node_type=%s, node_version=%s",
workflow.id,
node.id,
node.type_,
node.version(),
)
raise WorkflowNodeRunFailedError(node=node, err_msg=str(e))
return node, generator
@@ -297,7 +303,12 @@ class WorkflowEntry:
return node, generator
except Exception as e:
logger.exception(f"error while running node, {node.id=}, {node.type_=}, {node.version()=}")
logger.exception(
"error while running node, node_id=%s, node_type=%s, node_version=%s",
node.id,
node.type_,
node.version(),
)
raise WorkflowNodeRunFailedError(node=node, err_msg=str(e))
@staticmethod