chore: apply ty checks on api code with script and ci action (#24653)

This commit is contained in:
Bowen Liang
2025-09-02 16:05:13 +08:00
committed by GitHub
parent c373b734bc
commit 7b379e2a61
48 changed files with 188 additions and 142 deletions

View File

@@ -1,4 +1,4 @@
from typing import Union, cast
from typing import Union
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.model_manager import ModelInstance
@@ -28,14 +28,11 @@ class FunctionCallMultiDatasetRouter:
SystemPromptMessage(content="You are a helpful AI assistant."),
UserPromptMessage(content=query),
]
result = cast(
LLMResult,
model_instance.invoke_llm(
prompt_messages=prompt_messages,
tools=dataset_tools,
stream=False,
model_parameters={"temperature": 0.2, "top_p": 0.3, "max_tokens": 1500},
),
result: LLMResult = model_instance.invoke_llm(
prompt_messages=prompt_messages,
tools=dataset_tools,
stream=False,
model_parameters={"temperature": 0.2, "top_p": 0.3, "max_tokens": 1500},
)
if result.message.tool_calls:
# get retrieval model config

View File

@@ -1,5 +1,5 @@
from collections.abc import Generator, Sequence
from typing import Union, cast
from typing import Union
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.model_manager import ModelInstance
@@ -150,15 +150,12 @@ class ReactMultiDatasetRouter:
:param stop: stop
:return:
"""
invoke_result = cast(
Generator[LLMResult, None, None],
model_instance.invoke_llm(
prompt_messages=prompt_messages,
model_parameters=completion_param,
stop=stop,
stream=True,
user=user_id,
),
invoke_result: Generator[LLMResult, None, None] = model_instance.invoke_llm(
prompt_messages=prompt_messages,
model_parameters=completion_param,
stop=stop,
stream=True,
user=user_id,
)
# handle invoke result