feat: re-add prompt messages to result and chunks in llm (#17883)

Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
-LAN-
2025-04-11 18:04:49 +09:00
committed by GitHub
parent 5f8d20b5b2
commit 8e6f6d64a4
5 changed files with 24 additions and 15 deletions

View File

@@ -177,7 +177,7 @@ class ModelInstance:
)
def get_llm_num_tokens(
self, prompt_messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None
self, prompt_messages: Sequence[PromptMessage], tools: Optional[Sequence[PromptMessageTool]] = None
) -> int:
"""
Get number of tokens for llm