feat: re-add prompt messages to result and chunks in llm (#17883)
Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
@@ -177,7 +177,7 @@ class ModelInstance:
|
||||
)
|
||||
|
||||
def get_llm_num_tokens(
|
||||
self, prompt_messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None
|
||||
self, prompt_messages: Sequence[PromptMessage], tools: Optional[Sequence[PromptMessageTool]] = None
|
||||
) -> int:
|
||||
"""
|
||||
Get number of tokens for llm
|
||||
|
Reference in New Issue
Block a user