feat: filter empty content messages in llm node (#3547)

This commit is contained in:
takatost
2024-04-17 13:30:33 +08:00
committed by GitHub
parent 2e27425e93
commit b890c11c14
2 changed files with 40 additions and 1 deletions

View File

@@ -438,7 +438,11 @@ class LLMNode(BaseNode):
stop = model_config.stop
vision_enabled = node_data.vision.enabled
filtered_prompt_messages = []
for prompt_message in prompt_messages:
if prompt_message.is_empty():
continue
if not isinstance(prompt_message.content, str):
prompt_message_content = []
for content_item in prompt_message.content:
@@ -453,7 +457,13 @@ class LLMNode(BaseNode):
and prompt_message_content[0].type == PromptMessageContentType.TEXT):
prompt_message.content = prompt_message_content[0].data
return prompt_messages, stop
filtered_prompt_messages.append(prompt_message)
if not filtered_prompt_messages:
raise ValueError("No prompt found in the LLM configuration. "
"Please ensure a prompt is properly configured before proceeding.")
return filtered_prompt_messages, stop
@classmethod
def deduct_llm_quota(cls, tenant_id: str, model_instance: ModelInstance, usage: LLMUsage) -> None: