feat: add completion mode and context size options for LLM configuration (#13325)

Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
-LAN-
2025-02-07 15:08:53 +08:00
committed by GitHub
parent f9515901cc
commit 413dfd5628
4 changed files with 44 additions and 6 deletions

View File

@@ -3,8 +3,7 @@ from typing import Any, Optional
from pydantic import BaseModel, Field, field_validator
from core.model_runtime.entities import ImagePromptMessageContent
from core.model_runtime.entities.llm_entities import LLMMode
from core.model_runtime.entities import ImagePromptMessageContent, LLMMode
from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
from core.workflow.entities.variable_entities import VariableSelector
from core.workflow.nodes.base import BaseNodeData
@@ -13,7 +12,7 @@ from core.workflow.nodes.base import BaseNodeData
class ModelConfig(BaseModel):
provider: str
name: str
mode: LLMMode = LLMMode.COMPLETION
mode: LLMMode
completion_params: dict[str, Any] = {}