feat: LLM prompt Jinja2 template now support more variables (#24944)
This commit is contained in:
@@ -140,7 +140,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
|
||||
<ConfigPrompt
|
||||
readOnly={readOnly}
|
||||
nodeId={id}
|
||||
filterVar={filterInputVar}
|
||||
filterVar={isShowVars ? filterJinja2InputVar : filterInputVar}
|
||||
isChatModel={isChatModel}
|
||||
isChatApp={isChatMode}
|
||||
isShowContext
|
||||
|
@@ -308,7 +308,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
||||
}, [])
|
||||
|
||||
const filterJinja2InputVar = useCallback((varPayload: Var) => {
|
||||
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber].includes(varPayload.type)
|
||||
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.arrayBoolean, VarType.arrayObject, VarType.object, VarType.array, VarType.boolean].includes(varPayload.type)
|
||||
}, [])
|
||||
|
||||
const filterMemoryPromptVar = useCallback((varPayload: Var) => {
|
||||
|
Reference in New Issue
Block a user