feat: LLM prompt Jinja2 template now support more variables (#24944)
This commit is contained in:
@@ -308,7 +308,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
||||
}, [])
|
||||
|
||||
const filterJinja2InputVar = useCallback((varPayload: Var) => {
|
||||
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber].includes(varPayload.type)
|
||||
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.arrayBoolean, VarType.arrayObject, VarType.object, VarType.array, VarType.boolean].includes(varPayload.type)
|
||||
}, [])
|
||||
|
||||
const filterMemoryPromptVar = useCallback((varPayload: Var) => {
|
||||
|
Reference in New Issue
Block a user