fix: LLM may not return <think> tag, cause thinking time keep increase (#13962)

This commit is contained in:
NFish
2025-02-18 21:39:01 +08:00
committed by GitHub
parent 6fc234183a
commit ece25bce1a
2 changed files with 7 additions and 6 deletions

View File

@@ -68,12 +68,13 @@ const preprocessLaTeX = (content: string) => {
}
const preprocessThinkTag = (content: string) => {
if (!content.trim().startsWith('<think>\n'))
if (!(content.trim().startsWith('<think>\n') || content.trim().startsWith('<details style=')))
return content
return flow([
(str: string) => str.replace('<think>\n', '<details>\n'),
(str: string) => str.replace('\n</think>', '\n[ENDTHINKFLAG]</details>'),
(str: string) => str.replaceAll('<think>\n', '<details>\n'),
(str: string) => str.replaceAll('\n</think>', '\n[ENDTHINKFLAG]</details>'),
(str: string) => str.replaceAll('\n</details>', '\n[ENDTHINKFLAG]</details>'),
])(content)
}