improve qa generate prompt (#3132)

Co-authored-by: jyong <jyong@dify.ai>
This commit is contained in:
Jyong
2024-04-07 15:21:11 +08:00
committed by GitHub
parent ab9fcbdfb9
commit 28b1c48235
2 changed files with 7 additions and 4 deletions

View File

@@ -166,6 +166,7 @@ class LLMGenerator:
response = model_instance.invoke_llm(
prompt_messages=prompt_messages,
model_parameters={
'temperature': 0.01,
"max_tokens": 2000
},
stream=False

View File

@@ -69,13 +69,15 @@ SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = (
)
GENERATOR_QA_PROMPT = (
'The user will send a long text. Please think step by step.'
'<Task> The user will send a long text. Generate a Question and Answer pairs only using the knowledge in the long text. Please think step by step.'
'Step 1: Understand and summarize the main content of this text.\n'
'Step 2: What key information or concepts are mentioned in this text?\n'
'Step 3: Decompose or combine multiple pieces of information and concepts.\n'
'Step 4: Generate 20 questions and answers based on these key information and concepts.'
'The questions should be clear and detailed, and the answers should be detailed and complete.\n'
"Answer MUST according to the the language:{language} and in the following format: Q1:\nA1:\nQ2:\nA2:...\n"
'Step 4: Generate questions and answers based on these key information and concepts.\n'
'<Constraints> The questions should be clear and detailed, and the answers should be detailed and complete. '
'You must answer in {language}, in a style that is clear and detailed in {language}. No language other than {language} should be used. \n'
'<Format> Use the following format: Q1:\nA1:\nQ2:\nA2:...\n'
'<QA Pairs>'
)
RULE_CONFIG_GENERATE_TEMPLATE = """Given MY INTENDED AUDIENCES and HOPING TO SOLVE using a language model, please select \