refactor advanced prompt core. (#1350)
This commit is contained in:
@@ -1,6 +1,8 @@
|
||||
|
||||
import copy
|
||||
|
||||
from core.model_providers.models.entity.model_params import ModelMode
|
||||
from core.prompt.prompt_transform import AppMode
|
||||
from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CONFIG, CHAT_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_COMPLETION_PROMPT_CONFIG, \
|
||||
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG, CONTEXT, BAICHUAN_CONTEXT
|
||||
|
||||
@@ -13,7 +15,7 @@ class AdvancedPromptTemplateService:
|
||||
model_name = args['model_name']
|
||||
has_context = args['has_context']
|
||||
|
||||
if 'baichuan' in model_name:
|
||||
if 'baichuan' in model_name.lower():
|
||||
return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
|
||||
else:
|
||||
return cls.get_common_prompt(app_mode, model_mode, has_context)
|
||||
@@ -22,15 +24,15 @@ class AdvancedPromptTemplateService:
|
||||
def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
||||
context_prompt = copy.deepcopy(CONTEXT)
|
||||
|
||||
if app_mode == 'chat':
|
||||
if model_mode == 'completion':
|
||||
if app_mode == AppMode.CHAT.value:
|
||||
if model_mode == ModelMode.COMPLETION.value:
|
||||
return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
||||
elif model_mode == 'chat':
|
||||
elif model_mode == ModelMode.CHAT.value:
|
||||
return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
||||
elif app_mode == 'completion':
|
||||
if model_mode == 'completion':
|
||||
elif app_mode == AppMode.COMPLETION.value:
|
||||
if model_mode == ModelMode.COMPLETION.value:
|
||||
return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
||||
elif model_mode == 'chat':
|
||||
elif model_mode == ModelMode.CHAT.value:
|
||||
return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
||||
|
||||
@classmethod
|
||||
@@ -51,13 +53,13 @@ class AdvancedPromptTemplateService:
|
||||
def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
||||
baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
|
||||
|
||||
if app_mode == 'chat':
|
||||
if model_mode == 'completion':
|
||||
if app_mode == AppMode.CHAT.value:
|
||||
if model_mode == ModelMode.COMPLETION.value:
|
||||
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||
elif model_mode == 'chat':
|
||||
elif model_mode == ModelMode.CHAT.value:
|
||||
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||
elif app_mode == 'completion':
|
||||
if model_mode == 'completion':
|
||||
elif app_mode == AppMode.COMPLETION.value:
|
||||
if model_mode == ModelMode.COMPLETION.value:
|
||||
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
||||
elif model_mode == 'chat':
|
||||
elif model_mode == ModelMode.CHAT.value:
|
||||
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
Reference in New Issue
Block a user