advanced_prompt_template_service.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. import copy
  2. from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CONFIG, CHAT_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_COMPLETION_PROMPT_CONFIG, \
  3. BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG, CONTEXT, BAICHUAN_CONTEXT
  4. class AdvancedPromptTemplateService:
  5. @classmethod
  6. def get_prompt(cls, args: dict) -> dict:
  7. app_mode = args['app_mode']
  8. model_mode = args['model_mode']
  9. model_name = args['model_name']
  10. has_context = args['has_context']
  11. if 'baichuan' in model_name:
  12. return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
  13. else:
  14. return cls.get_common_prompt(app_mode, model_mode, has_context)
  15. @classmethod
  16. def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
  17. context_prompt = copy.deepcopy(CONTEXT)
  18. if app_mode == 'chat':
  19. if model_mode == 'completion':
  20. return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
  21. elif model_mode == 'chat':
  22. return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
  23. elif app_mode == 'completion':
  24. if model_mode == 'completion':
  25. return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
  26. elif model_mode == 'chat':
  27. return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
  28. @classmethod
  29. def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  30. if has_context == 'true':
  31. prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
  32. return prompt_template
  33. @classmethod
  34. def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  35. if has_context == 'true':
  36. prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
  37. return prompt_template
  38. @classmethod
  39. def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
  40. baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
  41. if app_mode == 'chat':
  42. if model_mode == 'completion':
  43. return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  44. elif model_mode == 'chat':
  45. return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  46. elif app_mode == 'completion':
  47. if model_mode == 'completion':
  48. return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  49. elif model_mode == 'chat':
  50. return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)