advanced_prompt_template_service.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. import copy
  2. from core.prompt.prompt_templates.advanced_prompt_templates import (
  3. BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG,
  4. BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG,
  5. BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG,
  6. BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG,
  7. BAICHUAN_CONTEXT,
  8. CHAT_APP_CHAT_PROMPT_CONFIG,
  9. CHAT_APP_COMPLETION_PROMPT_CONFIG,
  10. COMPLETION_APP_CHAT_PROMPT_CONFIG,
  11. COMPLETION_APP_COMPLETION_PROMPT_CONFIG,
  12. CONTEXT,
  13. )
  14. from models.model import AppMode
  15. class AdvancedPromptTemplateService:
  16. @classmethod
  17. def get_prompt(cls, args: dict) -> dict:
  18. app_mode = args["app_mode"]
  19. model_mode = args["model_mode"]
  20. model_name = args["model_name"]
  21. has_context = args["has_context"]
  22. if "baichuan" in model_name.lower():
  23. return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
  24. else:
  25. return cls.get_common_prompt(app_mode, model_mode, has_context)
  26. @classmethod
  27. def get_common_prompt(cls, app_mode: str, model_mode: str, has_context: str) -> dict:
  28. context_prompt = copy.deepcopy(CONTEXT)
  29. if app_mode == AppMode.CHAT.value:
  30. if model_mode == "completion":
  31. return cls.get_completion_prompt(
  32. copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
  33. )
  34. elif model_mode == "chat":
  35. return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
  36. elif app_mode == AppMode.COMPLETION.value:
  37. if model_mode == "completion":
  38. return cls.get_completion_prompt(
  39. copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
  40. )
  41. elif model_mode == "chat":
  42. return cls.get_chat_prompt(
  43. copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt
  44. )
  45. @classmethod
  46. def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  47. if has_context == "true":
  48. prompt_template["completion_prompt_config"]["prompt"]["text"] = (
  49. context + prompt_template["completion_prompt_config"]["prompt"]["text"]
  50. )
  51. return prompt_template
  52. @classmethod
  53. def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  54. if has_context == "true":
  55. prompt_template["chat_prompt_config"]["prompt"][0]["text"] = (
  56. context + prompt_template["chat_prompt_config"]["prompt"][0]["text"]
  57. )
  58. return prompt_template
  59. @classmethod
  60. def get_baichuan_prompt(cls, app_mode: str, model_mode: str, has_context: str) -> dict:
  61. baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
  62. if app_mode == AppMode.CHAT.value:
  63. if model_mode == "completion":
  64. return cls.get_completion_prompt(
  65. copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt
  66. )
  67. elif model_mode == "chat":
  68. return cls.get_chat_prompt(
  69. copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt
  70. )
  71. elif app_mode == AppMode.COMPLETION.value:
  72. if model_mode == "completion":
  73. return cls.get_completion_prompt(
  74. copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG),
  75. has_context,
  76. baichuan_context_prompt,
  77. )
  78. elif model_mode == "chat":
  79. return cls.get_chat_prompt(
  80. copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt
  81. )