localai_provider.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. import json
  2. from typing import Type
  3. from langchain.embeddings import LocalAIEmbeddings
  4. from langchain.schema import HumanMessage
  5. from core.helper import encrypter
  6. from core.model_providers.models.embedding.localai_embedding import LocalAIEmbedding
  7. from core.model_providers.models.entity.model_params import ModelKwargsRules, ModelType, KwargRule, ModelMode
  8. from core.model_providers.models.llm.localai_model import LocalAIModel
  9. from core.model_providers.providers.base import BaseModelProvider, CredentialsValidateFailedError
  10. from core.model_providers.models.base import BaseProviderModel
  11. from core.third_party.langchain.llms.chat_open_ai import EnhanceChatOpenAI
  12. from core.third_party.langchain.llms.open_ai import EnhanceOpenAI
  13. from models.provider import ProviderType
  14. class LocalAIProvider(BaseModelProvider):
  15. @property
  16. def provider_name(self):
  17. """
  18. Returns the name of a provider.
  19. """
  20. return 'localai'
  21. def _get_fixed_model_list(self, model_type: ModelType) -> list[dict]:
  22. return []
  23. def _get_text_generation_model_mode(self, model_name) -> str:
  24. credentials = self.get_model_credentials(model_name, ModelType.TEXT_GENERATION)
  25. if credentials['completion_type'] == 'chat_completion':
  26. return ModelMode.CHAT.value
  27. else:
  28. return ModelMode.COMPLETION.value
  29. def get_model_class(self, model_type: ModelType) -> Type[BaseProviderModel]:
  30. """
  31. Returns the model class.
  32. :param model_type:
  33. :return:
  34. """
  35. if model_type == ModelType.TEXT_GENERATION:
  36. model_class = LocalAIModel
  37. elif model_type == ModelType.EMBEDDINGS:
  38. model_class = LocalAIEmbedding
  39. else:
  40. raise NotImplementedError
  41. return model_class
  42. def get_model_parameter_rules(self, model_name: str, model_type: ModelType) -> ModelKwargsRules:
  43. """
  44. get model parameter rules.
  45. :param model_name:
  46. :param model_type:
  47. :return:
  48. """
  49. return ModelKwargsRules(
  50. temperature=KwargRule[float](min=0, max=2, default=0.7, precision=2),
  51. top_p=KwargRule[float](min=0, max=1, default=1, precision=2),
  52. max_tokens=KwargRule[int](min=10, max=4097, default=16, precision=0),
  53. )
  54. @classmethod
  55. def is_model_credentials_valid_or_raise(cls, model_name: str, model_type: ModelType, credentials: dict):
  56. """
  57. check model credentials valid.
  58. :param model_name:
  59. :param model_type:
  60. :param credentials:
  61. """
  62. if 'server_url' not in credentials:
  63. raise CredentialsValidateFailedError('LocalAI Server URL must be provided.')
  64. try:
  65. if model_type == ModelType.EMBEDDINGS:
  66. model = LocalAIEmbeddings(
  67. model=model_name,
  68. openai_api_key='1',
  69. openai_api_base=credentials['server_url']
  70. )
  71. model.embed_query("ping")
  72. else:
  73. if ('completion_type' not in credentials
  74. or credentials['completion_type'] not in ['completion', 'chat_completion']):
  75. raise CredentialsValidateFailedError('LocalAI Completion Type must be provided.')
  76. if credentials['completion_type'] == 'chat_completion':
  77. model = EnhanceChatOpenAI(
  78. model_name=model_name,
  79. openai_api_key='1',
  80. openai_api_base=credentials['server_url'] + '/v1',
  81. max_tokens=10,
  82. request_timeout=60,
  83. )
  84. model([HumanMessage(content='ping')])
  85. else:
  86. model = EnhanceOpenAI(
  87. model_name=model_name,
  88. openai_api_key='1',
  89. openai_api_base=credentials['server_url'] + '/v1',
  90. max_tokens=10,
  91. request_timeout=60,
  92. )
  93. model('ping')
  94. except Exception as ex:
  95. raise CredentialsValidateFailedError(str(ex))
  96. @classmethod
  97. def encrypt_model_credentials(cls, tenant_id: str, model_name: str, model_type: ModelType,
  98. credentials: dict) -> dict:
  99. """
  100. encrypt model credentials for save.
  101. :param tenant_id:
  102. :param model_name:
  103. :param model_type:
  104. :param credentials:
  105. :return:
  106. """
  107. credentials['server_url'] = encrypter.encrypt_token(tenant_id, credentials['server_url'])
  108. return credentials
  109. def get_model_credentials(self, model_name: str, model_type: ModelType, obfuscated: bool = False) -> dict:
  110. """
  111. get credentials for llm use.
  112. :param model_name:
  113. :param model_type:
  114. :param obfuscated:
  115. :return:
  116. """
  117. if self.provider.provider_type != ProviderType.CUSTOM.value:
  118. raise NotImplementedError
  119. provider_model = self._get_provider_model(model_name, model_type)
  120. if not provider_model.encrypted_config:
  121. return {
  122. 'server_url': None,
  123. }
  124. credentials = json.loads(provider_model.encrypted_config)
  125. if credentials['server_url']:
  126. credentials['server_url'] = encrypter.decrypt_token(
  127. self.provider.tenant_id,
  128. credentials['server_url']
  129. )
  130. if obfuscated:
  131. credentials['server_url'] = encrypter.obfuscated_token(credentials['server_url'])
  132. return credentials
  133. @classmethod
  134. def is_provider_credentials_valid_or_raise(cls, credentials: dict):
  135. return
  136. @classmethod
  137. def encrypt_provider_credentials(cls, tenant_id: str, credentials: dict) -> dict:
  138. return {}
  139. def get_provider_credentials(self, obfuscated: bool = False) -> dict:
  140. return {}