test_baichuan_model.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. import json
  2. import os
  3. from unittest.mock import patch
  4. from core.model_providers.models.entity.message import PromptMessage, MessageType
  5. from core.model_providers.models.entity.model_params import ModelKwargs
  6. from core.model_providers.models.llm.baichuan_model import BaichuanModel
  7. from core.model_providers.providers.baichuan_provider import BaichuanProvider
  8. from models.provider import Provider, ProviderType
  9. def get_mock_provider(valid_api_key, valid_secret_key):
  10. return Provider(
  11. id='provider_id',
  12. tenant_id='tenant_id',
  13. provider_name='baichuan',
  14. provider_type=ProviderType.CUSTOM.value,
  15. encrypted_config=json.dumps({
  16. 'api_key': valid_api_key,
  17. 'secret_key': valid_secret_key,
  18. }),
  19. is_valid=True,
  20. )
  21. def get_mock_model(model_name: str, streaming: bool = False):
  22. model_kwargs = ModelKwargs(
  23. temperature=0.01,
  24. )
  25. valid_api_key = os.environ['BAICHUAN_API_KEY']
  26. valid_secret_key = os.environ['BAICHUAN_SECRET_KEY']
  27. model_provider = BaichuanProvider(provider=get_mock_provider(valid_api_key, valid_secret_key))
  28. return BaichuanModel(
  29. model_provider=model_provider,
  30. name=model_name,
  31. model_kwargs=model_kwargs,
  32. streaming=streaming
  33. )
  34. def decrypt_side_effect(tenant_id, encrypted_api_key):
  35. return encrypted_api_key
  36. @patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
  37. def test_chat_get_num_tokens(mock_decrypt):
  38. model = get_mock_model('baichuan2-53b')
  39. rst = model.get_num_tokens([
  40. PromptMessage(type=MessageType.SYSTEM, content='you are a kindness Assistant.'),
  41. PromptMessage(type=MessageType.HUMAN, content='Who is your manufacturer?')
  42. ])
  43. assert rst > 0
  44. @patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
  45. def test_chat_run(mock_decrypt, mocker):
  46. mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
  47. model = get_mock_model('baichuan2-53b')
  48. messages = [
  49. PromptMessage(type=MessageType.HUMAN, content='Are you Human? you MUST only answer `y` or `n`?')
  50. ]
  51. rst = model.run(
  52. messages,
  53. )
  54. assert len(rst.content) > 0
  55. @patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
  56. def test_chat_stream_run(mock_decrypt, mocker):
  57. mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
  58. model = get_mock_model('baichuan2-53b', streaming=True)
  59. messages = [
  60. PromptMessage(type=MessageType.HUMAN, content='Are you Human? you MUST only answer `y` or `n`?')
  61. ]
  62. rst = model.run(
  63. messages
  64. )
  65. assert len(rst.content) > 0