chat.py 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. import json
  2. import logging
  3. from typing import Generator, Union
  4. from flask import Response, stream_with_context
  5. from flask_login import current_user
  6. from flask_restful import reqparse
  7. from werkzeug.exceptions import InternalServerError, NotFound
  8. import services
  9. from controllers.console import api
  10. from controllers.console.app.error import ConversationCompletedError, AppUnavailableError, ProviderNotInitializeError, \
  11. ProviderQuotaExceededError, ProviderModelCurrentlyNotSupportError, CompletionRequestError
  12. from controllers.console.universal_chat.wraps import UniversalChatResource
  13. from core.constant import llm_constant
  14. from core.conversation_message_task import PubHandler
  15. from core.llm.error import ProviderTokenNotInitError, QuotaExceededError, ModelCurrentlyNotSupportError, \
  16. LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError, LLMRateLimitError, LLMAuthorizationError
  17. from libs.helper import uuid_value
  18. from services.completion_service import CompletionService
  19. class UniversalChatApi(UniversalChatResource):
  20. def post(self, universal_app):
  21. app_model = universal_app
  22. parser = reqparse.RequestParser()
  23. parser.add_argument('query', type=str, required=True, location='json')
  24. parser.add_argument('conversation_id', type=uuid_value, location='json')
  25. parser.add_argument('model', type=str, required=True, location='json')
  26. parser.add_argument('tools', type=list, required=True, location='json')
  27. args = parser.parse_args()
  28. app_model_config = app_model.app_model_config
  29. # update app model config
  30. args['model_config'] = app_model_config.to_dict()
  31. args['model_config']['model']['name'] = args['model']
  32. if not llm_constant.models[args['model']]:
  33. raise ValueError("Model not exists.")
  34. args['model_config']['model']['provider'] = llm_constant.models[args['model']]
  35. args['model_config']['agent_mode']['tools'] = args['tools']
  36. args['inputs'] = {}
  37. del args['model']
  38. del args['tools']
  39. try:
  40. response = CompletionService.completion(
  41. app_model=app_model,
  42. user=current_user,
  43. args=args,
  44. from_source='console',
  45. streaming=True,
  46. is_model_config_override=True,
  47. )
  48. return compact_response(response)
  49. except services.errors.conversation.ConversationNotExistsError:
  50. raise NotFound("Conversation Not Exists.")
  51. except services.errors.conversation.ConversationCompletedError:
  52. raise ConversationCompletedError()
  53. except services.errors.app_model_config.AppModelConfigBrokenError:
  54. logging.exception("App model config broken.")
  55. raise AppUnavailableError()
  56. except ProviderTokenNotInitError:
  57. raise ProviderNotInitializeError()
  58. except QuotaExceededError:
  59. raise ProviderQuotaExceededError()
  60. except ModelCurrentlyNotSupportError:
  61. raise ProviderModelCurrentlyNotSupportError()
  62. except (LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError,
  63. LLMRateLimitError, LLMAuthorizationError) as e:
  64. raise CompletionRequestError(str(e))
  65. except ValueError as e:
  66. raise e
  67. except Exception as e:
  68. logging.exception("internal server error.")
  69. raise InternalServerError()
  70. class UniversalChatStopApi(UniversalChatResource):
  71. def post(self, universal_app, task_id):
  72. PubHandler.stop(current_user, task_id)
  73. return {'result': 'success'}, 200
  74. def compact_response(response: Union[dict | Generator]) -> Response:
  75. if isinstance(response, dict):
  76. return Response(response=json.dumps(response), status=200, mimetype='application/json')
  77. else:
  78. def generate() -> Generator:
  79. try:
  80. for chunk in response:
  81. yield chunk
  82. except services.errors.conversation.ConversationNotExistsError:
  83. yield "data: " + json.dumps(api.handle_error(NotFound("Conversation Not Exists.")).get_json()) + "\n\n"
  84. except services.errors.conversation.ConversationCompletedError:
  85. yield "data: " + json.dumps(api.handle_error(ConversationCompletedError()).get_json()) + "\n\n"
  86. except services.errors.app_model_config.AppModelConfigBrokenError:
  87. logging.exception("App model config broken.")
  88. yield "data: " + json.dumps(api.handle_error(AppUnavailableError()).get_json()) + "\n\n"
  89. except ProviderTokenNotInitError:
  90. yield "data: " + json.dumps(api.handle_error(ProviderNotInitializeError()).get_json()) + "\n\n"
  91. except QuotaExceededError:
  92. yield "data: " + json.dumps(api.handle_error(ProviderQuotaExceededError()).get_json()) + "\n\n"
  93. except ModelCurrentlyNotSupportError:
  94. yield "data: " + json.dumps(api.handle_error(ProviderModelCurrentlyNotSupportError()).get_json()) + "\n\n"
  95. except (LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError,
  96. LLMRateLimitError, LLMAuthorizationError) as e:
  97. yield "data: " + json.dumps(api.handle_error(CompletionRequestError(str(e))).get_json()) + "\n\n"
  98. except ValueError as e:
  99. yield "data: " + json.dumps(api.handle_error(e).get_json()) + "\n\n"
  100. except Exception:
  101. logging.exception("internal server error.")
  102. yield "data: " + json.dumps(api.handle_error(InternalServerError()).get_json()) + "\n\n"
  103. return Response(stream_with_context(generate()), status=200,
  104. mimetype='text/event-stream')
  105. api.add_resource(UniversalChatApi, '/universal-chat/messages')
  106. api.add_resource(UniversalChatStopApi, '/universal-chat/messages/<string:task_id>/stop')