streamable_chat_anthropic.py 1.2 KB

123456789101112131415161718192021222324252627282930313233343536373839
  1. from typing import List, Optional, Any, Dict
  2. from langchain.callbacks.manager import Callbacks
  3. from langchain.chat_models import ChatAnthropic
  4. from langchain.schema import BaseMessage, LLMResult
  5. from core.llm.wrappers.anthropic_wrapper import handle_anthropic_exceptions
  6. class StreamableChatAnthropic(ChatAnthropic):
  7. """
  8. Wrapper around Anthropic's large language model.
  9. """
  10. @handle_anthropic_exceptions
  11. def generate(
  12. self,
  13. messages: List[List[BaseMessage]],
  14. stop: Optional[List[str]] = None,
  15. callbacks: Callbacks = None,
  16. *,
  17. tags: Optional[List[str]] = None,
  18. metadata: Optional[Dict[str, Any]] = None,
  19. **kwargs: Any,
  20. ) -> LLMResult:
  21. return super().generate(messages, stop, callbacks, tags=tags, metadata=metadata, **kwargs)
  22. @classmethod
  23. def get_kwargs_from_model_params(cls, params: dict):
  24. params['model'] = params.get('model_name')
  25. del params['model_name']
  26. params['max_tokens_to_sample'] = params.get('max_tokens')
  27. del params['max_tokens']
  28. del params['frequency_penalty']
  29. del params['presence_penalty']
  30. return params