error.py 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. from typing import Optional
  2. class LLMError(Exception):
  3. """Base class for all LLM exceptions."""
  4. description: Optional[str] = None
  5. def __init__(self, description: Optional[str] = None) -> None:
  6. self.description = description
  7. class LLMBadRequestError(LLMError):
  8. """Raised when the LLM returns bad request."""
  9. description = "Bad Request"
  10. class LLMAPIConnectionError(LLMError):
  11. """Raised when the LLM returns API connection error."""
  12. description = "API Connection Error"
  13. class LLMAPIUnavailableError(LLMError):
  14. """Raised when the LLM returns API unavailable error."""
  15. description = "API Unavailable Error"
  16. class LLMRateLimitError(LLMError):
  17. """Raised when the LLM returns rate limit error."""
  18. description = "Rate Limit Error"
  19. class LLMAuthorizationError(LLMError):
  20. """Raised when the LLM returns authorization error."""
  21. description = "Authorization Error"
  22. class ProviderTokenNotInitError(Exception):
  23. """
  24. Custom exception raised when the provider token is not initialized.
  25. """
  26. description = "Provider Token Not Init"
  27. def __init__(self, *args, **kwargs):
  28. self.description = args[0] if args else self.description
  29. class QuotaExceededError(Exception):
  30. """
  31. Custom exception raised when the quota for a provider has been exceeded.
  32. """
  33. description = "Quota Exceeded"
  34. class ModelCurrentlyNotSupportError(Exception):
  35. """
  36. Custom exception raised when the model not support
  37. """
  38. description = "Model Currently Not Support"