entities.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. from typing import Any, Literal, Optional, Union
  2. from pydantic import BaseModel
  3. from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
  4. from core.workflow.entities.base_node_data_entities import BaseNodeData
  5. from core.workflow.entities.variable_entities import VariableSelector
  6. class ModelConfig(BaseModel):
  7. """
  8. Model Config.
  9. """
  10. provider: str
  11. name: str
  12. mode: str
  13. completion_params: dict[str, Any] = {}
  14. class ContextConfig(BaseModel):
  15. """
  16. Context Config.
  17. """
  18. enabled: bool
  19. variable_selector: Optional[list[str]] = None
  20. class VisionConfig(BaseModel):
  21. """
  22. Vision Config.
  23. """
  24. class Configs(BaseModel):
  25. """
  26. Configs.
  27. """
  28. detail: Literal['low', 'high']
  29. enabled: bool
  30. configs: Optional[Configs] = None
  31. class PromptConfig(BaseModel):
  32. """
  33. Prompt Config.
  34. """
  35. jinja2_variables: Optional[list[VariableSelector]] = None
  36. class LLMNodeChatModelMessage(ChatModelMessage):
  37. """
  38. LLM Node Chat Model Message.
  39. """
  40. jinja2_text: Optional[str] = None
  41. class LLMNodeCompletionModelPromptTemplate(CompletionModelPromptTemplate):
  42. """
  43. LLM Node Chat Model Prompt Template.
  44. """
  45. jinja2_text: Optional[str] = None
  46. class LLMNodeData(BaseNodeData):
  47. """
  48. LLM Node Data.
  49. """
  50. model: ModelConfig
  51. prompt_template: Union[list[LLMNodeChatModelMessage], LLMNodeCompletionModelPromptTemplate]
  52. prompt_config: Optional[PromptConfig] = None
  53. memory: Optional[MemoryConfig] = None
  54. context: ContextConfig
  55. vision: VisionConfig