Browse Source

feat: abab6-chat supported (#2184)

takatost 1 year ago
parent
commit
76c52300a2

+ 1 - 1
api/core/model_runtime/model_providers/minimax/llm/abab5-chat.yaml

@@ -23,7 +23,7 @@ parameter_rules:
   - name: frequency_penalty
     use_template: frequency_penalty
 pricing:
-  input: '0.00'
+  input: '0.015'
   output: '0.015'
   unit: '0.001'
   currency: RMB

+ 1 - 1
api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml

@@ -36,7 +36,7 @@ parameter_rules:
       en_US: Enable Web Search
       zh_Hans: 开启网页搜索
 pricing:
-  input: '0.00'
+  input: '0.015'
   output: '0.015'
   unit: '0.001'
   currency: RMB

+ 1 - 1
api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml

@@ -29,7 +29,7 @@ parameter_rules:
   - name: frequency_penalty
     use_template: frequency_penalty
 pricing:
-  input: '0.00'
+  input: '0.005'
   output: '0.005'
   unit: '0.001'
   currency: RMB

+ 35 - 0
api/core/model_runtime/model_providers/minimax/llm/abab6-chat.yaml

@@ -0,0 +1,35 @@
+model: abab6-chat
+label:
+  en_US: Abab6-Chat
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32768
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0.01
+    max: 1
+    default: 0.1
+  - name: top_p
+    use_template: top_p
+    min: 0.01
+    max: 1
+    default: 0.9
+  - name: max_tokens
+    use_template: max_tokens
+    required: true
+    default: 2048
+    min: 1
+    max: 32768
+  - name: presence_penalty
+    use_template: presence_penalty
+  - name: frequency_penalty
+    use_template: frequency_penalty
+pricing:
+  input: '0.1'
+  output: '0.1'
+  unit: '0.001'
+  currency: RMB

+ 0 - 3
api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py

@@ -22,9 +22,6 @@ class MinimaxChatCompletionPro(object):
         """
             generate chat completion
         """
-        if model not in ['abab5.5-chat', 'abab5.5s-chat']:
-            raise BadRequestError(f'Invalid model: {model}')
-        
         if not api_key or not group_id:
             raise InvalidAPIKeyError('Invalid API key or group ID')
         

+ 3 - 3
api/core/model_runtime/model_providers/minimax/llm/llm.py

@@ -1,9 +1,8 @@
-from typing import Generator, List, Optional, Union
+from typing import Generator, List
 
-from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
+from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
 from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
                                                           SystemPromptMessage, UserPromptMessage)
-from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, ParameterRule, ParameterType
 from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
                                               InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
 from core.model_runtime.errors.validate import CredentialsValidateFailedError
@@ -18,6 +17,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
 
 class MinimaxLargeLanguageModel(LargeLanguageModel):
     model_apis = {
+        'abab6-chat': MinimaxChatCompletionPro,
         'abab5.5s-chat': MinimaxChatCompletionPro,
         'abab5.5-chat': MinimaxChatCompletionPro,
         'abab5-chat': MinimaxChatCompletion