| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109 | import osfrom collections.abc import Generatorimport pytestfrom core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDeltafrom core.model_runtime.entities.message_entities import (    AssistantPromptMessage,    PromptMessageTool,    SystemPromptMessage,    UserPromptMessage,)from core.model_runtime.errors.validate import CredentialsValidateFailedErrorfrom core.model_runtime.model_providers.zhipuai.llm.llm import ZhipuAILargeLanguageModeldef test_validate_credentials():    model = ZhipuAILargeLanguageModel()    with pytest.raises(CredentialsValidateFailedError):        model.validate_credentials(model="chatglm_turbo", credentials={"api_key": "invalid_key"})    model.validate_credentials(model="chatglm_turbo", credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")})def test_invoke_model():    model = ZhipuAILargeLanguageModel()    response = model.invoke(        model="chatglm_turbo",        credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")},        prompt_messages=[UserPromptMessage(content="Who are you?")],        model_parameters={"temperature": 0.9, "top_p": 0.7},        stop=["How"],        stream=False,        user="abc-123",    )    assert isinstance(response, LLMResult)    assert len(response.message.content) > 0def test_invoke_stream_model():    model = ZhipuAILargeLanguageModel()    response = model.invoke(        model="chatglm_turbo",        credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")},        prompt_messages=[UserPromptMessage(content="Hello World!")],        model_parameters={"temperature": 0.9, "top_p": 0.7},        stream=True,        user="abc-123",    )    assert isinstance(response, Generator)    for chunk in response:        assert isinstance(chunk, LLMResultChunk)        assert isinstance(chunk.delta, LLMResultChunkDelta)        assert isinstance(chunk.delta.message, AssistantPromptMessage)        assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else Truedef test_get_num_tokens():    model = ZhipuAILargeLanguageModel()    num_tokens = model.get_num_tokens(        model="chatglm_turbo",        credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")},        prompt_messages=[            SystemPromptMessage(                content="You are a helpful AI assistant.",            ),            UserPromptMessage(content="Hello World!"),        ],    )    assert num_tokens == 14def test_get_tools_num_tokens():    model = ZhipuAILargeLanguageModel()    num_tokens = model.get_num_tokens(        model="tools",        credentials={"api_key": os.environ.get("ZHIPUAI_API_KEY")},        tools=[            PromptMessageTool(                name="get_current_weather",                description="Get the current weather in a given location",                parameters={                    "type": "object",                    "properties": {                        "location": {"type": "string", "description": "The city and state e.g. San Francisco, CA"},                        "unit": {"type": "string", "enum": ["c", "f"]},                    },                    "required": ["location"],                },            )        ],        prompt_messages=[            SystemPromptMessage(                content="You are a helpful AI assistant.",            ),            UserPromptMessage(content="Hello World!"),        ],    )    assert num_tokens == 88
 |