.. |
__init__.py
|
5fa2161b05
feat: server multi models support (#799)
|
%!s(int64=2) %!d(string=hai) anos |
anthropic_model.py
|
2eba98a465
feat: optimize anthropic connection pool (#1066)
|
hai 1 ano |
azure_openai_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
base.py
|
f9082104ed
feat: add hosted moderation (#1158)
|
hai 1 ano |
chatglm_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
huggingface_hub_model.py
|
c4d8bdc3db
fix: hf hosted inference check (#1128)
|
hai 1 ano |
localai_model.py
|
417c19577a
feat: add LocalAI local embedding model support (#1021)
|
hai 1 ano |
minimax_model.py
|
fd0fc8f4fe
Fix/price calc (#862)
|
%!s(int64=2) %!d(string=hai) anos |
openai_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
openllm_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
replicate_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
spark_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
tongyi_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
wenxin_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |
xinference_model.py
|
0796791de5
feat: hf inference endpoint stream support (#1028)
|
hai 1 ano |