add supprot for lepton (#1866)

### What problem does this PR solve?

add supprot for lepton
#1853

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
黄腾
2024-08-08 12:09:50 +08:00
committed by GitHub
parent 19ded65c66
commit aed1bbbcaa
5 changed files with 154 additions and 3 deletions

View File

@ -2326,6 +2326,104 @@
"model_type": "rerank"
}
]
},
{
"name": "Lepton",
"logo": "",
"tags": "LLM",
"status": "1",
"llm": [
{
"llm_name": "dolphin-mixtral-8x7b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "gemma-7b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama3-1-8b",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "llama3-8b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama2-13b",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "llama3-1-70b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama3-70b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama3-1-405b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "mistral-7b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "mistral-8x7b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "nous-hermes-llama2",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "openchat-3-5",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "toppy-m-7b",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "wizardlm-2-7b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "wizardlm-2-8x22b",
"tags": "LLM,CHAT,64K",
"max_tokens": 65536,
"model_type": "chat"
}
]
}
]
}