Chore(config): Added rank values for the LLM vendors and remove deprecated LLM (#11133)

### What problem does this PR solve?

Added vendor ranking so that frequently used model providers appear
higher on the page for easier access.
Remove deprecated LLM configurations from llm_factories.json to
streamline model management

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
redredrrred
2025-11-10 19:17:35 +08:00
committed by GitHub
parent df16a80f25
commit ba6470a7a5

View File

@ -5,7 +5,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,TTS,TEXT RE-RANK,SPEECH2TEXT,MODERATION",
"status": "1",
"rank": 999999990,
"rank": "99",
"llm": [
{
"llm_name": "gpt-5",
@ -175,6 +175,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "92",
"llm": [
{
"llm_name": "grok-4",
@ -331,6 +332,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK,TTS,SPEECH2TEXT,MODERATION",
"status": "1",
"rank": "94",
"llm": [
{
"llm_name": "Moonshot-Kimi-K2-Instruct",
@ -715,6 +717,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
"status": "1",
"rank": "93",
"llm": [
{
"llm_name": "glm-4.5",
@ -860,6 +863,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
"status": "1",
"rank": "84",
"llm": []
},
{
@ -895,6 +899,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
"status": "1",
"rank": "95",
"llm": [
{
"llm_name": "kimi-thinking-preview",
@ -1015,6 +1020,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "96",
"llm": [
{
"llm_name": "deepseek-chat",
@ -1193,6 +1199,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING",
"status": "1",
"rank": "82",
"llm": [
{
"llm_name": "abab6.5-chat",
@ -1232,6 +1239,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,MODERATION",
"status": "1",
"rank": "90",
"llm": [
{
"llm_name": "codestral-latest",
@ -1325,6 +1333,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
"status": "1",
"rank": "85",
"llm": [
{
"llm_name": "gpt-4o-mini",
@ -1409,6 +1418,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING",
"status": "1",
"rank": "86",
"llm": []
},
{
@ -1416,6 +1426,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
"status": "1",
"rank": "97",
"llm": [
{
"llm_name": "gemini-2.5-flash",
@ -1471,6 +1482,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "81",
"llm": [
{
"llm_name": "gemma2-9b-it",
@ -1580,6 +1592,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING, TEXT RE-RANK",
"status": "1",
"rank": "80",
"llm": [
{
"llm_name": "01-ai/yi-large",
@ -2334,6 +2347,7 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING, TEXT RE-RANK",
"status": "1",
"rank": "89",
"llm": [
{
"llm_name": "command-r-plus",
@ -2441,108 +2455,6 @@
}
]
},
{
"name": "LeptonAI",
"logo": "",
"tags": "LLM",
"status": "1",
"llm": [
{
"llm_name": "dolphin-mixtral-8x7b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemma-7b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama3-1-8b",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "llama3-8b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama2-13b",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "llama3-1-70b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "llama3-70b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "llama3-1-405b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "mistral-7b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "mistral-8x7b",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "nous-hermes-llama2",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "openchat-3-5",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "toppy-m-7b",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "wizardlm-2-7b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "wizardlm-2-8x22b",
"tags": "LLM,CHAT,64K",
"max_tokens": 65536,
"model_type": "chat"
}
]
},
{
"name": "TogetherAI",
"logo": "",
@ -2550,167 +2462,6 @@
"status": "1",
"llm": []
},
{
"name": "PerfXCloud",
"logo": "",
"tags": "LLM,TEXT EMBEDDING",
"status": "1",
"llm": [
{
"llm_name": "deepseek-v2-chat",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "llama3.1:405b",
"tags": "LLM,CHAT,128k",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-72B-Instruct",
"tags": "LLM,CHAT,128k",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-72B-Instruct-GPTQ-Int4",
"tags": "LLM,CHAT,2k",
"max_tokens": 2048,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-72B-Instruct-awq-int4",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Llama3-Chinese_v2",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Yi-1_5-9B-Chat-16K",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "Qwen1.5-72B-Chat-GPTQ-Int4",
"tags": "LLM,CHAT,2k",
"max_tokens": 2048,
"model_type": "chat"
},
{
"llm_name": "Meta-Llama-3.1-8B-Instruct",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-v2-lite-chat",
"tags": "LLM,CHAT,2k",
"max_tokens": 2048,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-7B",
"tags": "LLM,CHAT,128k",
"max_tokens": 131072,
"model_type": "chat"
},
{
"llm_name": "chatglm3-6b",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Meta-Llama-3-70B-Instruct-GPTQ-Int4",
"tags": "LLM,CHAT,1k",
"max_tokens": 1024,
"model_type": "chat"
},
{
"llm_name": "Meta-Llama-3-8B-Instruct",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Mistral-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "MindChat-Qwen-7B-v2",
"tags": "LLM,CHAT,2k",
"max_tokens": 2048,
"model_type": "chat"
},
{
"llm_name": "phi-2",
"tags": "LLM,CHAT,2k",
"max_tokens": 2048,
"model_type": "chat"
},
{
"llm_name": "SOLAR-10_7B-Instruct",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "Mixtral-8x7B-Instruct-v0.1-GPTQ",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen1.5-7B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "BAAI/bge-large-en-v1.5",
"tags": "TEXT EMBEDDING",
"max_tokens": 512,
"model_type": "embedding"
},
{
"llm_name": "BAAI/bge-large-zh-v1.5",
"tags": "TEXT EMBEDDING",
"max_tokens": 1024,
"model_type": "embedding"
},
{
"llm_name": "BAAI/bge-m3",
"tags": "TEXT EMBEDDING",
"max_tokens": 8192,
"model_type": "embedding"
}
]
},
{
"name": "Upstage",
"logo": "",
@ -2875,12 +2626,13 @@
"logo": "",
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK,IMAGE2TEXT",
"status": "1",
"rank": "79",
"llm": [
{
"llm_name":"THUDM/GLM-4.1V-9B-Thinking",
"tags":"LLM,CHAT,IMAGE2TEXT, 64k",
"max_tokens":64000,
"model_type":"chat",
"llm_name": "THUDM/GLM-4.1V-9B-Thinking",
"tags": "LLM,CHAT,IMAGE2TEXT, 64k",
"max_tokens": 64000,
"model_type": "chat",
"is_tools": false
},
{
@ -3374,75 +3126,6 @@
}
]
},
{
"name": "01.AI",
"logo": "",
"tags": "LLM,IMAGE2TEXT",
"status": "1",
"llm": [
{
"llm_name": "yi-lightning",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-large",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "yi-medium",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-medium-200k",
"tags": "LLM,CHAT,200k",
"max_tokens": 204800,
"model_type": "chat"
},
{
"llm_name": "yi-spark",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-large-rag",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-large-fc",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "yi-large-turbo",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-large-preview",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "yi-vision",
"tags": "LLM,CHAT,IMAGE2TEXT,16k",
"max_tokens": 16384,
"model_type": "image2text"
}
]
},
{
"name": "Replicate",
"logo": "",
@ -3494,6 +3177,7 @@
"logo": "",
"tags": "LLM,TTS",
"status": "1",
"rank": "83",
"llm": []
},
{
@ -3501,6 +3185,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "88",
"llm": []
},
{
@ -3522,6 +3207,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "98",
"llm": [
{
"llm_name": "claude-opus-4-1-20250805",
@ -4123,6 +3809,7 @@
"logo": "",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"status": "1",
"rank": "91",
"llm": []
},
{
@ -4866,6 +4553,7 @@
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "87",
"llm": [
{
"llm_name": "LongCat-Flash-Chat",
@ -5151,4 +4839,4 @@
]
}
]
}
}