Chore(config): remove Youdao and BAAI embedding model providers (#10873)

### What problem does this PR solve?

This commit removes the Youdao and BAAI entries from the LLM factories
configuration as they are no longer needed or supported.

### Type of change

- [x] Config update
This commit is contained in:
Liu An
2025-10-29 19:38:57 +08:00
committed by GitHub
parent 55eb525fdc
commit 40b2c48957
2 changed files with 1 additions and 29 deletions

View File

@ -368,7 +368,7 @@ def my_llms():
@manager.route('/list', methods=['GET']) # noqa: F821 @manager.route('/list', methods=['GET']) # noqa: F821
@login_required @login_required
def list_app(): def list_app():
self_deployed = ["Youdao", "FastEmbed", "BAAI", "Ollama", "Xinference", "LocalAI", "LM-Studio", "GPUStack"] self_deployed = ["FastEmbed", "Ollama", "Xinference", "LocalAI", "LM-Studio", "GPUStack"]
weighted = [] weighted = []
model_type = request.args.get("model_type") model_type = request.args.get("model_type")
try: try:

View File

@ -974,20 +974,6 @@
"status": "1", "status": "1",
"llm": [] "llm": []
}, },
{
"name": "Youdao",
"logo": "",
"tags": "TEXT EMBEDDING",
"status": "1",
"llm": [
{
"llm_name": "maidalun1020/bce-embedding-base_v1",
"tags": "TEXT EMBEDDING,",
"max_tokens": 512,
"model_type": "embedding"
}
]
},
{ {
"name": "DeepSeek", "name": "DeepSeek",
"logo": "", "logo": "",
@ -1140,20 +1126,6 @@
} }
] ]
}, },
{
"name": "BAAI",
"logo": "",
"tags": "TEXT EMBEDDING",
"status": "1",
"llm": [
{
"llm_name": "BAAI/bge-large-zh-v1.5",
"tags": "TEXT EMBEDDING,",
"max_tokens": 1024,
"model_type": "embedding"
}
]
},
{ {
"name": "Builtin", "name": "Builtin",
"logo": "", "logo": "",