diff --git a/api/apps/llm_app.py b/api/apps/llm_app.py index d14f6fa15..19b25325f 100644 --- a/api/apps/llm_app.py +++ b/api/apps/llm_app.py @@ -368,7 +368,7 @@ def my_llms(): @manager.route('/list', methods=['GET']) # noqa: F821 @login_required def list_app(): - self_deployed = ["Youdao", "FastEmbed", "BAAI", "Ollama", "Xinference", "LocalAI", "LM-Studio", "GPUStack"] + self_deployed = ["FastEmbed", "Ollama", "Xinference", "LocalAI", "LM-Studio", "GPUStack"] weighted = [] model_type = request.args.get("model_type") try: diff --git a/conf/llm_factories.json b/conf/llm_factories.json index 0d8c11035..1c069e8dc 100644 --- a/conf/llm_factories.json +++ b/conf/llm_factories.json @@ -974,20 +974,6 @@ "status": "1", "llm": [] }, - { - "name": "Youdao", - "logo": "", - "tags": "TEXT EMBEDDING", - "status": "1", - "llm": [ - { - "llm_name": "maidalun1020/bce-embedding-base_v1", - "tags": "TEXT EMBEDDING,", - "max_tokens": 512, - "model_type": "embedding" - } - ] - }, { "name": "DeepSeek", "logo": "", @@ -1140,20 +1126,6 @@ } ] }, - { - "name": "BAAI", - "logo": "", - "tags": "TEXT EMBEDDING", - "status": "1", - "llm": [ - { - "llm_name": "BAAI/bge-large-zh-v1.5", - "tags": "TEXT EMBEDDING,", - "max_tokens": 1024, - "model_type": "embedding" - } - ] - }, { "name": "Builtin", "logo": "",