From 938d8dd87830e4775a7dbfb8c8824f57c4c4b652 Mon Sep 17 00:00:00 2001 From: Stephen Hu Date: Fri, 27 Jun 2025 09:41:12 +0800 Subject: [PATCH] Fix: user_default_llm configuration doesn't work for OpenAI API compatible LLM factory (#8502) ### What problem does this PR solve? https://github.com/infiniflow/ragflow/issues/8467 when add llm the llm_name will like "llm1___OpenAI-API" https://github.com/infiniflow/ragflow/blob/f09ca8e79500c0c99ba9703ccbc86965ef05de02/api/apps/llm_app.py#L173 so we should not use llm1 to query ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- api/db/services/llm_service.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/api/db/services/llm_service.py b/api/db/services/llm_service.py index e124b5b16..462eb9d4a 100644 --- a/api/db/services/llm_service.py +++ b/api/db/services/llm_service.py @@ -45,6 +45,18 @@ class TenantLLMService(CommonService): objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm) else: objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid) + + if (not objs) and fid: + if fid == "LocalAI": + mdlnm += "___LocalAI" + elif fid == "HuggingFace": + mdlnm += "___HuggingFace" + elif fid == "OpenAI-API-Compatible": + mdlnm += "___OpenAI-API" + elif fid == "VLLM": + mdlnm += "___VLLM" + + objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid) if not objs: return return objs[0]