Fix: user_default_llm configuration doesn't work for OpenAI API compatible LLM factory (#8502)

### What problem does this PR solve?

https://github.com/infiniflow/ragflow/issues/8467
when add llm the llm_name will like "llm1___OpenAI-API"
f09ca8e795/api/apps/llm_app.py (L173)
so we should not use llm1 to query


### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Stephen Hu
2025-06-27 09:41:12 +08:00
committed by GitHub
parent daf6c82066
commit 938d8dd878

View File

@ -45,6 +45,18 @@ class TenantLLMService(CommonService):
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm) objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm)
else: else:
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid) objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)
if (not objs) and fid:
if fid == "LocalAI":
mdlnm += "___LocalAI"
elif fid == "HuggingFace":
mdlnm += "___HuggingFace"
elif fid == "OpenAI-API-Compatible":
mdlnm += "___OpenAI-API"
elif fid == "VLLM":
mdlnm += "___VLLM"
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)
if not objs: if not objs:
return return
return objs[0] return objs[0]