mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 12:32:30 +08:00
Fix: user_default_llm configuration doesn't work for OpenAI API compatible LLM factory (#8502)
### What problem does this PR solve?
https://github.com/infiniflow/ragflow/issues/8467
when add llm the llm_name will like "llm1___OpenAI-API"
f09ca8e795/api/apps/llm_app.py (L173)
so we should not use llm1 to query
### Type of change
- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@ -45,6 +45,18 @@ class TenantLLMService(CommonService):
|
||||
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm)
|
||||
else:
|
||||
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)
|
||||
|
||||
if (not objs) and fid:
|
||||
if fid == "LocalAI":
|
||||
mdlnm += "___LocalAI"
|
||||
elif fid == "HuggingFace":
|
||||
mdlnm += "___HuggingFace"
|
||||
elif fid == "OpenAI-API-Compatible":
|
||||
mdlnm += "___OpenAI-API"
|
||||
elif fid == "VLLM":
|
||||
mdlnm += "___VLLM"
|
||||
|
||||
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)
|
||||
if not objs:
|
||||
return
|
||||
return objs[0]
|
||||
|
||||
Reference in New Issue
Block a user