mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Fix: self-deployed LLM error, (#9217)
### What problem does this PR solve? Close #9197 Close #9145 ### Type of change - [x] Refactoring - [x] Bug fixing.
This commit is contained in:
@ -225,6 +225,9 @@ class TenantLLMService(CommonService):
|
||||
if llm_id == llm["llm_name"]:
|
||||
return llm["model_type"].split(",")[-1]
|
||||
|
||||
for llm in LLMService.query(llm_name=llm_id):
|
||||
return llm.model_type
|
||||
|
||||
|
||||
class LLMBundle:
|
||||
def __init__(self, tenant_id, llm_type, llm_name=None, lang="Chinese", **kwargs):
|
||||
|
||||
Reference in New Issue
Block a user