From f819378fb0c93730a0384be78f3def72faeb28a3 Mon Sep 17 00:00:00 2001 From: Stephen Hu Date: Thu, 5 Jun 2025 12:05:58 +0800 Subject: [PATCH] Update api_utils.py (#8069) ### What problem does this PR solve? https://github.com/infiniflow/ragflow/issues/8059#issuecomment-2942407486 lazy throw exception to better support custom embedding model ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- api/utils/api_utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/api/utils/api_utils.py b/api/utils/api_utils.py index c0f2c1957..68c703b99 100644 --- a/api/utils/api_utils.py +++ b/api/utils/api_utils.py @@ -460,14 +460,15 @@ def verify_embedding_availability(embd_id: str, tenant_id: str) -> tuple[bool, R """ try: llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(embd_id) - if not LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding"): - return False, get_error_argument_result(f"Unsupported model: <{embd_id}>") - + in_llm_service = bool(LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding")) # Tongyi-Qianwen is added to TenantLLM by default, but remains unusable with empty api_key tenant_llms = TenantLLMService.get_my_llms(tenant_id=tenant_id) is_tenant_model = any(llm["llm_name"] == llm_name and llm["llm_factory"] == llm_factory and llm["model_type"] == "embedding" for llm in tenant_llms) is_builtin_model = embd_id in settings.BUILTIN_EMBEDDING_MODELS + if not ((is_builtin_model or is_tenant_model or in_llm_service)): + return False, get_error_argument_result(f"Unsupported model: <{embd_id}>") + if not (is_builtin_model or is_tenant_model): return False, get_error_argument_result(f"Unauthorized model: <{embd_id}>") except OperationalError as e: