From 2d83c64eedf09697e893ced544fbe274e9e3a572 Mon Sep 17 00:00:00 2001 From: buua436 <66937541+buua436@users.noreply.github.com> Date: Mon, 3 Nov 2025 19:16:41 +0800 Subject: [PATCH] Fix:wrong describe_with_prompt() in ollama (#10963) ### What problem does this PR solve? change: wrong describe_with_prompt() in ollama ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/cv_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rag/llm/cv_model.py b/rag/llm/cv_model.py index c0f90807d..9ea945907 100644 --- a/rag/llm/cv_model.py +++ b/rag/llm/cv_model.py @@ -539,7 +539,7 @@ class OllamaCV(Base): try: response = self.client.generate( model=self.model_name, - prompt=vision_prompt[0]["content"][0]["text"], + prompt=vision_prompt[0]["content"], images=[image], ) ans = response["response"].strip()