From 35034fed73923d41158fc206095305b659f6b876 Mon Sep 17 00:00:00 2001 From: Stephen Hu Date: Wed, 18 Jun 2025 16:40:57 +0800 Subject: [PATCH] Fix: Raptor: [Bug]: **ERROR**: Unknown field for GenerationConfig: max_tokens (#8331) ### What problem does this PR solve? https://github.com/infiniflow/ragflow/issues/8324 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/chat_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index 3e96f75f4..fbef34781 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -1217,7 +1217,7 @@ class GeminiChat(Base): def _clean_conf(self, gen_conf): for k in list(gen_conf.keys()): - if k not in ["temperature", "top_p", "max_tokens"]: + if k not in ["temperature", "top_p"]: del gen_conf[k] return gen_conf