From 3947da10ae26cdae398f235df921bc172ecd377e Mon Sep 17 00:00:00 2001 From: Yongteng Lei Date: Fri, 22 Aug 2025 19:33:09 +0800 Subject: [PATCH] Fix: unexpected LLM parameters (#9661) ### What problem does this PR solve? Remove unexpected LLM parameters. ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/chat_model.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index 1f14cbf72..27c9a6c99 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -112,6 +112,32 @@ class Base(ABC): def _clean_conf(self, gen_conf): if "max_tokens" in gen_conf: del gen_conf["max_tokens"] + + allowed_conf = { + "temperature", + "max_completion_tokens", + "top_p", + "stream", + "stream_options", + "stop", + "n", + "presence_penalty", + "frequency_penalty", + "functions", + "function_call", + "logit_bias", + "user", + "response_format", + "seed", + "tools", + "tool_choice", + "logprobs", + "top_logprobs", + "extra_headers", + } + + gen_conf = {k: v for k, v in gen_conf.items() if k in allowed_conf} + return gen_conf def _chat(self, history, gen_conf, **kwargs):