From cd77425b87264e76d1c60af971b8cdebd148b52c Mon Sep 17 00:00:00 2001 From: Yongteng Lei Date: Tue, 21 Oct 2025 15:49:51 +0800 Subject: [PATCH] Fix: potential negative max_tokens in RAPTOR (#10701) ### What problem does this PR solve? Fix potential negative max_tokens in RAPTOR. #10235. ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue --- rag/raptor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rag/raptor.py b/rag/raptor.py index 6ce776a68..191ecdeb4 100644 --- a/rag/raptor.py +++ b/rag/raptor.py @@ -114,7 +114,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval: ), } ], - {"max_tokens": self._max_token}, + {"max_tokens": max(self._max_token, 512)}, # fix issue: #10235 ) cnt = re.sub( "(······\n由于长度的原因,回答被截断了,要继续吗?|For the content length reason, it stopped, continue?)",