enlarge the default token length of RAPTOR summarization (#3454)

### What problem does this PR solve?

#3426

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Kevin Hu
2024-11-18 10:15:26 +08:00
committed by GitHub
parent dc05f43eee
commit a1d01a1b2f
5 changed files with 892 additions and 519 deletions

View File

@ -26,7 +26,7 @@ from rag.utils import truncate
class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
def __init__(self, max_cluster, llm_model, embd_model, prompt, max_token=256, threshold=0.1):
def __init__(self, max_cluster, llm_model, embd_model, prompt, max_token=512, threshold=0.1):
self._max_cluster = max_cluster
self._llm_model = llm_model
self._embd_model = embd_model