mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Cache the result from llm for graphrag and raptor (#4051)
### What problem does this PR solve? #4045 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -8,6 +8,7 @@ Reference:
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
|
||||
from graphrag.extractor import Extractor
|
||||
from graphrag.utils import ErrorHandlerFn, perform_variable_replacements
|
||||
from rag.llm.chat_model import Base as CompletionLLM
|
||||
|
||||
@ -42,10 +43,9 @@ class SummarizationResult:
|
||||
description: str
|
||||
|
||||
|
||||
class SummarizeExtractor:
|
||||
class SummarizeExtractor(Extractor):
|
||||
"""Unipartite graph extractor class definition."""
|
||||
|
||||
_llm: CompletionLLM
|
||||
_entity_name_key: str
|
||||
_input_descriptions_key: str
|
||||
_summarization_prompt: str
|
||||
@ -143,4 +143,4 @@ class SummarizeExtractor:
|
||||
self._input_descriptions_key: json.dumps(sorted(descriptions)),
|
||||
}
|
||||
text = perform_variable_replacements(self._summarization_prompt, variables=variables)
|
||||
return self._llm.chat("", [{"role": "user", "content": text}])
|
||||
return self._chat("", [{"role": "user", "content": text}])
|
||||
|
||||
Reference in New Issue
Block a user