Refa: remove temperature since some LLMs fail to support. (#8981)

### What problem does this PR solve?


### Type of change

- [x] Refactoring
This commit is contained in:
Kevin Hu
2025-07-23 10:17:04 +08:00
committed by GitHub
parent 0020c50000
commit 935ce872d8
7 changed files with 10 additions and 12 deletions

View File

@ -90,11 +90,10 @@ class CommunityReportsExtractor(Extractor):
"relation_df": rela_df.to_csv(index_label="id")
}
text = perform_variable_replacements(self._extraction_prompt, variables=prompt_variables)
gen_conf = {"temperature": 0.3}
async with chat_limiter:
try:
with trio.move_on_after(80) as cancel_scope:
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], gen_conf)
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {})
if cancel_scope.cancelled_caught:
logging.warning("extract_community_report._chat timeout, skipping...")
return