Fix: Change Ollama Embedding Keep Alive (#8734)

### What problem does this PR solve?
https://github.com/infiniflow/ragflow/issues/8733

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Stephen Hu
2025-07-09 12:17:26 +08:00
committed by GitHub
parent 2f79a2a04d
commit 19419281c3

View File

@ -286,7 +286,7 @@ class OllamaEmbed(Base):
# remove special tokens if they exist
for token in OllamaEmbed._special_tokens:
txt = txt.replace(token, "")
res = self.client.embeddings(prompt=txt, model=self.model_name, options={"use_mmap": True})
res = self.client.embeddings(prompt=txt, model=self.model_name, options={"use_mmap": True}, keep_alive=-1)
try:
arr.append(res["embedding"])
except Exception as _e:
@ -298,7 +298,7 @@ class OllamaEmbed(Base):
# remove special tokens if they exist
for token in OllamaEmbed._special_tokens:
text = text.replace(token, "")
res = self.client.embeddings(prompt=text, model=self.model_name, options={"use_mmap": True})
res = self.client.embeddings(prompt=text, model=self.model_name, options={"use_mmap": True}, keep_alive=-1)
try:
return np.array(res["embedding"]), 128
except Exception as _e: