mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Fix: chat_completion answer data incorrect (#6041)
### What problem does this PR solve? fix chat_completion answer data incorrect ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) Co-authored-by: renqi <renqi08266@fxomail.com>
This commit is contained in:
@ -259,7 +259,7 @@ def chat_completion_openai_like(tenant_id, chat_id):
|
|||||||
# The choices field on the last chunk will always be an empty array [].
|
# The choices field on the last chunk will always be an empty array [].
|
||||||
def streamed_response_generator(chat_id, dia, msg):
|
def streamed_response_generator(chat_id, dia, msg):
|
||||||
token_used = 0
|
token_used = 0
|
||||||
should_split_index = 0
|
answer_cache = ""
|
||||||
response = {
|
response = {
|
||||||
"id": f"chatcmpl-{chat_id}",
|
"id": f"chatcmpl-{chat_id}",
|
||||||
"choices": [
|
"choices": [
|
||||||
@ -285,13 +285,9 @@ def chat_completion_openai_like(tenant_id, chat_id):
|
|||||||
try:
|
try:
|
||||||
for ans in chat(dia, msg, True):
|
for ans in chat(dia, msg, True):
|
||||||
answer = ans["answer"]
|
answer = ans["answer"]
|
||||||
incremental = answer[should_split_index:]
|
incremental = answer.replace(answer_cache, "", 1)
|
||||||
|
answer_cache = answer.rstrip("</think>")
|
||||||
token_used += len(incremental)
|
token_used += len(incremental)
|
||||||
if incremental.endswith("</think>"):
|
|
||||||
response_data_len = len(incremental.rstrip("</think>"))
|
|
||||||
else:
|
|
||||||
response_data_len = len(incremental)
|
|
||||||
should_split_index += response_data_len
|
|
||||||
response["choices"][0]["delta"]["content"] = incremental
|
response["choices"][0]["delta"]["content"] = incremental
|
||||||
yield f"data:{json.dumps(response, ensure_ascii=False)}\n\n"
|
yield f"data:{json.dumps(response, ensure_ascii=False)}\n\n"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
Reference in New Issue
Block a user