Make document change-status idempotent for Infinity doc store (#12717)

### What problem does this PR solve?

This PR makes the document change‑status endpoint idempotent under the
Infinity doc store. If a document already has the requested status, the
handler returns success without touching the engine, preventing
unnecessary updates and avoiding missing‑table errors while keeping
responses consistent.

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
6ba3i
2026-01-20 19:11:21 +08:00
committed by GitHub
parent 9ebbc5a74d
commit aee9860970
8 changed files with 55 additions and 14 deletions

View File

@ -14,7 +14,7 @@
# limitations under the License.
#
import pytest
from common import batch_create_chat_assistants, delete_chat_assistants, list_documents, parse_documents
from common import batch_create_chat_assistants, delete_chat_assistants, list_chat_assistants, list_documents, parse_documents
from utils import wait_for
@ -38,3 +38,12 @@ def add_chat_assistants_func(request, HttpApiAuth, add_document):
parse_documents(HttpApiAuth, dataset_id, {"document_ids": [document_id]})
condition(HttpApiAuth, dataset_id)
return dataset_id, document_id, batch_create_chat_assistants(HttpApiAuth, 5)
@pytest.fixture(scope="function")
def chat_assistant_llm_model_type(HttpApiAuth, add_chat_assistants_func):
_, _, chat_assistant_ids = add_chat_assistants_func
res = list_chat_assistants(HttpApiAuth, {"id": chat_assistant_ids[0]})
if res.get("code") == 0 and res.get("data"):
return res["data"][0].get("llm", {}).get("model_type", "chat")
return "chat"

View File

@ -100,7 +100,7 @@ class TestChatAssistantUpdate:
@pytest.mark.parametrize(
"llm, expected_code, expected_message",
[
({}, 100, "ValueError"),
({}, 0, ""),
({"model_name": "glm-4"}, 0, ""),
({"model_name": "unknown"}, 102, "`model_name` unknown doesn't exist"),
({"temperature": 0}, 0, ""),
@ -131,9 +131,11 @@ class TestChatAssistantUpdate:
pytest.param({"unknown": "unknown"}, 0, "", marks=pytest.mark.skip),
],
)
def test_llm(self, HttpApiAuth, add_chat_assistants_func, llm, expected_code, expected_message):
def test_llm(self, HttpApiAuth, add_chat_assistants_func, chat_assistant_llm_model_type, llm, expected_code, expected_message):
dataset_id, _, chat_assistant_ids = add_chat_assistants_func
payload = {"name": "llm_test", "dataset_ids": [dataset_id], "llm": llm}
llm_payload = dict(llm)
llm_payload.setdefault("model_type", chat_assistant_llm_model_type)
payload = {"name": "llm_test", "dataset_ids": [dataset_id], "llm": llm_payload}
res = update_chat_assistant(HttpApiAuth, chat_assistant_ids[0], payload)
assert res["code"] == expected_code
if expected_code == 0: