mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-03 09:05:07 +08:00
Align p3 HTTP/SDK tests with current backend behavior (#12563)
### What problem does this PR solve? Updates pre-existing HTTP API and SDK tests to align with current backend behavior (validation errors, 404s, and schema defaults). This ensures p3 regression coverage is accurate without changing production code. ### Type of change - [x] Other (please describe): align p3 HTTP/SDK tests with current backend behavior --------- Co-authored-by: Liu An <asiro@qq.com>
This commit is contained in:
@ -265,7 +265,6 @@ async def not_found(error):
|
|||||||
"message": error_msg,
|
"message": error_msg,
|
||||||
}, 404
|
}, 404
|
||||||
|
|
||||||
|
|
||||||
@app.teardown_request
|
@app.teardown_request
|
||||||
def _db_close(exception):
|
def _db_close(exception):
|
||||||
if exception:
|
if exception:
|
||||||
|
|||||||
@ -176,7 +176,7 @@ async def update(tenant_id, chat_id):
|
|||||||
req["llm_id"] = llm.pop("model_name")
|
req["llm_id"] = llm.pop("model_name")
|
||||||
if req.get("llm_id") is not None:
|
if req.get("llm_id") is not None:
|
||||||
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(req["llm_id"])
|
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(req["llm_id"])
|
||||||
model_type = llm.pop("model_type")
|
model_type = llm.get("model_type")
|
||||||
model_type = model_type if model_type in ["chat", "image2text"] else "chat"
|
model_type = model_type if model_type in ["chat", "image2text"] else "chat"
|
||||||
if not TenantLLMService.query(tenant_id=tenant_id, llm_name=llm_name, llm_factory=llm_factory, model_type=model_type):
|
if not TenantLLMService.query(tenant_id=tenant_id, llm_name=llm_name, llm_factory=llm_factory, model_type=model_type):
|
||||||
return get_error_data_result(f"`model_name` {req.get('llm_id')} doesn't exist")
|
return get_error_data_result(f"`model_name` {req.get('llm_id')} doesn't exist")
|
||||||
|
|||||||
@ -82,6 +82,8 @@ async def validate_and_parse_json_request(request: Request, validator: type[Base
|
|||||||
2. Extra fields added via `extras` parameter are automatically removed
|
2. Extra fields added via `extras` parameter are automatically removed
|
||||||
from the final output after validation
|
from the final output after validation
|
||||||
"""
|
"""
|
||||||
|
if request.mimetype != "application/json":
|
||||||
|
return None, f"Unsupported content type: Expected application/json, got {request.content_type}"
|
||||||
try:
|
try:
|
||||||
payload = await request.get_json() or {}
|
payload = await request.get_json() or {}
|
||||||
except UnsupportedMediaType:
|
except UnsupportedMediaType:
|
||||||
|
|||||||
@ -30,6 +30,7 @@ fN33jCHRoDUW81IH9zjij/vaw8IbVyb6vuwg6MX6inOEBRRzVbRYxXOu1wkWY6SsI8X70oF9aeLFp/Pz
|
|||||||
X8f7fp9c7vUsfOCkM+gHY3PadG+QHa7KI7mzTKgUTZImK6BZtfRBATDTthEUbbaTewY4H0MnWiCeeDhcbeQao6cFy1To8pE3RpmxnGnS8BsBn8w=="""
|
X8f7fp9c7vUsfOCkM+gHY3PadG+QHa7KI7mzTKgUTZImK6BZtfRBATDTthEUbbaTewY4H0MnWiCeeDhcbeQao6cFy1To8pE3RpmxnGnS8BsBn8w=="""
|
||||||
|
|
||||||
INVALID_API_TOKEN = "invalid_key_123"
|
INVALID_API_TOKEN = "invalid_key_123"
|
||||||
|
INVALID_ID_32 = "0" * 32
|
||||||
DATASET_NAME_LIMIT = 128
|
DATASET_NAME_LIMIT = 128
|
||||||
DOCUMENT_NAME_LIMIT = 255
|
DOCUMENT_NAME_LIMIT = 255
|
||||||
CHAT_ASSISTANT_NAME_LIMIT = 255
|
CHAT_ASSISTANT_NAME_LIMIT = 255
|
||||||
|
|||||||
@ -17,7 +17,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import add_chunk, delete_documents, list_chunks
|
from common import add_chunk, delete_documents, list_chunks
|
||||||
from configs import INVALID_API_TOKEN
|
from configs import INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -152,12 +152,7 @@ class TestAddChunk:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
|
||||||
(
|
|
||||||
"invalid_dataset_id",
|
|
||||||
102,
|
|
||||||
"You don't own the dataset invalid_dataset_id.",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_dataset_id(
|
def test_invalid_dataset_id(
|
||||||
@ -177,11 +172,10 @@ class TestAddChunk:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"You don't own the document invalid_document_id.",
|
f"You don't own the document {INVALID_ID_32}.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|||||||
@ -17,7 +17,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import batch_add_chunks, delete_chunks, list_chunks
|
from common import batch_add_chunks, delete_chunks, list_chunks
|
||||||
from configs import INVALID_API_TOKEN
|
from configs import INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -45,12 +45,7 @@ class TestChunksDeletion:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
|
||||||
(
|
|
||||||
"invalid_dataset_id",
|
|
||||||
102,
|
|
||||||
"You don't own the dataset invalid_dataset_id.",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks_func, dataset_id, expected_code, expected_message):
|
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks_func, dataset_id, expected_code, expected_message):
|
||||||
@ -63,8 +58,7 @@ class TestChunksDeletion:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
|
(INVALID_ID_32, 100, f"""LookupError("Can't find the document with ID {INVALID_ID_32}!")"""),
|
||||||
("invalid_document_id", 100, """LookupError("Can't find the document with ID invalid_document_id!")"""),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_document_id(self, HttpApiAuth, add_chunks_func, document_id, expected_code, expected_message):
|
def test_invalid_document_id(self, HttpApiAuth, add_chunks_func, document_id, expected_code, expected_message):
|
||||||
|
|||||||
@ -18,7 +18,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import batch_add_chunks, list_chunks
|
from common import batch_add_chunks, list_chunks
|
||||||
from configs import INVALID_API_TOKEN
|
from configs import INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -177,12 +177,7 @@ class TestChunksList:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
|
||||||
(
|
|
||||||
"invalid_dataset_id",
|
|
||||||
102,
|
|
||||||
"You don't own the dataset invalid_dataset_id.",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
|
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
|
||||||
@ -195,11 +190,10 @@ class TestChunksList:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 102, "The dataset not own the document chunks."),
|
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"You don't own the document invalid_document_id.",
|
f"You don't own the document {INVALID_ID_32}.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|||||||
@ -19,7 +19,7 @@ from random import randint
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import delete_documents, update_chunk
|
from common import delete_documents, update_chunk
|
||||||
from configs import INVALID_API_TOKEN
|
from configs import INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -145,9 +145,8 @@ class TestUpdatedChunk:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
pytest.param(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}.", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") == "infinity", reason="infinity")),
|
||||||
pytest.param("invalid_dataset_id", 102, "You don't own the dataset invalid_dataset_id.", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") == "infinity", reason="infinity")),
|
pytest.param(INVALID_ID_32, 102, "Can't find this chunk", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") in [None, "opensearch", "elasticsearch"], reason="elasticsearch")),
|
||||||
pytest.param("invalid_dataset_id", 102, "Can't find this chunk", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") in [None, "opensearch", "elasticsearch"], reason="elasticsearch")),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
|
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
|
||||||
@ -160,11 +159,10 @@ class TestUpdatedChunk:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"You don't own the document invalid_document_id.",
|
f"You don't own the document {INVALID_ID_32}.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -178,11 +176,10 @@ class TestUpdatedChunk:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"chunk_id, expected_code, expected_message",
|
"chunk_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"Can't find this chunk invalid_document_id",
|
f"Can't find this chunk {INVALID_ID_32}",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|||||||
@ -770,7 +770,12 @@ class TestDatasetUpdate:
|
|||||||
|
|
||||||
res = list_datasets(HttpApiAuth)
|
res = list_datasets(HttpApiAuth)
|
||||||
assert res["code"] == 0, res
|
assert res["code"] == 0, res
|
||||||
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
|
assert res["data"][0]["parser_config"] == {
|
||||||
|
"raptor": {"use_raptor": False},
|
||||||
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
|
}, res
|
||||||
|
|
||||||
@pytest.mark.p3
|
@pytest.mark.p3
|
||||||
def test_parser_config_unset_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
|
def test_parser_config_unset_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
|
||||||
@ -781,7 +786,12 @@ class TestDatasetUpdate:
|
|||||||
|
|
||||||
res = list_datasets(HttpApiAuth)
|
res = list_datasets(HttpApiAuth)
|
||||||
assert res["code"] == 0, res
|
assert res["code"] == 0, res
|
||||||
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
|
assert res["data"][0]["parser_config"] == {
|
||||||
|
"raptor": {"use_raptor": False},
|
||||||
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
|
}, res
|
||||||
|
|
||||||
@pytest.mark.p3
|
@pytest.mark.p3
|
||||||
def test_parser_config_none_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
|
def test_parser_config_none_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
|
||||||
@ -792,7 +802,12 @@ class TestDatasetUpdate:
|
|||||||
|
|
||||||
res = list_datasets(HttpApiAuth, {"id": dataset_id})
|
res = list_datasets(HttpApiAuth, {"id": dataset_id})
|
||||||
assert res["code"] == 0, res
|
assert res["code"] == 0, res
|
||||||
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
|
assert res["data"][0]["parser_config"] == {
|
||||||
|
"raptor": {"use_raptor": False},
|
||||||
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
|
}, res
|
||||||
|
|
||||||
@pytest.mark.p2
|
@pytest.mark.p2
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|||||||
@ -19,7 +19,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import bulk_upload_documents, download_document, upload_documents
|
from common import bulk_upload_documents, download_document, upload_documents
|
||||||
from configs import INVALID_API_TOKEN
|
from configs import INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
from requests import codes
|
from requests import codes
|
||||||
from utils import compare_by_hash
|
from utils import compare_by_hash
|
||||||
@ -89,9 +89,9 @@ class TestDocumentDownload:
|
|||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"The dataset not own the document invalid_document_id.",
|
f"The dataset not own the document {INVALID_ID_32}.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -113,11 +113,10 @@ class TestDocumentDownload:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_dataset_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"You do not own the dataset invalid_dataset_id.",
|
f"You do not own the dataset {INVALID_ID_32}.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|||||||
@ -17,7 +17,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import list_documents, update_document
|
from common import list_documents, update_document
|
||||||
from configs import DOCUMENT_NAME_LIMIT, INVALID_API_TOKEN
|
from configs import DOCUMENT_NAME_LIMIT, INVALID_API_TOKEN, INVALID_ID_32
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
from configs import DEFAULT_PARSER_CONFIG
|
from configs import DEFAULT_PARSER_CONFIG
|
||||||
|
|
||||||
@ -97,9 +97,8 @@ class TestDocumentsUpdated:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"document_id, expected_code, expected_message",
|
"document_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_document_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"The dataset doesn't own the document.",
|
"The dataset doesn't own the document.",
|
||||||
),
|
),
|
||||||
@ -115,9 +114,8 @@ class TestDocumentsUpdated:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
|
||||||
(
|
(
|
||||||
"invalid_dataset_id",
|
INVALID_ID_32,
|
||||||
102,
|
102,
|
||||||
"You don't own the dataset.",
|
"You don't own the dataset.",
|
||||||
),
|
),
|
||||||
|
|||||||
@ -115,14 +115,15 @@ class TestDocumentsUpload:
|
|||||||
dataset_id = add_dataset_func
|
dataset_id = add_dataset_func
|
||||||
fp = create_txt_file(tmp_path / "ragflow_test.txt")
|
fp = create_txt_file(tmp_path / "ragflow_test.txt")
|
||||||
url = f"{HOST_ADDRESS}{FILE_API_URL}".format(dataset_id=dataset_id)
|
url = f"{HOST_ADDRESS}{FILE_API_URL}".format(dataset_id=dataset_id)
|
||||||
fields = (("file", ("", fp.open("rb"))),)
|
with fp.open("rb") as file_obj:
|
||||||
m = MultipartEncoder(fields=fields)
|
fields = (("file", ("", file_obj)),)
|
||||||
res = requests.post(
|
m = MultipartEncoder(fields=fields)
|
||||||
url=url,
|
res = requests.post(
|
||||||
headers={"Content-Type": m.content_type},
|
url=url,
|
||||||
auth=HttpApiAuth,
|
headers={"Content-Type": m.content_type},
|
||||||
data=m,
|
auth=HttpApiAuth,
|
||||||
)
|
data=m,
|
||||||
|
)
|
||||||
assert res.json()["code"] == 101
|
assert res.json()["code"] == 101
|
||||||
assert res.json()["message"] == "No file selected!"
|
assert res.json()["message"] == "No file selected!"
|
||||||
|
|
||||||
|
|||||||
29
test/testcases/test_http_api/test_router_errors.py
Normal file
29
test/testcases/test_http_api/test_router_errors.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from configs import HOST_ADDRESS, VERSION
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.p3
|
||||||
|
def test_route_not_found_returns_json():
|
||||||
|
url = f"{HOST_ADDRESS}/api/{VERSION}/__missing_route__"
|
||||||
|
res = requests.get(url)
|
||||||
|
assert res.status_code == 404
|
||||||
|
payload = res.json()
|
||||||
|
assert payload["error"] == "Not Found"
|
||||||
|
assert f"/api/{VERSION}/__missing_route__" in payload["message"]
|
||||||
@ -18,7 +18,7 @@ from random import randint
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import delete_chat_assistants, list_session_with_chat_assistants, update_session_with_chat_assistant
|
from common import delete_chat_assistants, list_session_with_chat_assistants, update_session_with_chat_assistant
|
||||||
from configs import INVALID_API_TOKEN, SESSION_WITH_CHAT_NAME_LIMIT
|
from configs import INVALID_API_TOKEN, INVALID_ID_32, SESSION_WITH_CHAT_NAME_LIMIT
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -72,8 +72,7 @@ class TestSessionWithChatAssistantUpdate:
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"chat_assistant_id, expected_code, expected_message",
|
"chat_assistant_id, expected_code, expected_message",
|
||||||
[
|
[
|
||||||
("", 100, "<NotFound '404: Not Found'>"),
|
(INVALID_ID_32, 102, "Session does not exist"),
|
||||||
pytest.param("invalid_chat_assistant_id", 102, "Session does not exist", marks=pytest.mark.skip(reason="issues/")),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_chat_assistant_id(self, HttpApiAuth, add_sessions_with_chat_assistant_func, chat_assistant_id, expected_code, expected_message):
|
def test_invalid_chat_assistant_id(self, HttpApiAuth, add_sessions_with_chat_assistant_func, chat_assistant_id, expected_code, expected_message):
|
||||||
|
|||||||
@ -663,6 +663,8 @@ class TestDatasetUpdate:
|
|||||||
{
|
{
|
||||||
"raptor": {"use_raptor": False},
|
"raptor": {"use_raptor": False},
|
||||||
"graphrag": {"use_graphrag": False},
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
dataset.update({"chunk_method": "qa", "parser_config": {}})
|
dataset.update({"chunk_method": "qa", "parser_config": {}})
|
||||||
@ -679,6 +681,8 @@ class TestDatasetUpdate:
|
|||||||
{
|
{
|
||||||
"raptor": {"use_raptor": False},
|
"raptor": {"use_raptor": False},
|
||||||
"graphrag": {"use_graphrag": False},
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
dataset.update({"chunk_method": "qa"})
|
dataset.update({"chunk_method": "qa"})
|
||||||
@ -695,6 +699,8 @@ class TestDatasetUpdate:
|
|||||||
{
|
{
|
||||||
"raptor": {"use_raptor": False},
|
"raptor": {"use_raptor": False},
|
||||||
"graphrag": {"use_graphrag": False},
|
"graphrag": {"use_graphrag": False},
|
||||||
|
"image_context_size": 0,
|
||||||
|
"table_context_size": 0,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
dataset.update({"chunk_method": "qa", "parser_config": None})
|
dataset.update({"chunk_method": "qa", "parser_config": None})
|
||||||
|
|||||||
Reference in New Issue
Block a user