mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-23 11:36:38 +08:00
Feat: update and add new tests for web api apps (#12714)
### What problem does this PR solve? This PR adds missing web API tests (system, search, KB, LLM, plugin, connector). It also addresses a contract mismatch that was causing test failures: metadata updates did not persist new keys (update‑only behavior). ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) - [x] New Feature (non-breaking change which adds functionality) - [x] Other (please describe): Test coverage expansion and test helper instrumentation
This commit is contained in:
@ -13,6 +13,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
@ -30,6 +34,137 @@ DIALOG_APP_URL = f"/{VERSION}/dialog"
|
||||
# SESSION_WITH_AGENT_API_URL = "/api/v1/agents/{agent_id}/sessions"
|
||||
MEMORY_API_URL = f"/api/{VERSION}/memories"
|
||||
MESSAGE_API_URL = f"/api/{VERSION}/messages"
|
||||
API_APP_URL = f"/{VERSION}/api"
|
||||
SYSTEM_APP_URL = f"/{VERSION}/system"
|
||||
LLM_APP_URL = f"/{VERSION}/llm"
|
||||
PLUGIN_APP_URL = f"/{VERSION}/plugin"
|
||||
SEARCH_APP_URL = f"/{VERSION}/search"
|
||||
|
||||
|
||||
def _http_debug_enabled():
|
||||
return os.getenv("TEST_HTTP_DEBUG") == "1"
|
||||
|
||||
|
||||
def _redact_payload(payload):
|
||||
if not isinstance(payload, dict):
|
||||
return payload
|
||||
redacted = {}
|
||||
for key, value in payload.items():
|
||||
if any(token in key.lower() for token in ("api_key", "password", "token", "secret", "authorization")):
|
||||
redacted[key] = "***redacted***"
|
||||
else:
|
||||
redacted[key] = value
|
||||
return redacted
|
||||
|
||||
|
||||
def _log_http_debug(method, url, req_id, payload, status, text, resp_json, elapsed_ms):
|
||||
if not _http_debug_enabled():
|
||||
return
|
||||
payload_summary = _redact_payload(payload)
|
||||
print(f"[HTTP DEBUG] {method} {url} req_id={req_id} elapsed_ms={elapsed_ms:.1f}")
|
||||
print(f"[HTTP DEBUG] request_payload={json.dumps(payload_summary, default=str)}")
|
||||
print(f"[HTTP DEBUG] status={status}")
|
||||
print(f"[HTTP DEBUG] response_text={text}")
|
||||
print(f"[HTTP DEBUG] response_json={json.dumps(resp_json, default=str) if resp_json is not None else None}")
|
||||
|
||||
|
||||
# API APP
|
||||
def api_new_token(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{API_APP_URL}/new_token", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def api_token_list(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{API_APP_URL}/token_list", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def api_rm_token(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{API_APP_URL}/rm", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def api_stats(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{API_APP_URL}/stats", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
# SYSTEM APP
|
||||
def system_new_token(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/new_token", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def system_token_list(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/token_list", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def system_delete_token(auth, token, *, headers=HEADERS):
|
||||
res = requests.delete(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/token/{token}", headers=headers, auth=auth)
|
||||
return res.json()
|
||||
|
||||
|
||||
def system_status(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/status", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def system_version(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/version", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def system_config(auth=None, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{SYSTEM_APP_URL}/config", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
# LLM APP
|
||||
def llm_factories(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{LLM_APP_URL}/factories", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def llm_list(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{LLM_APP_URL}/list", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
# PLUGIN APP
|
||||
def plugin_llm_tools(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{PLUGIN_APP_URL}/llm_tools", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
# SEARCH APP
|
||||
def search_create(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{SEARCH_APP_URL}/create", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def search_update(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{SEARCH_APP_URL}/update", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def search_detail(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{SEARCH_APP_URL}/detail", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def search_list(auth, params=None, payload=None, *, headers=HEADERS, data=None):
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{SEARCH_APP_URL}/list", headers=headers, auth=auth, params=params, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def search_rm(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{SEARCH_APP_URL}/rm", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
# KB APP
|
||||
@ -60,6 +195,77 @@ def detail_kb(auth, params=None, *, headers=HEADERS):
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_get_meta(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/get_meta", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_basic_info(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/basic_info", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_update_metadata_setting(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/update_metadata_setting", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_list_pipeline_logs(auth, params=None, payload=None, *, headers=HEADERS, data=None):
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/list_pipeline_logs", headers=headers, auth=auth, params=params, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_list_pipeline_dataset_logs(auth, params=None, payload=None, *, headers=HEADERS, data=None):
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/list_pipeline_dataset_logs", headers=headers, auth=auth, params=params, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_delete_pipeline_logs(auth, params=None, payload=None, *, headers=HEADERS, data=None):
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/delete_pipeline_logs", headers=headers, auth=auth, params=params, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_pipeline_log_detail(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/pipeline_log_detail", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_run_graphrag(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/run_graphrag", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_trace_graphrag(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/trace_graphrag", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_run_raptor(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/run_raptor", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_trace_raptor(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/trace_raptor", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_run_mindmap(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/run_mindmap", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def kb_trace_mindmap(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/trace_mindmap", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
|
||||
|
||||
def list_tags_from_kbs(auth, params=None, *, headers=HEADERS):
|
||||
res = requests.get(url=f"{HOST_ADDRESS}{KB_APP_URL}/tags", headers=headers, auth=auth, params=params)
|
||||
return res.json()
|
||||
@ -76,7 +282,7 @@ def rm_tags(auth, dataset_id, payload=None, *, headers=HEADERS, data=None):
|
||||
|
||||
|
||||
def rename_tags(auth, dataset_id, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/{dataset_id}/rename_tags", headers=headers, auth=auth, json=payload, data=data)
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{KB_APP_URL}/{dataset_id}/rename_tag", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
@ -154,6 +360,46 @@ def parse_documents(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_filter(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/filter", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_infos(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/infos", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_metadata_summary(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/metadata/summary", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_metadata_update(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/metadata/update", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_update_metadata_setting(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/update_metadata_setting", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_change_status(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/change_status", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_rename(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/rename", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def document_set_meta(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DOCUMENT_APP_URL}/set_meta", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
|
||||
|
||||
def bulk_upload_documents(auth, kb_id, num, tmp_path):
|
||||
fps = []
|
||||
for i in range(num):
|
||||
@ -208,8 +454,33 @@ def batch_add_chunks(auth, doc_id, num):
|
||||
|
||||
# DIALOG APP
|
||||
def create_dialog(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
res = requests.post(url=f"{HOST_ADDRESS}{DIALOG_APP_URL}/set", headers=headers, auth=auth, json=payload, data=data)
|
||||
return res.json()
|
||||
if payload is None:
|
||||
payload = {}
|
||||
url = f"{HOST_ADDRESS}{DIALOG_APP_URL}/set"
|
||||
req_id = str(uuid.uuid4())
|
||||
req_headers = dict(headers)
|
||||
req_headers["X-Request-ID"] = req_id
|
||||
start = time.monotonic()
|
||||
res = requests.post(url=url, headers=req_headers, auth=auth, json=payload, data=data)
|
||||
elapsed_ms = (time.monotonic() - start) * 1000
|
||||
resp_json = None
|
||||
json_error = None
|
||||
try:
|
||||
resp_json = res.json()
|
||||
except ValueError as exc:
|
||||
json_error = exc
|
||||
_log_http_debug("POST", url, req_id, payload, res.status_code, res.text, resp_json, elapsed_ms)
|
||||
if _http_debug_enabled():
|
||||
if not res.ok or (resp_json is not None and resp_json.get("code") != 0):
|
||||
payload_summary = _redact_payload(payload)
|
||||
raise AssertionError(
|
||||
"HTTP helper failure: "
|
||||
f"req_id={req_id} url={url} status={res.status_code} "
|
||||
f"payload={payload_summary} response={res.text}"
|
||||
)
|
||||
if json_error:
|
||||
raise json_error
|
||||
return resp_json
|
||||
|
||||
|
||||
def update_dialog(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
@ -238,11 +509,21 @@ def batch_create_dialogs(auth, num, kb_ids=None):
|
||||
|
||||
dialog_ids = []
|
||||
for i in range(num):
|
||||
if kb_ids:
|
||||
prompt_config = {
|
||||
"system": "You are a helpful assistant. Use the following knowledge to answer questions: {knowledge}",
|
||||
"parameters": [{"key": "knowledge", "optional": False}],
|
||||
}
|
||||
else:
|
||||
prompt_config = {
|
||||
"system": "You are a helpful assistant.",
|
||||
"parameters": [],
|
||||
}
|
||||
payload = {
|
||||
"name": f"dialog_{i}",
|
||||
"description": f"Test dialog {i}",
|
||||
"kb_ids": kb_ids,
|
||||
"prompt_config": {"system": "You are a helpful assistant. Use the following knowledge to answer questions: {knowledge}", "parameters": [{"key": "knowledge", "optional": False}]},
|
||||
"prompt_config": prompt_config,
|
||||
"top_n": 6,
|
||||
"top_k": 1024,
|
||||
"similarity_threshold": 0.1,
|
||||
@ -250,6 +531,12 @@ def batch_create_dialogs(auth, num, kb_ids=None):
|
||||
"llm_setting": {"model": "gpt-3.5-turbo", "temperature": 0.7},
|
||||
}
|
||||
res = create_dialog(auth, payload)
|
||||
if res is None or res.get("code") != 0:
|
||||
uses_knowledge = "{knowledge}" in payload["prompt_config"]["system"]
|
||||
raise AssertionError(
|
||||
"batch_create_dialogs failed: "
|
||||
f"res={res} kb_ids_len={len(kb_ids)} uses_knowledge={uses_knowledge}"
|
||||
)
|
||||
if res["code"] == 0:
|
||||
dialog_ids.append(res["data"]["id"])
|
||||
return dialog_ids
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
from time import sleep
|
||||
from ragflow_sdk import RAGFlow
|
||||
from configs import HOST_ADDRESS, VERSION
|
||||
@ -91,6 +92,15 @@ def WebApiAuth(auth):
|
||||
return RAGFlowWebApiAuth(auth)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def require_env_flag():
|
||||
def _require(flag, value="1"):
|
||||
if os.getenv(flag) != value:
|
||||
pytest.skip(f"Requires {flag}={value}")
|
||||
|
||||
return _require
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_datasets(request: FixtureRequest, WebApiAuth: RAGFlowWebApiAuth):
|
||||
def cleanup():
|
||||
|
||||
87
test/testcases/test_web_api/test_api_app/test_api_tokens.py
Normal file
87
test/testcases/test_web_api/test_api_app/test_api_tokens.py
Normal file
@ -0,0 +1,87 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import api_new_token, api_rm_token, api_stats, api_token_list, batch_create_dialogs
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "Unauthorized"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "Unauthorized"),
|
||||
]
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_new_token(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = api_new_token(invalid_auth, {"dialog_id": "dummy_dialog_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_token_list(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = api_token_list(invalid_auth, {"dialog_id": "dummy_dialog_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_rm(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = api_rm_token(invalid_auth, {"tokens": ["dummy_token"], "tenant_id": "dummy_tenant"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_stats(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = api_stats(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clear_dialogs")
|
||||
class TestApiTokens:
|
||||
@pytest.mark.p2
|
||||
def test_token_lifecycle(self, WebApiAuth):
|
||||
dialog_id = batch_create_dialogs(WebApiAuth, 1)[0]
|
||||
create_res = api_new_token(WebApiAuth, {"dialog_id": dialog_id})
|
||||
assert create_res["code"] == 0, create_res
|
||||
token = create_res["data"]["token"]
|
||||
tenant_id = create_res["data"]["tenant_id"]
|
||||
|
||||
list_res = api_token_list(WebApiAuth, {"dialog_id": dialog_id})
|
||||
assert list_res["code"] == 0, list_res
|
||||
assert any(item["token"] == token for item in list_res["data"]), list_res
|
||||
|
||||
rm_res = api_rm_token(WebApiAuth, {"tokens": [token], "tenant_id": tenant_id})
|
||||
assert rm_res["code"] == 0, rm_res
|
||||
assert rm_res["data"] is True, rm_res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_stats_basic(self, WebApiAuth):
|
||||
res = api_stats(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
for key in ["pv", "uv", "speed", "tokens", "round", "thumb_up"]:
|
||||
assert key in res["data"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rm_missing_tokens(self, WebApiAuth):
|
||||
res = api_rm_token(WebApiAuth, {"tenant_id": "dummy_tenant"})
|
||||
assert res["code"] == 101, res
|
||||
assert "required argument are missing" in res["message"], res
|
||||
@ -0,0 +1,150 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from configs import HOST_ADDRESS, VERSION
|
||||
|
||||
CONNECTOR_BASE_URL = f"{HOST_ADDRESS}/{VERSION}/connector"
|
||||
LLM_API_KEY_URL = f"{HOST_ADDRESS}/{VERSION}/llm/set_api_key"
|
||||
LANGFUSE_API_KEY_URL = f"{HOST_ADDRESS}/{VERSION}/langfuse/api_key"
|
||||
|
||||
pytestmark = pytest.mark.p3
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _require_oauth_env(require_env_flag):
|
||||
require_env_flag("RAGFLOW_E2E_OAUTH")
|
||||
|
||||
|
||||
def _skip_unless_provider(allowed):
|
||||
provider = os.getenv("RAGFLOW_OAUTH_PROVIDER")
|
||||
if provider and provider not in allowed:
|
||||
pytest.skip(f"RAGFLOW_OAUTH_PROVIDER={provider} not in {sorted(allowed)}")
|
||||
|
||||
|
||||
def _assert_unauthorized(payload):
|
||||
assert payload["code"] == 401, payload
|
||||
assert "Unauthorized" in payload["message"], payload
|
||||
|
||||
|
||||
def _assert_unauthorized_response(res, *, allow_405=False):
|
||||
if allow_405 and res.status_code == 405:
|
||||
pytest.skip("method not supported in this deployment")
|
||||
content_type = res.headers.get("Content-Type", "")
|
||||
payload = None
|
||||
if "json" in content_type:
|
||||
payload = res.json()
|
||||
else:
|
||||
try:
|
||||
payload = res.json()
|
||||
except ValueError:
|
||||
assert False, f"Expected JSON response, status={res.status_code}, content_type={content_type}"
|
||||
_assert_unauthorized(payload)
|
||||
|
||||
|
||||
def _assert_callback_response(res, expected_fragment):
|
||||
assert res.status_code in {200, 302}, {"status": res.status_code, "headers": dict(res.headers)}
|
||||
if res.status_code == 200:
|
||||
assert "text/html" in res.headers.get("Content-Type", ""), res.headers
|
||||
assert expected_fragment in res.text
|
||||
else:
|
||||
location = res.headers.get("Location", "")
|
||||
assert location, res.headers
|
||||
markers = ("error", "oauth", "callback", "state", "code")
|
||||
assert any(marker in location for marker in markers), location
|
||||
|
||||
|
||||
def test_google_oauth_start_requires_auth():
|
||||
_skip_unless_provider({"google", "google-drive", "gmail"})
|
||||
res = requests.post(f"{CONNECTOR_BASE_URL}/google/oauth/web/start")
|
||||
_assert_unauthorized(res.json())
|
||||
|
||||
|
||||
def test_google_oauth_start_missing_credentials(WebApiAuth):
|
||||
_skip_unless_provider({"google", "google-drive", "gmail"})
|
||||
res = requests.post(f"{CONNECTOR_BASE_URL}/google/oauth/web/start", auth=WebApiAuth, json={})
|
||||
payload = res.json()
|
||||
assert payload["code"] == 101, payload
|
||||
assert "required argument are missing" in payload["message"], payload
|
||||
assert "credentials" in payload["message"], payload
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path", ["google-drive/oauth/web/callback", "gmail/oauth/web/callback"])
|
||||
def test_google_oauth_callback_missing_state(path):
|
||||
_skip_unless_provider({"google", "google-drive", "gmail"})
|
||||
res = requests.get(f"{CONNECTOR_BASE_URL}/{path}", allow_redirects=False)
|
||||
_assert_callback_response(res, "Missing OAuth state parameter.")
|
||||
|
||||
|
||||
def test_google_oauth_result_missing_flow_id(WebApiAuth):
|
||||
_skip_unless_provider({"google", "google-drive", "gmail"})
|
||||
res = requests.post(
|
||||
f"{CONNECTOR_BASE_URL}/google/oauth/web/result",
|
||||
params={"type": "google-drive"},
|
||||
auth=WebApiAuth,
|
||||
json={},
|
||||
)
|
||||
payload = res.json()
|
||||
assert payload["code"] == 101, payload
|
||||
assert "required argument are missing" in payload["message"], payload
|
||||
assert "flow_id" in payload["message"], payload
|
||||
|
||||
|
||||
def test_box_oauth_start_missing_params(WebApiAuth):
|
||||
_skip_unless_provider({"box"})
|
||||
res = requests.post(f"{CONNECTOR_BASE_URL}/box/oauth/web/start", auth=WebApiAuth, json={})
|
||||
payload = res.json()
|
||||
assert payload["code"] == 101, payload
|
||||
assert "client_id" in payload["message"], payload
|
||||
assert "client_secret" in payload["message"], payload
|
||||
|
||||
|
||||
def test_box_oauth_callback_missing_state():
|
||||
_skip_unless_provider({"box"})
|
||||
res = requests.get(f"{CONNECTOR_BASE_URL}/box/oauth/web/callback", allow_redirects=False)
|
||||
_assert_callback_response(res, "Missing OAuth parameters.")
|
||||
|
||||
|
||||
def test_box_oauth_result_missing_flow_id(WebApiAuth):
|
||||
_skip_unless_provider({"box"})
|
||||
res = requests.post(f"{CONNECTOR_BASE_URL}/box/oauth/web/result", auth=WebApiAuth, json={})
|
||||
payload = res.json()
|
||||
assert payload["code"] == 101, payload
|
||||
assert "required argument are missing" in payload["message"], payload
|
||||
assert "flow_id" in payload["message"], payload
|
||||
|
||||
|
||||
def test_langfuse_api_key_requires_auth():
|
||||
res = requests.post(LANGFUSE_API_KEY_URL, json={})
|
||||
_assert_unauthorized_response(res)
|
||||
|
||||
|
||||
def test_langfuse_api_key_requires_auth_get():
|
||||
res = requests.get(LANGFUSE_API_KEY_URL)
|
||||
_assert_unauthorized_response(res, allow_405=True)
|
||||
|
||||
|
||||
def test_langfuse_api_key_requires_auth_put():
|
||||
res = requests.put(LANGFUSE_API_KEY_URL, json={})
|
||||
_assert_unauthorized_response(res, allow_405=True)
|
||||
|
||||
|
||||
def test_llm_set_api_key_requires_auth():
|
||||
res = requests.post(LLM_API_KEY_URL, json={})
|
||||
_assert_unauthorized_response(res)
|
||||
@ -0,0 +1,232 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import (
|
||||
document_change_status,
|
||||
document_filter,
|
||||
document_infos,
|
||||
document_metadata_summary,
|
||||
document_metadata_update,
|
||||
document_rename,
|
||||
document_set_meta,
|
||||
document_update_metadata_setting,
|
||||
)
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "Unauthorized"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "Unauthorized"),
|
||||
]
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_filter_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_filter(invalid_auth, {"kb_id": "kb_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_infos_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_infos(invalid_auth, {"doc_ids": ["doc_id"]})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_metadata_summary_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_metadata_summary(invalid_auth, {"kb_id": "kb_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_metadata_update_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_metadata_update(invalid_auth, {"kb_id": "kb_id", "selector": {"document_ids": ["doc_id"]}, "updates": []})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_update_metadata_setting_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_update_metadata_setting(invalid_auth, {"doc_id": "doc_id", "metadata": {}})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_change_status_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_change_status(invalid_auth, {"doc_ids": ["doc_id"], "status": "1"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_rename_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_rename(invalid_auth, {"doc_id": "doc_id", "name": "rename.txt"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_set_meta_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = document_set_meta(invalid_auth, {"doc_id": "doc_id", "meta": "{}"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
|
||||
class TestDocumentMetadata:
|
||||
@pytest.mark.p2
|
||||
def test_filter(self, WebApiAuth, add_dataset_func):
|
||||
kb_id = add_dataset_func
|
||||
res = document_filter(WebApiAuth, {"kb_id": kb_id})
|
||||
assert res["code"] == 0, res
|
||||
assert "filter" in res["data"], res
|
||||
assert "total" in res["data"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_infos(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_infos(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert res["code"] == 0, res
|
||||
assert len(res["data"]) == 1, res
|
||||
assert res["data"][0]["id"] == doc_id, res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_metadata_summary(self, WebApiAuth, add_document_func):
|
||||
kb_id, _ = add_document_func
|
||||
res = document_metadata_summary(WebApiAuth, {"kb_id": kb_id})
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"]["summary"], dict), res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_metadata_update(self, WebApiAuth, add_document_func):
|
||||
kb_id, doc_id = add_document_func
|
||||
payload = {
|
||||
"kb_id": kb_id,
|
||||
"selector": {"document_ids": [doc_id]},
|
||||
"updates": [{"key": "author", "value": "alice"}],
|
||||
"deletes": [],
|
||||
}
|
||||
res = document_metadata_update(WebApiAuth, payload)
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["matched_docs"] == 1, res
|
||||
info_res = document_infos(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert info_res["code"] == 0, info_res
|
||||
meta_fields = info_res["data"][0].get("meta_fields", {})
|
||||
assert meta_fields.get("author") == "alice", info_res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_update_metadata_setting(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
metadata = {"source": "test"}
|
||||
res = document_update_metadata_setting(WebApiAuth, {"doc_id": doc_id, "metadata": metadata})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["id"] == doc_id, res
|
||||
assert res["data"]["parser_config"]["metadata"] == metadata, res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_change_status(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_change_status(WebApiAuth, {"doc_ids": [doc_id], "status": "1"})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"][doc_id]["status"] == "1", res
|
||||
info_res = document_infos(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert info_res["code"] == 0, info_res
|
||||
assert info_res["data"][0]["status"] == "1", info_res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_rename(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
name = f"renamed_{doc_id}.txt"
|
||||
res = document_rename(WebApiAuth, {"doc_id": doc_id, "name": name})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
info_res = document_infos(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert info_res["code"] == 0, info_res
|
||||
assert info_res["data"][0]["name"] == name, info_res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_set_meta(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_set_meta(WebApiAuth, {"doc_id": doc_id, "meta": "{\"author\": \"alice\"}"})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
info_res = document_infos(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert info_res["code"] == 0, info_res
|
||||
meta_fields = info_res["data"][0].get("meta_fields", {})
|
||||
assert meta_fields.get("author") == "alice", info_res
|
||||
|
||||
|
||||
class TestDocumentMetadataNegative:
|
||||
@pytest.mark.p3
|
||||
def test_filter_missing_kb_id(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_filter(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert res["code"] == 101, res
|
||||
assert "KB ID" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_metadata_summary_missing_kb_id(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_metadata_summary(WebApiAuth, {"doc_ids": [doc_id]})
|
||||
assert res["code"] == 101, res
|
||||
assert "KB ID" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_metadata_update_missing_kb_id(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_metadata_update(WebApiAuth, {"selector": {"document_ids": [doc_id]}, "updates": []})
|
||||
assert res["code"] == 101, res
|
||||
assert "KB ID" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_infos_invalid_doc_id(self, WebApiAuth):
|
||||
res = document_infos(WebApiAuth, {"doc_ids": ["invalid_id"]})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_update_metadata_setting_missing_metadata(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_update_metadata_setting(WebApiAuth, {"doc_id": doc_id})
|
||||
assert res["code"] == 101, res
|
||||
assert "required argument are missing" in res["message"], res
|
||||
assert "metadata" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_change_status_invalid_status(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_change_status(WebApiAuth, {"doc_ids": [doc_id], "status": "2"})
|
||||
assert res["code"] == 101, res
|
||||
assert "Status" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rename_extension_mismatch(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_rename(WebApiAuth, {"doc_id": doc_id, "name": "renamed.pdf"})
|
||||
assert res["code"] == 101, res
|
||||
assert "extension" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_set_meta_invalid_type(self, WebApiAuth, add_document_func):
|
||||
_, doc_id = add_document_func
|
||||
res = document_set_meta(WebApiAuth, {"doc_id": doc_id, "meta": "[]"})
|
||||
assert res["code"] == 101, res
|
||||
assert "dictionary" in res["message"], res
|
||||
@ -0,0 +1,208 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import (
|
||||
kb_delete_pipeline_logs,
|
||||
kb_list_pipeline_dataset_logs,
|
||||
kb_list_pipeline_logs,
|
||||
kb_pipeline_log_detail,
|
||||
kb_run_graphrag,
|
||||
kb_run_mindmap,
|
||||
kb_run_raptor,
|
||||
kb_trace_graphrag,
|
||||
kb_trace_mindmap,
|
||||
kb_trace_raptor,
|
||||
list_documents,
|
||||
parse_documents,
|
||||
)
|
||||
from utils import wait_for
|
||||
|
||||
TASK_STATUS_DONE = "3"
|
||||
|
||||
def _find_task(data, task_id):
|
||||
if isinstance(data, dict):
|
||||
if data.get("id") == task_id:
|
||||
return data
|
||||
tasks = data.get("tasks")
|
||||
if isinstance(tasks, list):
|
||||
for item in tasks:
|
||||
if isinstance(item, dict) and item.get("id") == task_id:
|
||||
return item
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
if isinstance(item, dict) and item.get("id") == task_id:
|
||||
return item
|
||||
return None
|
||||
|
||||
|
||||
def _assert_progress_in_scale(progress, payload):
|
||||
assert isinstance(progress, (int, float)), payload
|
||||
if progress < 0:
|
||||
assert False, f"Negative progress is not expected: {payload}"
|
||||
scale = 100 if progress > 1 else 1
|
||||
# Infer scale from observed payload (0..1 or 0..100).
|
||||
assert 0 <= progress <= scale, payload
|
||||
return scale
|
||||
|
||||
|
||||
def _wait_for_task(trace_func, auth, kb_id, task_id, timeout=60):
|
||||
@wait_for(timeout, 1, "Pipeline task trace timeout")
|
||||
def _condition():
|
||||
res = trace_func(auth, {"kb_id": kb_id})
|
||||
if res["code"] != 0:
|
||||
return False
|
||||
return _find_task(res["data"], task_id) is not None
|
||||
|
||||
_condition()
|
||||
|
||||
|
||||
def _wait_for_docs_parsed(auth, kb_id, timeout=60):
|
||||
@wait_for(timeout, 2, "Document parsing timeout")
|
||||
def _condition():
|
||||
res = list_documents(auth, {"kb_id": kb_id})
|
||||
if res["code"] != 0:
|
||||
return False
|
||||
for doc in res["data"]["docs"]:
|
||||
progress = doc.get("progress", 0)
|
||||
_assert_progress_in_scale(progress, doc)
|
||||
scale = 100 if progress > 1 else 1
|
||||
if doc.get("run") != TASK_STATUS_DONE or progress < scale:
|
||||
return False
|
||||
return True
|
||||
|
||||
_condition()
|
||||
|
||||
|
||||
def _wait_for_pipeline_logs(auth, kb_id, timeout=30):
|
||||
@wait_for(timeout, 1, "Pipeline log timeout")
|
||||
def _condition():
|
||||
res = kb_list_pipeline_logs(auth, params={"kb_id": kb_id}, payload={})
|
||||
if res["code"] != 0:
|
||||
return False
|
||||
return bool(res["data"]["logs"])
|
||||
|
||||
_condition()
|
||||
|
||||
|
||||
class TestKbPipelineTasks:
|
||||
@pytest.mark.p3
|
||||
def test_graphrag_run_and_trace(self, WebApiAuth, add_chunks):
|
||||
kb_id, _, _ = add_chunks
|
||||
run_res = kb_run_graphrag(WebApiAuth, {"kb_id": kb_id})
|
||||
assert run_res["code"] == 0, run_res
|
||||
task_id = run_res["data"]["graphrag_task_id"]
|
||||
assert task_id, run_res
|
||||
|
||||
_wait_for_task(kb_trace_graphrag, WebApiAuth, kb_id, task_id)
|
||||
trace_res = kb_trace_graphrag(WebApiAuth, {"kb_id": kb_id})
|
||||
assert trace_res["code"] == 0, trace_res
|
||||
task = _find_task(trace_res["data"], task_id)
|
||||
assert task, trace_res
|
||||
assert task["id"] == task_id, trace_res
|
||||
progress = task.get("progress")
|
||||
_assert_progress_in_scale(progress, task)
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_raptor_run_and_trace(self, WebApiAuth, add_chunks):
|
||||
kb_id, _, _ = add_chunks
|
||||
run_res = kb_run_raptor(WebApiAuth, {"kb_id": kb_id})
|
||||
assert run_res["code"] == 0, run_res
|
||||
task_id = run_res["data"]["raptor_task_id"]
|
||||
assert task_id, run_res
|
||||
|
||||
_wait_for_task(kb_trace_raptor, WebApiAuth, kb_id, task_id)
|
||||
trace_res = kb_trace_raptor(WebApiAuth, {"kb_id": kb_id})
|
||||
assert trace_res["code"] == 0, trace_res
|
||||
task = _find_task(trace_res["data"], task_id)
|
||||
assert task, trace_res
|
||||
assert task["id"] == task_id, trace_res
|
||||
progress = task.get("progress")
|
||||
_assert_progress_in_scale(progress, task)
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_mindmap_run_and_trace(self, WebApiAuth, add_chunks):
|
||||
kb_id, _, _ = add_chunks
|
||||
run_res = kb_run_mindmap(WebApiAuth, {"kb_id": kb_id})
|
||||
assert run_res["code"] == 0, run_res
|
||||
task_id = run_res["data"]["mindmap_task_id"]
|
||||
assert task_id, run_res
|
||||
|
||||
_wait_for_task(kb_trace_mindmap, WebApiAuth, kb_id, task_id)
|
||||
trace_res = kb_trace_mindmap(WebApiAuth, {"kb_id": kb_id})
|
||||
assert trace_res["code"] == 0, trace_res
|
||||
task = _find_task(trace_res["data"], task_id)
|
||||
assert task, trace_res
|
||||
assert task["id"] == task_id, trace_res
|
||||
progress = task.get("progress")
|
||||
_assert_progress_in_scale(progress, task)
|
||||
|
||||
|
||||
class TestKbPipelineLogs:
|
||||
@pytest.mark.p3
|
||||
def test_pipeline_log_lifecycle(self, WebApiAuth, add_document):
|
||||
kb_id, document_id = add_document
|
||||
parse_documents(WebApiAuth, {"doc_ids": [document_id], "run": "1"})
|
||||
_wait_for_docs_parsed(WebApiAuth, kb_id)
|
||||
_wait_for_pipeline_logs(WebApiAuth, kb_id)
|
||||
|
||||
list_res = kb_list_pipeline_logs(WebApiAuth, params={"kb_id": kb_id}, payload={})
|
||||
assert list_res["code"] == 0, list_res
|
||||
assert "total" in list_res["data"], list_res
|
||||
assert isinstance(list_res["data"]["logs"], list), list_res
|
||||
assert list_res["data"]["logs"], list_res
|
||||
|
||||
log_id = list_res["data"]["logs"][0]["id"]
|
||||
detail_res = kb_pipeline_log_detail(WebApiAuth, {"log_id": log_id})
|
||||
assert detail_res["code"] == 0, detail_res
|
||||
detail = detail_res["data"]
|
||||
assert detail["id"] == log_id, detail_res
|
||||
assert detail["kb_id"] == kb_id, detail_res
|
||||
for key in ["document_id", "task_type", "operation_status", "progress"]:
|
||||
assert key in detail, detail_res
|
||||
|
||||
delete_res = kb_delete_pipeline_logs(WebApiAuth, params={"kb_id": kb_id}, payload={"log_ids": [log_id]})
|
||||
assert delete_res["code"] == 0, delete_res
|
||||
assert delete_res["data"] is True, delete_res
|
||||
|
||||
@wait_for(30, 1, "Pipeline log delete timeout")
|
||||
def _condition():
|
||||
res = kb_list_pipeline_logs(WebApiAuth, params={"kb_id": kb_id}, payload={})
|
||||
if res["code"] != 0:
|
||||
return False
|
||||
return all(log.get("id") != log_id for log in res["data"]["logs"])
|
||||
|
||||
_condition()
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_list_pipeline_dataset_logs(self, WebApiAuth, add_document):
|
||||
kb_id, _ = add_document
|
||||
res = kb_list_pipeline_dataset_logs(WebApiAuth, params={"kb_id": kb_id}, payload={})
|
||||
assert res["code"] == 0, res
|
||||
assert "total" in res["data"], res
|
||||
assert isinstance(res["data"]["logs"], list), res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_pipeline_log_detail_missing_id(self, WebApiAuth):
|
||||
res = kb_pipeline_log_detail(WebApiAuth, {})
|
||||
assert res["code"] == 101, res
|
||||
assert "Pipeline log ID" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_delete_pipeline_logs_empty(self, WebApiAuth, add_document):
|
||||
kb_id, _ = add_document
|
||||
res = kb_delete_pipeline_logs(WebApiAuth, params={"kb_id": kb_id}, payload={"log_ids": []})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
251
test/testcases/test_web_api/test_kb_app/test_kb_tags_meta.py
Normal file
251
test/testcases/test_web_api/test_kb_app/test_kb_tags_meta.py
Normal file
@ -0,0 +1,251 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from common import (
|
||||
kb_basic_info,
|
||||
kb_get_meta,
|
||||
kb_update_metadata_setting,
|
||||
list_tags,
|
||||
list_tags_from_kbs,
|
||||
rename_tags,
|
||||
rm_tags,
|
||||
update_chunk,
|
||||
)
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
from utils import wait_for
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "Unauthorized"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "Unauthorized"),
|
||||
]
|
||||
|
||||
TAG_SEED_TIMEOUT = 20
|
||||
|
||||
|
||||
def _wait_for_tag(auth, kb_id, tag, timeout=TAG_SEED_TIMEOUT):
|
||||
@wait_for(timeout, 1, "Tag seed timeout")
|
||||
def _condition():
|
||||
res = list_tags(auth, kb_id)
|
||||
if res["code"] != 0:
|
||||
return False
|
||||
return tag in res["data"]
|
||||
|
||||
try:
|
||||
_condition()
|
||||
except AssertionError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _seed_tag(auth, kb_id, document_id, chunk_id):
|
||||
# KB tags are derived from chunk tag_kwd, not document metadata.
|
||||
tag = f"tag_{uuid.uuid4().hex[:8]}"
|
||||
res = update_chunk(
|
||||
auth,
|
||||
{
|
||||
"doc_id": document_id,
|
||||
"chunk_id": chunk_id,
|
||||
"content_with_weight": f"tag seed {tag}",
|
||||
"tag_kwd": [tag],
|
||||
},
|
||||
)
|
||||
assert res["code"] == 0, res
|
||||
if not _wait_for_tag(auth, kb_id, tag):
|
||||
return None
|
||||
return tag
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_list_tags_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = list_tags(invalid_auth, "kb_id")
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_list_tags_from_kbs_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = list_tags_from_kbs(invalid_auth, {"kb_ids": "kb_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_rm_tags_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = rm_tags(invalid_auth, "kb_id", {"tags": ["tag"]})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_rename_tag_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = rename_tags(invalid_auth, "kb_id", {"from_tag": "old", "to_tag": "new"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_get_meta_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = kb_get_meta(invalid_auth, {"kb_ids": "kb_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_basic_info_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = kb_basic_info(invalid_auth, {"kb_id": "kb_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_update_metadata_setting_auth_invalid(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = kb_update_metadata_setting(invalid_auth, {"kb_id": "kb_id", "metadata": {}})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
|
||||
class TestKbTagsMeta:
|
||||
@pytest.mark.p2
|
||||
def test_list_tags(self, WebApiAuth, add_dataset):
|
||||
kb_id = add_dataset
|
||||
res = list_tags(WebApiAuth, kb_id)
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], list), res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_list_tags_from_kbs(self, WebApiAuth, add_dataset):
|
||||
kb_id = add_dataset
|
||||
res = list_tags_from_kbs(WebApiAuth, {"kb_ids": kb_id})
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], list), res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rm_tags(self, WebApiAuth, add_chunks):
|
||||
kb_id, document_id, chunk_ids = add_chunks
|
||||
tag_to_remove = _seed_tag(WebApiAuth, kb_id, document_id, chunk_ids[0])
|
||||
if not tag_to_remove:
|
||||
# Tag aggregation is index-backed; skip if it never surfaces.
|
||||
pytest.skip("Seeded tag did not appear in list_tags.")
|
||||
|
||||
res = rm_tags(WebApiAuth, kb_id, {"tags": [tag_to_remove]})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
|
||||
@wait_for(TAG_SEED_TIMEOUT, 1, "Tag removal timeout")
|
||||
def _condition():
|
||||
after_res = list_tags(WebApiAuth, kb_id)
|
||||
if after_res["code"] != 0:
|
||||
return False
|
||||
return tag_to_remove not in after_res["data"]
|
||||
|
||||
_condition()
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rename_tag(self, WebApiAuth, add_chunks):
|
||||
kb_id, document_id, chunk_ids = add_chunks
|
||||
from_tag = _seed_tag(WebApiAuth, kb_id, document_id, chunk_ids[0])
|
||||
if not from_tag:
|
||||
# Tag aggregation is index-backed; skip if it never surfaces.
|
||||
pytest.skip("Seeded tag did not appear in list_tags.")
|
||||
|
||||
to_tag = f"{from_tag}_renamed"
|
||||
res = rename_tags(WebApiAuth, kb_id, {"from_tag": from_tag, "to_tag": to_tag})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
|
||||
@wait_for(TAG_SEED_TIMEOUT, 1, "Tag rename timeout")
|
||||
def _condition():
|
||||
after_res = list_tags(WebApiAuth, kb_id)
|
||||
if after_res["code"] != 0:
|
||||
return False
|
||||
tags = after_res["data"]
|
||||
return to_tag in tags and from_tag not in tags
|
||||
|
||||
_condition()
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_get_meta(self, WebApiAuth, add_dataset):
|
||||
kb_id = add_dataset
|
||||
res = kb_get_meta(WebApiAuth, {"kb_ids": kb_id})
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], dict), res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_basic_info(self, WebApiAuth, add_dataset):
|
||||
kb_id = add_dataset
|
||||
res = kb_basic_info(WebApiAuth, {"kb_id": kb_id})
|
||||
assert res["code"] == 0, res
|
||||
for key in ["processing", "finished", "failed", "cancelled", "downloaded"]:
|
||||
assert key in res["data"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_update_metadata_setting(self, WebApiAuth, add_dataset):
|
||||
kb_id = add_dataset
|
||||
metadata = {"source": "test"}
|
||||
res = kb_update_metadata_setting(WebApiAuth, {"kb_id": kb_id, "metadata": metadata, "enable_metadata": True})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["id"] == kb_id, res
|
||||
assert res["data"]["parser_config"]["metadata"] == metadata, res
|
||||
|
||||
|
||||
class TestKbTagsMetaNegative:
|
||||
@pytest.mark.p3
|
||||
def test_list_tags_invalid_kb(self, WebApiAuth):
|
||||
res = list_tags(WebApiAuth, "invalid_kb_id")
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_list_tags_from_kbs_invalid_kb(self, WebApiAuth):
|
||||
res = list_tags_from_kbs(WebApiAuth, {"kb_ids": "invalid_kb_id"})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rm_tags_invalid_kb(self, WebApiAuth):
|
||||
res = rm_tags(WebApiAuth, "invalid_kb_id", {"tags": ["tag"]})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_rename_tag_invalid_kb(self, WebApiAuth):
|
||||
res = rename_tags(WebApiAuth, "invalid_kb_id", {"from_tag": "old", "to_tag": "new"})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_get_meta_invalid_kb(self, WebApiAuth):
|
||||
res = kb_get_meta(WebApiAuth, {"kb_ids": "invalid_kb_id"})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_basic_info_invalid_kb(self, WebApiAuth):
|
||||
res = kb_basic_info(WebApiAuth, {"kb_id": "invalid_kb_id"})
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_update_metadata_setting_missing_metadata(self, WebApiAuth, add_dataset):
|
||||
res = kb_update_metadata_setting(WebApiAuth, {"kb_id": add_dataset})
|
||||
assert res["code"] == 101, res
|
||||
assert "required argument are missing" in res["message"], res
|
||||
assert "metadata" in res["message"], res
|
||||
@ -77,7 +77,8 @@ class TestDatasetUpdate:
|
||||
@pytest.mark.p1
|
||||
@given(name=valid_names())
|
||||
@example("a" * 128)
|
||||
@settings(max_examples=20, suppress_health_check=[HealthCheck.function_scoped_fixture])
|
||||
# Network-bound API call; disable Hypothesis deadline to avoid flaky timeouts.
|
||||
@settings(max_examples=20, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None)
|
||||
def test_name(self, WebApiAuth, add_dataset_func, name):
|
||||
dataset_id = add_dataset_func
|
||||
payload = {"name": name, "description": "", "parser_id": "naive", "kb_id": dataset_id}
|
||||
|
||||
55
test/testcases/test_web_api/test_llm_app/test_llm_list.py
Normal file
55
test/testcases/test_web_api/test_llm_app/test_llm_list.py
Normal file
@ -0,0 +1,55 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import llm_factories, llm_list
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "<Unauthorized '401: Unauthorized'>"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "<Unauthorized '401: Unauthorized'>"),
|
||||
]
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p1
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_message", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_factories(self, invalid_auth, expected_code, expected_message):
|
||||
res = llm_factories(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert res["message"] == expected_message, res
|
||||
|
||||
@pytest.mark.p1
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_message", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_list(self, invalid_auth, expected_code, expected_message):
|
||||
res = llm_list(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert res["message"] == expected_message, res
|
||||
|
||||
|
||||
class TestLLMList:
|
||||
@pytest.mark.p1
|
||||
def test_factories(self, WebApiAuth):
|
||||
res = llm_factories(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], list), res
|
||||
|
||||
@pytest.mark.p1
|
||||
def test_list(self, WebApiAuth):
|
||||
res = llm_list(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], dict), res
|
||||
@ -0,0 +1,42 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import plugin_llm_tools
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "<Unauthorized '401: Unauthorized'>"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "<Unauthorized '401: Unauthorized'>"),
|
||||
]
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p1
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_message", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid(self, invalid_auth, expected_code, expected_message):
|
||||
res = plugin_llm_tools(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert res["message"] == expected_message, res
|
||||
|
||||
|
||||
class TestPluginTools:
|
||||
@pytest.mark.p1
|
||||
def test_llm_tools(self, WebApiAuth):
|
||||
res = plugin_llm_tools(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], list), res
|
||||
154
test/testcases/test_web_api/test_search_app/test_search_crud.py
Normal file
154
test/testcases/test_web_api/test_search_app/test_search_crud.py
Normal file
@ -0,0 +1,154 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from common import search_create, search_detail, search_list, search_rm, search_update
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "Unauthorized"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "Unauthorized"),
|
||||
]
|
||||
|
||||
|
||||
def _search_name(prefix="search"):
|
||||
return f"{prefix}_{uuid.uuid4().hex[:8]}"
|
||||
|
||||
|
||||
def _find_tenant_id(WebApiAuth, search_id):
|
||||
res = search_list(WebApiAuth, payload={})
|
||||
assert res["code"] == 0, res
|
||||
for search_app in res["data"]["search_apps"]:
|
||||
if search_app.get("id") == search_id:
|
||||
return search_app.get("tenant_id")
|
||||
assert False, res
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_app(WebApiAuth):
|
||||
name = _search_name()
|
||||
create_res = search_create(WebApiAuth, {"name": name, "description": "test search"})
|
||||
assert create_res["code"] == 0, create_res
|
||||
search_id = create_res["data"]["search_id"]
|
||||
yield search_id
|
||||
rm_res = search_rm(WebApiAuth, {"search_id": search_id})
|
||||
assert rm_res["code"] == 0, rm_res
|
||||
assert rm_res["data"] is True, rm_res
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_create(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = search_create(invalid_auth, {"name": "dummy"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_list(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = search_list(invalid_auth, payload={})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_detail(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = search_detail(invalid_auth, {"search_id": "dummy_search_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_update(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = search_update(invalid_auth, {"search_id": "dummy", "name": "dummy", "search_config": {}, "tenant_id": "dummy"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_rm(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = search_rm(invalid_auth, {"search_id": "dummy_search_id"})
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
|
||||
class TestSearchCrud:
|
||||
@pytest.mark.p2
|
||||
def test_create_and_rm(self, WebApiAuth):
|
||||
name = _search_name("create")
|
||||
create_res = search_create(WebApiAuth, {"name": name, "description": "test search"})
|
||||
assert create_res["code"] == 0, create_res
|
||||
search_id = create_res["data"]["search_id"]
|
||||
|
||||
rm_res = search_rm(WebApiAuth, {"search_id": search_id})
|
||||
assert rm_res["code"] == 0, rm_res
|
||||
assert rm_res["data"] is True, rm_res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_list(self, WebApiAuth, search_app):
|
||||
res = search_list(WebApiAuth, payload={})
|
||||
assert res["code"] == 0, res
|
||||
assert any(app.get("id") == search_app for app in res["data"]["search_apps"]), res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_detail(self, WebApiAuth, search_app):
|
||||
res = search_detail(WebApiAuth, {"search_id": search_app})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"].get("id") == search_app, res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_update(self, WebApiAuth, search_app):
|
||||
tenant_id = _find_tenant_id(WebApiAuth, search_app)
|
||||
new_name = _search_name("updated")
|
||||
payload = {
|
||||
"search_id": search_app,
|
||||
"name": new_name,
|
||||
"search_config": {"top_k": 3},
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
res = search_update(WebApiAuth, payload)
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"].get("name") == new_name, res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_create_invalid_name(self, WebApiAuth):
|
||||
res = search_create(WebApiAuth, {"name": ""})
|
||||
assert res["code"] == 102, res
|
||||
assert "empty" in res["message"], res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_update_invalid_search_id(self, WebApiAuth):
|
||||
create_res = search_create(WebApiAuth, {"name": _search_name("invalid"), "description": "test search"})
|
||||
assert create_res["code"] == 0, create_res
|
||||
search_id = create_res["data"]["search_id"]
|
||||
tenant_id = _find_tenant_id(WebApiAuth, search_id)
|
||||
try:
|
||||
payload = {
|
||||
"search_id": "invalid_search_id",
|
||||
"name": "invalid",
|
||||
"search_config": {},
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
res = search_update(WebApiAuth, payload)
|
||||
assert res["code"] == 109, res
|
||||
assert "No authorization" in res["message"], res
|
||||
finally:
|
||||
rm_res = search_rm(WebApiAuth, {"search_id": search_id})
|
||||
assert rm_res["code"] == 0, rm_res
|
||||
108
test/testcases/test_web_api/test_system_app/test_system_basic.py
Normal file
108
test/testcases/test_web_api/test_system_app/test_system_basic.py
Normal file
@ -0,0 +1,108 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import (
|
||||
system_config,
|
||||
system_delete_token,
|
||||
system_new_token,
|
||||
system_status,
|
||||
system_token_list,
|
||||
system_version,
|
||||
)
|
||||
from configs import INVALID_API_TOKEN
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
|
||||
|
||||
INVALID_AUTH_CASES = [
|
||||
(None, 401, "Unauthorized"),
|
||||
(RAGFlowWebApiAuth(INVALID_API_TOKEN), 401, "Unauthorized"),
|
||||
]
|
||||
|
||||
|
||||
class TestAuthorization:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_status(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = system_status(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_version(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = system_version(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_token_list(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = system_token_list(invalid_auth)
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth, expected_code, expected_fragment", INVALID_AUTH_CASES)
|
||||
def test_auth_invalid_delete_token(self, invalid_auth, expected_code, expected_fragment):
|
||||
res = system_delete_token(invalid_auth, "dummy_token")
|
||||
assert res["code"] == expected_code, res
|
||||
assert expected_fragment in res["message"], res
|
||||
|
||||
|
||||
class TestSystemConfig:
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.parametrize("invalid_auth", [None, RAGFlowWebApiAuth(INVALID_API_TOKEN)])
|
||||
def test_config_no_auth_required(self, invalid_auth):
|
||||
res = system_config(invalid_auth)
|
||||
assert res["code"] == 0, res
|
||||
assert "registerEnabled" in res["data"], res
|
||||
|
||||
|
||||
class TestSystemEndpoints:
|
||||
@pytest.mark.p2
|
||||
def test_status(self, WebApiAuth):
|
||||
res = system_status(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
for key in ["doc_engine", "storage", "database", "redis"]:
|
||||
assert key in res["data"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_version(self, WebApiAuth):
|
||||
res = system_version(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"], res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_token_list(self, WebApiAuth):
|
||||
res = system_token_list(WebApiAuth)
|
||||
assert res["code"] == 0, res
|
||||
assert isinstance(res["data"], list), res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_delete_token(self, WebApiAuth):
|
||||
create_res = system_new_token(WebApiAuth)
|
||||
assert create_res["code"] == 0, create_res
|
||||
token = create_res["data"]["token"]
|
||||
|
||||
delete_res = system_delete_token(WebApiAuth, token)
|
||||
assert delete_res["code"] == 0, delete_res
|
||||
assert delete_res["data"] is True, delete_res
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_delete_missing_token(self, WebApiAuth):
|
||||
res = system_delete_token(WebApiAuth, "missing_token")
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"] is True, res
|
||||
Reference in New Issue
Block a user