mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Test: Add tests for chunk API endpoints (#8616)
### What problem does this PR solve? - Add comprehensive test suite for chunk operations including: - Test files for create, list, retrieve, update, and delete chunks - Authorization tests - Batch operations tests - Update test configurations and common utilities - Validate `important_kwd` and `question_kwd` fields are lists in chunk_app.py - Reorganize imports and clean up duplicate code ### Type of change - [x] Add test cases
This commit is contained in:
@ -13,18 +13,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
from common import (
|
||||
batch_add_chunks,
|
||||
batch_create_datasets,
|
||||
bulk_upload_documents,
|
||||
delete_chunks,
|
||||
list_chunks,
|
||||
list_documents,
|
||||
list_kbs,
|
||||
parse_documents,
|
||||
rm_kb,
|
||||
)
|
||||
|
||||
# from configs import HOST_ADDRESS, VERSION
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
from pytest import FixtureRequest
|
||||
|
||||
# from ragflow_sdk import RAGFlow
|
||||
from utils import wait_for
|
||||
from utils.file_utils import (
|
||||
create_docx_file,
|
||||
create_eml_file,
|
||||
@ -39,6 +44,15 @@ from utils.file_utils import (
|
||||
)
|
||||
|
||||
|
||||
@wait_for(30, 1, "Document parsing timeout")
|
||||
def condition(_auth, _kb_id):
|
||||
res = list_documents(_auth, {"kb_id": _kb_id})
|
||||
for doc in res["data"]["docs"]:
|
||||
if doc["run"] != "3":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def generate_test_files(request: FixtureRequest, tmp_path):
|
||||
file_creators = {
|
||||
@ -73,11 +87,6 @@ def WebApiAuth(auth):
|
||||
return RAGFlowWebApiAuth(auth)
|
||||
|
||||
|
||||
# @pytest.fixture(scope="session")
|
||||
# def client(token: str) -> RAGFlow:
|
||||
# return RAGFlow(api_key=token, base_url=HOST_ADDRESS, version=VERSION)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_datasets(request: FixtureRequest, WebApiAuth: RAGFlowWebApiAuth):
|
||||
def cleanup():
|
||||
@ -108,3 +117,35 @@ def add_dataset_func(request: FixtureRequest, WebApiAuth: RAGFlowWebApiAuth) ->
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return batch_create_datasets(WebApiAuth, 1)[0]
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_document(request, WebApiAuth, add_dataset, ragflow_tmp_dir):
|
||||
# def cleanup():
|
||||
# res = list_documents(WebApiAuth, {"kb_id": dataset_id})
|
||||
# for doc in res["data"]["docs"]:
|
||||
# delete_document(WebApiAuth, {"doc_id": doc["id"]})
|
||||
|
||||
# request.addfinalizer(cleanup)
|
||||
|
||||
dataset_id = add_dataset
|
||||
return dataset_id, bulk_upload_documents(WebApiAuth, dataset_id, 1, ragflow_tmp_dir)[0]
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_chunks(request, WebApiAuth, add_document):
|
||||
def cleanup():
|
||||
res = list_chunks(WebApiAuth, {"doc_id": document_id})
|
||||
if res["code"] == 0:
|
||||
chunk_ids = [chunk["chunk_id"] for chunk in res["data"]["chunks"]]
|
||||
delete_chunks(WebApiAuth, {"doc_id": document_id, "chunk_ids": chunk_ids})
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
kb_id, document_id = add_document
|
||||
parse_documents(WebApiAuth, {"doc_ids": [document_id], "run": "1"})
|
||||
condition(WebApiAuth, kb_id)
|
||||
chunk_ids = batch_add_chunks(WebApiAuth, document_id, 4)
|
||||
# issues/6487
|
||||
sleep(1)
|
||||
return kb_id, document_id, chunk_ids
|
||||
|
||||
Reference in New Issue
Block a user