mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Test: add sdk Dataset test cases (#8077)
### What problem does this PR solve? Add sdk dataset test cases ### Type of change - [x] Add test case
This commit is contained in:
@ -80,10 +80,11 @@ class TestCapability:
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_create_dataset_concurrent(self, api_key):
|
||||
count = 100
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
futures = [executor.submit(create_dataset, api_key, {"name": f"dataset_{i}"}) for i in range(100)]
|
||||
futures = [executor.submit(create_dataset, api_key, {"name": f"dataset_{i}"}) for i in range(count)]
|
||||
responses = list(as_completed(futures))
|
||||
assert all(r["code"] == 0 for r in responses), responses
|
||||
assert len(responses) == count, responses
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clear_datasets")
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import uuid
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import pytest
|
||||
from common import (
|
||||
@ -86,13 +86,13 @@ class TestCapability:
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_concurrent_deletion(self, api_key):
|
||||
dataset_num = 1_000
|
||||
ids = batch_create_datasets(api_key, dataset_num)
|
||||
count = 1_000
|
||||
ids = batch_create_datasets(api_key, count)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
futures = [executor.submit(delete_datasets, api_key, {"ids": ids[i : i + 1]}) for i in range(dataset_num)]
|
||||
responses = [f.result() for f in futures]
|
||||
assert all(r["code"] == 0 for r in responses), responses
|
||||
futures = [executor.submit(delete_datasets, api_key, {"ids": ids[i : i + 1]}) for i in range(count)]
|
||||
responses = list(as_completed(futures))
|
||||
assert len(responses) == count, responses
|
||||
|
||||
|
||||
class TestDatasetsDelete:
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import uuid
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import pytest
|
||||
from common import INVALID_API_TOKEN, list_datasets
|
||||
@ -44,10 +44,11 @@ class TestAuthorization:
|
||||
class TestCapability:
|
||||
@pytest.mark.p3
|
||||
def test_concurrent_list(self, api_key):
|
||||
count = 100
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
futures = [executor.submit(list_datasets, api_key) for i in range(100)]
|
||||
responses = [f.result() for f in futures]
|
||||
assert all(r["code"] == 0 for r in responses), responses
|
||||
futures = [executor.submit(list_datasets, api_key) for i in range(count)]
|
||||
responses = list(as_completed(futures))
|
||||
assert len(responses) == count, responses
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("add_datasets")
|
||||
@ -173,7 +174,7 @@ class TestDatasetsList:
|
||||
|
||||
@pytest.mark.p3
|
||||
def test_orderby_none(self, api_key):
|
||||
params = {"order_by": None}
|
||||
params = {"orderby": None}
|
||||
res = list_datasets(api_key, params)
|
||||
assert res["code"] == 0, res
|
||||
assert is_sorted(res["data"], "create_time", True), res
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import uuid
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import pytest
|
||||
from common import DATASET_NAME_LIMIT, INVALID_API_TOKEN, list_datasets, update_dataset
|
||||
@ -90,10 +90,11 @@ class TestCapability:
|
||||
@pytest.mark.p3
|
||||
def test_update_dateset_concurrent(self, api_key, add_dataset_func):
|
||||
dataset_id = add_dataset_func
|
||||
count = 100
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
futures = [executor.submit(update_dataset, api_key, dataset_id, {"name": f"dataset_{i}"}) for i in range(100)]
|
||||
responses = [f.result() for f in futures]
|
||||
assert all(r["code"] == 0 for r in responses), responses
|
||||
futures = [executor.submit(update_dataset, api_key, dataset_id, {"name": f"dataset_{i}"}) for i in range(count)]
|
||||
responses = list(as_completed(futures))
|
||||
assert len(responses) == count, responses
|
||||
|
||||
|
||||
class TestDatasetUpdate:
|
||||
@ -811,10 +812,4 @@ class TestDatasetUpdate:
|
||||
assert res["data"][0]["permission"] == original_data["permission"], res
|
||||
assert res["data"][0]["chunk_method"] == original_data["chunk_method"], res
|
||||
assert res["data"][0]["pagerank"] == original_data["pagerank"], res
|
||||
assert res["data"][0]["parser_config"] == {
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": r"\n",
|
||||
"html4excel": False,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"raptor": {"use_raptor": False},
|
||||
}, res
|
||||
assert res["data"][0]["parser_config"] == original_data["parser_config"], res
|
||||
|
||||
Reference in New Issue
Block a user