Test Cases (#2993)

### What problem does this PR solve?

Test Cases

### Type of change

- [x] Refactoring

Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
liuhua
2024-10-23 22:58:27 +08:00
committed by GitHub
parent 2174c350be
commit 50b425cf89
11 changed files with 556 additions and 418 deletions

View File

@ -37,9 +37,9 @@ def create(tenant_id):
language = req.get("language") language = req.get("language")
chunk_method = req.get("chunk_method") chunk_method = req.get("chunk_method")
parser_config = req.get("parser_config") parser_config = req.get("parser_config")
valid_permission = {"me", "team"} valid_permission = ["me", "team"]
valid_language ={"Chinese", "English"} valid_language =["Chinese", "English"]
valid_chunk_method = {"naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"} valid_chunk_method = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
check_validation=valid(permission,valid_permission,language,valid_language,chunk_method,valid_chunk_method) check_validation=valid(permission,valid_permission,language,valid_language,chunk_method,valid_chunk_method)
if check_validation: if check_validation:
return check_validation return check_validation
@ -47,10 +47,8 @@ def create(tenant_id):
if "tenant_id" in req: if "tenant_id" in req:
return get_error_data_result( return get_error_data_result(
retmsg="`tenant_id` must not be provided") retmsg="`tenant_id` must not be provided")
chunk_count=req.get("chunk_count") if "chunk_count" in req or "document_count" in req:
document_count=req.get("document_count") return get_error_data_result(retmsg="`chunk_count` or `document_count` must not be provided")
if chunk_count or document_count:
return get_error_data_result(retmsg="`chunk_count` or `document_count` must be 0 or not be provided")
if "name" not in req: if "name" not in req:
return get_error_data_result( return get_error_data_result(
retmsg="`name` is not empty!") retmsg="`name` is not empty!")
@ -123,10 +121,10 @@ def update(tenant_id,dataset_id):
language = req.get("language") language = req.get("language")
chunk_method = req.get("chunk_method") chunk_method = req.get("chunk_method")
parser_config = req.get("parser_config") parser_config = req.get("parser_config")
valid_permission = {"me", "team"} valid_permission = ["me", "team"]
valid_language = {"Chinese", "English"} valid_language = ["Chinese", "English"]
valid_chunk_method = {"naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one", valid_chunk_method = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
"knowledge_graph", "email"} "knowledge_graph", "email"]
check_validation = valid(permission, valid_permission, language, valid_language, chunk_method, valid_chunk_method) check_validation = valid(permission, valid_permission, language, valid_language, chunk_method, valid_chunk_method)
if check_validation: if check_validation:
return check_validation return check_validation

View File

@ -44,6 +44,7 @@ from rag.nlp import search
from rag.utils import rmSpace from rag.utils import rmSpace
from rag.utils.es_conn import ELASTICSEARCH from rag.utils.es_conn import ELASTICSEARCH
from rag.utils.storage_factory import STORAGE_IMPL from rag.utils.storage_factory import STORAGE_IMPL
import os
MAXIMUM_OF_UPLOADING_FILES = 256 MAXIMUM_OF_UPLOADING_FILES = 256

View File

@ -337,7 +337,7 @@ def valid(permission,valid_permission,language,valid_language,chunk_method,valid
def valid_parameter(parameter,valid_values): def valid_parameter(parameter,valid_values):
if parameter and parameter not in valid_values: if parameter and parameter not in valid_values:
return get_error_data_result(f"{parameter} not in {valid_values}") return get_error_data_result(f"{parameter} is not in {valid_values}")
def get_parser_config(chunk_method,parser_config): def get_parser_config(chunk_method,parser_config):
if parser_config: if parser_config:

View File

@ -51,6 +51,8 @@ class RAGFlow:
def create_dataset(self, name: str, avatar: str = "", description: str = "", language: str = "English", def create_dataset(self, name: str, avatar: str = "", description: str = "", language: str = "English",
permission: str = "me",chunk_method: str = "naive", permission: str = "me",chunk_method: str = "naive",
parser_config: DataSet.ParserConfig = None) -> DataSet: parser_config: DataSet.ParserConfig = None) -> DataSet:
if parser_config:
parser_config = parser_config.to_json()
res = self.post("/dataset", res = self.post("/dataset",
{"name": name, "avatar": avatar, "description": description, "language": language, {"name": name, "avatar": avatar, "description": description, "language": language,
"permission": permission, "chunk_method": chunk_method, "permission": permission, "chunk_method": chunk_method,
@ -91,7 +93,7 @@ class RAGFlow:
llm: Chat.LLM = None, prompt: Chat.Prompt = None) -> Chat: llm: Chat.LLM = None, prompt: Chat.Prompt = None) -> Chat:
dataset_list = [] dataset_list = []
for dataset in datasets: for dataset in datasets:
dataset_list.append(dataset.to_json()) dataset_list.append(dataset.id)
if llm is None: if llm is None:
llm = Chat.LLM(self, {"model_name": None, llm = Chat.LLM(self, {"model_name": None,

View File

@ -1,4 +0,0 @@
API_KEY = 'ragflow-NiYmZjNTVjODYwNzExZWZiODEwMDI0Mm'
HOST_ADDRESS = 'http://127.0.0.1:9380'

View File

@ -0,0 +1,52 @@
import pytest
import requests
import string
import random
HOST_ADDRESS = 'http://127.0.0.1:9380'
def generate_random_email():
return 'user_' + ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))+'@1.com'
EMAIL = generate_random_email()
# password is "123"
PASSWORD='''ctAseGvejiaSWWZ88T/m4FQVOpQyUvP+x7sXtdv3feqZACiQleuewkUi35E16wSd5C5QcnkkcV9cYc8TKPTRZlxappDuirxghxoOvFcJxFU4ixLsD
fN33jCHRoDUW81IH9zjij/vaw8IbVyb6vuwg6MX6inOEBRRzVbRYxXOu1wkWY6SsI8X70oF9aeLFp/PzQpjoe/YbSqpTq8qqrmHzn9vO+yvyYyvmDsphXe
X8f7fp9c7vUsfOCkM+gHY3PadG+QHa7KI7mzTKgUTZImK6BZtfRBATDTthEUbbaTewY4H0MnWiCeeDhcbeQao6cFy1To8pE3RpmxnGnS8BsBn8w=='''
def get_email():
return EMAIL
def register():
url = HOST_ADDRESS + "/v1/user/register"
name = "user"
register_data = {"email":EMAIL,"nickname":name,"password":PASSWORD}
res = requests.post(url=url,json=register_data)
res = res.json()
if res.get("retcode") != 0:
raise Exception(res.get("retmsg"))
def login():
url = HOST_ADDRESS + "/v1/user/login"
login_data = {"email":EMAIL,"password":PASSWORD}
response=requests.post(url=url,json=login_data)
res = response.json()
if res.get("retcode")!=0:
raise Exception(res.get("retmsg"))
auth = response.headers["Authorization"]
return auth
@pytest.fixture(scope="session")
def get_api_key_fixture():
register()
auth = login()
url = HOST_ADDRESS + "/v1/system/new_token"
auth = {"Authorization": auth}
response = requests.post(url=url,headers=auth)
res = response.json()
if res.get("retcode") != 0:
raise Exception(res.get("retmsg"))
return res["data"].get("token")

View File

@ -1,57 +1,67 @@
from ragflow import RAGFlow, Chat from ragflow import RAGFlow, Chat
from xgboost.testing import datasets import time
HOST_ADDRESS = 'http://127.0.0.1:9380'
from common import API_KEY, HOST_ADDRESS def test_create_chat_with_name(get_api_key_fixture):
from test_sdkbase import TestSdk API_KEY = get_api_key_fixture
class TestChat(TestSdk):
def test_create_chat_with_success(self):
"""
Test creating an chat with success
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_chat") kb = rag.create_dataset(name="test_create_chat")
chat = rag.create_chat("test_create", datasets=[kb]) displayed_name = "ragflow.txt"
if isinstance(chat, Chat): with open("./ragflow.txt","rb") as file:
assert chat.name == "test_create", "Name does not match." blob = file.read()
else: document = {"displayed_name":displayed_name,"blob":blob}
assert False, f"Failed to create chat, error: {chat}" documents = []
documents.append(document)
doc_ids = []
docs= kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
rag.create_chat("test_create", datasets=[kb])
def test_update_chat_with_success(self):
""" def test_update_chat_with_name(get_api_key_fixture):
Test updating an chat with success. API_KEY = get_api_key_fixture
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_update_chat") kb = rag.create_dataset(name="test_update_chat")
displayed_name = "ragflow.txt"
with open("./ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
documents = []
documents.append(document)
doc_ids = []
docs = kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
chat = rag.create_chat("test_update", datasets=[kb]) chat = rag.create_chat("test_update", datasets=[kb])
if isinstance(chat, Chat): chat.update({"name": "new_chat"})
assert chat.name == "test_update", "Name does not match."
res=chat.update({"name":"new_chat"})
assert res is None, f"Failed to update chat, error: {res}"
else:
assert False, f"Failed to create chat, error: {chat}"
def test_delete_chats_with_success(self):
""" def test_delete_chats_with_success(get_api_key_fixture):
Test deleting an chat with success API_KEY = get_api_key_fixture
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_chat") kb = rag.create_dataset(name="test_delete_chat")
displayed_name = "ragflow.txt"
with open("./ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
documents = []
documents.append(document)
doc_ids = []
docs = kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
chat = rag.create_chat("test_delete", datasets=[kb]) chat = rag.create_chat("test_delete", datasets=[kb])
if isinstance(chat, Chat): rag.delete_chats(ids=[chat.id])
assert chat.name == "test_delete", "Name does not match."
res = rag.delete_chats(ids=[chat.id])
assert res is None, f"Failed to delete chat, error: {res}"
else:
assert False, f"Failed to create chat, error: {chat}"
def test_list_chats_with_success(self): API_KEY = get_api_key_fixture
"""
Test listing chats with success
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
list_chats = rag.list_chats() rag.list_chats()
assert len(list_chats) > 0, "Do not exist any chat"
for chat in list_chats:
assert isinstance(chat, Chat), "Existence type is not chat."

View File

@ -1,53 +1,54 @@
from ragflow import RAGFlow, DataSet from ragflow import RAGFlow
import random
import pytest
from common import API_KEY, HOST_ADDRESS HOST_ADDRESS = 'http://127.0.0.1:9380'
from test_sdkbase import TestSdk
def test_create_dataset_with_name(get_api_key_fixture):
class TestDataset(TestSdk): API_KEY = get_api_key_fixture
def test_create_dataset_with_success(self):
"""
Test creating a dataset with success
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset("God") rag.create_dataset("test_create_dataset_with_name")
if isinstance(ds, DataSet):
assert ds.name == "God", "Name does not match."
else:
assert False, f"Failed to create dataset, error: {ds}"
def test_update_dataset_with_success(self): def test_create_dataset_with_duplicated_name(get_api_key_fixture):
""" API_KEY = get_api_key_fixture
Test updating a dataset with success.
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset("ABC") with pytest.raises(Exception) as exc_info:
if isinstance(ds, DataSet): rag.create_dataset("test_create_dataset_with_name")
assert ds.name == "ABC", "Name does not match." assert str(exc_info.value) == "Duplicated dataset name in creating dataset."
res = ds.update({"name":"DEF"})
assert res is None, f"Failed to update dataset, error: {res}"
else:
assert False, f"Failed to create dataset, error: {ds}"
def test_delete_datasets_with_success(self): def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
""" API_KEY = get_api_key_fixture
Test deleting a dataset with success rag = RAGFlow(API_KEY, HOST_ADDRESS)
""" valid_chunk_methods = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
random_chunk_method = random.choice(valid_chunk_methods)
rag.create_dataset("test_create_dataset_with_random_chunk_method",chunk_method=random_chunk_method)
def test_create_dataset_with_invalid_parameter(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
valid_chunk_methods = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
"knowledge_graph", "email"]
chunk_method = "invalid_chunk_method"
with pytest.raises(Exception) as exc_info:
rag.create_dataset("test_create_dataset_with_name",chunk_method=chunk_method)
assert str(exc_info.value) == f"{chunk_method} is not in {valid_chunk_methods}"
def test_update_dataset_with_name(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset("test_update_dataset")
ds.update({"name": "updated_dataset"})
def test_delete_datasets_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset("MA") ds = rag.create_dataset("MA")
if isinstance(ds, DataSet): rag.delete_datasets(ids=[ds.id])
assert ds.name == "MA", "Name does not match."
res = rag.delete_datasets(ids=[ds.id])
assert res is None, f"Failed to delete dataset, error: {res}"
else:
assert False, f"Failed to create dataset, error: {ds}"
def test_list_datasets_with_success(self):
""" def test_list_datasets_with_success(get_api_key_fixture):
Test listing datasets with success API_KEY = get_api_key_fixture
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
list_datasets = rag.list_datasets() rag.list_datasets()
assert len(list_datasets) > 0, "Do not exist any dataset"
for ds in list_datasets:
assert isinstance(ds, DataSet), "Existence type is not dataset."

View File

@ -1,15 +1,14 @@
from ragflow import RAGFlow, DataSet, Document, Chunk from ragflow import RAGFlow, DataSet, Document, Chunk
from common import API_KEY, HOST_ADDRESS HOST_ADDRESS = 'http://127.0.0.1:9380'
from test_sdkbase import TestSdk
class TestDocument(TestSdk): def test_upload_document_with_success(get_api_key_fixture):
def test_upload_document_with_success(self):
""" """
Test ingesting a document into a dataset with success. Test ingesting a document into a dataset with success.
""" """
# Initialize RAGFlow instance # Initialize RAGFlow instance
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
# Step 1: Create a new dataset # Step 1: Create a new dataset
@ -32,11 +31,13 @@ class TestDocument(TestSdk):
# Ensure document ingestion was successful # Ensure document ingestion was successful
assert res is None, f"Failed to create document, error: {res}" assert res is None, f"Failed to create document, error: {res}"
def test_update_document_with_success(self):
def test_update_document_with_success(get_api_key_fixture):
""" """
Test updating a document with success. Test updating a document with success.
Update name or chunk_method are supported Update name or chunk_method are supported
""" """
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.list_datasets(name="God") ds = rag.list_datasets(name="God")
ds = ds[0] ds = ds[0]
@ -48,10 +49,12 @@ class TestDocument(TestSdk):
else: else:
assert False, f"Failed to get document, error: {doc}" assert False, f"Failed to get document, error: {doc}"
def test_download_document_with_success(self):
def test_download_document_with_success(get_api_key_fixture):
""" """
Test downloading a document with success. Test downloading a document with success.
""" """
API_KEY = get_api_key_fixture
# Initialize RAGFlow instance # Initialize RAGFlow instance
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
@ -74,10 +77,12 @@ class TestDocument(TestSdk):
# If the document retrieval fails, assert failure # If the document retrieval fails, assert failure
assert False, f"Failed to get document, error: {doc}" assert False, f"Failed to get document, error: {doc}"
def test_list_documents_in_dataset_with_success(self):
def test_list_documents_in_dataset_with_success(get_api_key_fixture):
""" """
Test list all documents into a dataset with success. Test list all documents into a dataset with success.
""" """
API_KEY = get_api_key_fixture
# Initialize RAGFlow instance # Initialize RAGFlow instance
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
@ -99,10 +104,12 @@ class TestDocument(TestSdk):
for d in ds.list_documents(keywords="test", offset=0, limit=12): for d in ds.list_documents(keywords="test", offset=0, limit=12):
assert isinstance(d, Document), "Failed to upload documents" assert isinstance(d, Document), "Failed to upload documents"
def test_delete_documents_in_dataset_with_success(self):
def test_delete_documents_in_dataset_with_success(get_api_key_fixture):
""" """
Test list all documents into a dataset with success. Test list all documents into a dataset with success.
""" """
API_KEY = get_api_key_fixture
# Initialize RAGFlow instance # Initialize RAGFlow instance
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
@ -126,7 +133,9 @@ class TestDocument(TestSdk):
remaining_docs = ds.list_documents(keywords="rag", offset=0, limit=12) remaining_docs = ds.list_documents(keywords="rag", offset=0, limit=12)
assert len(remaining_docs) == 0, "Documents were not properly deleted." assert len(remaining_docs) == 0, "Documents were not properly deleted."
def test_parse_and_cancel_document(self):
def test_parse_and_cancel_document(get_api_key_fixture):
API_KEY = get_api_key_fixture
# Initialize RAGFlow with API key and host address # Initialize RAGFlow with API key and host address
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
@ -160,7 +169,9 @@ class TestDocument(TestSdk):
# Print message to confirm parsing has been cancelled successfully # Print message to confirm parsing has been cancelled successfully
print("Parsing cancelled successfully") print("Parsing cancelled successfully")
def test_bulk_parse_and_cancel_documents(self):
def test_bulk_parse_and_cancel_documents(get_api_key_fixture):
API_KEY = get_api_key_fixture
# Initialize RAGFlow with API key and host address # Initialize RAGFlow with API key and host address
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
@ -211,7 +222,9 @@ class TestDocument(TestSdk):
assert cancel_result is None or isinstance(cancel_result, type(None)), "Failed to cancel document parsing" assert cancel_result is None or isinstance(cancel_result, type(None)), "Failed to cancel document parsing"
print("Async bulk parsing cancelled") print("Async bulk parsing cancelled")
def test_parse_document_and_chunk_list(self):
def test_parse_document_and_chunk_list(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset(name="God7") ds = rag.create_dataset(name="God7")
name = 'story.txt' name = 'story.txt'
@ -235,14 +248,18 @@ class TestDocument(TestSdk):
assert c is not None, "Chunk is None" assert c is not None, "Chunk is None"
assert "rag" in c['content_with_weight'].lower(), f"Keyword 'rag' not found in chunk content: {c.content}" assert "rag" in c['content_with_weight'].lower(), f"Keyword 'rag' not found in chunk content: {c.content}"
def test_add_chunk_to_chunk_list(self):
def test_add_chunk_to_chunk_list(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
doc = rag.get_document(name='story.txt') doc = rag.get_document(name='story.txt')
chunk = doc.add_chunk(content="assssdd") chunk = doc.add_chunk(content="assssdd")
assert chunk is not None, "Chunk is None" assert chunk is not None, "Chunk is None"
assert isinstance(chunk, Chunk), "Chunk was not added to chunk list" assert isinstance(chunk, Chunk), "Chunk was not added to chunk list"
def test_delete_chunk_of_chunk_list(self):
def test_delete_chunk_of_chunk_list(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
doc = rag.get_document(name='story.txt') doc = rag.get_document(name='story.txt')
chunk = doc.add_chunk(content="assssdd") chunk = doc.add_chunk(content="assssdd")
@ -254,7 +271,9 @@ class TestDocument(TestSdk):
doc = rag.get_document(name='story.txt') doc = rag.get_document(name='story.txt')
assert doc.chunk_count == chunk_count_before - 1, "Chunk was not deleted" assert doc.chunk_count == chunk_count_before - 1, "Chunk was not deleted"
def test_update_chunk_content(self):
def test_update_chunk_content(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
doc = rag.get_document(name='story.txt') doc = rag.get_document(name='story.txt')
chunk = doc.add_chunk(content="assssddd") chunk = doc.add_chunk(content="assssddd")
@ -264,7 +283,9 @@ class TestDocument(TestSdk):
res = chunk.save() res = chunk.save()
assert res is True, f"Failed to update chunk content, error: {res}" assert res is True, f"Failed to update chunk content, error: {res}"
def test_update_chunk_available(self):
def test_update_chunk_available(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
doc = rag.get_document(name='story.txt') doc = rag.get_document(name='story.txt')
chunk = doc.add_chunk(content="ragflow") chunk = doc.add_chunk(content="ragflow")
@ -274,7 +295,9 @@ class TestDocument(TestSdk):
res = chunk.save() res = chunk.save()
assert res is True, f"Failed to update chunk status, error: {res}" assert res is True, f"Failed to update chunk status, error: {res}"
def test_retrieval_chunks(self):
def test_retrieval_chunks(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset(name="God8") ds = rag.create_dataset(name="God8")
name = 'ragflow_test.txt' name = 'ragflow_test.txt'

View File

@ -1,52 +1,110 @@
from ragflow import RAGFlow,Session from ragflow import RAGFlow,Session
import time
from common import API_KEY, HOST_ADDRESS HOST_ADDRESS = 'http://127.0.0.1:9380'
class TestSession: def test_create_session_with_success(get_api_key_fixture):
def test_create_session(self): API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_session") kb = rag.create_dataset(name="test_create_session")
displayed_name = "ragflow.txt"
with open("./ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
documents = []
documents.append(document)
doc_ids = []
docs = kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
assistant = rag.create_chat(name="test_create_session", datasets=[kb]) assistant = rag.create_chat(name="test_create_session", datasets=[kb])
session = assistant.create_session() assistant.create_session()
assert isinstance(session,Session), "Failed to create a session."
def test_create_chat_with_success(self):
def test_create_conversation_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_chat") kb = rag.create_dataset(name="test_create_conversation")
assistant = rag.create_chat(name="test_create_chat", datasets=[kb]) displayed_name = "ragflow.txt"
with open("./ragflow.txt","rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
documents = []
documents.append(document)
doc_ids = []
docs= kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
assistant = rag.create_chat(name="test_create_conversation", datasets=[kb])
session = assistant.create_session() session = assistant.create_session()
question = "What is AI" question = "What is AI"
for ans in session.ask(question, stream=True): for ans in session.ask(question, stream=True):
pass pass
assert not ans.content.startswith("**ERROR**"), "Please check this error." assert not ans.content.startswith("**ERROR**"), "Please check this error."
def test_delete_sessions_with_success(self):
def test_delete_sessions_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_session") kb = rag.create_dataset(name="test_delete_session")
displayed_name = "ragflow.txt"
with open("./ragflow.txt","rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
documents = []
documents.append(document)
doc_ids = []
docs= kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
assistant = rag.create_chat(name="test_delete_session", datasets=[kb]) assistant = rag.create_chat(name="test_delete_session", datasets=[kb])
session = assistant.create_session() session = assistant.create_session()
res=assistant.delete_sessions(ids=[session.id]) assistant.delete_sessions(ids=[session.id])
assert res is None, "Failed to delete the dataset."
def test_update_session_with_success(self): def test_update_session_with_name(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_update_session") kb = rag.create_dataset(name="test_update_session")
displayed_name = "ragflow.txt"
with open("./ragflow.txt","rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
documents = []
documents.append(document)
doc_ids = []
docs= kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
assistant = rag.create_chat(name="test_update_session", datasets=[kb]) assistant = rag.create_chat(name="test_update_session", datasets=[kb])
session = assistant.create_session(name="old session") session = assistant.create_session(name="old session")
res=session.update({"name":"new session"}) session.update({"name": "new session"})
assert res is None,"Failed to update the session"
def test_list_sessions_with_success(self): def test_list_sessions_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS) rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_list_session") kb = rag.create_dataset(name="test_list_session")
displayed_name = "ragflow.txt"
with open("./ragflow.txt","rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
documents = []
documents.append(document)
doc_ids = []
docs= kb.upload_documents(documents)
for doc in docs:
doc_ids.append(doc.id)
kb.async_parse_documents(doc_ids)
time.sleep(60)
assistant = rag.create_chat(name="test_list_session", datasets=[kb]) assistant = rag.create_chat(name="test_list_session", datasets=[kb])
assistant.create_session("test_1") assistant.create_session("test_1")
assistant.create_session("test_2") assistant.create_session("test_2")
sessions=assistant.list_sessions() assistant.list_sessions()
if isinstance(sessions,list):
for session in sessions:
assert isinstance(session,Session),"Non-Session elements exist in the list"
else :
assert False,"Failed to retrieve the session list."

View File

@ -1,3 +0,0 @@
class TestSdk():
def test_version(self):
print("test_sdk")