mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Add tests for frontend API (#3552)
### What problem does this PR solve? Add tests for frontend API ### Type of change - [x] New Feature (non-breaking change which adds functionality) --------- Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
2
sdk/python/test/test_sdk_api/common.py
Normal file
2
sdk/python/test/test_sdk_api/common.py
Normal file
@ -0,0 +1,2 @@
|
||||
import os
|
||||
HOST_ADDRESS=os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
3
sdk/python/test/test_sdk_api/get_email.py
Normal file
3
sdk/python/test/test_sdk_api/get_email.py
Normal file
@ -0,0 +1,3 @@
|
||||
def test_get_email(get_email):
|
||||
print(f"\nEmail account:",flush=True)
|
||||
print(f"{get_email}\n",flush=True)
|
||||
70
sdk/python/test/test_sdk_api/t_chat.py
Normal file
70
sdk/python/test/test_sdk_api/t_chat.py
Normal file
@ -0,0 +1,70 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
from common import HOST_ADDRESS
|
||||
|
||||
def test_create_chat_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
rag.create_chat("test_create_chat", dataset_ids=[kb.id])
|
||||
|
||||
|
||||
def test_update_chat_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_update_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
chat = rag.create_chat("test_update_chat", dataset_ids=[kb.id])
|
||||
chat.update({"name": "new_chat"})
|
||||
|
||||
|
||||
def test_delete_chats_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
chat = rag.create_chat("test_delete_chat", dataset_ids=[kb.id])
|
||||
rag.delete_chats(ids=[chat.id])
|
||||
|
||||
def test_list_chats_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_list_chats")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
rag.create_chat("test_list_1", dataset_ids=[kb.id])
|
||||
rag.create_chat("test_list_2", dataset_ids=[kb.id])
|
||||
rag.list_chats()
|
||||
|
||||
|
||||
193
sdk/python/test/test_sdk_api/t_chunk.py
Normal file
193
sdk/python/test/test_sdk_api/t_chunk.py
Normal file
@ -0,0 +1,193 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
from common import HOST_ADDRESS
|
||||
from time import sleep
|
||||
|
||||
def test_parse_document_with_txt(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_parse_document")
|
||||
name = 'ragflow_test.txt'
|
||||
with open("test_data/ragflow_test.txt", "rb") as file :
|
||||
blob = file.read()
|
||||
docs = ds.upload_documents([{"displayed_name": name, "blob": blob}])
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents(document_ids=[doc.id])
|
||||
'''
|
||||
for n in range(100):
|
||||
if doc.progress == 1:
|
||||
break
|
||||
sleep(1)
|
||||
else:
|
||||
raise Exception("Run time ERROR: Document parsing did not complete in time.")
|
||||
'''
|
||||
|
||||
def test_parse_and_cancel_document(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_parse_and_cancel_document")
|
||||
name = 'ragflow_test.txt'
|
||||
with open("test_data/ragflow_test.txt", "rb") as file :
|
||||
blob = file.read()
|
||||
docs=ds.upload_documents([{"displayed_name": name, "blob": blob}])
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents(document_ids=[doc.id])
|
||||
sleep(1)
|
||||
if 0 < doc.progress < 1:
|
||||
ds.async_cancel_parse_documents(document_ids=[doc.id])
|
||||
|
||||
|
||||
def test_bulk_parse_documents(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_bulk_parse_and_cancel_documents")
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
documents = [
|
||||
{'displayed_name': 'test1.txt', 'blob': blob},
|
||||
{'displayed_name': 'test2.txt', 'blob': blob},
|
||||
{'displayed_name': 'test3.txt', 'blob': blob}
|
||||
]
|
||||
docs = ds.upload_documents(documents)
|
||||
ids = [doc.id for doc in docs]
|
||||
ds.async_parse_documents(ids)
|
||||
'''
|
||||
for n in range(100):
|
||||
all_completed = all(doc.progress == 1 for doc in docs)
|
||||
if all_completed:
|
||||
break
|
||||
sleep(1)
|
||||
else:
|
||||
raise Exception("Run time ERROR: Bulk document parsing did not complete in time.")
|
||||
'''
|
||||
|
||||
def test_list_chunks_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_list_chunks_with_success")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
ids = [doc.id for doc in docs]
|
||||
ds.async_parse_documents(ids)
|
||||
'''
|
||||
for n in range(100):
|
||||
all_completed = all(doc.progress == 1 for doc in docs)
|
||||
if all_completed:
|
||||
break
|
||||
sleep(1)
|
||||
else:
|
||||
raise Exception("Run time ERROR: Chunk document parsing did not complete in time.")
|
||||
'''
|
||||
doc = docs[0]
|
||||
doc.list_chunks()
|
||||
|
||||
|
||||
def test_add_chunk_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_add_chunk_with_success")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
doc.add_chunk(content="This is a chunk addition test")
|
||||
|
||||
|
||||
def test_delete_chunk_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_delete_chunk_with_success")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_delete_chunk_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
sleep(5)
|
||||
doc.delete_chunks([chunk.id])
|
||||
|
||||
|
||||
def test_update_chunk_content(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_update_chunk_content_with_success")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_update_chunk_content_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
# For Elasticsearch, the chunk is not searchable in shot time (~2s).
|
||||
sleep(3)
|
||||
chunk.update({"content":"This is a updated content"})
|
||||
|
||||
def test_update_chunk_available(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_update_chunk_available_with_success")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_update_chunk_available_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
# For Elasticsearch, the chunk is not searchable in shot time (~2s).
|
||||
sleep(3)
|
||||
chunk.update({"available":0})
|
||||
|
||||
|
||||
def test_retrieve_chunks(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="retrieval")
|
||||
with open("test_data/ragflow_test.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
'''
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_retrieve_chunks.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
doc.add_chunk(content="This is a chunk addition test")
|
||||
rag.retrieve(dataset_ids=[ds.id],document_ids=[doc.id])
|
||||
54
sdk/python/test/test_sdk_api/t_dataset.py
Normal file
54
sdk/python/test/test_sdk_api/t_dataset.py
Normal file
@ -0,0 +1,54 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
import random
|
||||
import pytest
|
||||
from common import HOST_ADDRESS
|
||||
|
||||
def test_create_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.create_dataset("test_create_dataset_with_name")
|
||||
|
||||
def test_create_dataset_with_duplicated_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.create_dataset("test_create_dataset_with_duplicated_name")
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_duplicated_name")
|
||||
assert str(exc_info.value) == "Duplicated dataset name in creating dataset."
|
||||
|
||||
def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
|
||||
random_chunk_method = random.choice(valid_chunk_methods)
|
||||
rag.create_dataset("test_create_dataset_with_random_chunk_method",chunk_method=random_chunk_method)
|
||||
|
||||
def test_create_dataset_with_invalid_parameter(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
|
||||
"knowledge_graph", "email"]
|
||||
chunk_method = "invalid_chunk_method"
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_invalid_chunk_method",chunk_method=chunk_method)
|
||||
assert str(exc_info.value) == f"'{chunk_method}' is not in {valid_chunk_methods}"
|
||||
|
||||
|
||||
def test_update_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("test_update_dataset")
|
||||
ds.update({"name": "updated_dataset"})
|
||||
|
||||
|
||||
def test_delete_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("test_delete_dataset")
|
||||
rag.delete_datasets(ids=[ds.id])
|
||||
|
||||
|
||||
def test_list_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.list_datasets()
|
||||
166
sdk/python/test/test_sdk_api/t_document.py
Normal file
166
sdk/python/test/test_sdk_api/t_document.py
Normal file
@ -0,0 +1,166 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
from common import HOST_ADDRESS
|
||||
import pytest
|
||||
|
||||
def test_upload_document_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_upload_document")
|
||||
blob = b"Sample document content for test."
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob_2=file.read()
|
||||
document_infos = []
|
||||
document_infos.append({"displayed_name": "test_1.txt","blob": blob})
|
||||
document_infos.append({"displayed_name": "test_2.txt","blob": blob_2})
|
||||
ds.upload_documents(document_infos)
|
||||
|
||||
|
||||
def test_update_document_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_update_document")
|
||||
blob = b"Sample document content for test."
|
||||
document_infos=[{"displayed_name":"test.txt","blob":blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
doc.update({"chunk_method": "manual", "name": "manual.txt"})
|
||||
|
||||
|
||||
def test_download_document_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_download_document")
|
||||
blob = b"Sample document content for test."
|
||||
document_infos=[{"displayed_name": "test_1.txt","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
with open("test_download.txt","wb+") as file:
|
||||
file.write(doc.download())
|
||||
|
||||
|
||||
def test_list_documents_in_dataset_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_list_documents")
|
||||
blob = b"Sample document content for test."
|
||||
document_infos = [{"displayed_name": "test.txt","blob":blob}]
|
||||
ds.upload_documents(document_infos)
|
||||
ds.list_documents(keywords="test", page=1, page_size=12)
|
||||
|
||||
|
||||
def test_delete_documents_in_dataset_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_delete_documents")
|
||||
name = "test_delete_documents.txt"
|
||||
blob = b"Sample document content for test."
|
||||
document_infos=[{"displayed_name": name, "blob": blob}]
|
||||
docs = ds.upload_documents(document_infos)
|
||||
ds.delete_documents([docs[0].id])
|
||||
|
||||
# upload and parse the document with different in different parse method.
|
||||
def test_upload_and_parse_pdf_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_pdf_document")
|
||||
with open("test_data/test.pdf", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.pdf","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
|
||||
def test_upload_and_parse_docx_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_docx_document")
|
||||
with open("test_data/test.docx", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.docx","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
def test_upload_and_parse_excel_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_excel_document")
|
||||
with open("test_data/test.xlsx", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.xlsx","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
def test_upload_and_parse_ppt_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_ppt_document")
|
||||
with open("test_data/test.ppt", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.ppt","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
def test_upload_and_parse_image_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_image_document")
|
||||
with open("test_data/test.jpg", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.jpg","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
def test_upload_and_parse_txt_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_txt_document")
|
||||
with open("test_data/test.txt", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.txt","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
def test_upload_and_parse_md_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_md_document")
|
||||
with open("test_data/test.md", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.md","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
|
||||
def test_upload_and_parse_json_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_json_document")
|
||||
with open("test_data/test.json", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.json","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
|
||||
@pytest.mark.skip(reason="")
|
||||
def test_upload_and_parse_eml_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_eml_document")
|
||||
with open("test_data/test.eml", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.eml","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
|
||||
def test_upload_and_parse_html_documents_with_general_parse_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset(name="test_html_document")
|
||||
with open("test_data/test.html", "rb") as file:
|
||||
blob=file.read()
|
||||
document_infos = [{"displayed_name": "test.html","blob": blob}]
|
||||
docs=ds.upload_documents(document_infos)
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents([doc.id])
|
||||
110
sdk/python/test/test_sdk_api/t_session.py
Normal file
110
sdk/python/test/test_sdk_api/t_session.py
Normal file
@ -0,0 +1,110 @@
|
||||
from ragflow_sdk import RAGFlow,Agent
|
||||
from common import HOST_ADDRESS
|
||||
import pytest
|
||||
|
||||
|
||||
def test_create_session_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_create_session", dataset_ids=[kb.id])
|
||||
assistant.create_session()
|
||||
|
||||
|
||||
def test_create_conversation_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_conversation")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_create_conversation", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
question = "What is AI"
|
||||
for ans in session.ask(question):
|
||||
pass
|
||||
|
||||
# assert not ans.content.startswith("**ERROR**"), "Please check this error."
|
||||
|
||||
|
||||
def test_delete_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_delete_session", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
assistant.delete_sessions(ids=[session.id])
|
||||
|
||||
|
||||
def test_update_session_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_update_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_update_session", dataset_ids=[kb.id])
|
||||
session = assistant.create_session(name="old session")
|
||||
session.update({"name": "new session"})
|
||||
|
||||
|
||||
def test_list_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_list_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_list_session", dataset_ids=[kb.id])
|
||||
assistant.create_session("test_1")
|
||||
assistant.create_session("test_2")
|
||||
assistant.list_sessions()
|
||||
|
||||
@pytest.mark.skip(reason="")
|
||||
def test_create_agent_session_with_success(get_api_key_fixture):
|
||||
API_KEY = "ragflow-BkOGNhYjIyN2JiODExZWY5MzVhMDI0Mm"
|
||||
rag = RAGFlow(API_KEY,HOST_ADDRESS)
|
||||
Agent.create_session("2e45b5209c1011efa3e90242ac120006", rag)
|
||||
|
||||
@pytest.mark.skip(reason="")
|
||||
def test_create_agent_conversation_with_success(get_api_key_fixture):
|
||||
API_KEY = "ragflow-BkOGNhYjIyN2JiODExZWY5MzVhMDI0Mm"
|
||||
rag = RAGFlow(API_KEY,HOST_ADDRESS)
|
||||
session = Agent.create_session("2e45b5209c1011efa3e90242ac120006", rag)
|
||||
session.ask("What is this job")
|
||||
1
sdk/python/test/test_sdk_api/test_data/ragflow.txt
Normal file
1
sdk/python/test/test_sdk_api/test_data/ragflow.txt
Normal file
@ -0,0 +1 @@
|
||||
{"data":null,"code":100,"message":"TypeError(\"download_document() got an unexpected keyword argument 'tenant_id'\")"}
|
||||
29
sdk/python/test/test_sdk_api/test_data/ragflow_test.txt
Normal file
29
sdk/python/test/test_sdk_api/test_data/ragflow_test.txt
Normal file
@ -0,0 +1,29 @@
|
||||
|
||||
|
||||
Introducing RagFlow: Revolutionizing Natural Language Processing with Retrieval-Augmented Generation
|
||||
|
||||
In the ever-evolving landscape of Natural Language Processing (NLP), new techniques and frameworks continue to push the boundaries of what machines can understand and generate from human language. Among these innovative advancements, RagFlow stands out as a pioneering approach that combines the power of retrieval and generation to revolutionize the way we interact with text-based data.
|
||||
|
||||
What is RagFlow?
|
||||
|
||||
RagFlow, short for Retrieval-Augmented Generation Flow, is a framework designed to enhance the capabilities of NLP models by integrating a retrieval component into the generation process. This approach leverages large-scale knowledge bases and text corpora to retrieve relevant information that can inform and enrich the output generated by the model. By doing so, RagFlow enables models to produce more accurate, informative, and contextually relevant responses, surpassing the limitations of traditional generation-only or retrieval-only systems.
|
||||
|
||||
The Core Concept
|
||||
|
||||
At its core, RagFlow operates on two fundamental principles:
|
||||
|
||||
Retrieval: The first step involves identifying and retrieving relevant information from a vast collection of text sources. This can include web pages, academic articles, books, or any other form of unstructured text data. RagFlow employs advanced retrieval algorithms, often based on neural networks and vector similarity, to quickly and accurately locate the most pertinent information for a given query or task.
|
||||
Generation: Once relevant information has been retrieved, RagFlow leverages generative NLP models to produce the final output. These models, such as transformers or GPT-like architectures, are trained to understand the context provided by the retrieved information and generate coherent, fluent text that incorporates this knowledge. The integration of retrieval and generation allows RagFlow to generate responses that are not only grammatically correct but also semantically rich and contextually appropriate.
|
||||
Advantages of RagFlow
|
||||
|
||||
Increased Accuracy and Relevance: By incorporating retrieved information, RagFlow can generate responses that are more accurate and relevant to the user's query or task. This is particularly useful in domains where factual accuracy and contextual relevance are crucial, such as question answering, summarization, and knowledge-intensive dialogue systems.
|
||||
Scalability and Flexibility: RagFlow's reliance on large-scale text corpora and retrieval algorithms makes it highly scalable to new domains and datasets. As more data becomes available, the retrieval component can be easily updated to incorporate new information, while the generative model can be fine-tuned to adapt to specific tasks or user preferences.
|
||||
Improved Efficiency: By leveraging pre-existing knowledge bases and retrieval algorithms, RagFlow can reduce the computational burden on the generative model. This allows the model to focus on generating high-quality output rather than searching for relevant information from scratch, resulting in improved efficiency and faster response times.
|
||||
Applications and Future Directions
|
||||
|
||||
RagFlow has the potential to transform a wide range of NLP applications, including but not limited to:
|
||||
|
||||
Question Answering Systems: By retrieving relevant passages and generating precise answers, RagFlow can enhance the accuracy and comprehensiveness of question answering systems.
|
||||
Document Summarization: By identifying key information and generating concise summaries, RagFlow can help users quickly grasp the main points of lengthy documents.
|
||||
Creative Writing and Storytelling: By incorporating retrieved elements into the generation process, RagFlow can inspire and augment creative writing, enabling machines to produce more engaging and original stories.
|
||||
As the field of NLP continues to evolve, RagFlow represents a promising direction for leveraging the power of both retrieval and generation. With further research and development, we can expect to see even more sophisticated and versatile RagFlow-based systems that push the boundaries of what machines can achieve with human language.
|
||||
BIN
sdk/python/test/test_sdk_api/test_data/test.docx
Normal file
BIN
sdk/python/test/test_sdk_api/test_data/test.docx
Normal file
Binary file not shown.
148
sdk/python/test/test_sdk_api/test_data/test.html
Normal file
148
sdk/python/test/test_sdk_api/test_data/test.html
Normal file
File diff suppressed because one or more lines are too long
BIN
sdk/python/test/test_sdk_api/test_data/test.jpg
Normal file
BIN
sdk/python/test/test_sdk_api/test_data/test.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 87 KiB |
107
sdk/python/test/test_sdk_api/test_data/test.json
Normal file
107
sdk/python/test/test_sdk_api/test_data/test.json
Normal file
@ -0,0 +1,107 @@
|
||||
{
|
||||
"单车": [
|
||||
"自行车"
|
||||
],
|
||||
"青禾服装": [
|
||||
"青禾服饰"
|
||||
],
|
||||
"救济灾民": [
|
||||
"救助",
|
||||
"灾民救济",
|
||||
"赈济"
|
||||
],
|
||||
"左移": [],
|
||||
"低速": [],
|
||||
"雨果网": [],
|
||||
"钢小二": [
|
||||
"成立于2013年,位于江苏省无锡市,是一家以从事研究和试验发展为主的企业"
|
||||
],
|
||||
"第五项": [
|
||||
"5项"
|
||||
],
|
||||
"铸排机": [
|
||||
"机排",
|
||||
"排铸机",
|
||||
"排铸"
|
||||
],
|
||||
"金淳高分子": [],
|
||||
"麦门冬汤": [],
|
||||
"错位": [],
|
||||
"佰特吉姆": [],
|
||||
"楼体": [],
|
||||
"展美科技": [
|
||||
"美展"
|
||||
],
|
||||
"中寮": [],
|
||||
"贪官汙吏": [
|
||||
"...",
|
||||
"贪吏",
|
||||
"贪官污吏"
|
||||
],
|
||||
"掩蔽部": [
|
||||
"掩 蔽 部"
|
||||
],
|
||||
"海宏智能": [],
|
||||
"中寰": [],
|
||||
"万次": [],
|
||||
"领星资本": [
|
||||
"星领"
|
||||
],
|
||||
"肯讯": [],
|
||||
"坎肩": [],
|
||||
"爱农人": [],
|
||||
"易美餐": [],
|
||||
"寸丝半粟": [],
|
||||
"罗丹萍": [],
|
||||
"转导物": [],
|
||||
"泊寓": [],
|
||||
"万欧": [
|
||||
"欧万"
|
||||
],
|
||||
"友聚惠": [
|
||||
"友惠",
|
||||
"惠友"
|
||||
],
|
||||
"舞牙弄爪": [
|
||||
":形容凶猛的样子,比喻威胁、恐吓",
|
||||
"原形容猛兽的凶相,后常用来比喻猖狂凶恶的样子",
|
||||
"成语解释:原形容猛兽的凶相,后常用来比喻猖狂凶恶的样子",
|
||||
"原形容猛兽的凶相,后常用来比喻猖狂(好工具hao86.com",
|
||||
"牙舞爪",
|
||||
"形容猛兽凶恶可怕。也比喻猖狂凶恶",
|
||||
"舞爪"
|
||||
],
|
||||
"上海致上": [
|
||||
"上海上",
|
||||
"上海市"
|
||||
],
|
||||
"迪因加": [],
|
||||
"李正茂": [],
|
||||
"君来投": [],
|
||||
"双掌空": [
|
||||
"双掌 空",
|
||||
"空掌",
|
||||
"两手空空"
|
||||
],
|
||||
"浩石": [
|
||||
"石浩",
|
||||
"皓石"
|
||||
],
|
||||
"云阅文学": [],
|
||||
"阿斯帕": [],
|
||||
"中导": [],
|
||||
"以诚相待": [],
|
||||
"中融金服": [],
|
||||
"尚股网": [],
|
||||
"叶立钦": [
|
||||
"叶利钦"
|
||||
],
|
||||
"新信钱包": [
|
||||
"信信"
|
||||
],
|
||||
"赛苏投资": [
|
||||
"投资者"
|
||||
],
|
||||
"售价": [],
|
||||
"帮医网": []
|
||||
}
|
||||
21
sdk/python/test/test_sdk_api/test_data/test.md
Normal file
21
sdk/python/test/test_sdk_api/test_data/test.md
Normal file
@ -0,0 +1,21 @@
|
||||
Quod equidem non reprehendo;
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quibus natura iure responderit non esse verum aliunde finem beate vivendi, a se principia rei gerendae peti; Quae enim adhuc protulisti, popularia sunt, ego autem a te elegantiora desidero. Duo Reges: constructio interrete. Tum Lucius: Mihi vero ista valde probata sunt, quod item fratri puto. Bestiarum vero nullum iudicium puto. Nihil enim iam habes, quod ad corpus referas; Deinde prima illa, quae in congressu solemus: Quid tu, inquit, huc? Et homini, qui ceteris animantibus plurimum praestat, praecipue a natura nihil datum esse dicemus?
|
||||
|
||||
Iam id ipsum absurdum, maximum malum neglegi. Quod ea non occurrentia fingunt, vincunt Aristonem; Atqui perspicuum est hominem e corpore animoque constare, cum primae sint animi partes, secundae corporis. Fieri, inquam, Triari, nullo pacto potest, ut non dicas, quid non probes eius, a quo dissentias. Equidem e Cn. An dubium est, quin virtus ita maximam partem optineat in rebus humanis, ut reliquas obruat?
|
||||
|
||||
Quis istum dolorem timet?
|
||||
Summus dolor plures dies manere non potest? Dicet pro me ipsa virtus nec dubitabit isti vestro beato M. Tubulum fuisse, qua illum, cuius is condemnatus est rogatione, P. Quod si ita sit, cur opera philosophiae sit danda nescio.
|
||||
|
||||
Ex eorum enim scriptis et institutis cum omnis doctrina liberalis, omnis historia.
|
||||
Quod si ita est, sequitur id ipsum, quod te velle video, omnes semper beatos esse sapientes. Cum enim fertur quasi torrens oratio, quamvis multa cuiusque modi rapiat, nihil tamen teneas, nihil apprehendas, nusquam orationem rapidam coerceas. Ita redarguitur ipse a sese, convincunturque scripta eius probitate ipsius ac moribus. At quanta conantur! Mundum hunc omnem oppidum esse nostrum! Incendi igitur eos, qui audiunt, vides. Vide, ne magis, inquam, tuum fuerit, cum re idem tibi, quod mihi, videretur, non nova te rebus nomina inponere. Qui-vere falsone, quaerere mittimus-dicitur oculis se privasse; Si ista mala sunt, in quae potest incidere sapiens, sapientem esse non esse ad beate vivendum satis. At vero si ad vitem sensus accesserit, ut appetitum quendam habeat et per se ipsa moveatur, quid facturam putas?
|
||||
|
||||
Quem si tenueris, non modo meum Ciceronem, sed etiam me ipsum abducas licebit.
|
||||
Stulti autem malorum memoria torquentur, sapientes bona praeterita grata recordatione renovata delectant.
|
||||
Esse enim quam vellet iniquus iustus poterat inpune.
|
||||
Quae autem natura suae primae institutionis oblita est?
|
||||
Verum tamen cum de rebus grandioribus dicas, ipsae res verba rapiunt;
|
||||
Hoc est non modo cor non habere, sed ne palatum quidem.
|
||||
Voluptatem cum summum bonum diceret, primum in eo ipso parum vidit, deinde hoc quoque alienum; Sed tu istuc dixti bene Latine, parum plane. Nam haec ipsa mihi erunt in promptu, quae modo audivi, nec ante aggrediar, quam te ab istis, quos dicis, instructum videro. Fatebuntur Stoici haec omnia dicta esse praeclare, neque eam causam Zenoni desciscendi fuisse. Non autem hoc: igitur ne illud quidem. Ratio quidem vestra sic cogit. Cum audissem Antiochum, Brute, ut solebam, cum M. An quod ita callida est, ut optime possit architectari voluptates?
|
||||
|
||||
Idemne, quod iucunde?
|
||||
Haec mihi videtur delicatior, ut ita dicam, molliorque ratio, quam virtutis vis gravitasque postulat. Sed quoniam et advesperascit et mihi ad villam revertendum est, nunc quidem hactenus; Cuius ad naturam apta ratio vera illa et summa lex a philosophis dicitur. Neque solum ea communia, verum etiam paria esse dixerunt. Sed nunc, quod agimus; A mene tu?
|
||||
BIN
sdk/python/test/test_sdk_api/test_data/test.pdf
Normal file
BIN
sdk/python/test/test_sdk_api/test_data/test.pdf
Normal file
Binary file not shown.
BIN
sdk/python/test/test_sdk_api/test_data/test.ppt
Normal file
BIN
sdk/python/test/test_sdk_api/test_data/test.ppt
Normal file
Binary file not shown.
21
sdk/python/test/test_sdk_api/test_data/test.txt
Normal file
21
sdk/python/test/test_sdk_api/test_data/test.txt
Normal file
@ -0,0 +1,21 @@
|
||||
Quod equidem non reprehendo;
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quibus natura iure responderit non esse verum aliunde finem beate vivendi, a se principia rei gerendae peti; Quae enim adhuc protulisti, popularia sunt, ego autem a te elegantiora desidero. Duo Reges: constructio interrete. Tum Lucius: Mihi vero ista valde probata sunt, quod item fratri puto. Bestiarum vero nullum iudicium puto. Nihil enim iam habes, quod ad corpus referas; Deinde prima illa, quae in congressu solemus: Quid tu, inquit, huc? Et homini, qui ceteris animantibus plurimum praestat, praecipue a natura nihil datum esse dicemus?
|
||||
|
||||
Iam id ipsum absurdum, maximum malum neglegi. Quod ea non occurrentia fingunt, vincunt Aristonem; Atqui perspicuum est hominem e corpore animoque constare, cum primae sint animi partes, secundae corporis. Fieri, inquam, Triari, nullo pacto potest, ut non dicas, quid non probes eius, a quo dissentias. Equidem e Cn. An dubium est, quin virtus ita maximam partem optineat in rebus humanis, ut reliquas obruat?
|
||||
|
||||
Quis istum dolorem timet?
|
||||
Summus dolor plures dies manere non potest? Dicet pro me ipsa virtus nec dubitabit isti vestro beato M. Tubulum fuisse, qua illum, cuius is condemnatus est rogatione, P. Quod si ita sit, cur opera philosophiae sit danda nescio.
|
||||
|
||||
Ex eorum enim scriptis et institutis cum omnis doctrina liberalis, omnis historia.
|
||||
Quod si ita est, sequitur id ipsum, quod te velle video, omnes semper beatos esse sapientes. Cum enim fertur quasi torrens oratio, quamvis multa cuiusque modi rapiat, nihil tamen teneas, nihil apprehendas, nusquam orationem rapidam coerceas. Ita redarguitur ipse a sese, convincunturque scripta eius probitate ipsius ac moribus. At quanta conantur! Mundum hunc omnem oppidum esse nostrum! Incendi igitur eos, qui audiunt, vides. Vide, ne magis, inquam, tuum fuerit, cum re idem tibi, quod mihi, videretur, non nova te rebus nomina inponere. Qui-vere falsone, quaerere mittimus-dicitur oculis se privasse; Si ista mala sunt, in quae potest incidere sapiens, sapientem esse non esse ad beate vivendum satis. At vero si ad vitem sensus accesserit, ut appetitum quendam habeat et per se ipsa moveatur, quid facturam putas?
|
||||
|
||||
Quem si tenueris, non modo meum Ciceronem, sed etiam me ipsum abducas licebit.
|
||||
Stulti autem malorum memoria torquentur, sapientes bona praeterita grata recordatione renovata delectant.
|
||||
Esse enim quam vellet iniquus iustus poterat inpune.
|
||||
Quae autem natura suae primae institutionis oblita est?
|
||||
Verum tamen cum de rebus grandioribus dicas, ipsae res verba rapiunt;
|
||||
Hoc est non modo cor non habere, sed ne palatum quidem.
|
||||
Voluptatem cum summum bonum diceret, primum in eo ipso parum vidit, deinde hoc quoque alienum; Sed tu istuc dixti bene Latine, parum plane. Nam haec ipsa mihi erunt in promptu, quae modo audivi, nec ante aggrediar, quam te ab istis, quos dicis, instructum videro. Fatebuntur Stoici haec omnia dicta esse praeclare, neque eam causam Zenoni desciscendi fuisse. Non autem hoc: igitur ne illud quidem. Ratio quidem vestra sic cogit. Cum audissem Antiochum, Brute, ut solebam, cum M. An quod ita callida est, ut optime possit architectari voluptates?
|
||||
|
||||
Idemne, quod iucunde?
|
||||
Haec mihi videtur delicatior, ut ita dicam, molliorque ratio, quam virtutis vis gravitasque postulat. Sed quoniam et advesperascit et mihi ad villam revertendum est, nunc quidem hactenus; Cuius ad naturam apta ratio vera illa et summa lex a philosophis dicitur. Neque solum ea communia, verum etiam paria esse dixerunt. Sed nunc, quod agimus; A mene tu?
|
||||
BIN
sdk/python/test/test_sdk_api/test_data/test.xlsx
Normal file
BIN
sdk/python/test/test_sdk_api/test_data/test.xlsx
Normal file
Binary file not shown.
Reference in New Issue
Block a user