mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Update displayed_name to display_name (#4311)
### What problem does this PR solve? Update displayed_name to display_name ### Type of change - [x] Refactoring Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
@ -9,7 +9,7 @@ def test_parse_document_with_txt(get_api_key_fixture):
|
||||
name = 'ragflow_test.txt'
|
||||
with open("test_data/ragflow_test.txt", "rb") as file :
|
||||
blob = file.read()
|
||||
docs = ds.upload_documents([{"displayed_name": name, "blob": blob}])
|
||||
docs = ds.upload_documents([{"display_name": name, "blob": blob}])
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents(document_ids=[doc.id])
|
||||
'''
|
||||
@ -28,7 +28,7 @@ def test_parse_and_cancel_document(get_api_key_fixture):
|
||||
name = 'ragflow_test.txt'
|
||||
with open("test_data/ragflow_test.txt", "rb") as file :
|
||||
blob = file.read()
|
||||
docs=ds.upload_documents([{"displayed_name": name, "blob": blob}])
|
||||
docs=ds.upload_documents([{"display_name": name, "blob": blob}])
|
||||
doc = docs[0]
|
||||
ds.async_parse_documents(document_ids=[doc.id])
|
||||
sleep(1)
|
||||
@ -43,9 +43,9 @@ def test_bulk_parse_documents(get_api_key_fixture):
|
||||
with open("test_data/ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
documents = [
|
||||
{'displayed_name': 'test1.txt', 'blob': blob},
|
||||
{'displayed_name': 'test2.txt', 'blob': blob},
|
||||
{'displayed_name': 'test3.txt', 'blob': blob}
|
||||
{'display_name': 'test1.txt', 'blob': blob},
|
||||
{'display_name': 'test2.txt', 'blob': blob},
|
||||
{'display_name': 'test3.txt', 'blob': blob}
|
||||
]
|
||||
docs = ds.upload_documents(documents)
|
||||
ids = [doc.id for doc in docs]
|
||||
@ -70,10 +70,10 @@ def test_list_chunks_with_success(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
ids = [doc.id for doc in docs]
|
||||
ds.async_parse_documents(ids)
|
||||
@ -100,10 +100,10 @@ def test_add_chunk_with_success(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_list_chunks_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
doc.add_chunk(content="This is a chunk addition test")
|
||||
@ -119,10 +119,10 @@ def test_delete_chunk_with_success(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_delete_chunk_with_success.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_delete_chunk_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
@ -140,10 +140,10 @@ def test_update_chunk_content(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_update_chunk_content_with_success.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_update_chunk_content_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
@ -161,10 +161,10 @@ def test_update_chunk_available(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_update_chunk_available_with_success.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_update_chunk_available_with_success.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
chunk = doc.add_chunk(content="This is a chunk addition test")
|
||||
@ -183,10 +183,10 @@ def test_retrieve_chunks(get_api_key_fixture):
|
||||
# chunk_size = 1024 * 1024
|
||||
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
|
||||
documents = [
|
||||
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
|
||||
]
|
||||
'''
|
||||
documents =[{"displayed_name":"test_retrieve_chunks.txt","blob":blob}]
|
||||
documents =[{"display_name":"test_retrieve_chunks.txt","blob":blob}]
|
||||
docs = ds.upload_documents(documents)
|
||||
doc = docs[0]
|
||||
doc.add_chunk(content="This is a chunk addition test")
|
||||
|
||||
Reference in New Issue
Block a user