Fix some issues in API (#2982)

### What problem does this PR solve?

Fix some issues in API

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
liuhua
2024-10-23 12:02:18 +08:00
committed by GitHub
parent 43b959fe58
commit 8714754afc
14 changed files with 206 additions and 106 deletions

View File

@ -9,7 +9,7 @@ class Chat(Base):
self.id = ""
self.name = "assistant"
self.avatar = "path/to/avatar"
self.knowledgebases = ["kb1"]
self.datasets = ["kb1"]
self.llm = Chat.LLM(rag, {})
self.prompt = Chat.Prompt(rag, {})
super().__init__(rag, res_dict)

View File

@ -8,10 +8,10 @@ class Chunk(Base):
self.important_keywords = []
self.create_time = ""
self.create_timestamp = 0.0
self.knowledgebase_id = None
self.dataset_id = None
self.document_name = ""
self.document_id = ""
self.available = 1
self.available = True
for k in list(res_dict.keys()):
if k not in self.__dict__:
res_dict.pop(k)
@ -19,7 +19,7 @@ class Chunk(Base):
def update(self,update_message:dict):
res = self.put(f"/dataset/{self.knowledgebase_id}/document/{self.document_id}/chunk/{self.id}",update_message)
res = self.put(f"/dataset/{self.dataset_id}/document/{self.document_id}/chunk/{self.id}",update_message)
res = res.json()
if res.get("code") != 0 :
raise Exception(res["message"])

View File

@ -10,10 +10,6 @@ from .base import Base
class DataSet(Base):
class ParserConfig(Base):
def __init__(self, rag, res_dict):
self.chunk_token_count = 128
self.layout_recognize = True
self.delimiter = '\n!?。;!?'
self.task_page_size = 12
super().__init__(rag, res_dict)
def __init__(self, rag, res_dict):
@ -43,11 +39,16 @@ class DataSet(Base):
def upload_documents(self,document_list: List[dict]):
url = f"/dataset/{self.id}/document"
files = [("file",(ele["name"],ele["blob"])) for ele in document_list]
files = [("file",(ele["displayed_name"],ele["blob"])) for ele in document_list]
res = self.post(path=url,json=None,files=files)
res = res.json()
if res.get("code") != 0:
raise Exception(res.get("message"))
if res.get("code") == 0:
doc_list=[]
for doc in res["data"]:
document = Document(self.rag,doc)
doc_list.append(document)
return doc_list
raise Exception(res.get("message"))
def list_documents(self, id: str = None, keywords: str = None, offset: int =1, limit: int = 1024, orderby: str = "create_time", desc: bool = True):
res = self.get(f"/dataset/{self.id}/info",params={"id": id,"keywords": keywords,"offset": offset,"limit": limit,"orderby": orderby,"desc": desc})

View File

@ -5,12 +5,16 @@ from typing import List
class Document(Base):
class ParserConfig(Base):
def __init__(self, rag, res_dict):
super().__init__(rag, res_dict)
def __init__(self, rag, res_dict):
self.id = ""
self.name = ""
self.thumbnail = None
self.knowledgebase_id = None
self.chunk_method = ""
self.dataset_id = None
self.chunk_method = "naive"
self.parser_config = {"pages": [[1, 1000000]]}
self.source_type = "local"
self.type = ""
@ -31,14 +35,14 @@ class Document(Base):
def update(self, update_message: dict):
res = self.put(f'/dataset/{self.knowledgebase_id}/info/{self.id}',
res = self.put(f'/dataset/{self.dataset_id}/info/{self.id}',
update_message)
res = res.json()
if res.get("code") != 0:
raise Exception(res["message"])
def download(self):
res = self.get(f"/dataset/{self.knowledgebase_id}/document/{self.id}")
res = self.get(f"/dataset/{self.dataset_id}/document/{self.id}")
try:
res = res.json()
raise Exception(res.get("message"))
@ -48,7 +52,7 @@ class Document(Base):
def list_chunks(self,offset=0, limit=30, keywords="", id:str=None):
data={"document_id": self.id,"keywords": keywords,"offset":offset,"limit":limit,"id":id}
res = self.get(f'/dataset/{self.knowledgebase_id}/document/{self.id}/chunk', data)
res = self.get(f'/dataset/{self.dataset_id}/document/{self.id}/chunk', data)
res = res.json()
if res.get("code") == 0:
chunks=[]
@ -59,15 +63,15 @@ class Document(Base):
raise Exception(res.get("message"))
def add_chunk(self, content: str):
res = self.post(f'/dataset/{self.knowledgebase_id}/document/{self.id}/chunk', {"content":content})
def add_chunk(self, content: str,important_keywords:List[str]=[]):
res = self.post(f'/dataset/{self.dataset_id}/document/{self.id}/chunk', {"content":content,"important_keywords":important_keywords})
res = res.json()
if res.get("code") == 0:
return Chunk(self.rag,res["data"].get("chunk"))
raise Exception(res.get("message"))
def delete_chunks(self,ids:List[str]):
res = self.rm(f"dataset/{self.knowledgebase_id}/document/{self.id}/chunk",{"ids":ids})
res = self.rm(f"dataset/{self.dataset_id}/document/{self.id}/chunk",{"ids":ids})
res = res.json()
if res.get("code")!=0:
raise Exception(res.get("message"))

View File

@ -40,7 +40,7 @@ class Session(Base):
"content": chunk["content_with_weight"],
"document_id": chunk["doc_id"],
"document_name": chunk["docnm_kwd"],
"knowledgebase_id": chunk["kb_id"],
"dataset_id": chunk["kb_id"],
"image_id": chunk["img_id"],
"similarity": chunk["similarity"],
"vector_similarity": chunk["vector_similarity"],
@ -75,7 +75,7 @@ class Chunk(Base):
self.content = None
self.document_id = ""
self.document_name = ""
self.knowledgebase_id = ""
self.dataset_id = ""
self.image_id = ""
self.similarity = None
self.vector_similarity = None

View File

@ -49,17 +49,11 @@ class RAGFlow:
return res
def create_dataset(self, name: str, avatar: str = "", description: str = "", language: str = "English",
permission: str = "me",
document_count: int = 0, chunk_count: int = 0, chunk_method: str = "naive",
permission: str = "me",chunk_method: str = "naive",
parser_config: DataSet.ParserConfig = None) -> DataSet:
if parser_config is None:
parser_config = DataSet.ParserConfig(self, {"chunk_token_count": 128, "layout_recognize": True,
"delimiter": "\n!?。;!?", "task_page_size": 12})
parser_config = parser_config.to_json()
res = self.post("/dataset",
{"name": name, "avatar": avatar, "description": description, "language": language,
"permission": permission,
"document_count": document_count, "chunk_count": chunk_count, "chunk_method": chunk_method,
"permission": permission, "chunk_method": chunk_method,
"parser_config": parser_config
}
)
@ -93,11 +87,11 @@ class RAGFlow:
return result_list
raise Exception(res["message"])
def create_chat(self, name: str, avatar: str = "", knowledgebases: List[DataSet] = [],
def create_chat(self, name: str, avatar: str = "", datasets: List[DataSet] = [],
llm: Chat.LLM = None, prompt: Chat.Prompt = None) -> Chat:
datasets = []
for dataset in knowledgebases:
datasets.append(dataset.to_json())
dataset_list = []
for dataset in datasets:
dataset_list.append(dataset.to_json())
if llm is None:
llm = Chat.LLM(self, {"model_name": None,
@ -130,7 +124,7 @@ class RAGFlow:
temp_dict = {"name": name,
"avatar": avatar,
"knowledgebases": datasets,
"datasets": dataset_list,
"llm": llm.to_json(),
"prompt": prompt.to_json()}
res = self.post("/chat", temp_dict)
@ -158,25 +152,22 @@ class RAGFlow:
raise Exception(res["message"])
def retrieve(self, question="",datasets=None,documents=None, offset=1, limit=30, similarity_threshold=0.2,vector_similarity_weight=0.3,top_k=1024,rerank_id:str=None,keyword:bool=False,):
data_params = {
def retrieve(self, datasets,documents,question="", offset=1, limit=1024, similarity_threshold=0.2,vector_similarity_weight=0.3,top_k=1024,rerank_id:str=None,keyword:bool=False,):
data_json ={
"offset": offset,
"limit": limit,
"similarity_threshold": similarity_threshold,
"vector_similarity_weight": vector_similarity_weight,
"top_k": top_k,
"knowledgebase_id": datasets,
"rerank_id":rerank_id,
"keyword":keyword
}
data_json ={
"rerank_id": rerank_id,
"keyword": keyword,
"question": question,
"datasets": datasets,
"documents": documents
}
# Send a POST request to the backend service (using requests library as an example, actual implementation may vary)
res = self.get(f'/retrieval', data_params,data_json)
res = self.post(f'/retrieval',json=data_json)
res = res.json()
if res.get("code") ==0:
chunks=[]

View File

@ -1,4 +1,5 @@
from ragflow import RAGFlow, Chat
from xgboost.testing import datasets
from common import API_KEY, HOST_ADDRESS
from test_sdkbase import TestSdk
@ -11,7 +12,7 @@ class TestChat(TestSdk):
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_chat")
chat = rag.create_chat("test_create", knowledgebases=[kb])
chat = rag.create_chat("test_create", datasets=[kb])
if isinstance(chat, Chat):
assert chat.name == "test_create", "Name does not match."
else:
@ -23,7 +24,7 @@ class TestChat(TestSdk):
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_update_chat")
chat = rag.create_chat("test_update", knowledgebases=[kb])
chat = rag.create_chat("test_update", datasets=[kb])
if isinstance(chat, Chat):
assert chat.name == "test_update", "Name does not match."
res=chat.update({"name":"new_chat"})
@ -37,7 +38,7 @@ class TestChat(TestSdk):
"""
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_chat")
chat = rag.create_chat("test_delete", knowledgebases=[kb])
chat = rag.create_chat("test_delete", datasets=[kb])
if isinstance(chat, Chat):
assert chat.name == "test_delete", "Name does not match."
res = rag.delete_chats(ids=[chat.id])

View File

@ -7,14 +7,14 @@ class TestSession:
def test_create_session(self):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_session")
assistant = rag.create_chat(name="test_create_session", knowledgebases=[kb])
assistant = rag.create_chat(name="test_create_session", datasets=[kb])
session = assistant.create_session()
assert isinstance(session,Session), "Failed to create a session."
def test_create_chat_with_success(self):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_chat")
assistant = rag.create_chat(name="test_create_chat", knowledgebases=[kb])
assistant = rag.create_chat(name="test_create_chat", datasets=[kb])
session = assistant.create_session()
question = "What is AI"
for ans in session.ask(question, stream=True):
@ -24,7 +24,7 @@ class TestSession:
def test_delete_sessions_with_success(self):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_session")
assistant = rag.create_chat(name="test_delete_session",knowledgebases=[kb])
assistant = rag.create_chat(name="test_delete_session",datasets=[kb])
session=assistant.create_session()
res=assistant.delete_sessions(ids=[session.id])
assert res is None, "Failed to delete the dataset."
@ -32,7 +32,7 @@ class TestSession:
def test_update_session_with_success(self):
rag=RAGFlow(API_KEY,HOST_ADDRESS)
kb=rag.create_dataset(name="test_update_session")
assistant = rag.create_chat(name="test_update_session",knowledgebases=[kb])
assistant = rag.create_chat(name="test_update_session",datasets=[kb])
session=assistant.create_session(name="old session")
res=session.update({"name":"new session"})
assert res is None,"Failed to update the session"
@ -41,7 +41,7 @@ class TestSession:
def test_list_sessions_with_success(self):
rag=RAGFlow(API_KEY,HOST_ADDRESS)
kb=rag.create_dataset(name="test_list_session")
assistant=rag.create_chat(name="test_list_session",knowledgebases=[kb])
assistant=rag.create_chat(name="test_list_session",datasets=[kb])
assistant.create_session("test_1")
assistant.create_session("test_2")
sessions=assistant.list_sessions()