mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Manage ragflow-sdk with poetry (#3115)
### What problem does this PR solve? Manage ragflow-sdk with poetry ### Type of change - [x] Refactoring
This commit is contained in:
0
sdk/python/ragflow_sdk/modules/__init__.py
Normal file
0
sdk/python/ragflow_sdk/modules/__init__.py
Normal file
38
sdk/python/ragflow_sdk/modules/base.py
Normal file
38
sdk/python/ragflow_sdk/modules/base.py
Normal file
@ -0,0 +1,38 @@
|
||||
class Base(object):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.rag = rag
|
||||
for k, v in res_dict.items():
|
||||
if isinstance(v, dict):
|
||||
self.__dict__[k] = Base(rag, v)
|
||||
else:
|
||||
self.__dict__[k] = v
|
||||
|
||||
def to_json(self):
|
||||
pr = {}
|
||||
for name in dir(self):
|
||||
value = getattr(self, name)
|
||||
if not name.startswith('__') and not callable(value) and name != "rag":
|
||||
if isinstance(value, Base):
|
||||
pr[name] = value.to_json()
|
||||
else:
|
||||
pr[name] = value
|
||||
return pr
|
||||
|
||||
def post(self, path, json=None, stream=False, files=None):
|
||||
res = self.rag.post(path, json, stream=stream,files=files)
|
||||
return res
|
||||
|
||||
def get(self, path, params=None):
|
||||
res = self.rag.get(path, params)
|
||||
return res
|
||||
|
||||
def rm(self, path, json):
|
||||
res = self.rag.delete(path, json)
|
||||
return res
|
||||
|
||||
def put(self,path, json):
|
||||
res = self.rag.put(path,json)
|
||||
return res
|
||||
|
||||
def __str__(self):
|
||||
return str(self.to_json())
|
||||
75
sdk/python/ragflow_sdk/modules/chat.py
Normal file
75
sdk/python/ragflow_sdk/modules/chat.py
Normal file
@ -0,0 +1,75 @@
|
||||
from typing import List
|
||||
|
||||
from .base import Base
|
||||
from .session import Session
|
||||
|
||||
|
||||
class Chat(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = "assistant"
|
||||
self.avatar = "path/to/avatar"
|
||||
self.dataset_ids = ["kb1"]
|
||||
self.llm = Chat.LLM(rag, {})
|
||||
self.prompt = Chat.Prompt(rag, {})
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
class LLM(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.model_name = "deepseek-chat"
|
||||
self.temperature = 0.1
|
||||
self.top_p = 0.3
|
||||
self.presence_penalty = 0.4
|
||||
self.frequency_penalty = 0.7
|
||||
self.max_tokens = 512
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
class Prompt(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.similarity_threshold = 0.2
|
||||
self.keywords_similarity_weight = 0.7
|
||||
self.top_n = 8
|
||||
self.variables = [{"key": "knowledge", "optional": True}]
|
||||
self.rerank_model = None
|
||||
self.empty_response = None
|
||||
self.opener = "Hi! I'm your assistant, what can I do for you?"
|
||||
self.show_quote = True
|
||||
self.prompt = (
|
||||
"You are an intelligent assistant. Please summarize the content of the knowledge base to answer the question. "
|
||||
"Please list the data in the knowledge base and answer in detail. When all knowledge base content is irrelevant to the question, "
|
||||
"your answer must include the sentence 'The answer you are looking for is not found in the knowledge base!' "
|
||||
"Answers need to consider chat history.\nHere is the knowledge base:\n{knowledge}\nThe above is the knowledge base."
|
||||
)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/chats/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
def create_session(self, name: str = "New session") -> Session:
|
||||
res = self.post(f"/chats/{self.id}/sessions", {"name": name})
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Session(self.rag, res['data'])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def list_sessions(self,page: int = 1, page_size: int = 1024, orderby: str = "create_time", desc: bool = True,
|
||||
id: str = None, name: str = None) -> List[Session]:
|
||||
res = self.get(f'/chats/{self.id}/sessions',{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name} )
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
result_list = []
|
||||
for data in res["data"]:
|
||||
result_list.append(Session(self.rag, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_sessions(self,ids:List[str]=None):
|
||||
res = self.rm(f"/chats/{self.id}/sessions", {"ids": ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
27
sdk/python/ragflow_sdk/modules/chunk.py
Normal file
27
sdk/python/ragflow_sdk/modules/chunk.py
Normal file
@ -0,0 +1,27 @@
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Chunk(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.content = ""
|
||||
self.important_keywords = []
|
||||
self.create_time = ""
|
||||
self.create_timestamp = 0.0
|
||||
self.dataset_id = None
|
||||
self.document_name = ""
|
||||
self.document_id = ""
|
||||
self.available = True
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
def update(self,update_message:dict):
|
||||
res = self.put(f"/datasets/{self.dataset_id}/documents/{self.document_id}/chunks/{self.id}",update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0 :
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
77
sdk/python/ragflow_sdk/modules/dataset.py
Normal file
77
sdk/python/ragflow_sdk/modules/dataset.py
Normal file
@ -0,0 +1,77 @@
|
||||
from typing import List
|
||||
|
||||
from .document import Document
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class DataSet(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = ""
|
||||
self.avatar = ""
|
||||
self.tenant_id = None
|
||||
self.description = ""
|
||||
self.language = "English"
|
||||
self.embedding_model = ""
|
||||
self.permission = "me"
|
||||
self.document_count = 0
|
||||
self.chunk_count = 0
|
||||
self.chunk_method = "naive"
|
||||
self.parser_config = None
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/datasets/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def upload_documents(self,document_list: List[dict]):
|
||||
url = f"/datasets/{self.id}/documents"
|
||||
files = [("file",(ele["displayed_name"],ele["blob"])) for ele in document_list]
|
||||
res = self.post(path=url,json=None,files=files)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
doc_list=[]
|
||||
for doc in res["data"]:
|
||||
document = Document(self.rag,doc)
|
||||
doc_list.append(document)
|
||||
return doc_list
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def list_documents(self, id: str = None, keywords: str = None, offset: int =1, limit: int = 1024, orderby: str = "create_time", desc: bool = True):
|
||||
res = self.get(f"/datasets/{self.id}/documents",params={"id": id,"keywords": keywords,"offset": offset,"limit": limit,"orderby": orderby,"desc": desc})
|
||||
res = res.json()
|
||||
documents = []
|
||||
if res.get("code") == 0:
|
||||
for document in res["data"].get("docs"):
|
||||
documents.append(Document(self.rag,document))
|
||||
return documents
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_documents(self,ids: List[str] = None):
|
||||
res = self.rm(f"/datasets/{self.id}/documents",{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def async_parse_documents(self,document_ids):
|
||||
res = self.post(f"/datasets/{self.id}/chunks",{"document_ids":document_ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def async_cancel_parse_documents(self,document_ids):
|
||||
res = self.rm(f"/datasets/{self.id}/chunks",{"document_ids":document_ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
77
sdk/python/ragflow_sdk/modules/document.py
Normal file
77
sdk/python/ragflow_sdk/modules/document.py
Normal file
@ -0,0 +1,77 @@
|
||||
import json
|
||||
from .base import Base
|
||||
from .chunk import Chunk
|
||||
from typing import List
|
||||
|
||||
|
||||
class Document(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = ""
|
||||
self.thumbnail = None
|
||||
self.dataset_id = None
|
||||
self.chunk_method = "naive"
|
||||
self.parser_config = {"pages": [[1, 1000000]]}
|
||||
self.source_type = "local"
|
||||
self.type = ""
|
||||
self.created_by = ""
|
||||
self.size = 0
|
||||
self.token_count = 0
|
||||
self.chunk_count = 0
|
||||
self.progress = 0.0
|
||||
self.progress_msg = ""
|
||||
self.process_begin_at = None
|
||||
self.process_duration = 0.0
|
||||
self.run = "0"
|
||||
self.status = "1"
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/datasets/{self.dataset_id}/documents/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def download(self):
|
||||
res = self.get(f"/datasets/{self.dataset_id}/documents/{self.id}")
|
||||
try:
|
||||
res = res.json()
|
||||
raise Exception(res.get("message"))
|
||||
except json.JSONDecodeError:
|
||||
return res.content
|
||||
|
||||
|
||||
def list_chunks(self,offset=0, limit=30, keywords="", id:str=None):
|
||||
data={"document_id": self.id,"keywords": keywords,"offset":offset,"limit":limit,"id":id}
|
||||
res = self.get(f'/datasets/{self.dataset_id}/documents/{self.id}/chunks', data)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
chunks=[]
|
||||
for data in res["data"].get("chunks"):
|
||||
chunk = Chunk(self.rag,data)
|
||||
chunks.append(chunk)
|
||||
return chunks
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
def add_chunk(self, content: str,important_keywords:List[str]=[]):
|
||||
res = self.post(f'/datasets/{self.dataset_id}/documents/{self.id}/chunks', {"content":content,"important_keywords":important_keywords})
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Chunk(self.rag,res["data"].get("chunk"))
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def delete_chunks(self,ids:List[str] = None):
|
||||
res = self.rm(f"datasets/{self.dataset_id}/documents/{self.id}/chunks",{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code")!=0:
|
||||
raise Exception(res.get("message"))
|
||||
85
sdk/python/ragflow_sdk/modules/session.py
Normal file
85
sdk/python/ragflow_sdk/modules/session.py
Normal file
@ -0,0 +1,85 @@
|
||||
import json
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Session(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = None
|
||||
self.name = "New session"
|
||||
self.messages = [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
||||
self.chat_id = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def ask(self, question: str, stream: bool = False):
|
||||
for message in self.messages:
|
||||
if "reference" in message:
|
||||
message.pop("reference")
|
||||
res = self.post(f"/chats/{self.chat_id}/completions",
|
||||
{"question": question, "stream": True,"session_id":self.id}, stream=stream)
|
||||
for line in res.iter_lines():
|
||||
line = line.decode("utf-8")
|
||||
if line.startswith("{"):
|
||||
json_data = json.loads(line)
|
||||
raise Exception(json_data["message"])
|
||||
if line.startswith("data:"):
|
||||
json_data = json.loads(line[5:])
|
||||
if json_data["data"] != True:
|
||||
answer = json_data["data"]["answer"]
|
||||
reference = json_data["data"]["reference"]
|
||||
temp_dict = {
|
||||
"content": answer,
|
||||
"role": "assistant"
|
||||
}
|
||||
if "chunks" in reference:
|
||||
chunks = reference["chunks"]
|
||||
chunk_list = []
|
||||
for chunk in chunks:
|
||||
new_chunk = {
|
||||
"id": chunk["chunk_id"],
|
||||
"content": chunk["content_with_weight"],
|
||||
"document_id": chunk["doc_id"],
|
||||
"document_name": chunk["docnm_kwd"],
|
||||
"dataset_id": chunk["kb_id"],
|
||||
"image_id": chunk["img_id"],
|
||||
"similarity": chunk["similarity"],
|
||||
"vector_similarity": chunk["vector_similarity"],
|
||||
"term_similarity": chunk["term_similarity"],
|
||||
"positions": chunk["positions"],
|
||||
}
|
||||
chunk_list.append(new_chunk)
|
||||
temp_dict["reference"] = chunk_list
|
||||
message = Message(self.rag, temp_dict)
|
||||
yield message
|
||||
|
||||
def update(self,update_message):
|
||||
res = self.put(f"/chats/{self.chat_id}/sessions/{self.id}",
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
class Message(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.content = "Hi! I am your assistant,can I help you?"
|
||||
self.reference = None
|
||||
self.role = "assistant"
|
||||
self.prompt = None
|
||||
self.id = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
class Chunk(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = None
|
||||
self.content = None
|
||||
self.document_id = ""
|
||||
self.document_name = ""
|
||||
self.dataset_id = ""
|
||||
self.image_id = ""
|
||||
self.similarity = None
|
||||
self.vector_similarity = None
|
||||
self.term_similarity = None
|
||||
self.positions = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
Reference in New Issue
Block a user