mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Manage ragflow-sdk with poetry (#3115)
### What problem does this PR solve? Manage ragflow-sdk with poetry ### Type of change - [x] Refactoring
This commit is contained in:
10
sdk/python/ragflow_sdk/__init__.py
Normal file
10
sdk/python/ragflow_sdk/__init__.py
Normal file
@ -0,0 +1,10 @@
|
||||
import importlib.metadata
|
||||
|
||||
__version__ = importlib.metadata.version("ragflow_sdk")
|
||||
|
||||
from .ragflow import RAGFlow
|
||||
from .modules.dataset import DataSet
|
||||
from .modules.chat import Chat
|
||||
from .modules.session import Session
|
||||
from .modules.document import Document
|
||||
from .modules.chunk import Chunk
|
||||
21
sdk/python/ragflow_sdk/dataset.py
Normal file
21
sdk/python/ragflow_sdk/dataset.py
Normal file
@ -0,0 +1,21 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
class DataSet:
|
||||
def __init__(self, user_key, dataset_url, uuid, name):
|
||||
self.user_key = user_key
|
||||
self.dataset_url = dataset_url
|
||||
self.uuid = uuid
|
||||
self.name = name
|
||||
0
sdk/python/ragflow_sdk/modules/__init__.py
Normal file
0
sdk/python/ragflow_sdk/modules/__init__.py
Normal file
38
sdk/python/ragflow_sdk/modules/base.py
Normal file
38
sdk/python/ragflow_sdk/modules/base.py
Normal file
@ -0,0 +1,38 @@
|
||||
class Base(object):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.rag = rag
|
||||
for k, v in res_dict.items():
|
||||
if isinstance(v, dict):
|
||||
self.__dict__[k] = Base(rag, v)
|
||||
else:
|
||||
self.__dict__[k] = v
|
||||
|
||||
def to_json(self):
|
||||
pr = {}
|
||||
for name in dir(self):
|
||||
value = getattr(self, name)
|
||||
if not name.startswith('__') and not callable(value) and name != "rag":
|
||||
if isinstance(value, Base):
|
||||
pr[name] = value.to_json()
|
||||
else:
|
||||
pr[name] = value
|
||||
return pr
|
||||
|
||||
def post(self, path, json=None, stream=False, files=None):
|
||||
res = self.rag.post(path, json, stream=stream,files=files)
|
||||
return res
|
||||
|
||||
def get(self, path, params=None):
|
||||
res = self.rag.get(path, params)
|
||||
return res
|
||||
|
||||
def rm(self, path, json):
|
||||
res = self.rag.delete(path, json)
|
||||
return res
|
||||
|
||||
def put(self,path, json):
|
||||
res = self.rag.put(path,json)
|
||||
return res
|
||||
|
||||
def __str__(self):
|
||||
return str(self.to_json())
|
||||
75
sdk/python/ragflow_sdk/modules/chat.py
Normal file
75
sdk/python/ragflow_sdk/modules/chat.py
Normal file
@ -0,0 +1,75 @@
|
||||
from typing import List
|
||||
|
||||
from .base import Base
|
||||
from .session import Session
|
||||
|
||||
|
||||
class Chat(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = "assistant"
|
||||
self.avatar = "path/to/avatar"
|
||||
self.dataset_ids = ["kb1"]
|
||||
self.llm = Chat.LLM(rag, {})
|
||||
self.prompt = Chat.Prompt(rag, {})
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
class LLM(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.model_name = "deepseek-chat"
|
||||
self.temperature = 0.1
|
||||
self.top_p = 0.3
|
||||
self.presence_penalty = 0.4
|
||||
self.frequency_penalty = 0.7
|
||||
self.max_tokens = 512
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
class Prompt(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.similarity_threshold = 0.2
|
||||
self.keywords_similarity_weight = 0.7
|
||||
self.top_n = 8
|
||||
self.variables = [{"key": "knowledge", "optional": True}]
|
||||
self.rerank_model = None
|
||||
self.empty_response = None
|
||||
self.opener = "Hi! I'm your assistant, what can I do for you?"
|
||||
self.show_quote = True
|
||||
self.prompt = (
|
||||
"You are an intelligent assistant. Please summarize the content of the knowledge base to answer the question. "
|
||||
"Please list the data in the knowledge base and answer in detail. When all knowledge base content is irrelevant to the question, "
|
||||
"your answer must include the sentence 'The answer you are looking for is not found in the knowledge base!' "
|
||||
"Answers need to consider chat history.\nHere is the knowledge base:\n{knowledge}\nThe above is the knowledge base."
|
||||
)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/chats/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
def create_session(self, name: str = "New session") -> Session:
|
||||
res = self.post(f"/chats/{self.id}/sessions", {"name": name})
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Session(self.rag, res['data'])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def list_sessions(self,page: int = 1, page_size: int = 1024, orderby: str = "create_time", desc: bool = True,
|
||||
id: str = None, name: str = None) -> List[Session]:
|
||||
res = self.get(f'/chats/{self.id}/sessions',{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name} )
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
result_list = []
|
||||
for data in res["data"]:
|
||||
result_list.append(Session(self.rag, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_sessions(self,ids:List[str]=None):
|
||||
res = self.rm(f"/chats/{self.id}/sessions", {"ids": ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
27
sdk/python/ragflow_sdk/modules/chunk.py
Normal file
27
sdk/python/ragflow_sdk/modules/chunk.py
Normal file
@ -0,0 +1,27 @@
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Chunk(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.content = ""
|
||||
self.important_keywords = []
|
||||
self.create_time = ""
|
||||
self.create_timestamp = 0.0
|
||||
self.dataset_id = None
|
||||
self.document_name = ""
|
||||
self.document_id = ""
|
||||
self.available = True
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
def update(self,update_message:dict):
|
||||
res = self.put(f"/datasets/{self.dataset_id}/documents/{self.document_id}/chunks/{self.id}",update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0 :
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
77
sdk/python/ragflow_sdk/modules/dataset.py
Normal file
77
sdk/python/ragflow_sdk/modules/dataset.py
Normal file
@ -0,0 +1,77 @@
|
||||
from typing import List
|
||||
|
||||
from .document import Document
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class DataSet(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = ""
|
||||
self.avatar = ""
|
||||
self.tenant_id = None
|
||||
self.description = ""
|
||||
self.language = "English"
|
||||
self.embedding_model = ""
|
||||
self.permission = "me"
|
||||
self.document_count = 0
|
||||
self.chunk_count = 0
|
||||
self.chunk_method = "naive"
|
||||
self.parser_config = None
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/datasets/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def upload_documents(self,document_list: List[dict]):
|
||||
url = f"/datasets/{self.id}/documents"
|
||||
files = [("file",(ele["displayed_name"],ele["blob"])) for ele in document_list]
|
||||
res = self.post(path=url,json=None,files=files)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
doc_list=[]
|
||||
for doc in res["data"]:
|
||||
document = Document(self.rag,doc)
|
||||
doc_list.append(document)
|
||||
return doc_list
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def list_documents(self, id: str = None, keywords: str = None, offset: int =1, limit: int = 1024, orderby: str = "create_time", desc: bool = True):
|
||||
res = self.get(f"/datasets/{self.id}/documents",params={"id": id,"keywords": keywords,"offset": offset,"limit": limit,"orderby": orderby,"desc": desc})
|
||||
res = res.json()
|
||||
documents = []
|
||||
if res.get("code") == 0:
|
||||
for document in res["data"].get("docs"):
|
||||
documents.append(Document(self.rag,document))
|
||||
return documents
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_documents(self,ids: List[str] = None):
|
||||
res = self.rm(f"/datasets/{self.id}/documents",{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def async_parse_documents(self,document_ids):
|
||||
res = self.post(f"/datasets/{self.id}/chunks",{"document_ids":document_ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def async_cancel_parse_documents(self,document_ids):
|
||||
res = self.rm(f"/datasets/{self.id}/chunks",{"document_ids":document_ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
77
sdk/python/ragflow_sdk/modules/document.py
Normal file
77
sdk/python/ragflow_sdk/modules/document.py
Normal file
@ -0,0 +1,77 @@
|
||||
import json
|
||||
from .base import Base
|
||||
from .chunk import Chunk
|
||||
from typing import List
|
||||
|
||||
|
||||
class Document(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = ""
|
||||
self.thumbnail = None
|
||||
self.dataset_id = None
|
||||
self.chunk_method = "naive"
|
||||
self.parser_config = {"pages": [[1, 1000000]]}
|
||||
self.source_type = "local"
|
||||
self.type = ""
|
||||
self.created_by = ""
|
||||
self.size = 0
|
||||
self.token_count = 0
|
||||
self.chunk_count = 0
|
||||
self.progress = 0.0
|
||||
self.progress_msg = ""
|
||||
self.process_begin_at = None
|
||||
self.process_duration = 0.0
|
||||
self.run = "0"
|
||||
self.status = "1"
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/datasets/{self.dataset_id}/documents/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def download(self):
|
||||
res = self.get(f"/datasets/{self.dataset_id}/documents/{self.id}")
|
||||
try:
|
||||
res = res.json()
|
||||
raise Exception(res.get("message"))
|
||||
except json.JSONDecodeError:
|
||||
return res.content
|
||||
|
||||
|
||||
def list_chunks(self,offset=0, limit=30, keywords="", id:str=None):
|
||||
data={"document_id": self.id,"keywords": keywords,"offset":offset,"limit":limit,"id":id}
|
||||
res = self.get(f'/datasets/{self.dataset_id}/documents/{self.id}/chunks', data)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
chunks=[]
|
||||
for data in res["data"].get("chunks"):
|
||||
chunk = Chunk(self.rag,data)
|
||||
chunks.append(chunk)
|
||||
return chunks
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
def add_chunk(self, content: str,important_keywords:List[str]=[]):
|
||||
res = self.post(f'/datasets/{self.dataset_id}/documents/{self.id}/chunks', {"content":content,"important_keywords":important_keywords})
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Chunk(self.rag,res["data"].get("chunk"))
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def delete_chunks(self,ids:List[str] = None):
|
||||
res = self.rm(f"datasets/{self.dataset_id}/documents/{self.id}/chunks",{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code")!=0:
|
||||
raise Exception(res.get("message"))
|
||||
85
sdk/python/ragflow_sdk/modules/session.py
Normal file
85
sdk/python/ragflow_sdk/modules/session.py
Normal file
@ -0,0 +1,85 @@
|
||||
import json
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Session(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = None
|
||||
self.name = "New session"
|
||||
self.messages = [{"role": "assistant", "content": "Hi! I am your assistant,can I help you?"}]
|
||||
self.chat_id = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def ask(self, question: str, stream: bool = False):
|
||||
for message in self.messages:
|
||||
if "reference" in message:
|
||||
message.pop("reference")
|
||||
res = self.post(f"/chats/{self.chat_id}/completions",
|
||||
{"question": question, "stream": True,"session_id":self.id}, stream=stream)
|
||||
for line in res.iter_lines():
|
||||
line = line.decode("utf-8")
|
||||
if line.startswith("{"):
|
||||
json_data = json.loads(line)
|
||||
raise Exception(json_data["message"])
|
||||
if line.startswith("data:"):
|
||||
json_data = json.loads(line[5:])
|
||||
if json_data["data"] != True:
|
||||
answer = json_data["data"]["answer"]
|
||||
reference = json_data["data"]["reference"]
|
||||
temp_dict = {
|
||||
"content": answer,
|
||||
"role": "assistant"
|
||||
}
|
||||
if "chunks" in reference:
|
||||
chunks = reference["chunks"]
|
||||
chunk_list = []
|
||||
for chunk in chunks:
|
||||
new_chunk = {
|
||||
"id": chunk["chunk_id"],
|
||||
"content": chunk["content_with_weight"],
|
||||
"document_id": chunk["doc_id"],
|
||||
"document_name": chunk["docnm_kwd"],
|
||||
"dataset_id": chunk["kb_id"],
|
||||
"image_id": chunk["img_id"],
|
||||
"similarity": chunk["similarity"],
|
||||
"vector_similarity": chunk["vector_similarity"],
|
||||
"term_similarity": chunk["term_similarity"],
|
||||
"positions": chunk["positions"],
|
||||
}
|
||||
chunk_list.append(new_chunk)
|
||||
temp_dict["reference"] = chunk_list
|
||||
message = Message(self.rag, temp_dict)
|
||||
yield message
|
||||
|
||||
def update(self,update_message):
|
||||
res = self.put(f"/chats/{self.chat_id}/sessions/{self.id}",
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
class Message(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.content = "Hi! I am your assistant,can I help you?"
|
||||
self.reference = None
|
||||
self.role = "assistant"
|
||||
self.prompt = None
|
||||
self.id = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
|
||||
class Chunk(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = None
|
||||
self.content = None
|
||||
self.document_id = ""
|
||||
self.document_name = ""
|
||||
self.dataset_id = ""
|
||||
self.image_id = ""
|
||||
self.similarity = None
|
||||
self.vector_similarity = None
|
||||
self.term_similarity = None
|
||||
self.positions = None
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
181
sdk/python/ragflow_sdk/ragflow.py
Normal file
181
sdk/python/ragflow_sdk/ragflow.py
Normal file
@ -0,0 +1,181 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import List
|
||||
|
||||
import requests
|
||||
|
||||
from .modules.chat import Chat
|
||||
from .modules.chunk import Chunk
|
||||
from .modules.dataset import DataSet
|
||||
from .modules.document import Document
|
||||
|
||||
|
||||
class RAGFlow:
|
||||
def __init__(self, api_key, base_url, version='v1'):
|
||||
"""
|
||||
api_url: http://<host_address>/api/v1
|
||||
"""
|
||||
self.user_key = api_key
|
||||
self.api_url = f"{base_url}/api/{version}"
|
||||
self.authorization_header = {"Authorization": "{} {}".format("Bearer", self.user_key)}
|
||||
|
||||
def post(self, path, json=None, stream=False, files=None):
|
||||
res = requests.post(url=self.api_url + path, json=json, headers=self.authorization_header, stream=stream,files=files)
|
||||
return res
|
||||
|
||||
def get(self, path, params=None, json=None):
|
||||
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header,json=json)
|
||||
return res
|
||||
|
||||
def delete(self, path, json):
|
||||
res = requests.delete(url=self.api_url + path, json=json, headers=self.authorization_header)
|
||||
return res
|
||||
|
||||
def put(self, path, json):
|
||||
res = requests.put(url=self.api_url + path, json= json,headers=self.authorization_header)
|
||||
return res
|
||||
|
||||
def create_dataset(self, name: str, avatar: str = "", description: str = "", language: str = "English",
|
||||
permission: str = "me",chunk_method: str = "naive",
|
||||
parser_config: DataSet.ParserConfig = None) -> DataSet:
|
||||
if parser_config:
|
||||
parser_config = parser_config.to_json()
|
||||
res = self.post("/datasets",
|
||||
{"name": name, "avatar": avatar, "description": description, "language": language,
|
||||
"permission": permission, "chunk_method": chunk_method,
|
||||
"parser_config": parser_config
|
||||
}
|
||||
)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return DataSet(self, res["data"])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_datasets(self, ids: List[str] = None):
|
||||
res = self.delete("/datasets",{"ids": ids})
|
||||
res=res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def get_dataset(self,name: str):
|
||||
_list = self.list_datasets(name=name)
|
||||
if len(_list) > 0:
|
||||
return _list[0]
|
||||
raise Exception("Dataset %s not found" % name)
|
||||
|
||||
def list_datasets(self, page: int = 1, page_size: int = 1024, orderby: str = "create_time", desc: bool = True,
|
||||
id: str = None, name: str = None) -> \
|
||||
List[DataSet]:
|
||||
res = self.get("/datasets",
|
||||
{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||
res = res.json()
|
||||
result_list = []
|
||||
if res.get("code") == 0:
|
||||
for data in res['data']:
|
||||
result_list.append(DataSet(self, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
def create_chat(self, name: str, avatar: str = "", dataset_ids: List[str] = [],
|
||||
llm: Chat.LLM = None, prompt: Chat.Prompt = None) -> Chat:
|
||||
dataset_list = []
|
||||
for id in dataset_ids:
|
||||
dataset_list.append(id)
|
||||
|
||||
if llm is None:
|
||||
llm = Chat.LLM(self, {"model_name": None,
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512, })
|
||||
if prompt is None:
|
||||
prompt = Chat.Prompt(self, {"similarity_threshold": 0.2,
|
||||
"keywords_similarity_weight": 0.7,
|
||||
"top_n": 8,
|
||||
"variables": [{
|
||||
"key": "knowledge",
|
||||
"optional": True
|
||||
}], "rerank_model": "",
|
||||
"empty_response": None,
|
||||
"opener": None,
|
||||
"show_quote": True,
|
||||
"prompt": None})
|
||||
if prompt.opener is None:
|
||||
prompt.opener = "Hi! I'm your assistant, what can I do for you?"
|
||||
if prompt.prompt is None:
|
||||
prompt.prompt = (
|
||||
"You are an intelligent assistant. Please summarize the content of the knowledge base to answer the question. "
|
||||
"Please list the data in the knowledge base and answer in detail. When all knowledge base content is irrelevant to the question, "
|
||||
"your answer must include the sentence 'The answer you are looking for is not found in the knowledge base!' "
|
||||
"Answers need to consider chat history.\nHere is the knowledge base:\n{knowledge}\nThe above is the knowledge base."
|
||||
)
|
||||
|
||||
temp_dict = {"name": name,
|
||||
"avatar": avatar,
|
||||
"dataset_ids": dataset_list,
|
||||
"llm": llm.to_json(),
|
||||
"prompt": prompt.to_json()}
|
||||
res = self.post("/chats", temp_dict)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Chat(self, res["data"])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_chats(self,ids: List[str] = None) -> bool:
|
||||
res = self.delete('/chats',
|
||||
{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def list_chats(self, page: int = 1, page_size: int = 1024, orderby: str = "create_time", desc: bool = True,
|
||||
id: str = None, name: str = None) -> List[Chat]:
|
||||
res = self.get("/chats",{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||
res = res.json()
|
||||
result_list = []
|
||||
if res.get("code") == 0:
|
||||
for data in res['data']:
|
||||
result_list.append(Chat(self, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
def retrieve(self, dataset_ids, document_ids=None, question="", offset=1, limit=1024, similarity_threshold=0.2, vector_similarity_weight=0.3, top_k=1024, rerank_id:str=None, keyword:bool=False, ):
|
||||
if document_ids is None:
|
||||
document_ids = []
|
||||
data_json ={
|
||||
"offset": offset,
|
||||
"limit": limit,
|
||||
"similarity_threshold": similarity_threshold,
|
||||
"vector_similarity_weight": vector_similarity_weight,
|
||||
"top_k": top_k,
|
||||
"rerank_id": rerank_id,
|
||||
"keyword": keyword,
|
||||
"question": question,
|
||||
"datasets": dataset_ids,
|
||||
"documents": document_ids
|
||||
}
|
||||
# Send a POST request to the backend service (using requests library as an example, actual implementation may vary)
|
||||
res = self.post(f'/retrieval',json=data_json)
|
||||
res = res.json()
|
||||
if res.get("code") ==0:
|
||||
chunks=[]
|
||||
for chunk_data in res["data"].get("chunks"):
|
||||
chunk=Chunk(self,chunk_data)
|
||||
chunks.append(chunk)
|
||||
return chunks
|
||||
raise Exception(res.get("message"))
|
||||
Reference in New Issue
Block a user