mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
use onnx models, new deepdoc (#68)
This commit is contained in:
@ -33,38 +33,17 @@ def set_dialog():
|
||||
name = req.get("name", "New Dialog")
|
||||
description = req.get("description", "A helpful Dialog")
|
||||
language = req.get("language", "Chinese")
|
||||
llm_setting_type = req.get("llm_setting_type", "Precise")
|
||||
top_n = req.get("top_n", 6)
|
||||
similarity_threshold = req.get("similarity_threshold", 0.1)
|
||||
vector_similarity_weight = req.get("vector_similarity_weight", 0.3)
|
||||
llm_setting = req.get("llm_setting", {
|
||||
"Creative": {
|
||||
"temperature": 0.9,
|
||||
"top_p": 0.9,
|
||||
"frequency_penalty": 0.2,
|
||||
"presence_penalty": 0.4,
|
||||
"max_tokens": 512
|
||||
},
|
||||
"Precise": {
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"frequency_penalty": 0.7,
|
||||
"presence_penalty": 0.4,
|
||||
"max_tokens": 215
|
||||
},
|
||||
"Evenly": {
|
||||
"temperature": 0.5,
|
||||
"top_p": 0.5,
|
||||
"frequency_penalty": 0.7,
|
||||
"presence_penalty": 0.4,
|
||||
"max_tokens": 215
|
||||
},
|
||||
"Custom": {
|
||||
"temperature": 0.2,
|
||||
"top_p": 0.3,
|
||||
"frequency_penalty": 0.6,
|
||||
"presence_penalty": 0.3,
|
||||
"max_tokens": 215
|
||||
},
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"frequency_penalty": 0.7,
|
||||
"presence_penalty": 0.4,
|
||||
"max_tokens": 215
|
||||
})
|
||||
prompt_config = req.get("prompt_config", {
|
||||
default_prompt = {
|
||||
"system": """你是一个智能助手,请总结知识库的内容来回答问题,请列举知识库中的数据详细回答。当所有知识库内容都与问题无关时,你的回答必须包括“知识库中未找到您要的答案!”这句话。回答需要考虑聊天历史。
|
||||
以下是知识库:
|
||||
{knowledge}
|
||||
@ -74,30 +53,40 @@ def set_dialog():
|
||||
{"key": "knowledge", "optional": False}
|
||||
],
|
||||
"empty_response": "Sorry! 知识库中未找到相关内容!"
|
||||
})
|
||||
}
|
||||
prompt_config = req.get("prompt_config", default_prompt)
|
||||
|
||||
if len(prompt_config["parameters"]) < 1:
|
||||
return get_data_error_result(retmsg="'knowledge' should be in parameters")
|
||||
if not prompt_config["system"]: prompt_config["system"] = default_prompt["system"]
|
||||
# if len(prompt_config["parameters"]) < 1:
|
||||
# prompt_config["parameters"] = default_prompt["parameters"]
|
||||
# for p in prompt_config["parameters"]:
|
||||
# if p["key"] == "knowledge":break
|
||||
# else: prompt_config["parameters"].append(default_prompt["parameters"][0])
|
||||
|
||||
for p in prompt_config["parameters"]:
|
||||
if prompt_config["system"].find("{%s}"%p["key"]) < 0:
|
||||
if p["optional"]: continue
|
||||
if prompt_config["system"].find("{%s}" % p["key"]) < 0:
|
||||
return get_data_error_result(retmsg="Parameter '{}' is not used".format(p["key"]))
|
||||
|
||||
try:
|
||||
e, tenant = TenantService.get_by_id(current_user.id)
|
||||
if not e:return get_data_error_result(retmsg="Tenant not found!")
|
||||
if not e: return get_data_error_result(retmsg="Tenant not found!")
|
||||
llm_id = req.get("llm_id", tenant.llm_id)
|
||||
if not dialog_id:
|
||||
if not req.get("kb_ids"):return get_data_error_result(retmsg="Fail! Please select knowledgebase!")
|
||||
dia = {
|
||||
"id": get_uuid(),
|
||||
"tenant_id": current_user.id,
|
||||
"name": name,
|
||||
"kb_ids": req["kb_ids"],
|
||||
"description": description,
|
||||
"language": language,
|
||||
"llm_id": llm_id,
|
||||
"llm_setting_type": llm_setting_type,
|
||||
"llm_setting": llm_setting,
|
||||
"prompt_config": prompt_config
|
||||
"prompt_config": prompt_config,
|
||||
"top_n": top_n,
|
||||
"similarity_threshold": similarity_threshold,
|
||||
"vector_similarity_weight": vector_similarity_weight
|
||||
}
|
||||
if not DialogService.save(**dia): return get_data_error_result(retmsg="Fail to new a dialog!")
|
||||
e, dia = DialogService.get_by_id(dia["id"])
|
||||
@ -122,7 +111,7 @@ def set_dialog():
|
||||
def get():
|
||||
dialog_id = request.args["dialog_id"]
|
||||
try:
|
||||
e,dia = DialogService.get_by_id(dialog_id)
|
||||
e, dia = DialogService.get_by_id(dialog_id)
|
||||
if not e: return get_data_error_result(retmsg="Dialog not found!")
|
||||
dia = dia.to_dict()
|
||||
dia["kb_ids"], dia["kb_names"] = get_kb_names(dia["kb_ids"])
|
||||
@ -130,20 +119,22 @@ def get():
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
def get_kb_names(kb_ids):
|
||||
ids, nms = [], []
|
||||
for kid in kb_ids:
|
||||
e, kb = KnowledgebaseService.get_by_id(kid)
|
||||
if not e or kb.status != StatusEnum.VALID.value:continue
|
||||
if not e or kb.status != StatusEnum.VALID.value: continue
|
||||
ids.append(kid)
|
||||
nms.append(kb.name)
|
||||
return ids, nms
|
||||
|
||||
|
||||
@manager.route('/list', methods=['GET'])
|
||||
@login_required
|
||||
def list():
|
||||
try:
|
||||
diags = DialogService.query(tenant_id=current_user.id, status=StatusEnum.VALID.value)
|
||||
diags = DialogService.query(tenant_id=current_user.id, status=StatusEnum.VALID.value, reverse=True, order_by=DialogService.model.create_time)
|
||||
diags = [d.to_dict() for d in diags]
|
||||
for d in diags:
|
||||
d["kb_ids"], d["kb_names"] = get_kb_names(d["kb_ids"])
|
||||
@ -154,12 +145,11 @@ def list():
|
||||
|
||||
@manager.route('/rm', methods=['POST'])
|
||||
@login_required
|
||||
@validate_request("dialog_id")
|
||||
@validate_request("dialog_ids")
|
||||
def rm():
|
||||
req = request.json
|
||||
try:
|
||||
if not DialogService.update_by_id(req["dialog_id"], {"status": StatusEnum.INVALID.value}):
|
||||
return get_data_error_result(retmsg="Dialog not found!")
|
||||
DialogService.update_many_by_id([{"id": id, "status": StatusEnum.INVALID.value} for id in req["dialog_ids"]])
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
return server_error_response(e)
|
||||
|
||||
Reference in New Issue
Block a user