mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-02 08:35:08 +08:00
Compare commits
45 Commits
v0.20.3
...
8d8a5f73b6
| Author | SHA1 | Date | |
|---|---|---|---|
| 8d8a5f73b6 | |||
| d0fa66f4d5 | |||
| 9dd22e141b | |||
| b6c1ca828e | |||
| d367c7e226 | |||
| a3aa3f0d36 | |||
| 7b8752fe24 | |||
| 5e2c33e5b0 | |||
| e40be8e541 | |||
| 23d0b564d3 | |||
| ecaa9de843 | |||
| 2f74727bb9 | |||
| adbb038a87 | |||
| 3947da10ae | |||
| 4862be28ad | |||
| 035e8ed0f7 | |||
| cc167ae619 | |||
| f8847e7bcd | |||
| 3baebd709b | |||
| 3e6a4b2628 | |||
| 312635cb13 | |||
| 756d454122 | |||
| a4cab371fa | |||
| 0d7e52338e | |||
| 4110f7f5ce | |||
| 0af57ff772 | |||
| 0bd58038a8 | |||
| 0cbcfcfedf | |||
| fbdde0259a | |||
| d482173c9b | |||
| 929dc97509 | |||
| 30005c0203 | |||
| 382458ace7 | |||
| 4080f6a54a | |||
| 09570c7eef | |||
| 312f1a0477 | |||
| 1ca226e43b | |||
| 830cda6a3a | |||
| c66dbbe433 | |||
| 3b218b2dc0 | |||
| d58ef6127f | |||
| 55173c7201 | |||
| f860bdf0ad | |||
| 997627861a | |||
| 9f9d32d2cd |
@ -131,7 +131,16 @@ class Canvas:
|
|||||||
|
|
||||||
self.path = self.dsl["path"]
|
self.path = self.dsl["path"]
|
||||||
self.history = self.dsl["history"]
|
self.history = self.dsl["history"]
|
||||||
self.globals = self.dsl["globals"]
|
if "globals" in self.dsl:
|
||||||
|
self.globals = self.dsl["globals"]
|
||||||
|
else:
|
||||||
|
self.globals = {
|
||||||
|
"sys.query": "",
|
||||||
|
"sys.user_id": "",
|
||||||
|
"sys.conversation_turns": 0,
|
||||||
|
"sys.files": []
|
||||||
|
}
|
||||||
|
|
||||||
self.retrieval = self.dsl["retrieval"]
|
self.retrieval = self.dsl["retrieval"]
|
||||||
self.memory = self.dsl.get("memory", [])
|
self.memory = self.dsl.get("memory", [])
|
||||||
|
|
||||||
@ -417,7 +426,7 @@ class Canvas:
|
|||||||
convs = []
|
convs = []
|
||||||
if window_size <= 0:
|
if window_size <= 0:
|
||||||
return convs
|
return convs
|
||||||
for role, obj in self.history[window_size * -1:]:
|
for role, obj in self.history[window_size * -2:]:
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
convs.append({"role": role, "content": obj.get("content", "")})
|
convs.append({"role": role, "content": obj.get("content", "")})
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -36,7 +36,7 @@ _IS_RAW_CONF = "_is_raw_conf"
|
|||||||
|
|
||||||
class ComponentParamBase(ABC):
|
class ComponentParamBase(ABC):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.message_history_window_size = 22
|
self.message_history_window_size = 13
|
||||||
self.inputs = {}
|
self.inputs = {}
|
||||||
self.outputs = {}
|
self.outputs = {}
|
||||||
self.description = ""
|
self.description = ""
|
||||||
|
|||||||
@ -18,11 +18,8 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Generator
|
from typing import Any, Generator
|
||||||
|
|
||||||
import json_repair
|
import json_repair
|
||||||
from copy import deepcopy
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from api.db import LLMType
|
from api.db import LLMType
|
||||||
from api.db.services.llm_service import LLMBundle
|
from api.db.services.llm_service import LLMBundle
|
||||||
from api.db.services.tenant_llm_service import TenantLLMService
|
from api.db.services.tenant_llm_service import TenantLLMService
|
||||||
@ -130,7 +127,7 @@ class LLM(ComponentBase):
|
|||||||
|
|
||||||
args = {}
|
args = {}
|
||||||
vars = self.get_input_elements() if not self._param.debug_inputs else self._param.debug_inputs
|
vars = self.get_input_elements() if not self._param.debug_inputs else self._param.debug_inputs
|
||||||
prompt = self._param.sys_prompt
|
sys_prompt = self._param.sys_prompt
|
||||||
for k, o in vars.items():
|
for k, o in vars.items():
|
||||||
args[k] = o["value"]
|
args[k] = o["value"]
|
||||||
if not isinstance(args[k], str):
|
if not isinstance(args[k], str):
|
||||||
@ -141,14 +138,18 @@ class LLM(ComponentBase):
|
|||||||
self.set_input_value(k, args[k])
|
self.set_input_value(k, args[k])
|
||||||
|
|
||||||
msg = self._canvas.get_history(self._param.message_history_window_size)[:-1]
|
msg = self._canvas.get_history(self._param.message_history_window_size)[:-1]
|
||||||
msg.extend(deepcopy(self._param.prompts))
|
for p in self._param.prompts:
|
||||||
prompt = self.string_format(prompt, args)
|
if msg and msg[-1]["role"] == p["role"]:
|
||||||
|
continue
|
||||||
|
msg.append(p)
|
||||||
|
|
||||||
|
sys_prompt = self.string_format(sys_prompt, args)
|
||||||
for m in msg:
|
for m in msg:
|
||||||
m["content"] = self.string_format(m["content"], args)
|
m["content"] = self.string_format(m["content"], args)
|
||||||
if self._param.cite and self._canvas.get_reference()["chunks"]:
|
if self._param.cite and self._canvas.get_reference()["chunks"]:
|
||||||
prompt += citation_prompt()
|
sys_prompt += citation_prompt()
|
||||||
|
|
||||||
return prompt, msg
|
return sys_prompt, msg
|
||||||
|
|
||||||
def _generate(self, msg:list[dict], **kwargs) -> str:
|
def _generate(self, msg:list[dict], **kwargs) -> str:
|
||||||
if not self.imgs:
|
if not self.imgs:
|
||||||
|
|||||||
@ -156,7 +156,7 @@ class CodeExec(ToolBase, ABC):
|
|||||||
self.set_output("_ERROR", "construct code request error: " + str(e))
|
self.set_output("_ERROR", "construct code request error: " + str(e))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=10)
|
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60))
|
||||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run", code_req, resp.status_code)
|
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run", code_req, resp.status_code)
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
|
|||||||
@ -150,10 +150,10 @@ def update(tenant_id, chat_id):
|
|||||||
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
||||||
return get_error_data_result(message="You do not own the chat")
|
return get_error_data_result(message="You do not own the chat")
|
||||||
req = request.json
|
req = request.json
|
||||||
ids = req.get("dataset_ids")
|
ids = req.get("dataset_ids", [])
|
||||||
if "show_quotation" in req:
|
if "show_quotation" in req:
|
||||||
req["do_refer"] = req.pop("show_quotation")
|
req["do_refer"] = req.pop("show_quotation")
|
||||||
if ids is not None:
|
if ids:
|
||||||
for kb_id in ids:
|
for kb_id in ids:
|
||||||
kbs = KnowledgebaseService.accessible(kb_id=kb_id, user_id=tenant_id)
|
kbs = KnowledgebaseService.accessible(kb_id=kb_id, user_id=tenant_id)
|
||||||
if not kbs:
|
if not kbs:
|
||||||
|
|||||||
@ -24,6 +24,7 @@ from api.db.services.llm_service import LLMBundle
|
|||||||
from api import settings
|
from api import settings
|
||||||
from api.utils.api_utils import validate_request, build_error_result, apikey_required
|
from api.utils.api_utils import validate_request, build_error_result, apikey_required
|
||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
|
from api.db.services.dialog_service import meta_filter
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
|
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
|
||||||
@ -37,18 +38,23 @@ def retrieval(tenant_id):
|
|||||||
retrieval_setting = req.get("retrieval_setting", {})
|
retrieval_setting = req.get("retrieval_setting", {})
|
||||||
similarity_threshold = float(retrieval_setting.get("score_threshold", 0.0))
|
similarity_threshold = float(retrieval_setting.get("score_threshold", 0.0))
|
||||||
top = int(retrieval_setting.get("top_k", 1024))
|
top = int(retrieval_setting.get("top_k", 1024))
|
||||||
|
metadata_condition = req.get("metadata_condition",{})
|
||||||
|
metas = DocumentService.get_meta_by_kbs([kb_id])
|
||||||
|
|
||||||
|
doc_ids = []
|
||||||
try:
|
try:
|
||||||
|
|
||||||
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||||
if not e:
|
if not e:
|
||||||
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
|
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
|
||||||
|
|
||||||
if kb.tenant_id != tenant_id:
|
|
||||||
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
|
|
||||||
|
|
||||||
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
||||||
|
print(metadata_condition)
|
||||||
|
print("after",convert_conditions(metadata_condition))
|
||||||
|
doc_ids.extend(meta_filter(metas, convert_conditions(metadata_condition)))
|
||||||
|
print("doc_ids",doc_ids)
|
||||||
|
if not doc_ids and metadata_condition is not None:
|
||||||
|
doc_ids = ['-999']
|
||||||
ranks = settings.retrievaler.retrieval(
|
ranks = settings.retrievaler.retrieval(
|
||||||
question,
|
question,
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
@ -59,6 +65,7 @@ def retrieval(tenant_id):
|
|||||||
similarity_threshold=similarity_threshold,
|
similarity_threshold=similarity_threshold,
|
||||||
vector_similarity_weight=0.3,
|
vector_similarity_weight=0.3,
|
||||||
top=top,
|
top=top,
|
||||||
|
doc_ids=doc_ids,
|
||||||
rank_feature=label_question(question, [kb])
|
rank_feature=label_question(question, [kb])
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -67,6 +74,7 @@ def retrieval(tenant_id):
|
|||||||
[tenant_id],
|
[tenant_id],
|
||||||
[kb_id],
|
[kb_id],
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
|
doc_ids,
|
||||||
LLMBundle(kb.tenant_id, LLMType.CHAT))
|
LLMBundle(kb.tenant_id, LLMType.CHAT))
|
||||||
if ck["content_with_weight"]:
|
if ck["content_with_weight"]:
|
||||||
ranks["chunks"].insert(0, ck)
|
ranks["chunks"].insert(0, ck)
|
||||||
@ -93,3 +101,20 @@ def retrieval(tenant_id):
|
|||||||
)
|
)
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
return build_error_result(message=str(e), code=settings.RetCode.SERVER_ERROR)
|
return build_error_result(message=str(e), code=settings.RetCode.SERVER_ERROR)
|
||||||
|
|
||||||
|
def convert_conditions(metadata_condition):
|
||||||
|
if metadata_condition is None:
|
||||||
|
metadata_condition = {}
|
||||||
|
op_mapping = {
|
||||||
|
"is": "=",
|
||||||
|
"not is": "≠"
|
||||||
|
}
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"op": op_mapping.get(cond["comparison_operator"], cond["comparison_operator"]),
|
||||||
|
"key": cond["name"],
|
||||||
|
"value": cond["value"]
|
||||||
|
}
|
||||||
|
for cond in metadata_condition.get("conditions", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
|||||||
@ -84,18 +84,10 @@ def create_agent_session(tenant_id, agent_id):
|
|||||||
session_id=get_uuid()
|
session_id=get_uuid()
|
||||||
canvas = Canvas(cvs.dsl, tenant_id, agent_id)
|
canvas = Canvas(cvs.dsl, tenant_id, agent_id)
|
||||||
canvas.reset()
|
canvas.reset()
|
||||||
conv = {
|
|
||||||
"id": session_id,
|
|
||||||
"dialog_id": cvs.id,
|
|
||||||
"user_id": user_id,
|
|
||||||
"message": [],
|
|
||||||
"source": "agent",
|
|
||||||
"dsl": cvs.dsl
|
|
||||||
}
|
|
||||||
API4ConversationService.save(**conv)
|
|
||||||
|
|
||||||
cvs.dsl = json.loads(str(canvas))
|
cvs.dsl = json.loads(str(canvas))
|
||||||
conv = {"id": session_id, "dialog_id": cvs.id, "user_id": user_id, "message": [{"role": "assistant", "content": canvas.get_prologue()}], "source": "agent", "dsl": cvs.dsl}
|
conv = {"id": session_id, "dialog_id": cvs.id, "user_id": user_id, "message": [{"role": "assistant", "content": canvas.get_prologue()}], "source": "agent", "dsl": cvs.dsl}
|
||||||
|
API4ConversationService.save(**conv)
|
||||||
conv["agent_id"] = conv.pop("dialog_id")
|
conv["agent_id"] = conv.pop("dialog_id")
|
||||||
return get_result(data=conv)
|
return get_result(data=conv)
|
||||||
|
|
||||||
@ -450,37 +442,26 @@ def agents_completion_openai_compatibility(tenant_id, agent_id):
|
|||||||
def agent_completions(tenant_id, agent_id):
|
def agent_completions(tenant_id, agent_id):
|
||||||
req = request.json
|
req = request.json
|
||||||
|
|
||||||
ans = {}
|
|
||||||
if req.get("stream", True):
|
if req.get("stream", True):
|
||||||
|
resp = Response(agent_completion(tenant_id=tenant_id, agent_id=agent_id, **req), mimetype="text/event-stream")
|
||||||
def generate():
|
|
||||||
for answer in agent_completion(tenant_id=tenant_id, agent_id=agent_id, **req):
|
|
||||||
if isinstance(answer, str):
|
|
||||||
try:
|
|
||||||
ans = json.loads(answer[5:]) # remove "data:"
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if ans.get("event") != "message":
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield answer
|
|
||||||
|
|
||||||
yield "data:[DONE]\n\n"
|
|
||||||
|
|
||||||
resp = Response(generate(), mimetype="text/event-stream")
|
|
||||||
resp.headers.add_header("Cache-control", "no-cache")
|
resp.headers.add_header("Cache-control", "no-cache")
|
||||||
resp.headers.add_header("Connection", "keep-alive")
|
resp.headers.add_header("Connection", "keep-alive")
|
||||||
resp.headers.add_header("X-Accel-Buffering", "no")
|
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||||
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||||
return resp
|
return resp
|
||||||
|
result = {}
|
||||||
for answer in agent_completion(tenant_id=tenant_id, agent_id=agent_id, **req):
|
for answer in agent_completion(tenant_id=tenant_id, agent_id=agent_id, **req):
|
||||||
try:
|
try:
|
||||||
ans = json.loads(answer[5:]) # remove "data:"
|
ans = json.loads(answer[5:]) # remove "data:"
|
||||||
|
if not result:
|
||||||
|
result = ans.copy()
|
||||||
|
else:
|
||||||
|
result["data"]["answer"] += ans["data"]["answer"]
|
||||||
|
result["data"]["reference"] = ans["data"].get("reference", [])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return get_result(data=f"**ERROR**: {str(e)}")
|
return get_error_data_result(str(e))
|
||||||
return get_result(data=ans)
|
return result
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/chats/<chat_id>/sessions", methods=["GET"]) # noqa: F821
|
@manager.route("/chats/<chat_id>/sessions", methods=["GET"]) # noqa: F821
|
||||||
@ -581,6 +562,9 @@ def list_agent_session(tenant_id, agent_id):
|
|||||||
"chunks" in conv["reference"][chunk_num]):
|
"chunks" in conv["reference"][chunk_num]):
|
||||||
chunks = conv["reference"][chunk_num]["chunks"]
|
chunks = conv["reference"][chunk_num]["chunks"]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
|
# Ensure chunk is a dictionary before calling get method
|
||||||
|
if not isinstance(chunk, dict):
|
||||||
|
continue
|
||||||
new_chunk = {
|
new_chunk = {
|
||||||
"id": chunk.get("chunk_id", chunk.get("id")),
|
"id": chunk.get("chunk_id", chunk.get("id")),
|
||||||
"content": chunk.get("content_with_weight", chunk.get("content")),
|
"content": chunk.get("content_with_weight", chunk.get("content")),
|
||||||
@ -909,7 +893,7 @@ def ask_about_embedded():
|
|||||||
def stream():
|
def stream():
|
||||||
nonlocal req, uid
|
nonlocal req, uid
|
||||||
try:
|
try:
|
||||||
for ans in ask(req["question"], req["kb_ids"], uid, search_config):
|
for ans in ask(req["question"], req["kb_ids"], uid, search_config=search_config):
|
||||||
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": {"answer": "**ERROR**: " + str(e), "reference": []}}, ensure_ascii=False) + "\n\n"
|
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": {"answer": "**ERROR**: " + str(e), "reference": []}}, ensure_ascii=False) + "\n\n"
|
||||||
|
|||||||
@ -134,6 +134,25 @@ class UserCanvasService(CommonService):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def structure_answer(conv, ans, message_id, session_id):
|
||||||
|
if not conv:
|
||||||
|
return ans
|
||||||
|
content = ""
|
||||||
|
if ans["event"] == "message":
|
||||||
|
if ans["data"].get("start_to_think") is True:
|
||||||
|
content = "<think>"
|
||||||
|
elif ans["data"].get("end_to_think") is True:
|
||||||
|
content = "</think>"
|
||||||
|
else:
|
||||||
|
content = ans["data"]["content"]
|
||||||
|
|
||||||
|
reference = ans["data"].get("reference")
|
||||||
|
result = {"id": message_id, "session_id": session_id, "answer": content}
|
||||||
|
if reference:
|
||||||
|
result["reference"] = [reference]
|
||||||
|
return result
|
||||||
|
|
||||||
def completion(tenant_id, agent_id, session_id=None, **kwargs):
|
def completion(tenant_id, agent_id, session_id=None, **kwargs):
|
||||||
query = kwargs.get("query", "") or kwargs.get("question", "")
|
query = kwargs.get("query", "") or kwargs.get("question", "")
|
||||||
files = kwargs.get("files", [])
|
files = kwargs.get("files", [])
|
||||||
@ -163,7 +182,8 @@ def completion(tenant_id, agent_id, session_id=None, **kwargs):
|
|||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"message": [],
|
"message": [],
|
||||||
"source": "agent",
|
"source": "agent",
|
||||||
"dsl": cvs.dsl
|
"dsl": cvs.dsl,
|
||||||
|
"reference": []
|
||||||
}
|
}
|
||||||
API4ConversationService.save(**conv)
|
API4ConversationService.save(**conv)
|
||||||
conv = API4Conversation(**conv)
|
conv = API4Conversation(**conv)
|
||||||
@ -176,13 +196,14 @@ def completion(tenant_id, agent_id, session_id=None, **kwargs):
|
|||||||
})
|
})
|
||||||
txt = ""
|
txt = ""
|
||||||
for ans in canvas.run(query=query, files=files, user_id=user_id, inputs=inputs):
|
for ans in canvas.run(query=query, files=files, user_id=user_id, inputs=inputs):
|
||||||
ans["session_id"] = session_id
|
ans = structure_answer(conv, ans, message_id, session_id)
|
||||||
if ans["event"] == "message":
|
txt += ans["answer"]
|
||||||
txt += ans["data"]["content"]
|
if ans.get("answer") or ans.get("reference"):
|
||||||
yield "data:" + json.dumps(ans, ensure_ascii=False) + "\n\n"
|
yield "data:" + json.dumps({"code": 0, "data": ans},
|
||||||
|
ensure_ascii=False) + "\n\n"
|
||||||
|
|
||||||
conv.message.append({"role": "assistant", "content": txt, "created_at": time.time(), "id": message_id})
|
conv.message.append({"role": "assistant", "content": txt, "created_at": time.time(), "id": message_id})
|
||||||
conv.reference = canvas.get_reference()
|
conv.reference.append(canvas.get_reference())
|
||||||
conv.errors = canvas.error
|
conv.errors = canvas.error
|
||||||
conv.dsl = str(canvas)
|
conv.dsl = str(canvas)
|
||||||
conv = conv.to_dict()
|
conv = conv.to_dict()
|
||||||
@ -211,11 +232,9 @@ def completionOpenAI(tenant_id, agent_id, question, session_id=None, stream=True
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(f"Agent OpenAI-Compatible completionOpenAI parse answer failed: {e}")
|
logging.exception(f"Agent OpenAI-Compatible completionOpenAI parse answer failed: {e}")
|
||||||
continue
|
continue
|
||||||
|
if not ans["data"]["answer"]:
|
||||||
if ans.get("event") != "message":
|
|
||||||
continue
|
continue
|
||||||
|
content_piece = ans["data"]["answer"]
|
||||||
content_piece = ans["data"]["content"]
|
|
||||||
completion_tokens += len(tiktokenenc.encode(content_piece))
|
completion_tokens += len(tiktokenenc.encode(content_piece))
|
||||||
|
|
||||||
yield "data: " + json.dumps(
|
yield "data: " + json.dumps(
|
||||||
@ -260,9 +279,9 @@ def completionOpenAI(tenant_id, agent_id, question, session_id=None, stream=True
|
|||||||
):
|
):
|
||||||
if isinstance(ans, str):
|
if isinstance(ans, str):
|
||||||
ans = json.loads(ans[5:])
|
ans = json.loads(ans[5:])
|
||||||
if ans.get("event") != "message":
|
if not ans["data"]["answer"]:
|
||||||
continue
|
continue
|
||||||
all_content += ans["data"]["content"]
|
all_content += ans["data"]["answer"]
|
||||||
|
|
||||||
completion_tokens = len(tiktokenenc.encode(all_content))
|
completion_tokens = len(tiktokenenc.encode(all_content))
|
||||||
|
|
||||||
|
|||||||
@ -256,10 +256,10 @@ def repair_bad_citation_formats(answer: str, kbinfos: dict, idx: set):
|
|||||||
|
|
||||||
|
|
||||||
def meta_filter(metas: dict, filters: list[dict]):
|
def meta_filter(metas: dict, filters: list[dict]):
|
||||||
doc_ids = []
|
doc_ids = set([])
|
||||||
|
|
||||||
def filter_out(v2docs, operator, value):
|
def filter_out(v2docs, operator, value):
|
||||||
nonlocal doc_ids
|
ids = []
|
||||||
for input, docids in v2docs.items():
|
for input, docids in v2docs.items():
|
||||||
try:
|
try:
|
||||||
input = float(input)
|
input = float(input)
|
||||||
@ -284,16 +284,24 @@ def meta_filter(metas: dict, filters: list[dict]):
|
|||||||
]:
|
]:
|
||||||
try:
|
try:
|
||||||
if all(conds):
|
if all(conds):
|
||||||
doc_ids.extend(docids)
|
ids.extend(docids)
|
||||||
|
break
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
return ids
|
||||||
|
|
||||||
for k, v2docs in metas.items():
|
for k, v2docs in metas.items():
|
||||||
for f in filters:
|
for f in filters:
|
||||||
if k != f["key"]:
|
if k != f["key"]:
|
||||||
continue
|
continue
|
||||||
filter_out(v2docs, f["op"], f["value"])
|
ids = filter_out(v2docs, f["op"], f["value"])
|
||||||
return doc_ids
|
if not doc_ids:
|
||||||
|
doc_ids = set(ids)
|
||||||
|
else:
|
||||||
|
doc_ids = doc_ids & set(ids)
|
||||||
|
if not doc_ids:
|
||||||
|
return []
|
||||||
|
return list(doc_ids)
|
||||||
|
|
||||||
|
|
||||||
def chat(dialog, messages, stream=True, **kwargs):
|
def chat(dialog, messages, stream=True, **kwargs):
|
||||||
|
|||||||
@ -152,7 +152,7 @@ class LLMBundle(LLM4Tenant):
|
|||||||
|
|
||||||
def describe_with_prompt(self, image, prompt):
|
def describe_with_prompt(self, image, prompt):
|
||||||
if self.langfuse:
|
if self.langfuse:
|
||||||
generation = self.language.start_generation(trace_context=self.trace_context, name="describe_with_prompt", metadata={"model": self.llm_name, "prompt": prompt})
|
generation = self.langfuse.start_generation(trace_context=self.trace_context, name="describe_with_prompt", metadata={"model": self.llm_name, "prompt": prompt})
|
||||||
|
|
||||||
txt, used_tokens = self.mdl.describe_with_prompt(image, prompt)
|
txt, used_tokens = self.mdl.describe_with_prompt(image, prompt)
|
||||||
if not TenantLLMService.increase_usage(self.tenant_id, self.llm_type, used_tokens):
|
if not TenantLLMService.increase_usage(self.tenant_id, self.llm_type, used_tokens):
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import asyncio
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import queue
|
import queue
|
||||||
import random
|
import random
|
||||||
import threading
|
import threading
|
||||||
@ -667,7 +668,10 @@ def timeout(seconds: float | int = None, attempts: int = 2, *, exception: Option
|
|||||||
|
|
||||||
for a in range(attempts):
|
for a in range(attempts):
|
||||||
try:
|
try:
|
||||||
result = result_queue.get(timeout=seconds)
|
if os.environ.get("ENABLE_TIMEOUT_ASSERTION"):
|
||||||
|
result = result_queue.get(timeout=seconds)
|
||||||
|
else:
|
||||||
|
result = result_queue.get()
|
||||||
if isinstance(result, Exception):
|
if isinstance(result, Exception):
|
||||||
raise result
|
raise result
|
||||||
return result
|
return result
|
||||||
@ -682,7 +686,10 @@ def timeout(seconds: float | int = None, attempts: int = 2, *, exception: Option
|
|||||||
|
|
||||||
for a in range(attempts):
|
for a in range(attempts):
|
||||||
try:
|
try:
|
||||||
with trio.fail_after(seconds):
|
if os.environ.get("ENABLE_TIMEOUT_ASSERTION"):
|
||||||
|
with trio.fail_after(seconds):
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
else:
|
||||||
return await func(*args, **kwargs)
|
return await func(*args, **kwargs)
|
||||||
except trio.TooSlowError:
|
except trio.TooSlowError:
|
||||||
if a < attempts - 1:
|
if a < attempts - 1:
|
||||||
|
|||||||
@ -14,13 +14,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
from .pdf_parser import RAGFlowPdfParser as PdfParser, PlainParser
|
|
||||||
from .docx_parser import RAGFlowDocxParser as DocxParser
|
from .docx_parser import RAGFlowDocxParser as DocxParser
|
||||||
from .excel_parser import RAGFlowExcelParser as ExcelParser
|
from .excel_parser import RAGFlowExcelParser as ExcelParser
|
||||||
from .ppt_parser import RAGFlowPptParser as PptParser
|
|
||||||
from .html_parser import RAGFlowHtmlParser as HtmlParser
|
from .html_parser import RAGFlowHtmlParser as HtmlParser
|
||||||
from .json_parser import RAGFlowJsonParser as JsonParser
|
from .json_parser import RAGFlowJsonParser as JsonParser
|
||||||
|
from .markdown_parser import MarkdownElementExtractor
|
||||||
from .markdown_parser import RAGFlowMarkdownParser as MarkdownParser
|
from .markdown_parser import RAGFlowMarkdownParser as MarkdownParser
|
||||||
|
from .pdf_parser import PlainParser
|
||||||
|
from .pdf_parser import RAGFlowPdfParser as PdfParser
|
||||||
|
from .ppt_parser import RAGFlowPptParser as PptParser
|
||||||
from .txt_parser import RAGFlowTxtParser as TxtParser
|
from .txt_parser import RAGFlowTxtParser as TxtParser
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@ -33,4 +35,6 @@ __all__ = [
|
|||||||
"JsonParser",
|
"JsonParser",
|
||||||
"MarkdownParser",
|
"MarkdownParser",
|
||||||
"TxtParser",
|
"TxtParser",
|
||||||
|
"MarkdownElementExtractor",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@ -17,8 +17,10 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import mistune
|
||||||
from markdown import markdown
|
from markdown import markdown
|
||||||
|
|
||||||
|
|
||||||
class RAGFlowMarkdownParser:
|
class RAGFlowMarkdownParser:
|
||||||
def __init__(self, chunk_token_num=128):
|
def __init__(self, chunk_token_num=128):
|
||||||
self.chunk_token_num = int(chunk_token_num)
|
self.chunk_token_num = int(chunk_token_num)
|
||||||
@ -35,40 +37,44 @@ class RAGFlowMarkdownParser:
|
|||||||
table_list.append(raw_table)
|
table_list.append(raw_table)
|
||||||
if separate_tables:
|
if separate_tables:
|
||||||
# Skip this match (i.e., remove it)
|
# Skip this match (i.e., remove it)
|
||||||
new_text += working_text[last_end:match.start()] + "\n\n"
|
new_text += working_text[last_end : match.start()] + "\n\n"
|
||||||
else:
|
else:
|
||||||
# Replace with rendered HTML
|
# Replace with rendered HTML
|
||||||
html_table = markdown(raw_table, extensions=['markdown.extensions.tables']) if render else raw_table
|
html_table = markdown(raw_table, extensions=["markdown.extensions.tables"]) if render else raw_table
|
||||||
new_text += working_text[last_end:match.start()] + html_table + "\n\n"
|
new_text += working_text[last_end : match.start()] + html_table + "\n\n"
|
||||||
last_end = match.end()
|
last_end = match.end()
|
||||||
new_text += working_text[last_end:]
|
new_text += working_text[last_end:]
|
||||||
return new_text
|
return new_text
|
||||||
|
|
||||||
if "|" in markdown_text: # for optimize performance
|
if "|" in markdown_text: # for optimize performance
|
||||||
# Standard Markdown table
|
# Standard Markdown table
|
||||||
border_table_pattern = re.compile(
|
border_table_pattern = re.compile(
|
||||||
r'''
|
r"""
|
||||||
(?:\n|^)
|
(?:\n|^)
|
||||||
(?:\|.*?\|.*?\|.*?\n)
|
(?:\|.*?\|.*?\|.*?\n)
|
||||||
(?:\|(?:\s*[:-]+[-| :]*\s*)\|.*?\n)
|
(?:\|(?:\s*[:-]+[-| :]*\s*)\|.*?\n)
|
||||||
(?:\|.*?\|.*?\|.*?\n)+
|
(?:\|.*?\|.*?\|.*?\n)+
|
||||||
''', re.VERBOSE)
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
working_text = replace_tables_with_rendered_html(border_table_pattern, tables)
|
working_text = replace_tables_with_rendered_html(border_table_pattern, tables)
|
||||||
|
|
||||||
# Borderless Markdown table
|
# Borderless Markdown table
|
||||||
no_border_table_pattern = re.compile(
|
no_border_table_pattern = re.compile(
|
||||||
r'''
|
r"""
|
||||||
(?:\n|^)
|
(?:\n|^)
|
||||||
(?:\S.*?\|.*?\n)
|
(?:\S.*?\|.*?\n)
|
||||||
(?:(?:\s*[:-]+[-| :]*\s*).*?\n)
|
(?:(?:\s*[:-]+[-| :]*\s*).*?\n)
|
||||||
(?:\S.*?\|.*?\n)+
|
(?:\S.*?\|.*?\n)+
|
||||||
''', re.VERBOSE)
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
working_text = replace_tables_with_rendered_html(no_border_table_pattern, tables)
|
working_text = replace_tables_with_rendered_html(no_border_table_pattern, tables)
|
||||||
|
|
||||||
if "<table>" in working_text.lower(): # for optimize performance
|
if "<table>" in working_text.lower(): # for optimize performance
|
||||||
#HTML table extraction - handle possible html/body wrapper tags
|
# HTML table extraction - handle possible html/body wrapper tags
|
||||||
html_table_pattern = re.compile(
|
html_table_pattern = re.compile(
|
||||||
r'''
|
r"""
|
||||||
(?:\n|^)
|
(?:\n|^)
|
||||||
\s*
|
\s*
|
||||||
(?:
|
(?:
|
||||||
@ -83,9 +89,10 @@ class RAGFlowMarkdownParser:
|
|||||||
)
|
)
|
||||||
\s*
|
\s*
|
||||||
(?=\n|$)
|
(?=\n|$)
|
||||||
''',
|
""",
|
||||||
re.VERBOSE | re.DOTALL | re.IGNORECASE
|
re.VERBOSE | re.DOTALL | re.IGNORECASE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def replace_html_tables():
|
def replace_html_tables():
|
||||||
nonlocal working_text
|
nonlocal working_text
|
||||||
new_text = ""
|
new_text = ""
|
||||||
@ -94,9 +101,9 @@ class RAGFlowMarkdownParser:
|
|||||||
raw_table = match.group()
|
raw_table = match.group()
|
||||||
tables.append(raw_table)
|
tables.append(raw_table)
|
||||||
if separate_tables:
|
if separate_tables:
|
||||||
new_text += working_text[last_end:match.start()] + "\n\n"
|
new_text += working_text[last_end : match.start()] + "\n\n"
|
||||||
else:
|
else:
|
||||||
new_text += working_text[last_end:match.start()] + raw_table + "\n\n"
|
new_text += working_text[last_end : match.start()] + raw_table + "\n\n"
|
||||||
last_end = match.end()
|
last_end = match.end()
|
||||||
new_text += working_text[last_end:]
|
new_text += working_text[last_end:]
|
||||||
working_text = new_text
|
working_text = new_text
|
||||||
@ -104,3 +111,163 @@ class RAGFlowMarkdownParser:
|
|||||||
replace_html_tables()
|
replace_html_tables()
|
||||||
|
|
||||||
return working_text, tables
|
return working_text, tables
|
||||||
|
|
||||||
|
|
||||||
|
class MarkdownElementExtractor:
|
||||||
|
def __init__(self, markdown_content):
|
||||||
|
self.markdown_content = markdown_content
|
||||||
|
self.lines = markdown_content.split("\n")
|
||||||
|
self.ast_parser = mistune.create_markdown(renderer="ast")
|
||||||
|
self.ast_nodes = self.ast_parser(markdown_content)
|
||||||
|
|
||||||
|
def extract_elements(self):
|
||||||
|
"""Extract individual elements (headers, code blocks, lists, etc.)"""
|
||||||
|
sections = []
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
while i < len(self.lines):
|
||||||
|
line = self.lines[i]
|
||||||
|
|
||||||
|
if re.match(r"^#{1,6}\s+.*$", line):
|
||||||
|
# header
|
||||||
|
element = self._extract_header(i)
|
||||||
|
sections.append(element["content"])
|
||||||
|
i = element["end_line"] + 1
|
||||||
|
elif line.strip().startswith("```"):
|
||||||
|
# code block
|
||||||
|
element = self._extract_code_block(i)
|
||||||
|
sections.append(element["content"])
|
||||||
|
i = element["end_line"] + 1
|
||||||
|
elif re.match(r"^\s*[-*+]\s+.*$", line) or re.match(r"^\s*\d+\.\s+.*$", line):
|
||||||
|
# list block
|
||||||
|
element = self._extract_list_block(i)
|
||||||
|
sections.append(element["content"])
|
||||||
|
i = element["end_line"] + 1
|
||||||
|
elif line.strip().startswith(">"):
|
||||||
|
# blockquote
|
||||||
|
element = self._extract_blockquote(i)
|
||||||
|
sections.append(element["content"])
|
||||||
|
i = element["end_line"] + 1
|
||||||
|
elif line.strip():
|
||||||
|
# text block (paragraphs and inline elements until next block element)
|
||||||
|
element = self._extract_text_block(i)
|
||||||
|
sections.append(element["content"])
|
||||||
|
i = element["end_line"] + 1
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
sections = [section for section in sections if section.strip()]
|
||||||
|
return sections
|
||||||
|
|
||||||
|
def _extract_header(self, start_pos):
|
||||||
|
return {
|
||||||
|
"type": "header",
|
||||||
|
"content": self.lines[start_pos],
|
||||||
|
"start_line": start_pos,
|
||||||
|
"end_line": start_pos,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_code_block(self, start_pos):
|
||||||
|
end_pos = start_pos
|
||||||
|
content_lines = [self.lines[start_pos]]
|
||||||
|
|
||||||
|
# Find the end of the code block
|
||||||
|
for i in range(start_pos + 1, len(self.lines)):
|
||||||
|
content_lines.append(self.lines[i])
|
||||||
|
end_pos = i
|
||||||
|
if self.lines[i].strip().startswith("```"):
|
||||||
|
break
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": "code_block",
|
||||||
|
"content": "\n".join(content_lines),
|
||||||
|
"start_line": start_pos,
|
||||||
|
"end_line": end_pos,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_list_block(self, start_pos):
|
||||||
|
end_pos = start_pos
|
||||||
|
content_lines = []
|
||||||
|
|
||||||
|
i = start_pos
|
||||||
|
while i < len(self.lines):
|
||||||
|
line = self.lines[i]
|
||||||
|
# check if this line is a list item or continuation of a list
|
||||||
|
if (
|
||||||
|
re.match(r"^\s*[-*+]\s+.*$", line)
|
||||||
|
or re.match(r"^\s*\d+\.\s+.*$", line)
|
||||||
|
or (i > start_pos and not line.strip())
|
||||||
|
or (i > start_pos and re.match(r"^\s{2,}[-*+]\s+.*$", line))
|
||||||
|
or (i > start_pos and re.match(r"^\s{2,}\d+\.\s+.*$", line))
|
||||||
|
or (i > start_pos and re.match(r"^\s+\w+.*$", line))
|
||||||
|
):
|
||||||
|
content_lines.append(line)
|
||||||
|
end_pos = i
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": "list_block",
|
||||||
|
"content": "\n".join(content_lines),
|
||||||
|
"start_line": start_pos,
|
||||||
|
"end_line": end_pos,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_blockquote(self, start_pos):
|
||||||
|
end_pos = start_pos
|
||||||
|
content_lines = []
|
||||||
|
|
||||||
|
i = start_pos
|
||||||
|
while i < len(self.lines):
|
||||||
|
line = self.lines[i]
|
||||||
|
if line.strip().startswith(">") or (i > start_pos and not line.strip()):
|
||||||
|
content_lines.append(line)
|
||||||
|
end_pos = i
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": "blockquote",
|
||||||
|
"content": "\n".join(content_lines),
|
||||||
|
"start_line": start_pos,
|
||||||
|
"end_line": end_pos,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _extract_text_block(self, start_pos):
|
||||||
|
"""Extract a text block (paragraphs, inline elements) until next block element"""
|
||||||
|
end_pos = start_pos
|
||||||
|
content_lines = [self.lines[start_pos]]
|
||||||
|
|
||||||
|
i = start_pos + 1
|
||||||
|
while i < len(self.lines):
|
||||||
|
line = self.lines[i]
|
||||||
|
# stop if we encounter a block element
|
||||||
|
if re.match(r"^#{1,6}\s+.*$", line) or line.strip().startswith("```") or re.match(r"^\s*[-*+]\s+.*$", line) or re.match(r"^\s*\d+\.\s+.*$", line) or line.strip().startswith(">"):
|
||||||
|
break
|
||||||
|
elif not line.strip():
|
||||||
|
# check if the next line is a block element
|
||||||
|
if i + 1 < len(self.lines) and (
|
||||||
|
re.match(r"^#{1,6}\s+.*$", self.lines[i + 1])
|
||||||
|
or self.lines[i + 1].strip().startswith("```")
|
||||||
|
or re.match(r"^\s*[-*+]\s+.*$", self.lines[i + 1])
|
||||||
|
or re.match(r"^\s*\d+\.\s+.*$", self.lines[i + 1])
|
||||||
|
or self.lines[i + 1].strip().startswith(">")
|
||||||
|
):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
content_lines.append(line)
|
||||||
|
end_pos = i
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
content_lines.append(line)
|
||||||
|
end_pos = i
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": "text_block",
|
||||||
|
"content": "\n".join(content_lines),
|
||||||
|
"start_line": start_pos,
|
||||||
|
"end_line": end_pos,
|
||||||
|
}
|
||||||
|
|||||||
@ -3501,7 +3501,7 @@ Failure:
|
|||||||
|
|
||||||
### Generate related questions
|
### Generate related questions
|
||||||
|
|
||||||
**POST** `/v1/sessions/related_questions`
|
**POST** `/api/v1/sessions/related_questions`
|
||||||
|
|
||||||
Generates five to ten alternative question strings from the user's original query to retrieve more relevant search results.
|
Generates five to ten alternative question strings from the user's original query to retrieve more relevant search results.
|
||||||
|
|
||||||
@ -3516,7 +3516,7 @@ The chat model autonomously determines the number of questions to generate based
|
|||||||
#### Request
|
#### Request
|
||||||
|
|
||||||
- Method: POST
|
- Method: POST
|
||||||
- URL: `/v1/sessions/related_questions`
|
- URL: `/api/v1/sessions/related_questions`
|
||||||
- Headers:
|
- Headers:
|
||||||
- `'content-Type: application/json'`
|
- `'content-Type: application/json'`
|
||||||
- `'Authorization: Bearer <YOUR_LOGIN_TOKEN>'`
|
- `'Authorization: Bearer <YOUR_LOGIN_TOKEN>'`
|
||||||
@ -3528,7 +3528,7 @@ The chat model autonomously determines the number of questions to generate based
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl --request POST \
|
curl --request POST \
|
||||||
--url http://{address}/v1/sessions/related_questions \
|
--url http://{address}/api/v1/sessions/related_questions \
|
||||||
--header 'Content-Type: application/json' \
|
--header 'Content-Type: application/json' \
|
||||||
--header 'Authorization: Bearer <YOUR_LOGIN_TOKEN>' \
|
--header 'Authorization: Bearer <YOUR_LOGIN_TOKEN>' \
|
||||||
--data '
|
--data '
|
||||||
|
|||||||
@ -15,6 +15,7 @@
|
|||||||
#
|
#
|
||||||
import logging
|
import logging
|
||||||
import itertools
|
import itertools
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
@ -106,7 +107,8 @@ class EntityResolution(Extractor):
|
|||||||
nonlocal remain_candidates_to_resolve, callback
|
nonlocal remain_candidates_to_resolve, callback
|
||||||
async with semaphore:
|
async with semaphore:
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(280) as cancel_scope:
|
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
|
with trio.move_on_after(280 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||||
await self._resolve_candidate(candidate_batch, result_set, result_lock)
|
await self._resolve_candidate(candidate_batch, result_set, result_lock)
|
||||||
remain_candidates_to_resolve = remain_candidates_to_resolve - len(candidate_batch[1])
|
remain_candidates_to_resolve = remain_candidates_to_resolve - len(candidate_batch[1])
|
||||||
callback(msg=f"Resolved {len(candidate_batch[1])} pairs, {remain_candidates_to_resolve} are remained to resolve. ")
|
callback(msg=f"Resolved {len(candidate_batch[1])} pairs, {remain_candidates_to_resolve} are remained to resolve. ")
|
||||||
@ -169,7 +171,8 @@ class EntityResolution(Extractor):
|
|||||||
logging.info(f"Created resolution prompt {len(text)} bytes for {len(candidate_resolution_i[1])} entity pairs of type {candidate_resolution_i[0]}")
|
logging.info(f"Created resolution prompt {len(text)} bytes for {len(candidate_resolution_i[1])} entity pairs of type {candidate_resolution_i[0]}")
|
||||||
async with chat_limiter:
|
async with chat_limiter:
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(240) as cancel_scope:
|
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
|
with trio.move_on_after(280 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||||
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
||||||
if cancel_scope.cancelled_caught:
|
if cancel_scope.cancelled_caught:
|
||||||
logging.warning("_resolve_candidate._chat timeout, skipping...")
|
logging.warning("_resolve_candidate._chat timeout, skipping...")
|
||||||
|
|||||||
@ -7,6 +7,7 @@ Reference:
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
@ -51,6 +52,7 @@ class CommunityReportsExtractor(Extractor):
|
|||||||
self._max_report_length = max_report_length or 1500
|
self._max_report_length = max_report_length or 1500
|
||||||
|
|
||||||
async def __call__(self, graph: nx.Graph, callback: Callable | None = None):
|
async def __call__(self, graph: nx.Graph, callback: Callable | None = None):
|
||||||
|
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
for node_degree in graph.degree:
|
for node_degree in graph.degree:
|
||||||
graph.nodes[str(node_degree[0])]["rank"] = int(node_degree[1])
|
graph.nodes[str(node_degree[0])]["rank"] = int(node_degree[1])
|
||||||
|
|
||||||
@ -92,7 +94,7 @@ class CommunityReportsExtractor(Extractor):
|
|||||||
text = perform_variable_replacements(self._extraction_prompt, variables=prompt_variables)
|
text = perform_variable_replacements(self._extraction_prompt, variables=prompt_variables)
|
||||||
async with chat_limiter:
|
async with chat_limiter:
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(180) as cancel_scope:
|
with trio.move_on_after(180 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||||
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
||||||
if cancel_scope.cancelled_caught:
|
if cancel_scope.cancelled_caught:
|
||||||
logging.warning("extract_community_report._chat timeout, skipping...")
|
logging.warning("extract_community_report._chat timeout, skipping...")
|
||||||
|
|||||||
@ -47,7 +47,7 @@ class Extractor:
|
|||||||
self._language = language
|
self._language = language
|
||||||
self._entity_types = entity_types or DEFAULT_ENTITY_TYPES
|
self._entity_types = entity_types or DEFAULT_ENTITY_TYPES
|
||||||
|
|
||||||
@timeout(60*5)
|
@timeout(60*20)
|
||||||
def _chat(self, system, history, gen_conf={}):
|
def _chat(self, system, history, gen_conf={}):
|
||||||
hist = deepcopy(history)
|
hist = deepcopy(history)
|
||||||
conf = deepcopy(gen_conf)
|
conf = deepcopy(gen_conf)
|
||||||
|
|||||||
@ -15,6 +15,8 @@
|
|||||||
#
|
#
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
@ -49,6 +51,7 @@ async def run_graphrag(
|
|||||||
embedding_model,
|
embedding_model,
|
||||||
callback,
|
callback,
|
||||||
):
|
):
|
||||||
|
enable_timeout_assertion=os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
start = trio.current_time()
|
start = trio.current_time()
|
||||||
tenant_id, kb_id, doc_id = row["tenant_id"], str(row["kb_id"]), row["doc_id"]
|
tenant_id, kb_id, doc_id = row["tenant_id"], str(row["kb_id"]), row["doc_id"]
|
||||||
chunks = []
|
chunks = []
|
||||||
@ -57,7 +60,7 @@ async def run_graphrag(
|
|||||||
):
|
):
|
||||||
chunks.append(d["content_with_weight"])
|
chunks.append(d["content_with_weight"])
|
||||||
|
|
||||||
with trio.fail_after(max(120, len(chunks)*120)):
|
with trio.fail_after(max(120, len(chunks)*60*10) if enable_timeout_assertion else 10000000000):
|
||||||
subgraph = await generate_subgraph(
|
subgraph = await generate_subgraph(
|
||||||
LightKGExt
|
LightKGExt
|
||||||
if "method" not in row["kb_parser_config"].get("graphrag", {}) or row["kb_parser_config"]["graphrag"]["method"] != "general"
|
if "method" not in row["kb_parser_config"].get("graphrag", {}) or row["kb_parser_config"]["graphrag"]["method"] != "general"
|
||||||
|
|||||||
@ -130,7 +130,36 @@ Output:
|
|||||||
|
|
||||||
PROMPTS[
|
PROMPTS[
|
||||||
"entiti_continue_extraction"
|
"entiti_continue_extraction"
|
||||||
] = """MANY entities were missed in the last extraction. Add them below using the same format:
|
] = """
|
||||||
|
MANY entities and relationships were missed in the last extraction. Please find only the missing entities and relationships from previous text.
|
||||||
|
|
||||||
|
---Remember Steps---
|
||||||
|
|
||||||
|
1. Identify all entities. For each identified entity, extract the following information:
|
||||||
|
- entity_name: Name of the entity, use same language as input text. If English, capitalized the name
|
||||||
|
- entity_type: One of the following types: [{entity_types}]
|
||||||
|
- entity_description: Provide a comprehensive description of the entity's attributes and activities *based solely on the information present in the input text*. **Do not infer or hallucinate information not explicitly stated.** If the text provides insufficient information to create a comprehensive description, state "Description not available in text."
|
||||||
|
Format each entity as ("entity"{tuple_delimiter}<entity_name>{tuple_delimiter}<entity_type>{tuple_delimiter}<entity_description>)
|
||||||
|
|
||||||
|
2. From the entities identified in step 1, identify all pairs of (source_entity, target_entity) that are *clearly related* to each other.
|
||||||
|
For each pair of related entities, extract the following information:
|
||||||
|
- source_entity: name of the source entity, as identified in step 1
|
||||||
|
- target_entity: name of the target entity, as identified in step 1
|
||||||
|
- relationship_description: explanation as to why you think the source entity and the target entity are related to each other
|
||||||
|
- relationship_strength: a numeric score indicating strength of the relationship between the source entity and target entity
|
||||||
|
- relationship_keywords: one or more high-level key words that summarize the overarching nature of the relationship, focusing on concepts or themes rather than specific details
|
||||||
|
Format each relationship as ("relationship"{tuple_delimiter}<source_entity>{tuple_delimiter}<target_entity>{tuple_delimiter}<relationship_description>{tuple_delimiter}<relationship_keywords>{tuple_delimiter}<relationship_strength>)
|
||||||
|
|
||||||
|
3. Identify high-level key words that summarize the main concepts, themes, or topics of the entire text. These should capture the overarching ideas present in the document.
|
||||||
|
Format the content-level key words as ("content_keywords"{tuple_delimiter}<high_level_keywords>)
|
||||||
|
|
||||||
|
4. Return output in {language} as a single list of all the entities and relationships identified in steps 1 and 2. Use **{record_delimiter}** as the list delimiter.
|
||||||
|
|
||||||
|
5. When finished, output {completion_delimiter}
|
||||||
|
|
||||||
|
---Output---
|
||||||
|
|
||||||
|
Add new entities and relations below using the same format, and do not include entities and relations that have been previously extracted. :
|
||||||
"""
|
"""
|
||||||
|
|
||||||
PROMPTS[
|
PROMPTS[
|
||||||
|
|||||||
@ -307,6 +307,7 @@ def chunk_id(chunk):
|
|||||||
|
|
||||||
async def graph_node_to_chunk(kb_id, embd_mdl, ent_name, meta, chunks):
|
async def graph_node_to_chunk(kb_id, embd_mdl, ent_name, meta, chunks):
|
||||||
global chat_limiter
|
global chat_limiter
|
||||||
|
enable_timeout_assertion=os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
chunk = {
|
chunk = {
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"important_kwd": [ent_name],
|
"important_kwd": [ent_name],
|
||||||
@ -324,7 +325,7 @@ async def graph_node_to_chunk(kb_id, embd_mdl, ent_name, meta, chunks):
|
|||||||
ebd = get_embed_cache(embd_mdl.llm_name, ent_name)
|
ebd = get_embed_cache(embd_mdl.llm_name, ent_name)
|
||||||
if ebd is None:
|
if ebd is None:
|
||||||
async with chat_limiter:
|
async with chat_limiter:
|
||||||
with trio.fail_after(3):
|
with trio.fail_after(3 if enable_timeout_assertion else 30000000):
|
||||||
ebd, _ = await trio.to_thread.run_sync(lambda: embd_mdl.encode([ent_name]))
|
ebd, _ = await trio.to_thread.run_sync(lambda: embd_mdl.encode([ent_name]))
|
||||||
ebd = ebd[0]
|
ebd = ebd[0]
|
||||||
set_embed_cache(embd_mdl.llm_name, ent_name, ebd)
|
set_embed_cache(embd_mdl.llm_name, ent_name, ebd)
|
||||||
@ -362,6 +363,7 @@ def get_relation(tenant_id, kb_id, from_ent_name, to_ent_name, size=1):
|
|||||||
|
|
||||||
|
|
||||||
async def graph_edge_to_chunk(kb_id, embd_mdl, from_ent_name, to_ent_name, meta, chunks):
|
async def graph_edge_to_chunk(kb_id, embd_mdl, from_ent_name, to_ent_name, meta, chunks):
|
||||||
|
enable_timeout_assertion=os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
chunk = {
|
chunk = {
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"from_entity_kwd": from_ent_name,
|
"from_entity_kwd": from_ent_name,
|
||||||
@ -380,7 +382,7 @@ async def graph_edge_to_chunk(kb_id, embd_mdl, from_ent_name, to_ent_name, meta,
|
|||||||
ebd = get_embed_cache(embd_mdl.llm_name, txt)
|
ebd = get_embed_cache(embd_mdl.llm_name, txt)
|
||||||
if ebd is None:
|
if ebd is None:
|
||||||
async with chat_limiter:
|
async with chat_limiter:
|
||||||
with trio.fail_after(3):
|
with trio.fail_after(3 if enable_timeout_assertion else 300000000):
|
||||||
ebd, _ = await trio.to_thread.run_sync(lambda: embd_mdl.encode([txt+f": {meta['description']}"]))
|
ebd, _ = await trio.to_thread.run_sync(lambda: embd_mdl.encode([txt+f": {meta['description']}"]))
|
||||||
ebd = ebd[0]
|
ebd = ebd[0]
|
||||||
set_embed_cache(embd_mdl.llm_name, txt, ebd)
|
set_embed_cache(embd_mdl.llm_name, txt, ebd)
|
||||||
@ -514,9 +516,10 @@ async def set_graph(tenant_id: str, kb_id: str, embd_mdl, graph: nx.Graph, chang
|
|||||||
callback(msg=f"set_graph converted graph change to {len(chunks)} chunks in {now - start:.2f}s.")
|
callback(msg=f"set_graph converted graph change to {len(chunks)} chunks in {now - start:.2f}s.")
|
||||||
start = now
|
start = now
|
||||||
|
|
||||||
|
enable_timeout_assertion=os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||||
es_bulk_size = 4
|
es_bulk_size = 4
|
||||||
for b in range(0, len(chunks), es_bulk_size):
|
for b in range(0, len(chunks), es_bulk_size):
|
||||||
with trio.fail_after(3):
|
with trio.fail_after(3 if enable_timeout_assertion else 30000000):
|
||||||
doc_store_result = await trio.to_thread.run_sync(lambda: settings.docStoreConn.insert(chunks[b:b + es_bulk_size], search.index_name(tenant_id), kb_id))
|
doc_store_result = await trio.to_thread.run_sync(lambda: settings.docStoreConn.insert(chunks[b:b + es_bulk_size], search.index_name(tenant_id), kb_id))
|
||||||
if b % 100 == es_bulk_size and callback:
|
if b % 100 == es_bulk_size and callback:
|
||||||
callback(msg=f"Insert chunks: {b}/{len(chunks)}")
|
callback(msg=f"Insert chunks: {b}/{len(chunks)}")
|
||||||
|
|||||||
@ -16,6 +16,9 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
from collections.abc import AsyncIterator
|
from collections.abc import AsyncIterator
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
@ -53,6 +56,13 @@ JSON_RESPONSE = True
|
|||||||
|
|
||||||
|
|
||||||
class RAGFlowConnector:
|
class RAGFlowConnector:
|
||||||
|
_MAX_DATASET_CACHE = 32
|
||||||
|
_MAX_DOCUMENT_CACHE = 128
|
||||||
|
_CACHE_TTL = 300
|
||||||
|
|
||||||
|
_dataset_metadata_cache: OrderedDict[str, tuple[dict, float | int]] = OrderedDict() # "dataset_id" -> (metadata, expiry_ts)
|
||||||
|
_document_metadata_cache: OrderedDict[str, tuple[list[tuple[str, dict]], float | int]] = OrderedDict() # "dataset_id" -> ([(document_id, doc_metadata)], expiry_ts)
|
||||||
|
|
||||||
def __init__(self, base_url: str, version="v1"):
|
def __init__(self, base_url: str, version="v1"):
|
||||||
self.base_url = base_url
|
self.base_url = base_url
|
||||||
self.version = version
|
self.version = version
|
||||||
@ -72,6 +82,43 @@ class RAGFlowConnector:
|
|||||||
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header, json=json)
|
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header, json=json)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def _is_cache_valid(self, ts):
|
||||||
|
return time.time() < ts
|
||||||
|
|
||||||
|
def _get_expiry_timestamp(self):
|
||||||
|
offset = random.randint(-30, 30)
|
||||||
|
return time.time() + self._CACHE_TTL + offset
|
||||||
|
|
||||||
|
def _get_cached_dataset_metadata(self, dataset_id):
|
||||||
|
entry = self._dataset_metadata_cache.get(dataset_id)
|
||||||
|
if entry:
|
||||||
|
data, ts = entry
|
||||||
|
if self._is_cache_valid(ts):
|
||||||
|
self._dataset_metadata_cache.move_to_end(dataset_id)
|
||||||
|
return data
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_cached_dataset_metadata(self, dataset_id, metadata):
|
||||||
|
self._dataset_metadata_cache[dataset_id] = (metadata, self._get_expiry_timestamp())
|
||||||
|
self._dataset_metadata_cache.move_to_end(dataset_id)
|
||||||
|
if len(self._dataset_metadata_cache) > self._MAX_DATASET_CACHE:
|
||||||
|
self._dataset_metadata_cache.popitem(last=False)
|
||||||
|
|
||||||
|
def _get_cached_document_metadata_by_dataset(self, dataset_id):
|
||||||
|
entry = self._document_metadata_cache.get(dataset_id)
|
||||||
|
if entry:
|
||||||
|
data_list, ts = entry
|
||||||
|
if self._is_cache_valid(ts):
|
||||||
|
self._document_metadata_cache.move_to_end(dataset_id)
|
||||||
|
return {doc_id: doc_meta for doc_id, doc_meta in data_list}
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_cached_document_metadata_by_dataset(self, dataset_id, doc_id_meta_list):
|
||||||
|
self._document_metadata_cache[dataset_id] = (doc_id_meta_list, self._get_expiry_timestamp())
|
||||||
|
self._document_metadata_cache.move_to_end(dataset_id)
|
||||||
|
if len(self._document_metadata_cache) > self._MAX_DOCUMENT_CACHE:
|
||||||
|
self._document_metadata_cache.popitem(last=False)
|
||||||
|
|
||||||
def list_datasets(self, page: int = 1, page_size: int = 1000, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None):
|
def list_datasets(self, page: int = 1, page_size: int = 1000, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None):
|
||||||
res = self._get("/datasets", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
res = self._get("/datasets", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||||
if not res:
|
if not res:
|
||||||
@ -87,10 +134,38 @@ class RAGFlowConnector:
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
def retrieval(
|
def retrieval(
|
||||||
self, dataset_ids, document_ids=None, question="", page=1, page_size=30, similarity_threshold=0.2, vector_similarity_weight=0.3, top_k=1024, rerank_id: str | None = None, keyword: bool = False
|
self,
|
||||||
|
dataset_ids,
|
||||||
|
document_ids=None,
|
||||||
|
question="",
|
||||||
|
page=1,
|
||||||
|
page_size=30,
|
||||||
|
similarity_threshold=0.2,
|
||||||
|
vector_similarity_weight=0.3,
|
||||||
|
top_k=1024,
|
||||||
|
rerank_id: str | None = None,
|
||||||
|
keyword: bool = False,
|
||||||
|
force_refresh: bool = False,
|
||||||
):
|
):
|
||||||
if document_ids is None:
|
if document_ids is None:
|
||||||
document_ids = []
|
document_ids = []
|
||||||
|
|
||||||
|
# If no dataset_ids provided or empty list, get all available dataset IDs
|
||||||
|
if not dataset_ids:
|
||||||
|
dataset_list_str = self.list_datasets()
|
||||||
|
dataset_ids = []
|
||||||
|
|
||||||
|
# Parse the dataset list to extract IDs
|
||||||
|
if dataset_list_str:
|
||||||
|
for line in dataset_list_str.strip().split('\n'):
|
||||||
|
if line.strip():
|
||||||
|
try:
|
||||||
|
dataset_info = json.loads(line.strip())
|
||||||
|
dataset_ids.append(dataset_info["id"])
|
||||||
|
except (json.JSONDecodeError, KeyError):
|
||||||
|
# Skip malformed lines
|
||||||
|
continue
|
||||||
|
|
||||||
data_json = {
|
data_json = {
|
||||||
"page": page,
|
"page": page,
|
||||||
"page_size": page_size,
|
"page_size": page_size,
|
||||||
@ -110,12 +185,127 @@ class RAGFlowConnector:
|
|||||||
|
|
||||||
res = res.json()
|
res = res.json()
|
||||||
if res.get("code") == 0:
|
if res.get("code") == 0:
|
||||||
|
data = res["data"]
|
||||||
chunks = []
|
chunks = []
|
||||||
for chunk_data in res["data"].get("chunks"):
|
|
||||||
chunks.append(json.dumps(chunk_data, ensure_ascii=False))
|
# Cache document metadata and dataset information
|
||||||
return [types.TextContent(type="text", text="\n".join(chunks))]
|
document_cache, dataset_cache = self._get_document_metadata_cache(dataset_ids, force_refresh=force_refresh)
|
||||||
|
|
||||||
|
# Process chunks with enhanced field mapping including per-chunk metadata
|
||||||
|
for chunk_data in data.get("chunks", []):
|
||||||
|
enhanced_chunk = self._map_chunk_fields(chunk_data, dataset_cache, document_cache)
|
||||||
|
chunks.append(enhanced_chunk)
|
||||||
|
|
||||||
|
# Build structured response (no longer need response-level document_metadata)
|
||||||
|
response = {
|
||||||
|
"chunks": chunks,
|
||||||
|
"pagination": {
|
||||||
|
"page": data.get("page", page),
|
||||||
|
"page_size": data.get("page_size", page_size),
|
||||||
|
"total_chunks": data.get("total", len(chunks)),
|
||||||
|
"total_pages": (data.get("total", len(chunks)) + page_size - 1) // page_size,
|
||||||
|
},
|
||||||
|
"query_info": {
|
||||||
|
"question": question,
|
||||||
|
"similarity_threshold": similarity_threshold,
|
||||||
|
"vector_weight": vector_similarity_weight,
|
||||||
|
"keyword_search": keyword,
|
||||||
|
"dataset_count": len(dataset_ids),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return [types.TextContent(type="text", text=json.dumps(response, ensure_ascii=False))]
|
||||||
|
|
||||||
raise Exception([types.TextContent(type="text", text=res.get("message"))])
|
raise Exception([types.TextContent(type="text", text=res.get("message"))])
|
||||||
|
|
||||||
|
def _get_document_metadata_cache(self, dataset_ids, force_refresh=False):
|
||||||
|
"""Cache document metadata for all documents in the specified datasets"""
|
||||||
|
document_cache = {}
|
||||||
|
dataset_cache = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
for dataset_id in dataset_ids:
|
||||||
|
dataset_meta = None if force_refresh else self._get_cached_dataset_metadata(dataset_id)
|
||||||
|
if not dataset_meta:
|
||||||
|
# First get dataset info for name
|
||||||
|
dataset_res = self._get("/datasets", {"id": dataset_id, "page_size": 1})
|
||||||
|
if dataset_res and dataset_res.status_code == 200:
|
||||||
|
dataset_data = dataset_res.json()
|
||||||
|
if dataset_data.get("code") == 0 and dataset_data.get("data"):
|
||||||
|
dataset_info = dataset_data["data"][0]
|
||||||
|
dataset_meta = {"name": dataset_info.get("name", "Unknown"), "description": dataset_info.get("description", "")}
|
||||||
|
self._set_cached_dataset_metadata(dataset_id, dataset_meta)
|
||||||
|
if dataset_meta:
|
||||||
|
dataset_cache[dataset_id] = dataset_meta
|
||||||
|
|
||||||
|
docs = None if force_refresh else self._get_cached_document_metadata_by_dataset(dataset_id)
|
||||||
|
if docs is None:
|
||||||
|
docs_res = self._get(f"/datasets/{dataset_id}/documents")
|
||||||
|
docs_data = docs_res.json()
|
||||||
|
if docs_data.get("code") == 0 and docs_data.get("data", {}).get("docs"):
|
||||||
|
doc_id_meta_list = []
|
||||||
|
docs = {}
|
||||||
|
for doc in docs_data["data"]["docs"]:
|
||||||
|
doc_id = doc.get("id")
|
||||||
|
if not doc_id:
|
||||||
|
continue
|
||||||
|
doc_meta = {
|
||||||
|
"document_id": doc_id,
|
||||||
|
"name": doc.get("name", ""),
|
||||||
|
"location": doc.get("location", ""),
|
||||||
|
"type": doc.get("type", ""),
|
||||||
|
"size": doc.get("size"),
|
||||||
|
"chunk_count": doc.get("chunk_count"),
|
||||||
|
# "chunk_method": doc.get("chunk_method", ""),
|
||||||
|
"create_date": doc.get("create_date", ""),
|
||||||
|
"update_date": doc.get("update_date", ""),
|
||||||
|
# "process_begin_at": doc.get("process_begin_at", ""),
|
||||||
|
# "process_duration": doc.get("process_duration"),
|
||||||
|
# "progress": doc.get("progress"),
|
||||||
|
# "progress_msg": doc.get("progress_msg", ""),
|
||||||
|
# "status": doc.get("status", ""),
|
||||||
|
# "run": doc.get("run", ""),
|
||||||
|
"token_count": doc.get("token_count"),
|
||||||
|
# "source_type": doc.get("source_type", ""),
|
||||||
|
"thumbnail": doc.get("thumbnail", ""),
|
||||||
|
"dataset_id": doc.get("dataset_id", dataset_id),
|
||||||
|
"meta_fields": doc.get("meta_fields", {}),
|
||||||
|
# "parser_config": doc.get("parser_config", {})
|
||||||
|
}
|
||||||
|
doc_id_meta_list.append((doc_id, doc_meta))
|
||||||
|
docs[doc_id] = doc_meta
|
||||||
|
self._set_cached_document_metadata_by_dataset(dataset_id, doc_id_meta_list)
|
||||||
|
if docs:
|
||||||
|
document_cache.update(docs)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# Gracefully handle metadata cache failures
|
||||||
|
pass
|
||||||
|
|
||||||
|
return document_cache, dataset_cache
|
||||||
|
|
||||||
|
def _map_chunk_fields(self, chunk_data, dataset_cache, document_cache):
|
||||||
|
"""Preserve all original API fields and add per-chunk document metadata"""
|
||||||
|
# Start with ALL raw data from API (preserve everything like original version)
|
||||||
|
mapped = dict(chunk_data)
|
||||||
|
|
||||||
|
# Add dataset name enhancement
|
||||||
|
dataset_id = chunk_data.get("dataset_id") or chunk_data.get("kb_id")
|
||||||
|
if dataset_id and dataset_id in dataset_cache:
|
||||||
|
mapped["dataset_name"] = dataset_cache[dataset_id]["name"]
|
||||||
|
else:
|
||||||
|
mapped["dataset_name"] = "Unknown"
|
||||||
|
|
||||||
|
# Add document name convenience field
|
||||||
|
mapped["document_name"] = chunk_data.get("document_keyword", "")
|
||||||
|
|
||||||
|
# Add per-chunk document metadata
|
||||||
|
document_id = chunk_data.get("document_id")
|
||||||
|
if document_id and document_id in document_cache:
|
||||||
|
mapped["document_metadata"] = document_cache[document_id]
|
||||||
|
|
||||||
|
return mapped
|
||||||
|
|
||||||
|
|
||||||
class RAGFlowCtx:
|
class RAGFlowCtx:
|
||||||
def __init__(self, connector: RAGFlowConnector):
|
def __init__(self, connector: RAGFlowConnector):
|
||||||
@ -195,7 +385,58 @@ async def list_tools(*, connector) -> list[types.Tool]:
|
|||||||
"items": {"type": "string"},
|
"items": {"type": "string"},
|
||||||
"description": "Optional array of document IDs to search within."
|
"description": "Optional array of document IDs to search within."
|
||||||
},
|
},
|
||||||
"question": {"type": "string", "description": "The question or query to search for."},
|
"question": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The question or query to search for."
|
||||||
|
},
|
||||||
|
"page": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Page number for pagination",
|
||||||
|
"default": 1,
|
||||||
|
"minimum": 1,
|
||||||
|
},
|
||||||
|
"page_size": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Number of results to return per page (default: 10, max recommended: 50 to avoid token limits)",
|
||||||
|
"default": 10,
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 100,
|
||||||
|
},
|
||||||
|
"similarity_threshold": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "Minimum similarity threshold for results",
|
||||||
|
"default": 0.2,
|
||||||
|
"minimum": 0.0,
|
||||||
|
"maximum": 1.0,
|
||||||
|
},
|
||||||
|
"vector_similarity_weight": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "Weight for vector similarity vs term similarity",
|
||||||
|
"default": 0.3,
|
||||||
|
"minimum": 0.0,
|
||||||
|
"maximum": 1.0,
|
||||||
|
},
|
||||||
|
"keyword": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Enable keyword-based search",
|
||||||
|
"default": False,
|
||||||
|
},
|
||||||
|
"top_k": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum results to consider before ranking",
|
||||||
|
"default": 1024,
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 1024,
|
||||||
|
},
|
||||||
|
"rerank_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional reranking model identifier",
|
||||||
|
},
|
||||||
|
"force_refresh": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Set to true only if fresh dataset and document metadata is explicitly required. Otherwise, cached metadata is used (default: false).",
|
||||||
|
"default": False,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": ["question"],
|
"required": ["question"],
|
||||||
},
|
},
|
||||||
@ -209,6 +450,16 @@ async def call_tool(name: str, arguments: dict, *, connector) -> list[types.Text
|
|||||||
if name == "ragflow_retrieval":
|
if name == "ragflow_retrieval":
|
||||||
document_ids = arguments.get("document_ids", [])
|
document_ids = arguments.get("document_ids", [])
|
||||||
dataset_ids = arguments.get("dataset_ids", [])
|
dataset_ids = arguments.get("dataset_ids", [])
|
||||||
|
question = arguments.get("question", "")
|
||||||
|
page = arguments.get("page", 1)
|
||||||
|
page_size = arguments.get("page_size", 10)
|
||||||
|
similarity_threshold = arguments.get("similarity_threshold", 0.2)
|
||||||
|
vector_similarity_weight = arguments.get("vector_similarity_weight", 0.3)
|
||||||
|
keyword = arguments.get("keyword", False)
|
||||||
|
top_k = arguments.get("top_k", 1024)
|
||||||
|
rerank_id = arguments.get("rerank_id")
|
||||||
|
force_refresh = arguments.get("force_refresh", False)
|
||||||
|
|
||||||
|
|
||||||
# If no dataset_ids provided or empty list, get all available dataset IDs
|
# If no dataset_ids provided or empty list, get all available dataset IDs
|
||||||
if not dataset_ids:
|
if not dataset_ids:
|
||||||
@ -229,7 +480,15 @@ async def call_tool(name: str, arguments: dict, *, connector) -> list[types.Text
|
|||||||
return connector.retrieval(
|
return connector.retrieval(
|
||||||
dataset_ids=dataset_ids,
|
dataset_ids=dataset_ids,
|
||||||
document_ids=document_ids,
|
document_ids=document_ids,
|
||||||
question=arguments["question"],
|
question=question,
|
||||||
|
page=page,
|
||||||
|
page_size=page_size,
|
||||||
|
similarity_threshold=similarity_threshold,
|
||||||
|
vector_similarity_weight=vector_similarity_weight,
|
||||||
|
keyword=keyword,
|
||||||
|
top_k=top_k,
|
||||||
|
rerank_id=rerank_id,
|
||||||
|
force_refresh=force_refresh,
|
||||||
)
|
)
|
||||||
raise ValueError(f"Tool not found: {name}")
|
raise ValueError(f"Tool not found: {name}")
|
||||||
|
|
||||||
|
|||||||
@ -45,7 +45,7 @@ dependencies = [
|
|||||||
"html-text==0.6.2",
|
"html-text==0.6.2",
|
||||||
"httpx[socks]==0.27.2",
|
"httpx[socks]==0.27.2",
|
||||||
"huggingface-hub>=0.25.0,<0.26.0",
|
"huggingface-hub>=0.25.0,<0.26.0",
|
||||||
"infinity-sdk==0.6.0-dev4",
|
"infinity-sdk==0.6.0.dev5",
|
||||||
"infinity-emb>=0.0.66,<0.0.67",
|
"infinity-emb>=0.0.66,<0.0.67",
|
||||||
"itsdangerous==2.1.2",
|
"itsdangerous==2.1.2",
|
||||||
"json-repair==0.35.0",
|
"json-repair==0.35.0",
|
||||||
|
|||||||
@ -30,7 +30,7 @@ from tika import parser
|
|||||||
|
|
||||||
from api.db import LLMType
|
from api.db import LLMType
|
||||||
from api.db.services.llm_service import LLMBundle
|
from api.db.services.llm_service import LLMBundle
|
||||||
from deepdoc.parser import DocxParser, ExcelParser, HtmlParser, JsonParser, MarkdownParser, PdfParser, TxtParser
|
from deepdoc.parser import DocxParser, ExcelParser, HtmlParser, JsonParser, MarkdownElementExtractor, MarkdownParser, PdfParser, TxtParser
|
||||||
from deepdoc.parser.figure_parser import VisionFigureParser, vision_figure_parser_figure_data_wrapper
|
from deepdoc.parser.figure_parser import VisionFigureParser, vision_figure_parser_figure_data_wrapper
|
||||||
from deepdoc.parser.pdf_parser import PlainParser, VisionParser
|
from deepdoc.parser.pdf_parser import PlainParser, VisionParser
|
||||||
from rag.nlp import concat_img, find_codec, naive_merge, naive_merge_with_images, naive_merge_docx, rag_tokenizer, tokenize_chunks, tokenize_chunks_with_images, tokenize_table
|
from rag.nlp import concat_img, find_codec, naive_merge, naive_merge_with_images, naive_merge_docx, rag_tokenizer, tokenize_chunks, tokenize_chunks_with_images, tokenize_table
|
||||||
@ -289,7 +289,7 @@ class Pdf(PdfParser):
|
|||||||
return [(b["text"], self._line_tag(b, zoomin)) for b in self.boxes], tbls, figures
|
return [(b["text"], self._line_tag(b, zoomin)) for b in self.boxes], tbls, figures
|
||||||
else:
|
else:
|
||||||
tbls = self._extract_table_figure(True, zoomin, True, True)
|
tbls = self._extract_table_figure(True, zoomin, True, True)
|
||||||
# self._naive_vertical_merge()
|
self._naive_vertical_merge()
|
||||||
self._concat_downward()
|
self._concat_downward()
|
||||||
# self._filter_forpages()
|
# self._filter_forpages()
|
||||||
logging.info("layouts cost: {}s".format(timer() - first_start))
|
logging.info("layouts cost: {}s".format(timer() - first_start))
|
||||||
@ -350,17 +350,14 @@ class Markdown(MarkdownParser):
|
|||||||
else:
|
else:
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r") as f:
|
||||||
txt = f.read()
|
txt = f.read()
|
||||||
|
|
||||||
remainder, tables = self.extract_tables_and_remainder(f'{txt}\n', separate_tables=separate_tables)
|
remainder, tables = self.extract_tables_and_remainder(f'{txt}\n', separate_tables=separate_tables)
|
||||||
sections = []
|
|
||||||
|
extractor = MarkdownElementExtractor(txt)
|
||||||
|
element_sections = extractor.extract_elements()
|
||||||
|
sections = [(element, "") for element in element_sections]
|
||||||
|
|
||||||
tbls = []
|
tbls = []
|
||||||
for sec in remainder.split("\n"):
|
|
||||||
if sec.strip().find("#") == 0:
|
|
||||||
sections.append((sec, ""))
|
|
||||||
elif sections and sections[-1][0].strip().find("#") == 0:
|
|
||||||
sec_, _ = sections.pop(-1)
|
|
||||||
sections.append((sec_ + "\n" + sec, ""))
|
|
||||||
else:
|
|
||||||
sections.append((sec, ""))
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
tbls.append(((None, markdown(table, extensions=['markdown.extensions.tables'])), ""))
|
tbls.append(((None, markdown(table, extensions=['markdown.extensions.tables'])), ""))
|
||||||
return sections, tbls
|
return sections, tbls
|
||||||
|
|||||||
@ -36,6 +36,7 @@ class SupportedLiteLLMProvider(StrEnum):
|
|||||||
Nvidia = "NVIDIA"
|
Nvidia = "NVIDIA"
|
||||||
TogetherAI = "TogetherAI"
|
TogetherAI = "TogetherAI"
|
||||||
Anthropic = "Anthropic"
|
Anthropic = "Anthropic"
|
||||||
|
Ollama = "Ollama"
|
||||||
|
|
||||||
|
|
||||||
FACTORY_DEFAULT_BASE_URL = {
|
FACTORY_DEFAULT_BASE_URL = {
|
||||||
@ -59,6 +60,7 @@ LITELLM_PROVIDER_PREFIX = {
|
|||||||
SupportedLiteLLMProvider.Nvidia: "nvidia_nim/",
|
SupportedLiteLLMProvider.Nvidia: "nvidia_nim/",
|
||||||
SupportedLiteLLMProvider.TogetherAI: "together_ai/",
|
SupportedLiteLLMProvider.TogetherAI: "together_ai/",
|
||||||
SupportedLiteLLMProvider.Anthropic: "", # don't need a prefix
|
SupportedLiteLLMProvider.Anthropic: "", # don't need a prefix
|
||||||
|
SupportedLiteLLMProvider.Ollama: "ollama_chat/",
|
||||||
}
|
}
|
||||||
|
|
||||||
ChatModel = globals().get("ChatModel", {})
|
ChatModel = globals().get("ChatModel", {})
|
||||||
|
|||||||
@ -29,7 +29,6 @@ import json_repair
|
|||||||
import litellm
|
import litellm
|
||||||
import openai
|
import openai
|
||||||
import requests
|
import requests
|
||||||
from ollama import Client
|
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from openai.lib.azure import AzureOpenAI
|
from openai.lib.azure import AzureOpenAI
|
||||||
from strenum import StrEnum
|
from strenum import StrEnum
|
||||||
@ -112,6 +111,32 @@ class Base(ABC):
|
|||||||
def _clean_conf(self, gen_conf):
|
def _clean_conf(self, gen_conf):
|
||||||
if "max_tokens" in gen_conf:
|
if "max_tokens" in gen_conf:
|
||||||
del gen_conf["max_tokens"]
|
del gen_conf["max_tokens"]
|
||||||
|
|
||||||
|
allowed_conf = {
|
||||||
|
"temperature",
|
||||||
|
"max_completion_tokens",
|
||||||
|
"top_p",
|
||||||
|
"stream",
|
||||||
|
"stream_options",
|
||||||
|
"stop",
|
||||||
|
"n",
|
||||||
|
"presence_penalty",
|
||||||
|
"frequency_penalty",
|
||||||
|
"functions",
|
||||||
|
"function_call",
|
||||||
|
"logit_bias",
|
||||||
|
"user",
|
||||||
|
"response_format",
|
||||||
|
"seed",
|
||||||
|
"tools",
|
||||||
|
"tool_choice",
|
||||||
|
"logprobs",
|
||||||
|
"top_logprobs",
|
||||||
|
"extra_headers",
|
||||||
|
}
|
||||||
|
|
||||||
|
gen_conf = {k: v for k, v in gen_conf.items() if k in allowed_conf}
|
||||||
|
|
||||||
return gen_conf
|
return gen_conf
|
||||||
|
|
||||||
def _chat(self, history, gen_conf, **kwargs):
|
def _chat(self, history, gen_conf, **kwargs):
|
||||||
@ -657,73 +682,6 @@ class ZhipuChat(Base):
|
|||||||
return super().chat_streamly_with_tools(system, history, gen_conf)
|
return super().chat_streamly_with_tools(system, history, gen_conf)
|
||||||
|
|
||||||
|
|
||||||
class OllamaChat(Base):
|
|
||||||
_FACTORY_NAME = "Ollama"
|
|
||||||
|
|
||||||
def __init__(self, key, model_name, base_url=None, **kwargs):
|
|
||||||
super().__init__(key, model_name, base_url=base_url, **kwargs)
|
|
||||||
|
|
||||||
self.client = Client(host=base_url) if not key or key == "x" else Client(host=base_url, headers={"Authorization": f"Bearer {key}"})
|
|
||||||
self.model_name = model_name
|
|
||||||
self.keep_alive = kwargs.get("ollama_keep_alive", int(os.environ.get("OLLAMA_KEEP_ALIVE", -1)))
|
|
||||||
|
|
||||||
def _clean_conf(self, gen_conf):
|
|
||||||
options = {}
|
|
||||||
if "max_tokens" in gen_conf:
|
|
||||||
options["num_predict"] = gen_conf["max_tokens"]
|
|
||||||
for k in ["temperature", "top_p", "presence_penalty", "frequency_penalty"]:
|
|
||||||
if k not in gen_conf:
|
|
||||||
continue
|
|
||||||
options[k] = gen_conf[k]
|
|
||||||
return options
|
|
||||||
|
|
||||||
def _chat(self, history, gen_conf={}, **kwargs):
|
|
||||||
# Calculate context size
|
|
||||||
ctx_size = self._calculate_dynamic_ctx(history)
|
|
||||||
|
|
||||||
gen_conf["num_ctx"] = ctx_size
|
|
||||||
response = self.client.chat(model=self.model_name, messages=history, options=gen_conf, keep_alive=self.keep_alive)
|
|
||||||
ans = response["message"]["content"].strip()
|
|
||||||
token_count = response.get("eval_count", 0) + response.get("prompt_eval_count", 0)
|
|
||||||
return ans, token_count
|
|
||||||
|
|
||||||
def chat_streamly(self, system, history, gen_conf={}, **kwargs):
|
|
||||||
if system:
|
|
||||||
history.insert(0, {"role": "system", "content": system})
|
|
||||||
if "max_tokens" in gen_conf:
|
|
||||||
del gen_conf["max_tokens"]
|
|
||||||
try:
|
|
||||||
# Calculate context size
|
|
||||||
ctx_size = self._calculate_dynamic_ctx(history)
|
|
||||||
options = {"num_ctx": ctx_size}
|
|
||||||
if "temperature" in gen_conf:
|
|
||||||
options["temperature"] = gen_conf["temperature"]
|
|
||||||
if "max_tokens" in gen_conf:
|
|
||||||
options["num_predict"] = gen_conf["max_tokens"]
|
|
||||||
if "top_p" in gen_conf:
|
|
||||||
options["top_p"] = gen_conf["top_p"]
|
|
||||||
if "presence_penalty" in gen_conf:
|
|
||||||
options["presence_penalty"] = gen_conf["presence_penalty"]
|
|
||||||
if "frequency_penalty" in gen_conf:
|
|
||||||
options["frequency_penalty"] = gen_conf["frequency_penalty"]
|
|
||||||
|
|
||||||
ans = ""
|
|
||||||
try:
|
|
||||||
response = self.client.chat(model=self.model_name, messages=history, stream=True, options=options, keep_alive=self.keep_alive)
|
|
||||||
for resp in response:
|
|
||||||
if resp["done"]:
|
|
||||||
token_count = resp.get("prompt_eval_count", 0) + resp.get("eval_count", 0)
|
|
||||||
yield token_count
|
|
||||||
ans = resp["message"]["content"]
|
|
||||||
yield ans
|
|
||||||
except Exception as e:
|
|
||||||
yield ans + "\n**ERROR**: " + str(e)
|
|
||||||
yield 0
|
|
||||||
except Exception as e:
|
|
||||||
yield "**ERROR**: " + str(e)
|
|
||||||
yield 0
|
|
||||||
|
|
||||||
|
|
||||||
class LocalAIChat(Base):
|
class LocalAIChat(Base):
|
||||||
_FACTORY_NAME = "LocalAI"
|
_FACTORY_NAME = "LocalAI"
|
||||||
|
|
||||||
@ -1396,7 +1354,7 @@ class Ai302Chat(Base):
|
|||||||
|
|
||||||
|
|
||||||
class LiteLLMBase(ABC):
|
class LiteLLMBase(ABC):
|
||||||
_FACTORY_NAME = ["Tongyi-Qianwen", "Bedrock", "Moonshot", "xAI", "DeepInfra", "Groq", "Cohere", "Gemini", "DeepSeek", "NVIDIA", "TogetherAI", "Anthropic"]
|
_FACTORY_NAME = ["Tongyi-Qianwen", "Bedrock", "Moonshot", "xAI", "DeepInfra", "Groq", "Cohere", "Gemini", "DeepSeek", "NVIDIA", "TogetherAI", "Anthropic", "Ollama"]
|
||||||
|
|
||||||
def __init__(self, key, model_name, base_url=None, **kwargs):
|
def __init__(self, key, model_name, base_url=None, **kwargs):
|
||||||
self.timeout = int(os.environ.get("LM_TIMEOUT_SECONDS", 600))
|
self.timeout = int(os.environ.get("LM_TIMEOUT_SECONDS", 600))
|
||||||
|
|||||||
@ -114,6 +114,8 @@ def kb_prompt(kbinfos, max_tokens, hash_id=False):
|
|||||||
docs = {d.id: d.meta_fields for d in docs}
|
docs = {d.id: d.meta_fields for d in docs}
|
||||||
|
|
||||||
def draw_node(k, line):
|
def draw_node(k, line):
|
||||||
|
if line is not None and not isinstance(line, str):
|
||||||
|
line = str(line)
|
||||||
if not line:
|
if not line:
|
||||||
return ""
|
return ""
|
||||||
return f"\n├── {k}: " + re.sub(r"\n+", " ", line, flags=re.DOTALL)
|
return f"\n├── {k}: " + re.sub(r"\n+", " ", line, flags=re.DOTALL)
|
||||||
|
|||||||
@ -42,9 +42,12 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
|||||||
self._prompt = prompt
|
self._prompt = prompt
|
||||||
self._max_token = max_token
|
self._max_token = max_token
|
||||||
|
|
||||||
@timeout(60)
|
@timeout(60*20)
|
||||||
async def _chat(self, system, history, gen_conf):
|
async def _chat(self, system, history, gen_conf):
|
||||||
response = get_llm_cache(self._llm_model.llm_name, system, history, gen_conf)
|
response = await trio.to_thread.run_sync(
|
||||||
|
lambda: get_llm_cache(self._llm_model.llm_name, system, history, gen_conf)
|
||||||
|
)
|
||||||
|
|
||||||
if response:
|
if response:
|
||||||
return response
|
return response
|
||||||
response = await trio.to_thread.run_sync(
|
response = await trio.to_thread.run_sync(
|
||||||
@ -53,19 +56,23 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
|||||||
response = re.sub(r"^.*</think>", "", response, flags=re.DOTALL)
|
response = re.sub(r"^.*</think>", "", response, flags=re.DOTALL)
|
||||||
if response.find("**ERROR**") >= 0:
|
if response.find("**ERROR**") >= 0:
|
||||||
raise Exception(response)
|
raise Exception(response)
|
||||||
set_llm_cache(self._llm_model.llm_name, system, response, history, gen_conf)
|
await trio.to_thread.run_sync(
|
||||||
|
lambda: set_llm_cache(self._llm_model.llm_name, system, response, history, gen_conf)
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@timeout(2)
|
@timeout(20)
|
||||||
async def _embedding_encode(self, txt):
|
async def _embedding_encode(self, txt):
|
||||||
response = get_embed_cache(self._embd_model.llm_name, txt)
|
response = await trio.to_thread.run_sync(
|
||||||
|
lambda: get_embed_cache(self._embd_model.llm_name, txt)
|
||||||
|
)
|
||||||
if response is not None:
|
if response is not None:
|
||||||
return response
|
return response
|
||||||
embds, _ = await trio.to_thread.run_sync(lambda: self._embd_model.encode([txt]))
|
embds, _ = await trio.to_thread.run_sync(lambda: self._embd_model.encode([txt]))
|
||||||
if len(embds) < 1 or len(embds[0]) < 1:
|
if len(embds) < 1 or len(embds[0]) < 1:
|
||||||
raise Exception("Embedding error: ")
|
raise Exception("Embedding error: ")
|
||||||
embds = embds[0]
|
embds = embds[0]
|
||||||
set_embed_cache(self._embd_model.llm_name, txt, embds)
|
await trio.to_thread.run_sync(lambda: set_embed_cache(self._embd_model.llm_name, txt, embds))
|
||||||
return embds
|
return embds
|
||||||
|
|
||||||
def _get_optimal_clusters(self, embeddings: np.ndarray, random_state: int):
|
def _get_optimal_clusters(self, embeddings: np.ndarray, random_state: int):
|
||||||
@ -86,7 +93,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
|||||||
layers = [(0, len(chunks))]
|
layers = [(0, len(chunks))]
|
||||||
start, end = 0, len(chunks)
|
start, end = 0, len(chunks)
|
||||||
|
|
||||||
@timeout(60)
|
@timeout(60*20)
|
||||||
async def summarize(ck_idx: list[int]):
|
async def summarize(ck_idx: list[int]):
|
||||||
nonlocal chunks
|
nonlocal chunks
|
||||||
texts = [chunks[i][0] for i in ck_idx]
|
texts = [chunks[i][0] for i in ck_idx]
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from api.utils.api_utils import timeout, is_strong_enough
|
from api.utils.api_utils import timeout
|
||||||
from api.utils.log_utils import init_root_logger, get_project_base_directory
|
from api.utils.log_utils import init_root_logger, get_project_base_directory
|
||||||
from graphrag.general.index import run_graphrag
|
from graphrag.general.index import run_graphrag
|
||||||
from graphrag.utils import get_llm_cache, set_llm_cache, get_tags_from_cache, set_tags_to_cache
|
from graphrag.utils import get_llm_cache, set_llm_cache, get_tags_from_cache, set_tags_to_cache
|
||||||
@ -478,8 +478,6 @@ async def embedding(docs, mdl, parser_config=None, callback=None):
|
|||||||
|
|
||||||
@timeout(3600)
|
@timeout(3600)
|
||||||
async def run_raptor(row, chat_mdl, embd_mdl, vector_size, callback=None):
|
async def run_raptor(row, chat_mdl, embd_mdl, vector_size, callback=None):
|
||||||
# Pressure test for GraphRAG task
|
|
||||||
await is_strong_enough(chat_mdl, embd_mdl)
|
|
||||||
chunks = []
|
chunks = []
|
||||||
vctr_nm = "q_%d_vec"%vector_size
|
vctr_nm = "q_%d_vec"%vector_size
|
||||||
for d in settings.retrievaler.chunk_list(row["doc_id"], row["tenant_id"], [str(row["kb_id"])],
|
for d in settings.retrievaler.chunk_list(row["doc_id"], row["tenant_id"], [str(row["kb_id"])],
|
||||||
@ -553,7 +551,6 @@ async def do_handle_task(task):
|
|||||||
try:
|
try:
|
||||||
# bind embedding model
|
# bind embedding model
|
||||||
embedding_model = LLMBundle(task_tenant_id, LLMType.EMBEDDING, llm_name=task_embedding_id, lang=task_language)
|
embedding_model = LLMBundle(task_tenant_id, LLMType.EMBEDDING, llm_name=task_embedding_id, lang=task_language)
|
||||||
await is_strong_enough(None, embedding_model)
|
|
||||||
vts, _ = embedding_model.encode(["ok"])
|
vts, _ = embedding_model.encode(["ok"])
|
||||||
vector_size = len(vts[0])
|
vector_size = len(vts[0])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -568,7 +565,6 @@ async def do_handle_task(task):
|
|||||||
if task.get("task_type", "") == "raptor":
|
if task.get("task_type", "") == "raptor":
|
||||||
# bind LLM for raptor
|
# bind LLM for raptor
|
||||||
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
|
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
|
||||||
await is_strong_enough(chat_model, None)
|
|
||||||
# run RAPTOR
|
# run RAPTOR
|
||||||
async with kg_limiter:
|
async with kg_limiter:
|
||||||
chunks, token_count = await run_raptor(task, chat_model, embedding_model, vector_size, progress_callback)
|
chunks, token_count = await run_raptor(task, chat_model, embedding_model, vector_size, progress_callback)
|
||||||
@ -580,7 +576,6 @@ async def do_handle_task(task):
|
|||||||
graphrag_conf = task["kb_parser_config"].get("graphrag", {})
|
graphrag_conf = task["kb_parser_config"].get("graphrag", {})
|
||||||
start_ts = timer()
|
start_ts = timer()
|
||||||
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
|
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
|
||||||
await is_strong_enough(chat_model, None)
|
|
||||||
with_resolution = graphrag_conf.get("resolution", False)
|
with_resolution = graphrag_conf.get("resolution", False)
|
||||||
with_community = graphrag_conf.get("community", False)
|
with_community = graphrag_conf.get("community", False)
|
||||||
async with kg_limiter:
|
async with kg_limiter:
|
||||||
|
|||||||
@ -22,7 +22,7 @@ from util import format_timeout_duration, parse_timeout_duration
|
|||||||
from core.container import init_containers, teardown_containers
|
from core.container import init_containers, teardown_containers
|
||||||
from core.logger import logger
|
from core.logger import logger
|
||||||
|
|
||||||
TIMEOUT = 10
|
TIMEOUT = parse_timeout_duration(os.getenv("SANDBOX_TIMEOUT", "10s"))
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
@ -39,6 +39,5 @@ async def _lifespan(app: FastAPI):
|
|||||||
|
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
TIMEOUT = parse_timeout_duration(os.getenv("SANDBOX_TIMEOUT"))
|
|
||||||
logger.info(f"Global timeout: {format_timeout_duration(TIMEOUT)}")
|
logger.info(f"Global timeout: {format_timeout_duration(TIMEOUT)}")
|
||||||
return _lifespan
|
return _lifespan
|
||||||
|
|||||||
6
uv.lock
generated
6
uv.lock
generated
@ -2603,7 +2603,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "infinity-sdk"
|
name = "infinity-sdk"
|
||||||
version = "0.6.0.dev4"
|
version = "0.6.0.dev5"
|
||||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "numpy" },
|
{ name = "numpy" },
|
||||||
@ -2620,7 +2620,7 @@ dependencies = [
|
|||||||
{ name = "thrift" },
|
{ name = "thrift" },
|
||||||
]
|
]
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/cc/645ed8de15952940c7308a788036376583a5fc29fdcf3e4bc75b5ad0c881/infinity_sdk-0.6.0.dev4-py3-none-any.whl", hash = "sha256:f8f4bd8a44e3fae7b4228b5c9e9a16559b4905f50d2d7d0a3d18f39974613e7a" },
|
{ url = "https://mirrors.aliyun.com/pypi/packages/fe/a4/6079bf9790f16badc01e7b79a28c90bec407cfcaa8a2ed37e4a68120f87a/infinity_sdk-0.6.0.dev5-py3-none-any.whl", hash = "sha256:510ac408d5cd9d3d4df33c7c0877f55c5ae8a6019e465190c86d58012a319179" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -5471,7 +5471,7 @@ requires-dist = [
|
|||||||
{ name = "httpx", extras = ["socks"], specifier = "==0.27.2" },
|
{ name = "httpx", extras = ["socks"], specifier = "==0.27.2" },
|
||||||
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
||||||
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
||||||
{ name = "infinity-sdk", specifier = "==0.6.0.dev4" },
|
{ name = "infinity-sdk", specifier = "==0.6.0.dev5" },
|
||||||
{ name = "itsdangerous", specifier = "==2.1.2" },
|
{ name = "itsdangerous", specifier = "==2.1.2" },
|
||||||
{ name = "json-repair", specifier = "==0.35.0" },
|
{ name = "json-repair", specifier = "==0.35.0" },
|
||||||
{ name = "langfuse", specifier = ">=2.60.0" },
|
{ name = "langfuse", specifier = ">=2.60.0" },
|
||||||
|
|||||||
@ -14,7 +14,7 @@ module.exports = {
|
|||||||
'error',
|
'error',
|
||||||
{
|
{
|
||||||
'**/*.{jsx,tsx}': 'KEBAB_CASE',
|
'**/*.{jsx,tsx}': 'KEBAB_CASE',
|
||||||
'**/*.{js,ts}': 'KEBAB_CASE',
|
'**/*.{js,ts}': '[a-z0-9.-]*',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
'check-file/folder-naming-convention': [
|
'check-file/folder-naming-convention': [
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import { Toaster as Sonner } from '@/components/ui/sonner';
|
|||||||
import { Toaster } from '@/components/ui/toaster';
|
import { Toaster } from '@/components/ui/toaster';
|
||||||
import i18n from '@/locales/config';
|
import i18n from '@/locales/config';
|
||||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||||
import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
|
|
||||||
import { App, ConfigProvider, ConfigProviderProps, theme } from 'antd';
|
import { App, ConfigProvider, ConfigProviderProps, theme } from 'antd';
|
||||||
import pt_BR from 'antd/lib/locale/pt_BR';
|
import pt_BR from 'antd/lib/locale/pt_BR';
|
||||||
import deDE from 'antd/locale/de_DE';
|
import deDE from 'antd/locale/de_DE';
|
||||||
@ -85,7 +84,7 @@ function Root({ children }: React.PropsWithChildren) {
|
|||||||
<Sonner position={'top-right'} expand richColors closeButton></Sonner>
|
<Sonner position={'top-right'} expand richColors closeButton></Sonner>
|
||||||
<Toaster />
|
<Toaster />
|
||||||
</ConfigProvider>
|
</ConfigProvider>
|
||||||
<ReactQueryDevtools buttonPosition={'top-left'} />
|
{/* <ReactQueryDevtools buttonPosition={'top-left'} initialIsOpen={false} /> */}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,47 +8,93 @@ import {
|
|||||||
} from '@/components/ui/dialog';
|
} from '@/components/ui/dialog';
|
||||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
||||||
import { IModalProps } from '@/interfaces/common';
|
import { IModalProps } from '@/interfaces/common';
|
||||||
import { Dispatch, SetStateAction, useCallback, useState } from 'react';
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
|
import { TFunction } from 'i18next';
|
||||||
|
import { useForm } from 'react-hook-form';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { z } from 'zod';
|
||||||
import { FileUploader } from '../file-uploader';
|
import { FileUploader } from '../file-uploader';
|
||||||
|
import { RAGFlowFormItem } from '../ragflow-form';
|
||||||
|
import { Form } from '../ui/form';
|
||||||
|
import { Switch } from '../ui/switch';
|
||||||
|
|
||||||
type UploaderTabsProps = {
|
function buildUploadFormSchema(t: TFunction) {
|
||||||
setFiles: Dispatch<SetStateAction<File[]>>;
|
const FormSchema = z.object({
|
||||||
|
parseOnCreation: z.boolean().optional(),
|
||||||
|
fileList: z
|
||||||
|
.array(z.instanceof(File))
|
||||||
|
.min(1, { message: t('fileManager.pleaseUploadAtLeastOneFile') }),
|
||||||
|
});
|
||||||
|
|
||||||
|
return FormSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UploadFormSchemaType = z.infer<
|
||||||
|
ReturnType<typeof buildUploadFormSchema>
|
||||||
|
>;
|
||||||
|
|
||||||
|
const UploadFormId = 'UploadFormId';
|
||||||
|
|
||||||
|
type UploadFormProps = {
|
||||||
|
submit: (values?: UploadFormSchemaType) => void;
|
||||||
|
showParseOnCreation?: boolean;
|
||||||
};
|
};
|
||||||
|
function UploadForm({ submit, showParseOnCreation }: UploadFormProps) {
|
||||||
export function UploaderTabs({ setFiles }: UploaderTabsProps) {
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const FormSchema = buildUploadFormSchema(t);
|
||||||
|
|
||||||
|
type UploadFormSchemaType = z.infer<typeof FormSchema>;
|
||||||
|
const form = useForm<UploadFormSchemaType>({
|
||||||
|
resolver: zodResolver(FormSchema),
|
||||||
|
defaultValues: {
|
||||||
|
parseOnCreation: false,
|
||||||
|
fileList: [],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Tabs defaultValue="account">
|
<Form {...form}>
|
||||||
<TabsList className="grid w-full grid-cols-2 mb-4">
|
<form
|
||||||
<TabsTrigger value="account">{t('fileManager.local')}</TabsTrigger>
|
onSubmit={form.handleSubmit(submit)}
|
||||||
<TabsTrigger value="password">{t('fileManager.s3')}</TabsTrigger>
|
id={UploadFormId}
|
||||||
</TabsList>
|
className="space-y-4"
|
||||||
<TabsContent value="account">
|
>
|
||||||
<FileUploader
|
{showParseOnCreation && (
|
||||||
maxFileCount={8}
|
<RAGFlowFormItem
|
||||||
maxSize={8 * 1024 * 1024}
|
name="parseOnCreation"
|
||||||
onValueChange={setFiles}
|
label={t('fileManager.parseOnCreation')}
|
||||||
accept={{ '*': [] }}
|
>
|
||||||
/>
|
{(field) => (
|
||||||
</TabsContent>
|
<Switch
|
||||||
<TabsContent value="password">{t('common.comingSoon')}</TabsContent>
|
onCheckedChange={field.onChange}
|
||||||
</Tabs>
|
checked={field.value}
|
||||||
|
></Switch>
|
||||||
|
)}
|
||||||
|
</RAGFlowFormItem>
|
||||||
|
)}
|
||||||
|
<RAGFlowFormItem name="fileList" label={t('fileManager.file')}>
|
||||||
|
{(field) => (
|
||||||
|
<FileUploader
|
||||||
|
value={field.value}
|
||||||
|
onValueChange={field.onChange}
|
||||||
|
accept={{ '*': [] }}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</RAGFlowFormItem>
|
||||||
|
</form>
|
||||||
|
</Form>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FileUploadDialogProps = IModalProps<UploadFormSchemaType> &
|
||||||
|
Pick<UploadFormProps, 'showParseOnCreation'>;
|
||||||
export function FileUploadDialog({
|
export function FileUploadDialog({
|
||||||
hideModal,
|
hideModal,
|
||||||
onOk,
|
onOk,
|
||||||
loading,
|
loading,
|
||||||
}: IModalProps<File[]>) {
|
showParseOnCreation = false,
|
||||||
|
}: FileUploadDialogProps) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const [files, setFiles] = useState<File[]>([]);
|
|
||||||
|
|
||||||
const handleOk = useCallback(() => {
|
|
||||||
onOk?.(files);
|
|
||||||
}, [files, onOk]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog open onOpenChange={hideModal}>
|
<Dialog open onOpenChange={hideModal}>
|
||||||
@ -56,9 +102,21 @@ export function FileUploadDialog({
|
|||||||
<DialogHeader>
|
<DialogHeader>
|
||||||
<DialogTitle>{t('fileManager.uploadFile')}</DialogTitle>
|
<DialogTitle>{t('fileManager.uploadFile')}</DialogTitle>
|
||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
<UploaderTabs setFiles={setFiles}></UploaderTabs>
|
<Tabs defaultValue="account">
|
||||||
|
<TabsList className="grid w-full grid-cols-2 mb-4">
|
||||||
|
<TabsTrigger value="account">{t('fileManager.local')}</TabsTrigger>
|
||||||
|
<TabsTrigger value="password">{t('fileManager.s3')}</TabsTrigger>
|
||||||
|
</TabsList>
|
||||||
|
<TabsContent value="account">
|
||||||
|
<UploadForm
|
||||||
|
submit={onOk!}
|
||||||
|
showParseOnCreation={showParseOnCreation}
|
||||||
|
></UploadForm>
|
||||||
|
</TabsContent>
|
||||||
|
<TabsContent value="password">{t('common.comingSoon')}</TabsContent>
|
||||||
|
</Tabs>
|
||||||
<DialogFooter>
|
<DialogFooter>
|
||||||
<ButtonLoading type="submit" onClick={handleOk} loading={loading}>
|
<ButtonLoading type="submit" loading={loading} form={UploadFormId}>
|
||||||
{t('common.save')}
|
{t('common.save')}
|
||||||
</ButtonLoading>
|
</ButtonLoading>
|
||||||
</DialogFooter>
|
</DialogFooter>
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import { Progress } from '@/components/ui/progress';
|
|||||||
import { ScrollArea } from '@/components/ui/scroll-area';
|
import { ScrollArea } from '@/components/ui/scroll-area';
|
||||||
import { useControllableState } from '@/hooks/use-controllable-state';
|
import { useControllableState } from '@/hooks/use-controllable-state';
|
||||||
import { cn, formatBytes } from '@/lib/utils';
|
import { cn, formatBytes } from '@/lib/utils';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
function isFileWithPreview(file: File): file is File & { preview: string } {
|
function isFileWithPreview(file: File): file is File & { preview: string } {
|
||||||
return 'preview' in file && typeof file.preview === 'string';
|
return 'preview' in file && typeof file.preview === 'string';
|
||||||
@ -168,14 +169,14 @@ export function FileUploader(props: FileUploaderProps) {
|
|||||||
accept = {
|
accept = {
|
||||||
'image/*': [],
|
'image/*': [],
|
||||||
},
|
},
|
||||||
maxSize = 1024 * 1024 * 2,
|
maxSize = 1024 * 1024 * 10000000,
|
||||||
maxFileCount = 1,
|
maxFileCount = 100000000000,
|
||||||
multiple = false,
|
multiple = false,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
className,
|
className,
|
||||||
...dropzoneProps
|
...dropzoneProps
|
||||||
} = props;
|
} = props;
|
||||||
|
const { t } = useTranslation();
|
||||||
const [files, setFiles] = useControllableState({
|
const [files, setFiles] = useControllableState({
|
||||||
prop: valueProp,
|
prop: valueProp,
|
||||||
onChange: onValueChange,
|
onChange: onValueChange,
|
||||||
@ -267,7 +268,7 @@ export function FileUploader(props: FileUploaderProps) {
|
|||||||
<div
|
<div
|
||||||
{...getRootProps()}
|
{...getRootProps()}
|
||||||
className={cn(
|
className={cn(
|
||||||
'group relative grid h-52 w-full cursor-pointer place-items-center rounded-lg border-2 border-dashed border-muted-foreground/25 px-5 py-2.5 text-center transition hover:bg-muted/25',
|
'group relative grid h-72 w-full cursor-pointer place-items-center rounded-lg border-2 border-dashed border-muted-foreground/25 px-5 py-2.5 text-center transition hover:bg-muted/25',
|
||||||
'ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
|
'ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
|
||||||
isDragActive && 'border-muted-foreground/50',
|
isDragActive && 'border-muted-foreground/50',
|
||||||
isDisabled && 'pointer-events-none opacity-60',
|
isDisabled && 'pointer-events-none opacity-60',
|
||||||
@ -298,14 +299,15 @@ export function FileUploader(props: FileUploaderProps) {
|
|||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col gap-px">
|
<div className="flex flex-col gap-px">
|
||||||
<p className="font-medium text-muted-foreground">
|
<p className="font-medium text-muted-foreground">
|
||||||
Drag {`'n'`} drop files here, or click to select files
|
{t('knowledgeDetails.uploadTitle')}
|
||||||
</p>
|
</p>
|
||||||
<p className="text-sm text-muted-foreground/70">
|
<p className="text-sm text-muted-foreground/70">
|
||||||
You can upload
|
{t('knowledgeDetails.uploadDescription')}
|
||||||
|
{/* You can upload
|
||||||
{maxFileCount > 1
|
{maxFileCount > 1
|
||||||
? ` ${maxFileCount === Infinity ? 'multiple' : maxFileCount}
|
? ` ${maxFileCount === Infinity ? 'multiple' : maxFileCount}
|
||||||
files (up to ${formatBytes(maxSize)} each)`
|
files (up to ${formatBytes(maxSize)} each)`
|
||||||
: ` a file with ${formatBytes(maxSize)}`}
|
: ` a file with ${formatBytes(maxSize)}`} */}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||||
import { Card, CardContent } from '@/components/ui/card';
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
import { formatDate } from '@/utils/date';
|
import { formatDate } from '@/utils/date';
|
||||||
|
import { ReactNode } from 'react';
|
||||||
|
|
||||||
interface IProps {
|
interface IProps {
|
||||||
data: {
|
data: {
|
||||||
@ -11,8 +12,9 @@ interface IProps {
|
|||||||
};
|
};
|
||||||
onClick?: () => void;
|
onClick?: () => void;
|
||||||
moreDropdown: React.ReactNode;
|
moreDropdown: React.ReactNode;
|
||||||
|
sharedBadge?: ReactNode;
|
||||||
}
|
}
|
||||||
export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
export function HomeCard({ data, onClick, moreDropdown, sharedBadge }: IProps) {
|
||||||
return (
|
return (
|
||||||
<Card
|
<Card
|
||||||
className="bg-bg-card border-colors-outline-neutral-standard"
|
className="bg-bg-card border-colors-outline-neutral-standard"
|
||||||
@ -31,7 +33,7 @@ export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
|||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col justify-between gap-1 flex-1 h-full w-[calc(100%-50px)]">
|
<div className="flex flex-col justify-between gap-1 flex-1 h-full w-[calc(100%-50px)]">
|
||||||
<section className="flex justify-between">
|
<section className="flex justify-between">
|
||||||
<div className="text-[20px] font-bold w-80% leading-5">
|
<div className="text-[20px] font-bold w-80% leading-5 text-ellipsis overflow-hidden">
|
||||||
{data.name}
|
{data.name}
|
||||||
</div>
|
</div>
|
||||||
{moreDropdown}
|
{moreDropdown}
|
||||||
@ -41,10 +43,11 @@ export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
|||||||
<div className="whitespace-nowrap overflow-hidden text-ellipsis">
|
<div className="whitespace-nowrap overflow-hidden text-ellipsis">
|
||||||
{data.description}
|
{data.description}
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div className="flex justify-between items-center">
|
||||||
<p className="text-sm opacity-80">
|
<p className="text-sm opacity-80">
|
||||||
{formatDate(data.update_time)}
|
{formatDate(data.update_time)}
|
||||||
</p>
|
</p>
|
||||||
|
{sharedBadge}
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -68,7 +68,7 @@ export function LayoutRecognizeFormField() {
|
|||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<FormLabel
|
<FormLabel
|
||||||
tooltip={t('layoutRecognizeTip')}
|
tooltip={t('layoutRecognizeTip')}
|
||||||
className="text-sm text-muted-foreground whitespace-nowrap w-1/4"
|
className="text-sm text-muted-foreground whitespace-wrap w-1/4"
|
||||||
>
|
>
|
||||||
{t('layoutRecognize')}
|
{t('layoutRecognize')}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { ChevronDown } from 'lucide-react';
|
import { Funnel } from 'lucide-react';
|
||||||
import React, {
|
import React, {
|
||||||
ChangeEventHandler,
|
ChangeEventHandler,
|
||||||
PropsWithChildren,
|
PropsWithChildren,
|
||||||
@ -25,20 +25,20 @@ export const FilterButton = React.forwardRef<
|
|||||||
>(({ count = 0, ...props }, ref) => {
|
>(({ count = 0, ...props }, ref) => {
|
||||||
return (
|
return (
|
||||||
<Button variant="secondary" {...props} ref={ref}>
|
<Button variant="secondary" {...props} ref={ref}>
|
||||||
<span
|
{/* <span
|
||||||
className={cn({
|
className={cn({
|
||||||
'text-text-primary': count > 0,
|
'text-text-primary': count > 0,
|
||||||
'text-text-sub-title-invert': count === 0,
|
'text-text-sub-title-invert': count === 0,
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
Filter
|
Filter
|
||||||
</span>
|
</span> */}
|
||||||
{count > 0 && (
|
{count > 0 && (
|
||||||
<span className="rounded-full bg-text-badge px-1 text-xs ">
|
<span className="rounded-full bg-text-badge px-1 text-xs ">
|
||||||
{count}
|
{count}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
<ChevronDown />
|
<Funnel />
|
||||||
</Button>
|
</Button>
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -2,6 +2,7 @@ import { LlmModelType } from '@/constants/knowledge';
|
|||||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||||
import * as SelectPrimitive from '@radix-ui/react-select';
|
import * as SelectPrimitive from '@radix-ui/react-select';
|
||||||
import { forwardRef, memo, useMemo, useState } from 'react';
|
import { forwardRef, memo, useMemo, useState } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
import { LlmSettingFieldItems } from '../llm-setting-items/next';
|
import { LlmSettingFieldItems } from '../llm-setting-items/next';
|
||||||
import { Popover, PopoverContent, PopoverTrigger } from '../ui/popover';
|
import { Popover, PopoverContent, PopoverTrigger } from '../ui/popover';
|
||||||
import { Select, SelectTrigger, SelectValue } from '../ui/select';
|
import { Select, SelectTrigger, SelectValue } from '../ui/select';
|
||||||
@ -20,6 +21,7 @@ const NextInnerLLMSelect = forwardRef<
|
|||||||
React.ElementRef<typeof SelectPrimitive.Trigger>,
|
React.ElementRef<typeof SelectPrimitive.Trigger>,
|
||||||
NextInnerLLMSelectProps
|
NextInnerLLMSelectProps
|
||||||
>(({ value, disabled, filter, showSpeech2TextModel = false }, ref) => {
|
>(({ value, disabled, filter, showSpeech2TextModel = false }, ref) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
|
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
|
||||||
|
|
||||||
const ttsModel = useMemo(() => {
|
const ttsModel = useMemo(() => {
|
||||||
@ -49,7 +51,7 @@ const NextInnerLLMSelect = forwardRef<
|
|||||||
}}
|
}}
|
||||||
ref={ref}
|
ref={ref}
|
||||||
>
|
>
|
||||||
<SelectValue>
|
<SelectValue placeholder={t('common.pleaseSelect')}>
|
||||||
{
|
{
|
||||||
modelOptions
|
modelOptions
|
||||||
.flatMap((x) => x.options)
|
.flatMap((x) => x.options)
|
||||||
|
|||||||
@ -19,6 +19,7 @@ type SliderInputSwitchFormFieldProps = {
|
|||||||
name: string;
|
name: string;
|
||||||
label: string;
|
label: string;
|
||||||
defaultValue?: number;
|
defaultValue?: number;
|
||||||
|
onChange?: (value: number) => void;
|
||||||
className?: string;
|
className?: string;
|
||||||
checkName: string;
|
checkName: string;
|
||||||
};
|
};
|
||||||
@ -30,6 +31,7 @@ export function SliderInputSwitchFormField({
|
|||||||
label,
|
label,
|
||||||
name,
|
name,
|
||||||
defaultValue,
|
defaultValue,
|
||||||
|
onChange,
|
||||||
className,
|
className,
|
||||||
checkName,
|
checkName,
|
||||||
}: SliderInputSwitchFormFieldProps) {
|
}: SliderInputSwitchFormFieldProps) {
|
||||||
@ -66,6 +68,10 @@ export function SliderInputSwitchFormField({
|
|||||||
<FormControl>
|
<FormControl>
|
||||||
<SingleFormSlider
|
<SingleFormSlider
|
||||||
{...field}
|
{...field}
|
||||||
|
onChange={(value: number) => {
|
||||||
|
onChange?.(value);
|
||||||
|
field.onChange(value);
|
||||||
|
}}
|
||||||
max={max}
|
max={max}
|
||||||
min={min}
|
min={min}
|
||||||
step={step}
|
step={step}
|
||||||
@ -80,6 +86,10 @@ export function SliderInputSwitchFormField({
|
|||||||
min={min}
|
min={min}
|
||||||
step={step}
|
step={step}
|
||||||
{...field}
|
{...field}
|
||||||
|
onChange={(value: number) => {
|
||||||
|
onChange?.(value);
|
||||||
|
field.onChange(value);
|
||||||
|
}}
|
||||||
></NumberInput>
|
></NumberInput>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -58,7 +58,10 @@ export function MetadataFilter({ prefix = '' }: MetadataFilterProps) {
|
|||||||
name={methodName}
|
name={methodName}
|
||||||
tooltip={t('metadataTip')}
|
tooltip={t('metadataTip')}
|
||||||
>
|
>
|
||||||
<SelectWithSearch options={MetadataOptions} />
|
<SelectWithSearch
|
||||||
|
options={MetadataOptions}
|
||||||
|
triggerClassName="!bg-bg-input"
|
||||||
|
/>
|
||||||
</RAGFlowFormItem>
|
</RAGFlowFormItem>
|
||||||
)}
|
)}
|
||||||
{hasKnowledge && metadata === DatasetMetadata.Manual && (
|
{hasKnowledge && metadata === DatasetMetadata.Manual && (
|
||||||
|
|||||||
@ -28,6 +28,7 @@ import {
|
|||||||
PopoverTrigger,
|
PopoverTrigger,
|
||||||
} from '@/components/ui/popover';
|
} from '@/components/ui/popover';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
|
import { t } from 'i18next';
|
||||||
import { RAGFlowSelectOptionType } from '../ui/select';
|
import { RAGFlowSelectOptionType } from '../ui/select';
|
||||||
import { Separator } from '../ui/separator';
|
import { Separator } from '../ui/separator';
|
||||||
|
|
||||||
@ -114,7 +115,9 @@ export const SelectWithSearch = forwardRef<
|
|||||||
<span className="leading-none truncate">{selectLabel}</span>
|
<span className="leading-none truncate">{selectLabel}</span>
|
||||||
</span>
|
</span>
|
||||||
) : (
|
) : (
|
||||||
<span className="text-muted-foreground">Select value</span>
|
<span className="text-muted-foreground">
|
||||||
|
{t('common.selectPlaceholder')}
|
||||||
|
</span>
|
||||||
)}
|
)}
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
{value && allowClear && (
|
{value && allowClear && (
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { DocumentParserType } from '@/constants/knowledge';
|
|||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
import random from 'lodash/random';
|
import random from 'lodash/random';
|
||||||
import { Plus } from 'lucide-react';
|
import { Plus } from 'lucide-react';
|
||||||
import { useCallback, useEffect } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { useFormContext, useWatch } from 'react-hook-form';
|
import { useFormContext, useWatch } from 'react-hook-form';
|
||||||
import { SliderInputFormField } from '../slider-input-form-field';
|
import { SliderInputFormField } from '../slider-input-form-field';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
@ -57,15 +57,19 @@ const RaptorFormFields = () => {
|
|||||||
const form = useFormContext();
|
const form = useFormContext();
|
||||||
const { t } = useTranslate('knowledgeConfiguration');
|
const { t } = useTranslate('knowledgeConfiguration');
|
||||||
const useRaptor = useWatch({ name: UseRaptorField });
|
const useRaptor = useWatch({ name: UseRaptorField });
|
||||||
useEffect(() => {
|
|
||||||
if (useRaptor) {
|
const changeRaptor = useCallback(
|
||||||
form.setValue(MaxTokenField, 256);
|
(isUseRaptor: boolean) => {
|
||||||
form.setValue(ThresholdField, 0.1);
|
if (isUseRaptor) {
|
||||||
form.setValue(MaxCluster, 64);
|
form.setValue(MaxTokenField, 256);
|
||||||
form.setValue(RandomSeedField, 0);
|
form.setValue(ThresholdField, 0.1);
|
||||||
form.setValue(Prompt, t('promptText'));
|
form.setValue(MaxCluster, 64);
|
||||||
}
|
form.setValue(RandomSeedField, 0);
|
||||||
}, [form, useRaptor, t]);
|
form.setValue(Prompt, t('promptText'));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[form],
|
||||||
|
);
|
||||||
|
|
||||||
const handleGenerate = useCallback(() => {
|
const handleGenerate = useCallback(() => {
|
||||||
form.setValue(RandomSeedField, random(10000));
|
form.setValue(RandomSeedField, random(10000));
|
||||||
@ -97,7 +101,10 @@ const RaptorFormFields = () => {
|
|||||||
<FormControl>
|
<FormControl>
|
||||||
<Switch
|
<Switch
|
||||||
checked={field.value}
|
checked={field.value}
|
||||||
onCheckedChange={field.onChange}
|
onCheckedChange={(e) => {
|
||||||
|
changeRaptor(e);
|
||||||
|
field.onChange(e);
|
||||||
|
}}
|
||||||
></Switch>
|
></Switch>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</div>
|
</div>
|
||||||
@ -127,7 +134,13 @@ const RaptorFormFields = () => {
|
|||||||
</FormLabel>
|
</FormLabel>
|
||||||
<div className="w-3/4">
|
<div className="w-3/4">
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<Textarea {...field} rows={8} />
|
<Textarea
|
||||||
|
{...field}
|
||||||
|
rows={8}
|
||||||
|
onChange={(e) => {
|
||||||
|
field.onChange(e?.target?.value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import {
|
|||||||
FormLabel,
|
FormLabel,
|
||||||
FormMessage,
|
FormMessage,
|
||||||
} from '@/components/ui/form';
|
} from '@/components/ui/form';
|
||||||
|
import { cn } from '@/lib/utils';
|
||||||
import { ReactNode, cloneElement, isValidElement } from 'react';
|
import { ReactNode, cloneElement, isValidElement } from 'react';
|
||||||
import { ControllerRenderProps, useFormContext } from 'react-hook-form';
|
import { ControllerRenderProps, useFormContext } from 'react-hook-form';
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ type RAGFlowFormItemProps = {
|
|||||||
label: ReactNode;
|
label: ReactNode;
|
||||||
tooltip?: ReactNode;
|
tooltip?: ReactNode;
|
||||||
children: ReactNode | ((field: ControllerRenderProps) => ReactNode);
|
children: ReactNode | ((field: ControllerRenderProps) => ReactNode);
|
||||||
|
horizontal?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export function RAGFlowFormItem({
|
export function RAGFlowFormItem({
|
||||||
@ -20,6 +22,7 @@ export function RAGFlowFormItem({
|
|||||||
label,
|
label,
|
||||||
tooltip,
|
tooltip,
|
||||||
children,
|
children,
|
||||||
|
horizontal = false,
|
||||||
}: RAGFlowFormItemProps) {
|
}: RAGFlowFormItemProps) {
|
||||||
const form = useFormContext();
|
const form = useFormContext();
|
||||||
return (
|
return (
|
||||||
@ -27,8 +30,14 @@ export function RAGFlowFormItem({
|
|||||||
control={form.control}
|
control={form.control}
|
||||||
name={name}
|
name={name}
|
||||||
render={({ field }) => (
|
render={({ field }) => (
|
||||||
<FormItem>
|
<FormItem
|
||||||
<FormLabel tooltip={tooltip}>{label}</FormLabel>
|
className={cn({
|
||||||
|
'flex items-center': horizontal,
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
<FormLabel tooltip={tooltip} className={cn({ 'w-1/4': horizontal })}>
|
||||||
|
{label}
|
||||||
|
</FormLabel>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
{typeof children === 'function'
|
{typeof children === 'function'
|
||||||
? children(field)
|
? children(field)
|
||||||
|
|||||||
@ -8,9 +8,5 @@ export function SharedBadge({ children }: PropsWithChildren) {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return <span className="bg-bg-card rounded-sm px-1 text-xs">{children}</span>;
|
||||||
<span className="bg-text-secondary rounded-sm px-1 text-bg-base text-xs">
|
|
||||||
{children}
|
|
||||||
</span>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,7 +17,7 @@ const buttonVariants = cva(
|
|||||||
outline:
|
outline:
|
||||||
'border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50',
|
'border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50',
|
||||||
secondary:
|
secondary:
|
||||||
'bg-secondary text-secondary-foreground shadow-xs hover:bg-secondary/80',
|
'bg-bg-input text-secondary-foreground shadow-xs hover:bg-bg-input/80',
|
||||||
ghost:
|
ghost:
|
||||||
'hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50',
|
'hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50',
|
||||||
link: 'text-primary underline-offset-4 hover:underline',
|
link: 'text-primary underline-offset-4 hover:underline',
|
||||||
|
|||||||
@ -116,7 +116,10 @@ export { ExpandedInput, Input, SearchInput };
|
|||||||
|
|
||||||
type NumberInputProps = { onChange?(value: number): void } & InputProps;
|
type NumberInputProps = { onChange?(value: number): void } & InputProps;
|
||||||
|
|
||||||
export const NumberInput = ({ onChange, ...props }: NumberInputProps) => {
|
export const NumberInput = React.forwardRef<
|
||||||
|
HTMLInputElement,
|
||||||
|
NumberInputProps & { value: Value; onChange(value: Value): void }
|
||||||
|
>(function NumberInput({ onChange, ...props }, ref) {
|
||||||
return (
|
return (
|
||||||
<Input
|
<Input
|
||||||
type="number"
|
type="number"
|
||||||
@ -125,6 +128,7 @@ export const NumberInput = ({ onChange, ...props }: NumberInputProps) => {
|
|||||||
onChange?.(value === '' ? 0 : Number(value)); // convert to number
|
onChange?.(value === '' ? 0 : Number(value)); // convert to number
|
||||||
}}
|
}}
|
||||||
{...props}
|
{...props}
|
||||||
|
ref={ref}
|
||||||
></Input>
|
></Input>
|
||||||
);
|
);
|
||||||
};
|
});
|
||||||
|
|||||||
@ -209,8 +209,16 @@ export const MultiSelect = React.forwardRef<
|
|||||||
const [isAnimating, setIsAnimating] = React.useState(false);
|
const [isAnimating, setIsAnimating] = React.useState(false);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
setSelectedValues(defaultValue);
|
if (!selectedValues && props.value) {
|
||||||
}, [defaultValue]);
|
setSelectedValues(props.value as string[]);
|
||||||
|
}
|
||||||
|
}, [props.value, selectedValues]);
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
if (!selectedValues && !props.value && defaultValue) {
|
||||||
|
setSelectedValues(defaultValue);
|
||||||
|
}
|
||||||
|
}, [defaultValue, props.value, selectedValues]);
|
||||||
|
|
||||||
const flatOptions = React.useMemo(() => {
|
const flatOptions = React.useMemo(() => {
|
||||||
return options.flatMap((option) =>
|
return options.flatMap((option) =>
|
||||||
@ -291,15 +299,18 @@ export const MultiSelect = React.forwardRef<
|
|||||||
variant="secondary"
|
variant="secondary"
|
||||||
className={cn(
|
className={cn(
|
||||||
isAnimating ? 'animate-bounce' : '',
|
isAnimating ? 'animate-bounce' : '',
|
||||||
|
'px-1',
|
||||||
multiSelectVariants({ variant }),
|
multiSelectVariants({ variant }),
|
||||||
)}
|
)}
|
||||||
style={{ animationDuration: `${animation}s` }}
|
style={{ animationDuration: `${animation}s` }}
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-1">
|
<div className="flex justify-between items-center gap-1">
|
||||||
{IconComponent && (
|
{IconComponent && (
|
||||||
<IconComponent className="h-4 w-4" />
|
<IconComponent className="h-4 w-4" />
|
||||||
)}
|
)}
|
||||||
<div>{option?.label}</div>
|
<div className="max-w-28 text-ellipsis overflow-hidden">
|
||||||
|
{option?.label}
|
||||||
|
</div>
|
||||||
<XCircle
|
<XCircle
|
||||||
className="h-4 w-4 cursor-pointer"
|
className="h-4 w-4 cursor-pointer"
|
||||||
onClick={(event) => {
|
onClick={(event) => {
|
||||||
|
|||||||
@ -12,13 +12,13 @@ const Progress = React.forwardRef<
|
|||||||
<ProgressPrimitive.Root
|
<ProgressPrimitive.Root
|
||||||
ref={ref}
|
ref={ref}
|
||||||
className={cn(
|
className={cn(
|
||||||
'relative h-4 w-full overflow-hidden rounded-full bg-secondary',
|
'relative h-4 w-full overflow-hidden rounded-full bg-bg-accent',
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
{...props}
|
{...props}
|
||||||
>
|
>
|
||||||
<ProgressPrimitive.Indicator
|
<ProgressPrimitive.Indicator
|
||||||
className="h-full w-full flex-1 bg-primary transition-all"
|
className="h-full w-full flex-1 bg-accent-primary transition-all"
|
||||||
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
||||||
/>
|
/>
|
||||||
</ProgressPrimitive.Root>
|
</ProgressPrimitive.Root>
|
||||||
|
|||||||
@ -23,6 +23,7 @@ export interface SegmentedProps
|
|||||||
prefixCls?: string;
|
prefixCls?: string;
|
||||||
direction?: 'ltr' | 'rtl';
|
direction?: 'ltr' | 'rtl';
|
||||||
motionName?: string;
|
motionName?: string;
|
||||||
|
activeClassName?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function Segmented({
|
export function Segmented({
|
||||||
@ -30,6 +31,7 @@ export function Segmented({
|
|||||||
value,
|
value,
|
||||||
onChange,
|
onChange,
|
||||||
className,
|
className,
|
||||||
|
activeClassName,
|
||||||
}: SegmentedProps) {
|
}: SegmentedProps) {
|
||||||
const [selectedValue, setSelectedValue] = React.useState<
|
const [selectedValue, setSelectedValue] = React.useState<
|
||||||
SegmentedValue | undefined
|
SegmentedValue | undefined
|
||||||
@ -57,9 +59,12 @@ export function Segmented({
|
|||||||
className={cn(
|
className={cn(
|
||||||
'inline-flex items-center px-6 py-2 text-base font-normal rounded-3xl cursor-pointer',
|
'inline-flex items-center px-6 py-2 text-base font-normal rounded-3xl cursor-pointer',
|
||||||
{
|
{
|
||||||
'bg-text-primary': selectedValue === actualValue,
|
'text-bg-base bg-metallic-gradient border-b-[#00BEB4] border-b-2':
|
||||||
'text-bg-base': selectedValue === actualValue,
|
selectedValue === actualValue,
|
||||||
},
|
},
|
||||||
|
activeClassName && selectedValue === actualValue
|
||||||
|
? activeClassName
|
||||||
|
: '',
|
||||||
)}
|
)}
|
||||||
onClick={() => handleOnChange(actualValue)}
|
onClick={() => handleOnChange(actualValue)}
|
||||||
>
|
>
|
||||||
|
|||||||
@ -54,7 +54,7 @@ const Textarea = forwardRef<HTMLTextAreaElement, TextareaProps>(
|
|||||||
return (
|
return (
|
||||||
<textarea
|
<textarea
|
||||||
className={cn(
|
className={cn(
|
||||||
'flex min-h-[80px] w-full bg-bg-card rounded-md border border-input px-3 py-2 text-base ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm overflow-hidden',
|
'flex min-h-[80px] w-full bg-bg-input rounded-md border border-input px-3 py-2 text-base ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm overflow-hidden',
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
rows={autoSize?.minRows ?? props.rows ?? undefined}
|
rows={autoSize?.minRows ?? props.rows ?? undefined}
|
||||||
|
|||||||
4
web/src/constants/permission.ts
Normal file
4
web/src/constants/permission.ts
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
export enum PermissionRole {
|
||||||
|
Me = 'me',
|
||||||
|
Team = 'team',
|
||||||
|
}
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-filter-submit';
|
import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-filter-submit';
|
||||||
|
import { ResponseType } from '@/interfaces/database/base';
|
||||||
import {
|
import {
|
||||||
IDocumentInfo,
|
IDocumentInfo,
|
||||||
IDocumentInfoFilter,
|
IDocumentInfoFilter,
|
||||||
@ -45,9 +46,9 @@ export const useUploadNextDocument = () => {
|
|||||||
data,
|
data,
|
||||||
isPending: loading,
|
isPending: loading,
|
||||||
mutateAsync,
|
mutateAsync,
|
||||||
} = useMutation({
|
} = useMutation<ResponseType<IDocumentInfo[]>, Error, File[]>({
|
||||||
mutationKey: [DocumentApiAction.UploadDocument],
|
mutationKey: [DocumentApiAction.UploadDocument],
|
||||||
mutationFn: async (fileList: File[]) => {
|
mutationFn: async (fileList) => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('kb_id', id!);
|
formData.append('kb_id', id!);
|
||||||
fileList.forEach((file: any) => {
|
fileList.forEach((file: any) => {
|
||||||
|
|||||||
@ -8,9 +8,13 @@ import {
|
|||||||
} from '@/interfaces/database/llm';
|
} from '@/interfaces/database/llm';
|
||||||
import { buildLlmUuid } from '@/utils/llm-util';
|
import { buildLlmUuid } from '@/utils/llm-util';
|
||||||
|
|
||||||
|
export const enum LLMApiAction {
|
||||||
|
LlmList = 'llmList',
|
||||||
|
}
|
||||||
|
|
||||||
export const useFetchLlmList = (modelType?: LlmModelType) => {
|
export const useFetchLlmList = (modelType?: LlmModelType) => {
|
||||||
const { data } = useQuery<IThirdAiModelCollection>({
|
const { data } = useQuery<IThirdAiModelCollection>({
|
||||||
queryKey: ['llmList'],
|
queryKey: [LLMApiAction.LlmList],
|
||||||
initialData: {},
|
initialData: {},
|
||||||
queryFn: async () => {
|
queryFn: async () => {
|
||||||
const { data } = await userService.llm_list({ model_type: modelType });
|
const { data } = await userService.llm_list({ model_type: modelType });
|
||||||
|
|||||||
464
web/src/hooks/use-user-setting-request.tsx
Normal file
464
web/src/hooks/use-user-setting-request.tsx
Normal file
@ -0,0 +1,464 @@
|
|||||||
|
import message from '@/components/ui/message';
|
||||||
|
import { LanguageTranslationMap } from '@/constants/common';
|
||||||
|
import { ResponseGetType } from '@/interfaces/database/base';
|
||||||
|
import { IToken } from '@/interfaces/database/chat';
|
||||||
|
import { ITenantInfo } from '@/interfaces/database/knowledge';
|
||||||
|
import { ILangfuseConfig } from '@/interfaces/database/system';
|
||||||
|
import {
|
||||||
|
ISystemStatus,
|
||||||
|
ITenant,
|
||||||
|
ITenantUser,
|
||||||
|
IUserInfo,
|
||||||
|
} from '@/interfaces/database/user-setting';
|
||||||
|
import { ISetLangfuseConfigRequestBody } from '@/interfaces/request/system';
|
||||||
|
import userService, {
|
||||||
|
addTenantUser,
|
||||||
|
agreeTenant,
|
||||||
|
deleteTenantUser,
|
||||||
|
listTenant,
|
||||||
|
listTenantUser,
|
||||||
|
} from '@/services/user-service';
|
||||||
|
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||||
|
import { Modal } from 'antd';
|
||||||
|
import DOMPurify from 'dompurify';
|
||||||
|
import { isEmpty } from 'lodash';
|
||||||
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { history } from 'umi';
|
||||||
|
|
||||||
|
export const enum UserSettingApiAction {
|
||||||
|
UserInfo = 'userInfo',
|
||||||
|
TenantInfo = 'tenantInfo',
|
||||||
|
SaveSetting = 'saveSetting',
|
||||||
|
FetchManualSystemTokenList = 'fetchManualSystemTokenList',
|
||||||
|
FetchSystemTokenList = 'fetchSystemTokenList',
|
||||||
|
RemoveSystemToken = 'removeSystemToken',
|
||||||
|
CreateSystemToken = 'createSystemToken',
|
||||||
|
ListTenantUser = 'listTenantUser',
|
||||||
|
AddTenantUser = 'addTenantUser',
|
||||||
|
DeleteTenantUser = 'deleteTenantUser',
|
||||||
|
ListTenant = 'listTenant',
|
||||||
|
AgreeTenant = 'agreeTenant',
|
||||||
|
SetLangfuseConfig = 'setLangfuseConfig',
|
||||||
|
DeleteLangfuseConfig = 'deleteLangfuseConfig',
|
||||||
|
FetchLangfuseConfig = 'fetchLangfuseConfig',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useFetchUserInfo = (): ResponseGetType<IUserInfo> => {
|
||||||
|
const { i18n } = useTranslation();
|
||||||
|
|
||||||
|
const { data, isFetching: loading } = useQuery({
|
||||||
|
queryKey: [UserSettingApiAction.UserInfo],
|
||||||
|
initialData: {},
|
||||||
|
gcTime: 0,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data } = await userService.user_info();
|
||||||
|
if (data.code === 0) {
|
||||||
|
i18n.changeLanguage(
|
||||||
|
LanguageTranslationMap[
|
||||||
|
data.data.language as keyof typeof LanguageTranslationMap
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return data?.data ?? {};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchTenantInfo = (
|
||||||
|
showEmptyModelWarn = false,
|
||||||
|
): ResponseGetType<ITenantInfo> => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { data, isFetching: loading } = useQuery({
|
||||||
|
queryKey: [UserSettingApiAction.TenantInfo],
|
||||||
|
initialData: {},
|
||||||
|
gcTime: 0,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data: res } = await userService.get_tenant_info();
|
||||||
|
if (res.code === 0) {
|
||||||
|
// llm_id is chat_id
|
||||||
|
// asr_id is speech2txt
|
||||||
|
const { data } = res;
|
||||||
|
if (
|
||||||
|
showEmptyModelWarn &&
|
||||||
|
(isEmpty(data.embd_id) || isEmpty(data.llm_id))
|
||||||
|
) {
|
||||||
|
Modal.warning({
|
||||||
|
title: t('common.warn'),
|
||||||
|
content: (
|
||||||
|
<div
|
||||||
|
dangerouslySetInnerHTML={{
|
||||||
|
__html: DOMPurify.sanitize(t('setting.modelProvidersWarn')),
|
||||||
|
}}
|
||||||
|
></div>
|
||||||
|
),
|
||||||
|
onOk() {
|
||||||
|
history.push('/user-setting/model');
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
data.chat_id = data.llm_id;
|
||||||
|
data.speech2text_id = data.asr_id;
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useSelectParserList = (): Array<{
|
||||||
|
value: string;
|
||||||
|
label: string;
|
||||||
|
}> => {
|
||||||
|
const { data: tenantInfo } = useFetchTenantInfo(true);
|
||||||
|
|
||||||
|
const parserList = useMemo(() => {
|
||||||
|
const parserArray: Array<string> = tenantInfo?.parser_ids?.split(',') ?? [];
|
||||||
|
return parserArray.map((x) => {
|
||||||
|
const arr = x.split(':');
|
||||||
|
return { value: arr[0], label: arr[1] };
|
||||||
|
});
|
||||||
|
}, [tenantInfo]);
|
||||||
|
|
||||||
|
return parserList;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useSaveSetting = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.SaveSetting],
|
||||||
|
mutationFn: async (
|
||||||
|
userInfo: { new_password: string } | Partial<IUserInfo>,
|
||||||
|
) => {
|
||||||
|
const { data } = await userService.setting(userInfo);
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.modified'));
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['userInfo'] });
|
||||||
|
}
|
||||||
|
return data?.code;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, saveSetting: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchSystemVersion = () => {
|
||||||
|
const [version, setVersion] = useState('');
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
|
||||||
|
const fetchSystemVersion = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
const { data } = await userService.getSystemVersion();
|
||||||
|
if (data.code === 0) {
|
||||||
|
setVersion(data.data);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return { fetchSystemVersion, version, loading };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchSystemStatus = () => {
|
||||||
|
const [systemStatus, setSystemStatus] = useState<ISystemStatus>(
|
||||||
|
{} as ISystemStatus,
|
||||||
|
);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
|
||||||
|
const fetchSystemStatus = useCallback(async () => {
|
||||||
|
setLoading(true);
|
||||||
|
const { data } = await userService.getSystemStatus();
|
||||||
|
if (data.code === 0) {
|
||||||
|
setSystemStatus(data.data);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return {
|
||||||
|
systemStatus,
|
||||||
|
fetchSystemStatus,
|
||||||
|
loading,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchManualSystemTokenList = () => {
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.FetchManualSystemTokenList],
|
||||||
|
mutationFn: async () => {
|
||||||
|
const { data } = await userService.listToken();
|
||||||
|
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, fetchSystemTokenList: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchSystemTokenList = () => {
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isFetching: loading,
|
||||||
|
refetch,
|
||||||
|
} = useQuery<IToken[]>({
|
||||||
|
queryKey: [UserSettingApiAction.FetchSystemTokenList],
|
||||||
|
initialData: [],
|
||||||
|
gcTime: 0,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data } = await userService.listToken();
|
||||||
|
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, refetch };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useRemoveSystemToken = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.RemoveSystemToken],
|
||||||
|
mutationFn: async (token: string) => {
|
||||||
|
const { data } = await userService.removeToken({}, token);
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.deleted'));
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.FetchSystemTokenList],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, removeToken: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useCreateSystemToken = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.CreateSystemToken],
|
||||||
|
mutationFn: async (params: Record<string, any>) => {
|
||||||
|
const { data } = await userService.createToken(params);
|
||||||
|
if (data.code === 0) {
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.FetchSystemTokenList],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, createToken: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useListTenantUser = () => {
|
||||||
|
const { data: tenantInfo } = useFetchTenantInfo();
|
||||||
|
const tenantId = tenantInfo.tenant_id;
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isFetching: loading,
|
||||||
|
refetch,
|
||||||
|
} = useQuery<ITenantUser[]>({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenantUser, tenantId],
|
||||||
|
initialData: [],
|
||||||
|
gcTime: 0,
|
||||||
|
enabled: !!tenantId,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data } = await listTenantUser(tenantId);
|
||||||
|
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, refetch };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useAddTenantUser = () => {
|
||||||
|
const { data: tenantInfo } = useFetchTenantInfo();
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.AddTenantUser],
|
||||||
|
mutationFn: async (email: string) => {
|
||||||
|
const { data } = await addTenantUser(tenantInfo.tenant_id, email);
|
||||||
|
if (data.code === 0) {
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenantUser],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return data?.code;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, addTenantUser: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useDeleteTenantUser = () => {
|
||||||
|
const { data: tenantInfo } = useFetchTenantInfo();
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.DeleteTenantUser],
|
||||||
|
mutationFn: async ({
|
||||||
|
userId,
|
||||||
|
tenantId,
|
||||||
|
}: {
|
||||||
|
userId: string;
|
||||||
|
tenantId?: string;
|
||||||
|
}) => {
|
||||||
|
const { data } = await deleteTenantUser({
|
||||||
|
tenantId: tenantId ?? tenantInfo.tenant_id,
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.deleted'));
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenantUser],
|
||||||
|
});
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenant],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, deleteTenantUser: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useListTenant = () => {
|
||||||
|
const { data: tenantInfo } = useFetchTenantInfo();
|
||||||
|
const tenantId = tenantInfo.tenant_id;
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isFetching: loading,
|
||||||
|
refetch,
|
||||||
|
} = useQuery<ITenant[]>({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenant, tenantId],
|
||||||
|
initialData: [],
|
||||||
|
gcTime: 0,
|
||||||
|
enabled: !!tenantId,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data } = await listTenant();
|
||||||
|
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, refetch };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useAgreeTenant = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.AgreeTenant],
|
||||||
|
mutationFn: async (tenantId: string) => {
|
||||||
|
const { data } = await agreeTenant(tenantId);
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.operated'));
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: [UserSettingApiAction.ListTenant],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return data?.data ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, agreeTenant: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useSetLangfuseConfig = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.SetLangfuseConfig],
|
||||||
|
mutationFn: async (params: ISetLangfuseConfigRequestBody) => {
|
||||||
|
const { data } = await userService.setLangfuseConfig(params);
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.operated'));
|
||||||
|
}
|
||||||
|
return data?.code;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, setLangfuseConfig: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useDeleteLangfuseConfig = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isPending: loading,
|
||||||
|
mutateAsync,
|
||||||
|
} = useMutation({
|
||||||
|
mutationKey: [UserSettingApiAction.DeleteLangfuseConfig],
|
||||||
|
mutationFn: async () => {
|
||||||
|
const { data } = await userService.deleteLangfuseConfig();
|
||||||
|
if (data.code === 0) {
|
||||||
|
message.success(t('message.deleted'));
|
||||||
|
}
|
||||||
|
return data?.code;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading, deleteLangfuseConfig: mutateAsync };
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useFetchLangfuseConfig = () => {
|
||||||
|
const { data, isFetching: loading } = useQuery<ILangfuseConfig>({
|
||||||
|
queryKey: [UserSettingApiAction.FetchLangfuseConfig],
|
||||||
|
gcTime: 0,
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data } = await userService.getLangfuseConfig();
|
||||||
|
|
||||||
|
return data?.data;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data, loading };
|
||||||
|
};
|
||||||
28
web/src/layouts/bell-button.tsx
Normal file
28
web/src/layouts/bell-button.tsx
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { useNavigateWithFromState } from '@/hooks/route-hook';
|
||||||
|
import { useListTenant } from '@/hooks/use-user-setting-request';
|
||||||
|
import { TenantRole } from '@/pages/user-setting/constants';
|
||||||
|
import { BellRing } from 'lucide-react';
|
||||||
|
import { useCallback, useMemo } from 'react';
|
||||||
|
|
||||||
|
export function BellButton() {
|
||||||
|
const { data } = useListTenant();
|
||||||
|
const navigate = useNavigateWithFromState();
|
||||||
|
|
||||||
|
const showBell = useMemo(() => {
|
||||||
|
return data.some((x) => x.role === TenantRole.Invite);
|
||||||
|
}, [data]);
|
||||||
|
|
||||||
|
const handleBellClick = useCallback(() => {
|
||||||
|
navigate('/user-setting/team');
|
||||||
|
}, [navigate]);
|
||||||
|
|
||||||
|
return showBell ? (
|
||||||
|
<Button variant={'ghost'} onClick={handleBellClick}>
|
||||||
|
<div className="relative">
|
||||||
|
<BellRing className="size-4 " />
|
||||||
|
<span className="absolute size-1 rounded -right-1 -top-1 bg-red-600"></span>
|
||||||
|
</div>
|
||||||
|
</Button>
|
||||||
|
) : null;
|
||||||
|
}
|
||||||
@ -31,6 +31,7 @@ import {
|
|||||||
import React, { useCallback, useMemo } from 'react';
|
import React, { useCallback, useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useLocation } from 'umi';
|
import { useLocation } from 'umi';
|
||||||
|
import { BellButton } from './bell-button';
|
||||||
|
|
||||||
const handleDocHelpCLick = () => {
|
const handleDocHelpCLick = () => {
|
||||||
window.open('https://ragflow.io/docs/dev/category/guides', 'target');
|
window.open('https://ragflow.io/docs/dev/category/guides', 'target');
|
||||||
@ -53,12 +54,6 @@ export function Header() {
|
|||||||
changeLanguage(key);
|
changeLanguage(key);
|
||||||
};
|
};
|
||||||
|
|
||||||
// const { data } = useListTenant();
|
|
||||||
|
|
||||||
// const showBell = useMemo(() => {
|
|
||||||
// return data.some((x) => x.role === TenantRole.Invite);
|
|
||||||
// }, [data]);
|
|
||||||
|
|
||||||
const items = LanguageList.map((x) => ({
|
const items = LanguageList.map((x) => ({
|
||||||
key: x,
|
key: x,
|
||||||
label: <span>{LanguageMap[x as keyof typeof LanguageMap]}</span>,
|
label: <span>{LanguageMap[x as keyof typeof LanguageMap]}</span>,
|
||||||
@ -68,10 +63,6 @@ export function Header() {
|
|||||||
setTheme(theme === ThemeEnum.Dark ? ThemeEnum.Light : ThemeEnum.Dark);
|
setTheme(theme === ThemeEnum.Dark ? ThemeEnum.Light : ThemeEnum.Dark);
|
||||||
}, [setTheme, theme]);
|
}, [setTheme, theme]);
|
||||||
|
|
||||||
// const handleBellClick = useCallback(() => {
|
|
||||||
// navigate('/user-setting/team');
|
|
||||||
// }, [navigate]);
|
|
||||||
|
|
||||||
const tagsData = useMemo(
|
const tagsData = useMemo(
|
||||||
() => [
|
() => [
|
||||||
{ path: Routes.Root, name: t('header.Root'), icon: House },
|
{ path: Routes.Root, name: t('header.Root'), icon: House },
|
||||||
@ -160,6 +151,7 @@ export function Header() {
|
|||||||
<Button variant={'ghost'} onClick={onThemeClick}>
|
<Button variant={'ghost'} onClick={onThemeClick}>
|
||||||
{theme === 'light' ? <Sun /> : <Moon />}
|
{theme === 'light' ? <Sun /> : <Moon />}
|
||||||
</Button>
|
</Button>
|
||||||
|
<BellButton></BellButton>
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
<RAGFlowAvatar
|
<RAGFlowAvatar
|
||||||
name={nickname}
|
name={nickname}
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
export default {
|
export default {
|
||||||
translation: {
|
translation: {
|
||||||
common: {
|
common: {
|
||||||
|
selectPlaceholder: 'select value',
|
||||||
delete: 'Delete',
|
delete: 'Delete',
|
||||||
deleteModalTitle: 'Are you sure to delete this item?',
|
deleteModalTitle: 'Are you sure to delete this item?',
|
||||||
ok: 'Yes',
|
ok: 'Yes',
|
||||||
@ -70,7 +71,7 @@ export default {
|
|||||||
review: 'from 500+ reviews',
|
review: 'from 500+ reviews',
|
||||||
},
|
},
|
||||||
header: {
|
header: {
|
||||||
knowledgeBase: 'Knowledge Base',
|
knowledgeBase: 'Dataset',
|
||||||
chat: 'Chat',
|
chat: 'Chat',
|
||||||
register: 'Register',
|
register: 'Register',
|
||||||
signin: 'Sign in',
|
signin: 'Sign in',
|
||||||
@ -86,7 +87,7 @@ export default {
|
|||||||
knowledgeList: {
|
knowledgeList: {
|
||||||
welcome: 'Welcome back',
|
welcome: 'Welcome back',
|
||||||
description: 'Which knowledge bases will you use today?',
|
description: 'Which knowledge bases will you use today?',
|
||||||
createKnowledgeBase: 'Create knowledge base',
|
createKnowledgeBase: 'Create Dataset',
|
||||||
name: 'Name',
|
name: 'Name',
|
||||||
namePlaceholder: 'Please input name!',
|
namePlaceholder: 'Please input name!',
|
||||||
doc: 'Docs',
|
doc: 'Docs',
|
||||||
@ -94,6 +95,16 @@ export default {
|
|||||||
noMoreData: `That's all. Nothing more.`,
|
noMoreData: `That's all. Nothing more.`,
|
||||||
},
|
},
|
||||||
knowledgeDetails: {
|
knowledgeDetails: {
|
||||||
|
created: 'Created',
|
||||||
|
learnMore: 'Learn More',
|
||||||
|
general: 'General',
|
||||||
|
chunkMethodTab: 'Chunk Method',
|
||||||
|
testResults: 'Test Results',
|
||||||
|
testSetting: 'Test Setting',
|
||||||
|
retrievalTesting: 'Retrieval Testing',
|
||||||
|
retrievalTestingDescription:
|
||||||
|
'Conduct a retrieval test to check if RAGFlow can recover the intended content for the LLM.',
|
||||||
|
Parse: 'Parse',
|
||||||
dataset: 'Dataset',
|
dataset: 'Dataset',
|
||||||
testing: 'Retrieval testing',
|
testing: 'Retrieval testing',
|
||||||
files: 'files',
|
files: 'files',
|
||||||
@ -479,6 +490,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
|||||||
improvise: 'Improvise',
|
improvise: 'Improvise',
|
||||||
precise: 'Precise',
|
precise: 'Precise',
|
||||||
balance: 'Balance',
|
balance: 'Balance',
|
||||||
|
custom: 'Custom',
|
||||||
freedomTip: `A shortcut to 'Temperature', 'Top P', 'Presence penalty', and 'Frequency penalty' settings, indicating the freedom level of the model. This parameter has three options: Select 'Improvise' to produce more creative responses; select 'Precise' (default) to produce more conservative responses; 'Balance' is a middle ground between 'Improvise' and 'Precise'.`,
|
freedomTip: `A shortcut to 'Temperature', 'Top P', 'Presence penalty', and 'Frequency penalty' settings, indicating the freedom level of the model. This parameter has three options: Select 'Improvise' to produce more creative responses; select 'Precise' (default) to produce more conservative responses; 'Balance' is a middle ground between 'Improvise' and 'Precise'.`,
|
||||||
temperature: 'Temperature',
|
temperature: 'Temperature',
|
||||||
temperatureMessage: 'Temperature is required',
|
temperatureMessage: 'Temperature is required',
|
||||||
@ -845,6 +857,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
|||||||
uploadLimit:
|
uploadLimit:
|
||||||
'Each file must not exceed 10MB, and the total number of files must not exceed 128.',
|
'Each file must not exceed 10MB, and the total number of files must not exceed 128.',
|
||||||
destinationFolder: 'Destination folder',
|
destinationFolder: 'Destination folder',
|
||||||
|
pleaseUploadAtLeastOneFile: 'Please upload at least one file',
|
||||||
},
|
},
|
||||||
flow: {
|
flow: {
|
||||||
cite: 'Cite',
|
cite: 'Cite',
|
||||||
@ -1441,6 +1454,7 @@ This delimiter is used to split the input text into several text pieces echo of
|
|||||||
showQueryMindmap: 'Show Query Mindmap',
|
showQueryMindmap: 'Show Query Mindmap',
|
||||||
embedApp: 'Embed App',
|
embedApp: 'Embed App',
|
||||||
relatedSearch: 'Related Search',
|
relatedSearch: 'Related Search',
|
||||||
|
descriptionValue: 'You are an intelligent assistant.',
|
||||||
okText: 'Save',
|
okText: 'Save',
|
||||||
cancelText: 'Cancel',
|
cancelText: 'Cancel',
|
||||||
},
|
},
|
||||||
|
|||||||
@ -240,9 +240,8 @@ export default {
|
|||||||
promptTip:
|
promptTip:
|
||||||
'Décrivez la tâche attendue du LLM, ses réponses, ses exigences, etc. Utilisez `/` pour afficher les variables disponibles.',
|
'Décrivez la tâche attendue du LLM, ses réponses, ses exigences, etc. Utilisez `/` pour afficher les variables disponibles.',
|
||||||
promptMessage: 'Le prompt est requis',
|
promptMessage: 'Le prompt est requis',
|
||||||
promptText: `Veuillez résumer les paragraphes suivants. Attention aux chiffres, ne pas inventer. Paragraphes suivants : {cluster_content
|
promptText: `Veuillez résumer les paragraphes suivants. Attention aux chiffres, ne pas inventer. Paragraphes suivants : {cluster_content}
|
||||||
}
|
Le contenu à résumer est ci-dessus.`,
|
||||||
Le contenu à résumer est ci-dessus.`,
|
|
||||||
maxToken: 'Nombre maximal de tokens',
|
maxToken: 'Nombre maximal de tokens',
|
||||||
maxTokenTip: 'Nombre maximal de tokens générés par résumé.',
|
maxTokenTip: 'Nombre maximal de tokens générés par résumé.',
|
||||||
maxTokenMessage: 'Nombre maximal de tokens requis',
|
maxTokenMessage: 'Nombre maximal de tokens requis',
|
||||||
|
|||||||
@ -454,6 +454,7 @@ export default {
|
|||||||
improvise: '即興創作',
|
improvise: '即興創作',
|
||||||
precise: '精確',
|
precise: '精確',
|
||||||
balance: '平衡',
|
balance: '平衡',
|
||||||
|
custom: '自定義',
|
||||||
freedomTip: `“精確”意味著法學碩士會保守並謹慎地回答你的問題。“即興發揮”意味著你希望法學碩士能夠自由地暢所欲言。“平衡”是謹慎與自由之間的平衡。`,
|
freedomTip: `“精確”意味著法學碩士會保守並謹慎地回答你的問題。“即興發揮”意味著你希望法學碩士能夠自由地暢所欲言。“平衡”是謹慎與自由之間的平衡。`,
|
||||||
temperature: '溫度',
|
temperature: '溫度',
|
||||||
temperatureMessage: '溫度是必填項',
|
temperatureMessage: '溫度是必填項',
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
export default {
|
export default {
|
||||||
translation: {
|
translation: {
|
||||||
common: {
|
common: {
|
||||||
|
selectPlaceholder: '请选择',
|
||||||
delete: '删除',
|
delete: '删除',
|
||||||
deleteModalTitle: '确定删除吗?',
|
deleteModalTitle: '确定删除吗?',
|
||||||
ok: '是',
|
ok: '是',
|
||||||
@ -86,6 +87,16 @@ export default {
|
|||||||
noMoreData: '没有更多数据了',
|
noMoreData: '没有更多数据了',
|
||||||
},
|
},
|
||||||
knowledgeDetails: {
|
knowledgeDetails: {
|
||||||
|
created: '创建于',
|
||||||
|
learnMore: '了解更多',
|
||||||
|
general: '通用',
|
||||||
|
chunkMethodTab: '切片方法',
|
||||||
|
testResults: '测试结果',
|
||||||
|
testSetting: '测试设置',
|
||||||
|
retrievalTesting: '知识检索测试',
|
||||||
|
retrievalTestingDescription:
|
||||||
|
'进行检索测试,检查 RAGFlow 是否能够为大语言模型(LLM)恢复预期的内容。',
|
||||||
|
Parse: '解析',
|
||||||
dataset: '数据集',
|
dataset: '数据集',
|
||||||
testing: '检索测试',
|
testing: '检索测试',
|
||||||
configuration: '配置',
|
configuration: '配置',
|
||||||
@ -477,6 +488,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
|||||||
improvise: '即兴创作',
|
improvise: '即兴创作',
|
||||||
precise: '精确',
|
precise: '精确',
|
||||||
balance: '平衡',
|
balance: '平衡',
|
||||||
|
custom: '自定义',
|
||||||
freedomTip: `“精确”意味着大语言模型会保守并谨慎地回答你的问题。 “即兴发挥”意味着你希望大语言模型能够自由地畅所欲言。 “平衡”是谨慎与自由之间的平衡。`,
|
freedomTip: `“精确”意味着大语言模型会保守并谨慎地回答你的问题。 “即兴发挥”意味着你希望大语言模型能够自由地畅所欲言。 “平衡”是谨慎与自由之间的平衡。`,
|
||||||
temperature: '温度',
|
temperature: '温度',
|
||||||
temperatureMessage: '温度是必填项',
|
temperatureMessage: '温度是必填项',
|
||||||
@ -799,6 +811,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
|||||||
fileError: '文件错误',
|
fileError: '文件错误',
|
||||||
uploadLimit: '文件大小不能超过10M,文件总数不超过128个',
|
uploadLimit: '文件大小不能超过10M,文件总数不超过128个',
|
||||||
destinationFolder: '目标文件夹',
|
destinationFolder: '目标文件夹',
|
||||||
|
pleaseUploadAtLeastOneFile: '请上传至少一个文件',
|
||||||
},
|
},
|
||||||
flow: {
|
flow: {
|
||||||
flow: '工作流',
|
flow: '工作流',
|
||||||
@ -1344,6 +1357,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
|||||||
showQueryMindmap: '显示查询思维导图',
|
showQueryMindmap: '显示查询思维导图',
|
||||||
embedApp: '嵌入网站',
|
embedApp: '嵌入网站',
|
||||||
relatedSearch: '相关搜索',
|
relatedSearch: '相关搜索',
|
||||||
|
descriptionValue: '你是一位智能助手。',
|
||||||
okText: '保存',
|
okText: '保存',
|
||||||
cancelText: '返回',
|
cancelText: '返回',
|
||||||
},
|
},
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import {
|
|||||||
useHandleMessageInputChange,
|
useHandleMessageInputChange,
|
||||||
useSelectDerivedMessages,
|
useSelectDerivedMessages,
|
||||||
} from '@/hooks/logic-hooks';
|
} from '@/hooks/logic-hooks';
|
||||||
|
import { useFetchAgent } from '@/hooks/use-agent-request';
|
||||||
import {
|
import {
|
||||||
IEventList,
|
IEventList,
|
||||||
IInputEvent,
|
IInputEvent,
|
||||||
@ -188,11 +189,7 @@ export const useSendAgentMessage = (
|
|||||||
return answerList[0]?.message_id;
|
return answerList[0]?.message_id;
|
||||||
}, [answerList]);
|
}, [answerList]);
|
||||||
|
|
||||||
useEffect(() => {
|
const { refetch } = useFetchAgent();
|
||||||
if (answerList[0]?.session_id) {
|
|
||||||
setSessionId(answerList[0]?.session_id);
|
|
||||||
}
|
|
||||||
}, [answerList]);
|
|
||||||
|
|
||||||
const { findReferenceByMessageId } = useFindMessageReference(answerList);
|
const { findReferenceByMessageId } = useFindMessageReference(answerList);
|
||||||
const prologue = useGetBeginNodePrologue();
|
const prologue = useGetBeginNodePrologue();
|
||||||
@ -250,7 +247,7 @@ export const useSendAgentMessage = (
|
|||||||
setValue(message.content);
|
setValue(message.content);
|
||||||
removeLatestMessage();
|
removeLatestMessage();
|
||||||
} else {
|
} else {
|
||||||
// refetch(); // pull the message list after sending the message successfully
|
refetch(); // pull the message list after sending the message successfully
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('🚀 ~ useSendAgentMessage ~ error:', error);
|
console.log('🚀 ~ useSendAgentMessage ~ error:', error);
|
||||||
@ -258,28 +255,30 @@ export const useSendAgentMessage = (
|
|||||||
},
|
},
|
||||||
[
|
[
|
||||||
agentId,
|
agentId,
|
||||||
sessionId,
|
|
||||||
send,
|
|
||||||
clearUploadResponseList,
|
|
||||||
inputs,
|
inputs,
|
||||||
beginParams,
|
beginParams,
|
||||||
uploadResponseList,
|
uploadResponseList,
|
||||||
|
sessionId,
|
||||||
|
send,
|
||||||
|
clearUploadResponseList,
|
||||||
setValue,
|
setValue,
|
||||||
removeLatestMessage,
|
removeLatestMessage,
|
||||||
|
refetch,
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
const sendFormMessage = useCallback(
|
const sendFormMessage = useCallback(
|
||||||
(body: { id?: string; inputs: Record<string, BeginQuery> }) => {
|
async (body: { id?: string; inputs: Record<string, BeginQuery> }) => {
|
||||||
send({ ...body, session_id: sessionId });
|
|
||||||
addNewestOneQuestion({
|
addNewestOneQuestion({
|
||||||
content: Object.entries(body.inputs)
|
content: Object.entries(body.inputs)
|
||||||
.map(([key, val]) => `${key}: ${val.value}`)
|
.map(([key, val]) => `${key}: ${val.value}`)
|
||||||
.join('<br/>'),
|
.join('<br/>'),
|
||||||
role: MessageType.User,
|
role: MessageType.User,
|
||||||
});
|
});
|
||||||
|
await send({ ...body, session_id: sessionId });
|
||||||
|
refetch();
|
||||||
},
|
},
|
||||||
[addNewestOneQuestion, send, sessionId],
|
[addNewestOneQuestion, refetch, send, sessionId],
|
||||||
);
|
);
|
||||||
|
|
||||||
// reset session
|
// reset session
|
||||||
@ -346,6 +345,12 @@ export const useSendAgentMessage = (
|
|||||||
}
|
}
|
||||||
}, [addEventList, answerList, addEventListFun, messageId]);
|
}, [addEventList, answerList, addEventListFun, messageId]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (answerList[0]?.session_id) {
|
||||||
|
setSessionId(answerList[0]?.session_id);
|
||||||
|
}
|
||||||
|
}, [answerList]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
value,
|
value,
|
||||||
sendLoading: !done,
|
sendLoading: !done,
|
||||||
|
|||||||
@ -210,7 +210,9 @@ export default function Agent() {
|
|||||||
></EmbedDialog>
|
></EmbedDialog>
|
||||||
)}
|
)}
|
||||||
{versionDialogVisible && (
|
{versionDialogVisible && (
|
||||||
<VersionDialog hideModal={hideVersionDialog}></VersionDialog>
|
<DropdownProvider>
|
||||||
|
<VersionDialog hideModal={hideVersionDialog}></VersionDialog>
|
||||||
|
</DropdownProvider>
|
||||||
)}
|
)}
|
||||||
{settingDialogVisible && (
|
{settingDialogVisible && (
|
||||||
<SettingDialog hideModal={hideSettingDialog}></SettingDialog>
|
<SettingDialog hideModal={hideSettingDialog}></SettingDialog>
|
||||||
|
|||||||
@ -63,7 +63,6 @@ export function UploadAgentForm({ hideModal, onOk }: IModalProps<any>) {
|
|||||||
value={field.value}
|
value={field.value}
|
||||||
onValueChange={field.onChange}
|
onValueChange={field.onChange}
|
||||||
maxFileCount={1}
|
maxFileCount={1}
|
||||||
maxSize={4 * 1024 * 1024}
|
|
||||||
accept={{ '*.json': [FileMimeType.Json] }}
|
accept={{ '*.json': [FileMimeType.Json] }}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
|
|||||||
@ -35,7 +35,7 @@
|
|||||||
|
|
||||||
.documentPreview {
|
.documentPreview {
|
||||||
// width: 40%;
|
// width: 40%;
|
||||||
height: calc(100vh - 130px);
|
height: calc(100vh - 180px);
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,15 +3,15 @@ import { RunningStatus } from '@/constants/knowledge';
|
|||||||
export const RunningStatusMap = {
|
export const RunningStatusMap = {
|
||||||
[RunningStatus.UNSTART]: {
|
[RunningStatus.UNSTART]: {
|
||||||
label: 'UNSTART',
|
label: 'UNSTART',
|
||||||
color: 'cyan',
|
color: 'var(--accent-primary)',
|
||||||
},
|
},
|
||||||
[RunningStatus.RUNNING]: {
|
[RunningStatus.RUNNING]: {
|
||||||
label: 'Parsing',
|
label: 'Parsing',
|
||||||
color: 'blue',
|
color: 'var(--team-member)',
|
||||||
},
|
},
|
||||||
[RunningStatus.CANCEL]: { label: 'CANCEL', color: 'orange' },
|
[RunningStatus.CANCEL]: { label: 'CANCEL', color: 'var(--state-warning)' },
|
||||||
[RunningStatus.DONE]: { label: 'SUCCESS', color: 'blue' },
|
[RunningStatus.DONE]: { label: 'SUCCESS', color: 'var(--state-success)' },
|
||||||
[RunningStatus.FAIL]: { label: 'FAIL', color: 'red' },
|
[RunningStatus.FAIL]: { label: 'FAIL', color: 'var(--state-error' },
|
||||||
};
|
};
|
||||||
|
|
||||||
export * from '@/constants/knowledge';
|
export * from '@/constants/knowledge';
|
||||||
|
|||||||
@ -11,7 +11,7 @@ import { IDocumentInfo } from '@/interfaces/database/document';
|
|||||||
import { formatFileSize } from '@/utils/common-util';
|
import { formatFileSize } from '@/utils/common-util';
|
||||||
import { formatDate } from '@/utils/date';
|
import { formatDate } from '@/utils/date';
|
||||||
import { downloadDocument } from '@/utils/file-util';
|
import { downloadDocument } from '@/utils/file-util';
|
||||||
import { ArrowDownToLine, FolderPen, ScrollText, Trash2 } from 'lucide-react';
|
import { Download, Eye, PenLine, Trash2 } from 'lucide-react';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { UseRenameDocumentShowType } from './use-rename-document';
|
import { UseRenameDocumentShowType } from './use-rename-document';
|
||||||
import { isParserRunning } from './utils';
|
import { isParserRunning } from './utils';
|
||||||
@ -57,12 +57,12 @@ export function DatasetActionCell({
|
|||||||
disabled={isRunning}
|
disabled={isRunning}
|
||||||
onClick={handleRename}
|
onClick={handleRename}
|
||||||
>
|
>
|
||||||
<FolderPen />
|
<PenLine />
|
||||||
</Button>
|
</Button>
|
||||||
<HoverCard>
|
<HoverCard>
|
||||||
<HoverCardTrigger>
|
<HoverCardTrigger>
|
||||||
<Button variant="ghost" disabled={isRunning} size={'sm'}>
|
<Button variant="ghost" disabled={isRunning} size={'sm'}>
|
||||||
<ScrollText />
|
<Eye />
|
||||||
</Button>
|
</Button>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
<HoverCardContent className="w-[40vw] max-h-[40vh] overflow-auto">
|
<HoverCardContent className="w-[40vw] max-h-[40vh] overflow-auto">
|
||||||
@ -93,7 +93,7 @@ export function DatasetActionCell({
|
|||||||
disabled={isRunning}
|
disabled={isRunning}
|
||||||
size={'sm'}
|
size={'sm'}
|
||||||
>
|
>
|
||||||
<ArrowDownToLine />
|
<Download />
|
||||||
</Button>
|
</Button>
|
||||||
)}
|
)}
|
||||||
<ConfirmDeleteDialog onOk={handleRemove}>
|
<ConfirmDeleteDialog onOk={handleRemove}>
|
||||||
|
|||||||
@ -164,7 +164,7 @@ export function DatasetTable({
|
|||||||
)}
|
)}
|
||||||
</TableBody>
|
</TableBody>
|
||||||
</Table>
|
</Table>
|
||||||
<div className="flex items-center justify-end py-4">
|
<div className="flex items-center justify-end py-4 absolute bottom-3 right-3">
|
||||||
<div className="space-x-2">
|
<div className="space-x-2">
|
||||||
<RAGFlowPagination
|
<RAGFlowPagination
|
||||||
{...pick(pagination, 'current', 'pageSize')}
|
{...pick(pagination, 'current', 'pageSize')}
|
||||||
|
|||||||
@ -111,6 +111,7 @@ export default function Dataset() {
|
|||||||
hideModal={hideDocumentUploadModal}
|
hideModal={hideDocumentUploadModal}
|
||||||
onOk={onDocumentUploadOk}
|
onOk={onDocumentUploadOk}
|
||||||
loading={documentUploadLoading}
|
loading={documentUploadLoading}
|
||||||
|
showParseOnCreation
|
||||||
></FileUploadDialog>
|
></FileUploadDialog>
|
||||||
)}
|
)}
|
||||||
{createVisible && (
|
{createVisible && (
|
||||||
|
|||||||
@ -17,7 +17,7 @@ function Dot({ run }: { run: RunningStatus }) {
|
|||||||
const runningStatus = RunningStatusMap[run];
|
const runningStatus = RunningStatusMap[run];
|
||||||
return (
|
return (
|
||||||
<span
|
<span
|
||||||
className={'size-2 inline-block rounded'}
|
className={'size-1 inline-block rounded'}
|
||||||
style={{ backgroundColor: runningStatus.color }}
|
style={{ backgroundColor: runningStatus.color }}
|
||||||
></span>
|
></span>
|
||||||
);
|
);
|
||||||
@ -89,7 +89,7 @@ export function ParsingCard({ record }: IProps) {
|
|||||||
return (
|
return (
|
||||||
<HoverCard>
|
<HoverCard>
|
||||||
<HoverCardTrigger asChild>
|
<HoverCardTrigger asChild>
|
||||||
<Button variant={'ghost'} size={'sm'}>
|
<Button variant={'transparent'} className="border-none" size={'sm'}>
|
||||||
<Dot run={record.run}></Dot>
|
<Dot run={record.run}></Dot>
|
||||||
</Button>
|
</Button>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import {
|
|||||||
import { Progress } from '@/components/ui/progress';
|
import { Progress } from '@/components/ui/progress';
|
||||||
import { Separator } from '@/components/ui/separator';
|
import { Separator } from '@/components/ui/separator';
|
||||||
import { IDocumentInfo } from '@/interfaces/database/document';
|
import { IDocumentInfo } from '@/interfaces/database/document';
|
||||||
import { CircleX, Play, RefreshCw } from 'lucide-react';
|
import { CircleX, RefreshCw } from 'lucide-react';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { RunningStatus } from './constant';
|
import { RunningStatus } from './constant';
|
||||||
@ -24,11 +24,13 @@ import { useHandleRunDocumentByIds } from './use-run-document';
|
|||||||
import { UseSaveMetaShowType } from './use-save-meta';
|
import { UseSaveMetaShowType } from './use-save-meta';
|
||||||
import { isParserRunning } from './utils';
|
import { isParserRunning } from './utils';
|
||||||
const IconMap = {
|
const IconMap = {
|
||||||
[RunningStatus.UNSTART]: <Play />,
|
[RunningStatus.UNSTART]: (
|
||||||
[RunningStatus.RUNNING]: <CircleX />,
|
<div className="w-0 h-0 border-l-[10px] border-l-accent-primary border-t-8 border-r-4 border-b-8 border-transparent"></div>
|
||||||
[RunningStatus.CANCEL]: <RefreshCw />,
|
),
|
||||||
[RunningStatus.DONE]: <RefreshCw />,
|
[RunningStatus.RUNNING]: <CircleX size={14} color="var(--state-error)" />,
|
||||||
[RunningStatus.FAIL]: <RefreshCw />,
|
[RunningStatus.CANCEL]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||||
|
[RunningStatus.DONE]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||||
|
[RunningStatus.FAIL]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||||
};
|
};
|
||||||
|
|
||||||
export function ParsingStatusCell({
|
export function ParsingStatusCell({
|
||||||
@ -60,11 +62,11 @@ export function ParsingStatusCell({
|
|||||||
}, [record, showSetMetaModal]);
|
}, [record, showSetMetaModal]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="flex gap-2 items-center">
|
<section className="flex gap-8 items-center">
|
||||||
<div className="w-28 flex items-center justify-between">
|
<div className="w-fit flex items-center justify-between">
|
||||||
<DropdownMenu>
|
<DropdownMenu>
|
||||||
<DropdownMenuTrigger asChild>
|
<DropdownMenuTrigger asChild>
|
||||||
<Button variant={'ghost'} size={'sm'}>
|
<Button variant={'transparent'} className="border-none" size={'sm'}>
|
||||||
{parser_id === 'naive' ? 'general' : parser_id}
|
{parser_id === 'naive' ? 'general' : parser_id}
|
||||||
</Button>
|
</Button>
|
||||||
</DropdownMenuTrigger>
|
</DropdownMenuTrigger>
|
||||||
@ -77,7 +79,6 @@ export function ParsingStatusCell({
|
|||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
</DropdownMenuContent>
|
</DropdownMenuContent>
|
||||||
</DropdownMenu>
|
</DropdownMenu>
|
||||||
<Separator orientation="vertical" className="h-2.5" />
|
|
||||||
</div>
|
</div>
|
||||||
<ConfirmDeleteDialog
|
<ConfirmDeleteDialog
|
||||||
title={t(`knowledgeDetails.redo`, { chunkNum: chunk_num })}
|
title={t(`knowledgeDetails.redo`, { chunkNum: chunk_num })}
|
||||||
@ -85,17 +86,17 @@ export function ParsingStatusCell({
|
|||||||
onOk={handleOperationIconClick(true)}
|
onOk={handleOperationIconClick(true)}
|
||||||
onCancel={handleOperationIconClick(false)}
|
onCancel={handleOperationIconClick(false)}
|
||||||
>
|
>
|
||||||
<Button
|
<div
|
||||||
variant={'ghost'}
|
className="cursor-pointer flex items-center gap-3"
|
||||||
size={'sm'}
|
|
||||||
onClick={
|
onClick={
|
||||||
isZeroChunk || isRunning
|
isZeroChunk || isRunning
|
||||||
? handleOperationIconClick(false)
|
? handleOperationIconClick(false)
|
||||||
: () => {}
|
: () => {}
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
|
<Separator orientation="vertical" className="h-2.5" />
|
||||||
{operationIcon}
|
{operationIcon}
|
||||||
</Button>
|
</div>
|
||||||
</ConfirmDeleteDialog>
|
</ConfirmDeleteDialog>
|
||||||
{isParserRunning(run) ? (
|
{isParserRunning(run) ? (
|
||||||
<HoverCard>
|
<HoverCard>
|
||||||
|
|||||||
@ -65,7 +65,8 @@ export function useDatasetTableColumns({
|
|||||||
header: ({ column }) => {
|
header: ({ column }) => {
|
||||||
return (
|
return (
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="transparent"
|
||||||
|
className="border-none"
|
||||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||||
>
|
>
|
||||||
{t('name')}
|
{t('name')}
|
||||||
@ -103,7 +104,8 @@ export function useDatasetTableColumns({
|
|||||||
header: ({ column }) => {
|
header: ({ column }) => {
|
||||||
return (
|
return (
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="transparent"
|
||||||
|
className="border-none"
|
||||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||||
>
|
>
|
||||||
{t('uploadDate')}
|
{t('uploadDate')}
|
||||||
@ -141,7 +143,7 @@ export function useDatasetTableColumns({
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
accessorKey: 'run',
|
accessorKey: 'run',
|
||||||
header: t('parsingStatus'),
|
header: t('Parse'),
|
||||||
// meta: { cellClassName: 'min-w-[20vw]' },
|
// meta: { cellClassName: 'min-w-[20vw]' },
|
||||||
cell: ({ row }) => {
|
cell: ({ row }) => {
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -1,5 +1,9 @@
|
|||||||
|
import { UploadFormSchemaType } from '@/components/file-upload-dialog';
|
||||||
import { useSetModalState } from '@/hooks/common-hooks';
|
import { useSetModalState } from '@/hooks/common-hooks';
|
||||||
import { useUploadNextDocument } from '@/hooks/use-document-request';
|
import {
|
||||||
|
useRunDocument,
|
||||||
|
useUploadNextDocument,
|
||||||
|
} from '@/hooks/use-document-request';
|
||||||
import { getUnSupportedFilesCount } from '@/utils/document-util';
|
import { getUnSupportedFilesCount } from '@/utils/document-util';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
|
|
||||||
@ -10,14 +14,24 @@ export const useHandleUploadDocument = () => {
|
|||||||
showModal: showDocumentUploadModal,
|
showModal: showDocumentUploadModal,
|
||||||
} = useSetModalState();
|
} = useSetModalState();
|
||||||
const { uploadDocument, loading } = useUploadNextDocument();
|
const { uploadDocument, loading } = useUploadNextDocument();
|
||||||
|
const { runDocumentByIds } = useRunDocument();
|
||||||
|
|
||||||
const onDocumentUploadOk = useCallback(
|
const onDocumentUploadOk = useCallback(
|
||||||
async (fileList: File[]): Promise<number | undefined> => {
|
async ({ fileList, parseOnCreation }: UploadFormSchemaType) => {
|
||||||
if (fileList.length > 0) {
|
if (fileList.length > 0) {
|
||||||
const ret: any = await uploadDocument(fileList);
|
const ret = await uploadDocument(fileList);
|
||||||
if (typeof ret?.message !== 'string') {
|
if (typeof ret?.message !== 'string') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (ret.code === 0 && parseOnCreation) {
|
||||||
|
runDocumentByIds({
|
||||||
|
documentIds: ret.data.map((x) => x.id),
|
||||||
|
run: 1,
|
||||||
|
shouldDelete: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const count = getUnSupportedFilesCount(ret?.message);
|
const count = getUnSupportedFilesCount(ret?.message);
|
||||||
/// 500 error code indicates that some file types are not supported
|
/// 500 error code indicates that some file types are not supported
|
||||||
let code = ret?.code;
|
let code = ret?.code;
|
||||||
@ -31,7 +45,7 @@ export const useHandleUploadDocument = () => {
|
|||||||
return code;
|
return code;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[uploadDocument, hideDocumentUploadModal],
|
[uploadDocument, runDocumentByIds, hideDocumentUploadModal],
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@ -19,7 +19,7 @@ export default function DatasetWrapper() {
|
|||||||
const { data } = useFetchKnowledgeBaseConfiguration();
|
const { data } = useFetchKnowledgeBaseConfiguration();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section>
|
<section className="flex h-full flex-col w-full">
|
||||||
<PageHeader>
|
<PageHeader>
|
||||||
<Breadcrumb>
|
<Breadcrumb>
|
||||||
<BreadcrumbList>
|
<BreadcrumbList>
|
||||||
@ -30,12 +30,14 @@ export default function DatasetWrapper() {
|
|||||||
</BreadcrumbItem>
|
</BreadcrumbItem>
|
||||||
<BreadcrumbSeparator />
|
<BreadcrumbSeparator />
|
||||||
<BreadcrumbItem>
|
<BreadcrumbItem>
|
||||||
<BreadcrumbPage>{data.name}</BreadcrumbPage>
|
<BreadcrumbPage className="w-28 whitespace-nowrap text-ellipsis overflow-hidden">
|
||||||
|
{data.name}
|
||||||
|
</BreadcrumbPage>
|
||||||
</BreadcrumbItem>
|
</BreadcrumbItem>
|
||||||
</BreadcrumbList>
|
</BreadcrumbList>
|
||||||
</Breadcrumb>
|
</Breadcrumb>
|
||||||
</PageHeader>
|
</PageHeader>
|
||||||
<div className="flex flex-1">
|
<div className="flex flex-1 min-h-0">
|
||||||
<SideBar></SideBar>
|
<SideBar></SideBar>
|
||||||
<div className="flex-1">
|
<div className="flex-1">
|
||||||
<Outlet />
|
<Outlet />
|
||||||
|
|||||||
@ -66,10 +66,10 @@ export function ChunkMethodForm() {
|
|||||||
}, [finalParserId]);
|
}, [finalParserId]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<section className="h-full flex flex-col">
|
||||||
<section className="overflow-auto max-h-[76vh]">
|
<div className="overflow-auto flex-1 min-h-0">
|
||||||
<ConfigurationComponent></ConfigurationComponent>
|
<ConfigurationComponent></ConfigurationComponent>
|
||||||
</section>
|
</div>
|
||||||
<div className="text-right pt-4 flex justify-end gap-3">
|
<div className="text-right pt-4 flex justify-end gap-3">
|
||||||
<Button
|
<Button
|
||||||
type="reset"
|
type="reset"
|
||||||
@ -112,6 +112,6 @@ export function ChunkMethodForm() {
|
|||||||
{t('knowledgeConfiguration.save')}
|
{t('knowledgeConfiguration.save')}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</section>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { cn } from '@/lib/utils';
|
||||||
|
import { t } from 'i18next';
|
||||||
import { X } from 'lucide-react';
|
import { X } from 'lucide-react';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import CategoryPanel from './category-panel';
|
import CategoryPanel from './category-panel';
|
||||||
@ -14,20 +16,22 @@ export default ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
style={{
|
className={cn('hidden flex-1', {
|
||||||
display: tab === 'chunkMethodForm' ? 'block' : 'none',
|
'flex flex-col': tab === 'chunkMethodForm',
|
||||||
}}
|
})}
|
||||||
>
|
>
|
||||||
<Button
|
<div>
|
||||||
variant="outline"
|
<Button
|
||||||
onClick={() => {
|
variant="outline"
|
||||||
setVisible(!visible);
|
onClick={() => {
|
||||||
}}
|
setVisible(!visible);
|
||||||
>
|
}}
|
||||||
Learn More
|
>
|
||||||
</Button>
|
{t('knowledgeDetails.learnMore')}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
<div
|
<div
|
||||||
className="bg-[#FFF]/10 p-[20px] rounded-[12px] mt-[10px] relative"
|
className="bg-[#FFF]/10 p-[20px] rounded-[12px] mt-[10px] relative flex-1 overflow-auto"
|
||||||
style={{ display: visible ? 'block' : 'none' }}
|
style={{ display: visible ? 'block' : 'none' }}
|
||||||
>
|
>
|
||||||
<CategoryPanel chunkMethod={parserId}></CategoryPanel>
|
<CategoryPanel chunkMethod={parserId}></CategoryPanel>
|
||||||
|
|||||||
@ -29,7 +29,7 @@ export function ChunkMethodItem() {
|
|||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<FormLabel
|
<FormLabel
|
||||||
tooltip={t('chunkMethodTip')}
|
tooltip={t('chunkMethodTip')}
|
||||||
className="text-sm text-muted-foreground whitespace-nowrap w-1/4"
|
className="text-sm text-muted-foreground whitespace-wrap w-1/4"
|
||||||
>
|
>
|
||||||
{t('chunkMethod')}
|
{t('chunkMethod')}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
@ -69,7 +69,7 @@ export function EmbeddingModelItem() {
|
|||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<FormLabel
|
<FormLabel
|
||||||
tooltip={t('embeddingModelTip')}
|
tooltip={t('embeddingModelTip')}
|
||||||
className="text-sm text-muted-foreground whitespace-nowrap w-1/4"
|
className="text-sm text-muted-foreground whitespace-wrap w-1/4"
|
||||||
>
|
>
|
||||||
{t('embeddingModel')}
|
{t('embeddingModel')}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
import { FormContainer } from '@/components/form-container';
|
import { FormContainer } from '@/components/form-container';
|
||||||
|
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||||
|
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar';
|
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button, ButtonLoading } from '@/components/ui/button';
|
||||||
import {
|
import {
|
||||||
FormControl,
|
FormControl,
|
||||||
FormField,
|
FormField,
|
||||||
@ -9,9 +11,10 @@ import {
|
|||||||
FormMessage,
|
FormMessage,
|
||||||
} from '@/components/ui/form';
|
} from '@/components/ui/form';
|
||||||
import { Input } from '@/components/ui/input';
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { PermissionRole } from '@/constants/permission';
|
||||||
import { useUpdateKnowledge } from '@/hooks/knowledge-hooks';
|
import { useUpdateKnowledge } from '@/hooks/knowledge-hooks';
|
||||||
import { transformFile2Base64 } from '@/utils/file-util';
|
import { transformFile2Base64 } from '@/utils/file-util';
|
||||||
import { Loader2Icon, Pencil, Upload } from 'lucide-react';
|
import { Pencil, Upload } from 'lucide-react';
|
||||||
import { useEffect, useMemo, useState } from 'react';
|
import { useEffect, useMemo, useState } from 'react';
|
||||||
import { useFormContext } from 'react-hook-form';
|
import { useFormContext } from 'react-hook-form';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
@ -33,6 +36,13 @@ export function GeneralForm() {
|
|||||||
const parser_id = defaultValues['parser_id'];
|
const parser_id = defaultValues['parser_id'];
|
||||||
const { id: kb_id } = useParams();
|
const { id: kb_id } = useParams();
|
||||||
|
|
||||||
|
const teamOptions = useMemo(() => {
|
||||||
|
return Object.values(PermissionRole).map((x) => ({
|
||||||
|
label: t('knowledgeConfiguration.' + x),
|
||||||
|
value: x,
|
||||||
|
}));
|
||||||
|
}, [t]);
|
||||||
|
|
||||||
// init avatar file if it exists in defaultValues
|
// init avatar file if it exists in defaultValues
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!avatarFile) {
|
if (!avatarFile) {
|
||||||
@ -171,24 +181,35 @@ export function GeneralForm() {
|
|||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
<RAGFlowFormItem
|
||||||
|
name="permission"
|
||||||
|
label={t('knowledgeConfiguration.permissions')}
|
||||||
|
tooltip={t('knowledgeConfiguration.permissionsTip')}
|
||||||
|
horizontal
|
||||||
|
>
|
||||||
|
<SelectWithSearch
|
||||||
|
options={teamOptions}
|
||||||
|
triggerClassName="w-3/4"
|
||||||
|
></SelectWithSearch>
|
||||||
|
</RAGFlowFormItem>
|
||||||
</FormContainer>
|
</FormContainer>
|
||||||
<div className="text-right pt-4 flex justify-end gap-3">
|
<div className="text-right pt-4 flex justify-end gap-3">
|
||||||
<Button
|
<Button
|
||||||
type="reset"
|
type="reset"
|
||||||
className="bg-transparent text-color-white hover:bg-transparent border-gray-500 border-[1px]"
|
variant={'outline'}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
form.reset();
|
form.reset();
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{t('knowledgeConfiguration.cancel')}
|
{t('knowledgeConfiguration.cancel')}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<ButtonLoading
|
||||||
type="button"
|
type="button"
|
||||||
disabled={submitLoading}
|
loading={submitLoading}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
(async () => {
|
(async () => {
|
||||||
let isValidate = await form.formControl.trigger('name');
|
let isValidate = await form.trigger('name');
|
||||||
const { name, description } = form.formState.values;
|
const { name, description, permission } = form.getValues();
|
||||||
const avatar = avatarBase64Str;
|
const avatar = avatarBase64Str;
|
||||||
|
|
||||||
if (isValidate) {
|
if (isValidate) {
|
||||||
@ -198,14 +219,14 @@ export function GeneralForm() {
|
|||||||
name,
|
name,
|
||||||
description,
|
description,
|
||||||
avatar,
|
avatar,
|
||||||
|
permission,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{submitLoading && <Loader2Icon className="animate-spin" />}
|
|
||||||
{t('knowledgeConfiguration.save')}
|
{t('knowledgeConfiguration.save')}
|
||||||
</Button>
|
</ButtonLoading>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import {
|
|||||||
TabsTrigger,
|
TabsTrigger,
|
||||||
} from '@/components/ui/tabs-underlined';
|
} from '@/components/ui/tabs-underlined';
|
||||||
import { DocumentParserType } from '@/constants/knowledge';
|
import { DocumentParserType } from '@/constants/knowledge';
|
||||||
|
import { PermissionRole } from '@/constants/permission';
|
||||||
import { zodResolver } from '@hookform/resolvers/zod';
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { useForm, useWatch } from 'react-hook-form';
|
import { useForm, useWatch } from 'react-hook-form';
|
||||||
@ -43,7 +44,7 @@ export default function DatasetSettings() {
|
|||||||
defaultValues: {
|
defaultValues: {
|
||||||
name: '',
|
name: '',
|
||||||
parser_id: DocumentParserType.Naive,
|
parser_id: DocumentParserType.Naive,
|
||||||
permission: 'me',
|
permission: PermissionRole.Me,
|
||||||
parser_config: {
|
parser_config: {
|
||||||
layout_recognize: DocumentType.DeepDOC,
|
layout_recognize: DocumentType.DeepDOC,
|
||||||
chunk_token_num: 512,
|
chunk_token_num: 512,
|
||||||
@ -81,22 +82,23 @@ export default function DatasetSettings() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="p-5 ">
|
<section className="p-5 h-full flex flex-col">
|
||||||
<TopTitle
|
<TopTitle
|
||||||
title={t('knowledgeDetails.configuration')}
|
title={t('knowledgeDetails.configuration')}
|
||||||
description={t('knowledgeConfiguration.titleDescription')}
|
description={t('knowledgeConfiguration.titleDescription')}
|
||||||
></TopTitle>
|
></TopTitle>
|
||||||
<div className="flex gap-14">
|
<div className="flex gap-14 flex-1 min-h-0">
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<form
|
<form
|
||||||
onSubmit={form.handleSubmit(onSubmit)}
|
onSubmit={form.handleSubmit(onSubmit)}
|
||||||
className="space-y-6 basis-full min-w-[1000px] max-w-[1000px]"
|
className="space-y-6 flex-1"
|
||||||
>
|
>
|
||||||
<Tabs
|
<Tabs
|
||||||
defaultValue="generalForm"
|
defaultValue="generalForm"
|
||||||
onValueChange={(val) => {
|
onValueChange={(val) => {
|
||||||
setCurrentTab(val);
|
setCurrentTab(val);
|
||||||
}}
|
}}
|
||||||
|
className="h-full flex flex-col"
|
||||||
>
|
>
|
||||||
<TabsList className="grid bg-transparent grid-cols-2 rounded-none text-foreground">
|
<TabsList className="grid bg-transparent grid-cols-2 rounded-none text-foreground">
|
||||||
<TabsTrigger
|
<TabsTrigger
|
||||||
@ -105,7 +107,7 @@ export default function DatasetSettings() {
|
|||||||
>
|
>
|
||||||
<div className="flex w-full h-full justify-center items-center">
|
<div className="flex w-full h-full justify-center items-center">
|
||||||
<span className="h-full group-data-[state=active]:border-b-2 border-foreground ">
|
<span className="h-full group-data-[state=active]:border-b-2 border-foreground ">
|
||||||
General
|
{t('knowledgeDetails.general')}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</TabsTrigger>
|
</TabsTrigger>
|
||||||
@ -115,15 +117,15 @@ export default function DatasetSettings() {
|
|||||||
>
|
>
|
||||||
<div className="flex w-full h-full justify-center items-center">
|
<div className="flex w-full h-full justify-center items-center">
|
||||||
<span className="h-full group-data-[state=active]:border-b-2 border-foreground ">
|
<span className="h-full group-data-[state=active]:border-b-2 border-foreground ">
|
||||||
Chunk Method
|
{t('knowledgeDetails.chunkMethodTab')}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</TabsTrigger>
|
</TabsTrigger>
|
||||||
</TabsList>
|
</TabsList>
|
||||||
<TabsContent value="generalForm">
|
<TabsContent value="generalForm" className="flex-1 min-h-0">
|
||||||
<GeneralForm></GeneralForm>
|
<GeneralForm></GeneralForm>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
<TabsContent value="chunkMethodForm">
|
<TabsContent value="chunkMethodForm" className="flex-1 min-h-0">
|
||||||
<ChunkMethodForm></ChunkMethodForm>
|
<ChunkMethodForm></ChunkMethodForm>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|||||||
@ -62,15 +62,19 @@ export function SideBar({ refreshCount }: PropType) {
|
|||||||
name={data.name}
|
name={data.name}
|
||||||
className="size-16"
|
className="size-16"
|
||||||
></RAGFlowAvatar>
|
></RAGFlowAvatar>
|
||||||
<div className=" text-text-secondary text-xs space-y-1">
|
<div className=" text-text-secondary text-xs space-y-1 overflow-hidden">
|
||||||
<h3 className="text-lg font-semibold line-clamp-1 text-text-primary">
|
<h3 className="text-lg font-semibold line-clamp-1 text-text-primary text-ellipsis overflow-hidden">
|
||||||
{data.name}
|
{data.name}
|
||||||
</h3>
|
</h3>
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<span>{data.doc_num} files</span>
|
<span>
|
||||||
|
{data.doc_num} {t('knowledgeDetails.files')}
|
||||||
|
</span>
|
||||||
<span>{formatBytes(data.size)}</span>
|
<span>{formatBytes(data.size)}</span>
|
||||||
</div>
|
</div>
|
||||||
<div>Created {formatPureDate(data.create_time)}</div>
|
<div>
|
||||||
|
{t('knowledgeDetails.created')} {formatPureDate(data.create_time)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { useTestRetrieval } from '@/hooks/use-knowledge-request';
|
import { useTestRetrieval } from '@/hooks/use-knowledge-request';
|
||||||
|
import { t } from 'i18next';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { TopTitle } from '../dataset-title';
|
import { TopTitle } from '../dataset-title';
|
||||||
import TestingForm from './testing-form';
|
import TestingForm from './testing-form';
|
||||||
@ -23,8 +24,8 @@ export default function RetrievalTesting() {
|
|||||||
<div className="p-5">
|
<div className="p-5">
|
||||||
<section className="flex justify-between items-center">
|
<section className="flex justify-between items-center">
|
||||||
<TopTitle
|
<TopTitle
|
||||||
title={'Retrieval testing'}
|
title={t('knowledgeDetails.retrievalTesting')}
|
||||||
description={`Conduct a retrieval test to check if RAGFlow can recover the intended content for the LLM.`}
|
description={t('knowledgeDetails.retrievalTestingDescription')}
|
||||||
></TopTitle>
|
></TopTitle>
|
||||||
{/* <Button>Save as Preset</Button> */}
|
{/* <Button>Save as Preset</Button> */}
|
||||||
</section>
|
</section>
|
||||||
@ -33,7 +34,7 @@ export default function RetrievalTesting() {
|
|||||||
<div className="p-4 flex-1">
|
<div className="p-4 flex-1">
|
||||||
<div className="flex justify-between pb-2.5">
|
<div className="flex justify-between pb-2.5">
|
||||||
<span className="text-text-primary font-semibold text-2xl">
|
<span className="text-text-primary font-semibold text-2xl">
|
||||||
Test setting
|
{t('knowledgeDetails.testSetting')}
|
||||||
</span>
|
</span>
|
||||||
{/* <Button variant={'outline'} onClick={addCount}>
|
{/* <Button variant={'outline'} onClick={addCount}>
|
||||||
<Plus /> Add New Test
|
<Plus /> Add New Test
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { RAGFlowPagination } from '@/components/ui/ragflow-pagination';
|
|||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
import { useTestRetrieval } from '@/hooks/use-knowledge-request';
|
import { useTestRetrieval } from '@/hooks/use-knowledge-request';
|
||||||
import { ITestingChunk } from '@/interfaces/database/knowledge';
|
import { ITestingChunk } from '@/interfaces/database/knowledge';
|
||||||
|
import { t } from 'i18next';
|
||||||
import camelCase from 'lodash/camelCase';
|
import camelCase from 'lodash/camelCase';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
|
|
||||||
@ -66,7 +67,7 @@ export function TestingResult({
|
|||||||
<div className="p-4 flex-1">
|
<div className="p-4 flex-1">
|
||||||
<div className="flex justify-between pb-2.5">
|
<div className="flex justify-between pb-2.5">
|
||||||
<span className="text-text-primary font-semibold text-2xl">
|
<span className="text-text-primary font-semibold text-2xl">
|
||||||
Test results
|
{t('knowledgeDetails.testResults')}
|
||||||
</span>
|
</span>
|
||||||
<FilterPopover
|
<FilterPopover
|
||||||
filters={filters}
|
filters={filters}
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
import { HomeCard } from '@/components/home-card';
|
import { HomeCard } from '@/components/home-card';
|
||||||
import { MoreButton } from '@/components/more-button';
|
import { MoreButton } from '@/components/more-button';
|
||||||
|
import { SharedBadge } from '@/components/shared-badge';
|
||||||
import { Card, CardContent } from '@/components/ui/card';
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||||
import { IKnowledge } from '@/interfaces/database/knowledge';
|
import { IKnowledge } from '@/interfaces/database/knowledge';
|
||||||
import { ChevronRight } from 'lucide-react';
|
import { ChevronRight } from 'lucide-react';
|
||||||
import { DatasetDropdown } from './dataset-dropdown';
|
import { DatasetDropdown } from './dataset-dropdown';
|
||||||
import { useDisplayOwnerName } from './use-display-owner';
|
|
||||||
import { useRenameDataset } from './use-rename-dataset';
|
import { useRenameDataset } from './use-rename-dataset';
|
||||||
|
|
||||||
export type DatasetCardProps = {
|
export type DatasetCardProps = {
|
||||||
@ -17,9 +17,6 @@ export function DatasetCard({
|
|||||||
showDatasetRenameModal,
|
showDatasetRenameModal,
|
||||||
}: DatasetCardProps) {
|
}: DatasetCardProps) {
|
||||||
const { navigateToDataset } = useNavigatePage();
|
const { navigateToDataset } = useNavigatePage();
|
||||||
const displayOwnerName = useDisplayOwnerName();
|
|
||||||
|
|
||||||
const owner = displayOwnerName(dataset.tenant_id, dataset.nickname);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<HomeCard
|
<HomeCard
|
||||||
@ -32,6 +29,7 @@ export function DatasetCard({
|
|||||||
<MoreButton></MoreButton>
|
<MoreButton></MoreButton>
|
||||||
</DatasetDropdown>
|
</DatasetDropdown>
|
||||||
}
|
}
|
||||||
|
sharedBadge={<SharedBadge>{dataset.nickname}</SharedBadge>}
|
||||||
onClick={navigateToDataset(dataset.id)}
|
onClick={navigateToDataset(dataset.id)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
@ -41,7 +39,7 @@ export function SeeAllCard() {
|
|||||||
const { navigateToDatasetList } = useNavigatePage();
|
const { navigateToDatasetList } = useNavigatePage();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card className="w-40" onClick={navigateToDatasetList}>
|
<Card className="w-40 flex-none h-full" onClick={navigateToDatasetList}>
|
||||||
<CardContent className="p-2.5 pt-1 w-full h-full flex items-center justify-center gap-1.5 text-text-secondary">
|
<CardContent className="p-2.5 pt-1 w-full h-full flex items-center justify-center gap-1.5 text-text-secondary">
|
||||||
See All <ChevronRight className="size-4" />
|
See All <ChevronRight className="size-4" />
|
||||||
</CardContent>
|
</CardContent>
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { Button } from '@/components/ui/button';
|
import { ButtonLoading } from '@/components/ui/button';
|
||||||
import {
|
import {
|
||||||
Dialog,
|
Dialog,
|
||||||
DialogContent,
|
DialogContent,
|
||||||
@ -74,7 +74,11 @@ export function InputForm({ onOk }: IModalProps<any>) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DatasetCreatingDialog({ hideModal, onOk }: IModalProps<any>) {
|
export function DatasetCreatingDialog({
|
||||||
|
hideModal,
|
||||||
|
onOk,
|
||||||
|
loading,
|
||||||
|
}: IModalProps<any>) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -85,9 +89,9 @@ export function DatasetCreatingDialog({ hideModal, onOk }: IModalProps<any>) {
|
|||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
<InputForm onOk={onOk}></InputForm>
|
<InputForm onOk={onOk}></InputForm>
|
||||||
<DialogFooter>
|
<DialogFooter>
|
||||||
<Button type="submit" form={FormId}>
|
<ButtonLoading type="submit" form={FormId} loading={loading}>
|
||||||
{t('common.save')}
|
{t('common.save')}
|
||||||
</Button>
|
</ButtonLoading>
|
||||||
</DialogFooter>
|
</DialogFooter>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import { UploadFormSchemaType } from '@/components/file-upload-dialog';
|
||||||
import { useSetModalState } from '@/hooks/common-hooks';
|
import { useSetModalState } from '@/hooks/common-hooks';
|
||||||
import { useUploadFile } from '@/hooks/use-file-request';
|
import { useUploadFile } from '@/hooks/use-file-request';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
@ -13,7 +14,7 @@ export const useHandleUploadFile = () => {
|
|||||||
const id = useGetFolderId();
|
const id = useGetFolderId();
|
||||||
|
|
||||||
const onFileUploadOk = useCallback(
|
const onFileUploadOk = useCallback(
|
||||||
async (fileList: File[]): Promise<number | undefined> => {
|
async ({ fileList }: UploadFormSchemaType): Promise<number | undefined> => {
|
||||||
if (fileList.length > 0) {
|
if (fileList.length > 0) {
|
||||||
const ret: number = await uploadFile({ fileList, parentId: id });
|
const ret: number = await uploadFile({ fileList, parentId: id });
|
||||||
if (ret === 0) {
|
if (ret === 0) {
|
||||||
|
|||||||
@ -51,7 +51,8 @@ export function Applications() {
|
|||||||
options={options}
|
options={options}
|
||||||
value={val}
|
value={val}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
className="bg-transparent"
|
className="bg-bg-card border border-border-button rounded-full"
|
||||||
|
activeClassName="bg-text-primary border-none"
|
||||||
></Segmented>
|
></Segmented>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-wrap gap-4">
|
<div className="flex flex-wrap gap-4">
|
||||||
|
|||||||
@ -30,17 +30,21 @@ export function Datasets() {
|
|||||||
<CardSkeleton />
|
<CardSkeleton />
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="flex gap-4 flex-1">
|
<div className="grid gap-6 sm:grid-cols-1 md:grid-cols-2 lg:grid-cols-4 xl:grid-cols-6 2xl:grid-cols-8 max-h-[78vh] overflow-auto">
|
||||||
{kbs.slice(0, 6).map((dataset) => (
|
{kbs
|
||||||
<DatasetCard
|
?.slice(0, 6)
|
||||||
key={dataset.id}
|
.map((dataset) => (
|
||||||
dataset={dataset}
|
<DatasetCard
|
||||||
showDatasetRenameModal={showDatasetRenameModal}
|
key={dataset.id}
|
||||||
></DatasetCard>
|
dataset={dataset}
|
||||||
))}
|
showDatasetRenameModal={showDatasetRenameModal}
|
||||||
|
></DatasetCard>
|
||||||
|
))}
|
||||||
|
<div className="min-h-24">
|
||||||
|
<SeeAllCard></SeeAllCard>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<SeeAllCard></SeeAllCard>
|
|
||||||
</div>
|
</div>
|
||||||
{datasetRenameVisible && (
|
{datasetRenameVisible && (
|
||||||
<RenameDialog
|
<RenameDialog
|
||||||
|
|||||||
@ -14,6 +14,7 @@ import {
|
|||||||
} from '@/components/ui/form';
|
} from '@/components/ui/form';
|
||||||
import { Input } from '@/components/ui/input';
|
import { Input } from '@/components/ui/input';
|
||||||
import { Textarea } from '@/components/ui/textarea';
|
import { Textarea } from '@/components/ui/textarea';
|
||||||
|
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
import { useFormContext } from 'react-hook-form';
|
import { useFormContext } from 'react-hook-form';
|
||||||
|
|
||||||
@ -21,6 +22,17 @@ export default function ChatBasicSetting() {
|
|||||||
const { t } = useTranslate('chat');
|
const { t } = useTranslate('chat');
|
||||||
const form = useFormContext();
|
const form = useFormContext();
|
||||||
|
|
||||||
|
const languageOptions = [
|
||||||
|
{ value: 'English', label: 'English' },
|
||||||
|
{ value: 'Chinese', label: 'Chinese' },
|
||||||
|
{ value: 'Spanish', label: 'Spanish' },
|
||||||
|
{ value: 'French', label: 'French' },
|
||||||
|
{ value: 'German', label: 'German' },
|
||||||
|
{ value: 'Japanese', label: 'Japanese' },
|
||||||
|
{ value: 'Korean', label: 'Korean' },
|
||||||
|
{ value: 'Vietnamese', label: 'Vietnamese' },
|
||||||
|
];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-8">
|
<div className="space-y-8">
|
||||||
<FormField
|
<FormField
|
||||||
@ -35,7 +47,6 @@ export default function ChatBasicSetting() {
|
|||||||
value={field.value}
|
value={field.value}
|
||||||
onValueChange={field.onChange}
|
onValueChange={field.onChange}
|
||||||
maxFileCount={1}
|
maxFileCount={1}
|
||||||
maxSize={4 * 1024 * 1024}
|
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
<FormMessage />
|
<FormMessage />
|
||||||
@ -56,6 +67,30 @@ export default function ChatBasicSetting() {
|
|||||||
</FormItem>
|
</FormItem>
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
|
<FormField
|
||||||
|
control={form.control}
|
||||||
|
name="language"
|
||||||
|
render={({ field }) => (
|
||||||
|
<FormItem>
|
||||||
|
<FormLabel>{t('language')}</FormLabel>
|
||||||
|
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||||
|
<FormControl>
|
||||||
|
<SelectTrigger>
|
||||||
|
<SelectValue placeholder={t('common.languagePlaceholder')} />
|
||||||
|
</SelectTrigger>
|
||||||
|
</FormControl>
|
||||||
|
<SelectContent>
|
||||||
|
{languageOptions.map((option) => (
|
||||||
|
<SelectItem key={option.value} value={option.value}>
|
||||||
|
{option.label}
|
||||||
|
</SelectItem>
|
||||||
|
))}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
<FormMessage />
|
||||||
|
</FormItem>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
<FormField
|
<FormField
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name="description"
|
name="description"
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import {
|
|||||||
setLLMSettingEnabledValues,
|
setLLMSettingEnabledValues,
|
||||||
} from '@/utils/form';
|
} from '@/utils/form';
|
||||||
import { zodResolver } from '@hookform/resolvers/zod';
|
import { zodResolver } from '@hookform/resolvers/zod';
|
||||||
|
import { omit } from 'lodash';
|
||||||
import { X } from 'lucide-react';
|
import { X } from 'lucide-react';
|
||||||
import { useEffect } from 'react';
|
import { useEffect } from 'react';
|
||||||
import { useForm } from 'react-hook-form';
|
import { useForm } from 'react-hook-form';
|
||||||
@ -35,13 +36,18 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
|
|||||||
shouldUnregister: true,
|
shouldUnregister: true,
|
||||||
defaultValues: {
|
defaultValues: {
|
||||||
name: '',
|
name: '',
|
||||||
|
icon: [],
|
||||||
language: 'English',
|
language: 'English',
|
||||||
|
description: '',
|
||||||
|
kb_ids: [],
|
||||||
prompt_config: {
|
prompt_config: {
|
||||||
quote: true,
|
quote: true,
|
||||||
keyword: false,
|
keyword: false,
|
||||||
tts: false,
|
tts: false,
|
||||||
use_kg: false,
|
use_kg: false,
|
||||||
refine_multiturn: true,
|
refine_multiturn: true,
|
||||||
|
system: '',
|
||||||
|
parameters: [],
|
||||||
},
|
},
|
||||||
top_n: 8,
|
top_n: 8,
|
||||||
vector_similarity_weight: 0.2,
|
vector_similarity_weight: 0.2,
|
||||||
@ -64,7 +70,7 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
|
|||||||
? await transformFile2Base64(icon[0])
|
? await transformFile2Base64(icon[0])
|
||||||
: '';
|
: '';
|
||||||
setDialog({
|
setDialog({
|
||||||
...data,
|
...omit(data, 'operator_permission'),
|
||||||
...nextValues,
|
...nextValues,
|
||||||
icon: avatar,
|
icon: avatar,
|
||||||
dialog_id: id,
|
dialog_id: id,
|
||||||
@ -89,25 +95,28 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
|
|||||||
}, [data, form]);
|
}, [data, form]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="p-5 w-[440px] border-l">
|
<section className="p-5 w-[440px] border-l flex flex-col">
|
||||||
<div className="flex justify-between items-center text-base pb-2">
|
<div className="flex justify-between items-center text-base pb-2">
|
||||||
{t('chat.chatSetting')}
|
{t('chat.chatSetting')}
|
||||||
<X className="size-4 cursor-pointer" onClick={switchSettingVisible} />
|
<X className="size-4 cursor-pointer" onClick={switchSettingVisible} />
|
||||||
</div>
|
</div>
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<form onSubmit={form.handleSubmit(onSubmit, onInvalid)}>
|
<form
|
||||||
<section className="space-y-6 overflow-auto max-h-[82vh] pr-4">
|
onSubmit={form.handleSubmit(onSubmit, onInvalid)}
|
||||||
|
className="flex-1 flex flex-col min-h-0"
|
||||||
|
>
|
||||||
|
<section className="space-y-6 overflow-auto flex-1 pr-4 min-h-0">
|
||||||
<ChatBasicSetting></ChatBasicSetting>
|
<ChatBasicSetting></ChatBasicSetting>
|
||||||
<Separator />
|
<Separator />
|
||||||
<ChatPromptEngine></ChatPromptEngine>
|
<ChatPromptEngine></ChatPromptEngine>
|
||||||
<Separator />
|
<Separator />
|
||||||
<ChatModelSettings></ChatModelSettings>
|
<ChatModelSettings></ChatModelSettings>
|
||||||
</section>
|
</section>
|
||||||
<div className="space-x-5 text-right">
|
<div className="space-x-5 text-right pt-4">
|
||||||
<Button variant={'outline'} onClick={switchSettingVisible}>
|
<Button variant={'outline'} onClick={switchSettingVisible}>
|
||||||
{t('chat.cancel')}
|
{t('chat.cancel')}
|
||||||
</Button>
|
</Button>
|
||||||
<ButtonLoading className=" my-4" type="submit" loading={loading}>
|
<ButtonLoading type="submit" loading={loading}>
|
||||||
{t('common.save')}
|
{t('common.save')}
|
||||||
</ButtonLoading>
|
</ButtonLoading>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -34,11 +34,11 @@ export function useChatSettingSchema() {
|
|||||||
name: z.string().min(1, { message: t('assistantNameMessage') }),
|
name: z.string().min(1, { message: t('assistantNameMessage') }),
|
||||||
icon: z.array(z.instanceof(File)),
|
icon: z.array(z.instanceof(File)),
|
||||||
language: z.string().min(1, {
|
language: z.string().min(1, {
|
||||||
message: 'Username must be at least 2 characters.',
|
message: t('languageMessage'),
|
||||||
}),
|
}),
|
||||||
description: z.string(),
|
description: z.string().optional(),
|
||||||
kb_ids: z.array(z.string()).min(0, {
|
kb_ids: z.array(z.string()).min(0, {
|
||||||
message: 'Username must be at least 1 characters.',
|
message: t('knowledgeBasesMessage'),
|
||||||
}),
|
}),
|
||||||
prompt_config: promptConfigSchema,
|
prompt_config: promptConfigSchema,
|
||||||
...rerankFormSchema,
|
...rerankFormSchema,
|
||||||
|
|||||||
@ -2,6 +2,8 @@ import { LargeModelFormFieldWithoutFilter } from '@/components/large-model-form-
|
|||||||
import { LlmSettingSchema } from '@/components/llm-setting-items/next';
|
import { LlmSettingSchema } from '@/components/llm-setting-items/next';
|
||||||
import { NextMessageInput } from '@/components/message-input/next';
|
import { NextMessageInput } from '@/components/message-input/next';
|
||||||
import MessageItem from '@/components/message-item';
|
import MessageItem from '@/components/message-item';
|
||||||
|
import PdfDrawer from '@/components/pdf-drawer';
|
||||||
|
import { useClickDrawer } from '@/components/pdf-drawer/hooks';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
import { Form } from '@/components/ui/form';
|
import { Form } from '@/components/ui/form';
|
||||||
@ -54,7 +56,8 @@ type ChatCardProps = {
|
|||||||
} & Pick<
|
} & Pick<
|
||||||
MultipleChatBoxProps,
|
MultipleChatBoxProps,
|
||||||
'controller' | 'removeChatBox' | 'addChatBox' | 'chatBoxIds'
|
'controller' | 'removeChatBox' | 'addChatBox' | 'chatBoxIds'
|
||||||
>;
|
> &
|
||||||
|
Pick<ReturnType<typeof useClickDrawer>, 'clickDocumentButton'>;
|
||||||
|
|
||||||
const ChatCard = forwardRef(function ChatCard(
|
const ChatCard = forwardRef(function ChatCard(
|
||||||
{
|
{
|
||||||
@ -66,6 +69,7 @@ const ChatCard = forwardRef(function ChatCard(
|
|||||||
chatBoxIds,
|
chatBoxIds,
|
||||||
derivedMessages,
|
derivedMessages,
|
||||||
sendLoading,
|
sendLoading,
|
||||||
|
clickDocumentButton,
|
||||||
}: ChatCardProps,
|
}: ChatCardProps,
|
||||||
ref,
|
ref,
|
||||||
) {
|
) {
|
||||||
@ -178,6 +182,7 @@ const ChatCard = forwardRef(function ChatCard(
|
|||||||
removeMessageById={removeMessageById}
|
removeMessageById={removeMessageById}
|
||||||
regenerateMessage={regenerateMessage}
|
regenerateMessage={regenerateMessage}
|
||||||
sendLoading={sendLoading}
|
sendLoading={sendLoading}
|
||||||
|
clickDocumentButton={clickDocumentButton}
|
||||||
></MessageItem>
|
></MessageItem>
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
@ -211,6 +216,8 @@ export function MultipleChatBox({
|
|||||||
const { conversationId } = useGetChatSearchParams();
|
const { conversationId } = useGetChatSearchParams();
|
||||||
const disabled = useGetSendButtonDisabled();
|
const disabled = useGetSendButtonDisabled();
|
||||||
const sendDisabled = useSendButtonDisabled(value);
|
const sendDisabled = useSendButtonDisabled(value);
|
||||||
|
const { visible, hideModal, documentId, selectedChunk, clickDocumentButton } =
|
||||||
|
useClickDrawer();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="h-full flex flex-col px-5">
|
<section className="h-full flex flex-col px-5">
|
||||||
@ -227,6 +234,7 @@ export function MultipleChatBox({
|
|||||||
derivedMessages={messageRecord[id]}
|
derivedMessages={messageRecord[id]}
|
||||||
ref={setFormRef(id)}
|
ref={setFormRef(id)}
|
||||||
sendLoading={sendLoading}
|
sendLoading={sendLoading}
|
||||||
|
clickDocumentButton={clickDocumentButton}
|
||||||
></ChatCard>
|
></ChatCard>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
@ -246,6 +254,14 @@ export function MultipleChatBox({
|
|||||||
onUpload={handleUploadFile}
|
onUpload={handleUploadFile}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
{visible && (
|
||||||
|
<PdfDrawer
|
||||||
|
visible={visible}
|
||||||
|
hideModal={hideModal}
|
||||||
|
documentId={documentId}
|
||||||
|
chunk={selectedChunk}
|
||||||
|
></PdfDrawer>
|
||||||
|
)}
|
||||||
</section>
|
</section>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
import { NextMessageInput } from '@/components/message-input/next';
|
import { NextMessageInput } from '@/components/message-input/next';
|
||||||
import MessageItem from '@/components/message-item';
|
import MessageItem from '@/components/message-item';
|
||||||
|
import PdfDrawer from '@/components/pdf-drawer';
|
||||||
|
import { useClickDrawer } from '@/components/pdf-drawer/hooks';
|
||||||
import { MessageType } from '@/constants/chat';
|
import { MessageType } from '@/constants/chat';
|
||||||
import {
|
import {
|
||||||
useFetchConversation,
|
useFetchConversation,
|
||||||
@ -43,6 +45,8 @@ export function SingleChatBox({ controller }: IProps) {
|
|||||||
const { data: conversation } = useFetchConversation();
|
const { data: conversation } = useFetchConversation();
|
||||||
const disabled = useGetSendButtonDisabled();
|
const disabled = useGetSendButtonDisabled();
|
||||||
const sendDisabled = useSendButtonDisabled(value);
|
const sendDisabled = useSendButtonDisabled(value);
|
||||||
|
const { visible, hideModal, documentId, selectedChunk, clickDocumentButton } =
|
||||||
|
useClickDrawer();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="flex flex-col p-5 h-full">
|
<section className="flex flex-col p-5 h-full">
|
||||||
@ -68,7 +72,7 @@ export function SingleChatBox({ controller }: IProps) {
|
|||||||
},
|
},
|
||||||
message,
|
message,
|
||||||
)}
|
)}
|
||||||
// clickDocumentButton={clickDocumentButton}
|
clickDocumentButton={clickDocumentButton}
|
||||||
index={i}
|
index={i}
|
||||||
removeMessageById={removeMessageById}
|
removeMessageById={removeMessageById}
|
||||||
regenerateMessage={regenerateMessage}
|
regenerateMessage={regenerateMessage}
|
||||||
@ -94,6 +98,14 @@ export function SingleChatBox({ controller }: IProps) {
|
|||||||
onUpload={handleUploadFile}
|
onUpload={handleUploadFile}
|
||||||
isUploading={isUploading}
|
isUploading={isUploading}
|
||||||
/>
|
/>
|
||||||
|
{visible && (
|
||||||
|
<PdfDrawer
|
||||||
|
visible={visible}
|
||||||
|
hideModal={hideModal}
|
||||||
|
documentId={documentId}
|
||||||
|
chunk={selectedChunk}
|
||||||
|
></PdfDrawer>
|
||||||
|
)}
|
||||||
</section>
|
</section>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -109,13 +109,12 @@ export default function Chat() {
|
|||||||
|
|
||||||
<Card className="flex-1 min-w-0 bg-transparent border h-full">
|
<Card className="flex-1 min-w-0 bg-transparent border h-full">
|
||||||
<CardContent className="flex p-0 h-full">
|
<CardContent className="flex p-0 h-full">
|
||||||
<Card className="flex flex-col flex-1 bg-transparent">
|
<Card className="flex flex-col flex-1 bg-transparent min-w-0">
|
||||||
<CardHeader
|
<CardHeader
|
||||||
className={cn('p-5', { 'border-b': hasSingleChatBox })}
|
className={cn('p-5', { 'border-b': hasSingleChatBox })}
|
||||||
>
|
>
|
||||||
<CardTitle className="flex justify-between items-center text-base">
|
<CardTitle className="flex justify-between items-center text-base">
|
||||||
<div>{conversation.name}</div>
|
<div className="truncate">{conversation.name}</div>
|
||||||
|
|
||||||
<Button
|
<Button
|
||||||
variant={'ghost'}
|
variant={'ghost'}
|
||||||
onClick={switchDebugMode}
|
onClick={switchDebugMode}
|
||||||
|
|||||||
@ -90,8 +90,8 @@ export function Sessions({
|
|||||||
'bg-bg-card': conversationId === x.id,
|
'bg-bg-card': conversationId === x.id,
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
<CardContent className="px-3 py-2 flex justify-between items-center group">
|
<CardContent className="px-3 py-2 flex justify-between items-center group gap-1">
|
||||||
{x.name}
|
<div className="truncate">{x.name}</div>
|
||||||
<ConversationDropdown conversation={x}>
|
<ConversationDropdown conversation={x}>
|
||||||
<MoreButton></MoreButton>
|
<MoreButton></MoreButton>
|
||||||
</ConversationDropdown>
|
</ConversationDropdown>
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import { useSetModalState } from '@/hooks/common-hooks';
|
import { useSetModalState } from '@/hooks/common-hooks';
|
||||||
import { useSetDialog } from '@/hooks/use-chat-request';
|
import { useSetDialog } from '@/hooks/use-chat-request';
|
||||||
|
import { useFetchTenantInfo } from '@/hooks/use-user-setting-request';
|
||||||
import { IDialog } from '@/interfaces/database/chat';
|
import { IDialog } from '@/interfaces/database/chat';
|
||||||
import { isEmpty, omit } from 'lodash';
|
import { isEmpty, omit } from 'lodash';
|
||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
@ -14,6 +15,7 @@ export const useRenameChat = () => {
|
|||||||
} = useSetModalState();
|
} = useSetModalState();
|
||||||
const { setDialog, loading } = useSetDialog();
|
const { setDialog, loading } = useSetDialog();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const tenantInfo = useFetchTenantInfo();
|
||||||
|
|
||||||
const InitialData = useMemo(
|
const InitialData = useMemo(
|
||||||
() => ({
|
() => ({
|
||||||
@ -32,13 +34,13 @@ export const useRenameChat = () => {
|
|||||||
reasoning: false,
|
reasoning: false,
|
||||||
parameters: [{ key: 'knowledge', optional: false }],
|
parameters: [{ key: 'knowledge', optional: false }],
|
||||||
},
|
},
|
||||||
llm_id: '',
|
llm_id: tenantInfo.data.llm_id,
|
||||||
llm_setting: {},
|
llm_setting: {},
|
||||||
similarity_threshold: 0.2,
|
similarity_threshold: 0.2,
|
||||||
vector_similarity_weight: 0.30000000000000004,
|
vector_similarity_weight: 0.30000000000000004,
|
||||||
top_n: 8,
|
top_n: 8,
|
||||||
}),
|
}),
|
||||||
[t],
|
[t, tenantInfo.data.llm_id],
|
||||||
);
|
);
|
||||||
|
|
||||||
const onChatRenameOk = useCallback(
|
const onChatRenameOk = useCallback(
|
||||||
@ -46,7 +48,10 @@ export const useRenameChat = () => {
|
|||||||
const nextChat = {
|
const nextChat = {
|
||||||
...(isEmpty(chat)
|
...(isEmpty(chat)
|
||||||
? InitialData
|
? InitialData
|
||||||
: { ...omit(chat, 'nickname', 'tenant_avatar'), dialog_id: chat.id }),
|
: {
|
||||||
|
...omit(chat, 'nickname', 'tenant_avatar', 'operator_permission'),
|
||||||
|
dialog_id: chat.id,
|
||||||
|
}),
|
||||||
name,
|
name,
|
||||||
};
|
};
|
||||||
const ret = await setDialog(nextChat);
|
const ret = await setDialog(nextChat);
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import showMessage from '@/components/ui/message';
|
||||||
import { MessageType } from '@/constants/chat';
|
import { MessageType } from '@/constants/chat';
|
||||||
import {
|
import {
|
||||||
useHandleMessageInputChange,
|
useHandleMessageInputChange,
|
||||||
@ -159,7 +160,7 @@ export function useSendMultipleChatMessage(
|
|||||||
if (res && (res?.response.status !== 200 || res?.data?.code !== 0)) {
|
if (res && (res?.response.status !== 200 || res?.data?.code !== 0)) {
|
||||||
// cancel loading
|
// cancel loading
|
||||||
setValue(message.content);
|
setValue(message.content);
|
||||||
console.info('removeLatestMessage111');
|
showMessage.error(res.data.message);
|
||||||
removeLatestMessage(chatBoxId);
|
removeLatestMessage(chatBoxId);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -128,7 +128,7 @@ export default function SearchPage() {
|
|||||||
</div>
|
</div>
|
||||||
<div className="absolute right-5 top-4 ">
|
<div className="absolute right-5 top-4 ">
|
||||||
<Button
|
<Button
|
||||||
className="bg-text-primary text-bg-base border-b-[#00BEB4] border-b-2"
|
className="bg-text-primary text-bg-base border-b-accent-primary border-b-2"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
handleOperate().then((res) => {
|
handleOperate().then((res) => {
|
||||||
console.log(res, 'res');
|
console.log(res, 'res');
|
||||||
|
|||||||
@ -27,7 +27,7 @@ export default function SearchPage({
|
|||||||
<div className="relative z-10 px-8 pt-8 flex text-transparent flex-col justify-center items-center w-[780px]">
|
<div className="relative z-10 px-8 pt-8 flex text-transparent flex-col justify-center items-center w-[780px]">
|
||||||
<h1
|
<h1
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-4xl font-bold bg-gradient-to-r from-sky-600 from-30% via-sky-500 via-60% to-emerald-500 bg-clip-text',
|
'text-4xl font-bold bg-gradient-to-l from-[#40EBE3] to-[#4A51FF] bg-clip-text',
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
RAGFlow
|
RAGFlow
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import {
|
|||||||
} from '@/constants/knowledge';
|
} from '@/constants/knowledge';
|
||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||||
import { camelCase } from 'lodash';
|
import { camelCase, isEqual } from 'lodash';
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { useFormContext } from 'react-hook-form';
|
import { useFormContext } from 'react-hook-form';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
@ -61,20 +61,15 @@ export function LlmSettingFieldItems({
|
|||||||
|
|
||||||
const handleChange = useCallback(
|
const handleChange = useCallback(
|
||||||
(parameter: string) => {
|
(parameter: string) => {
|
||||||
const currentValues = { ...form.getValues() };
|
|
||||||
console.log('currentValues', currentValues);
|
|
||||||
const values =
|
const values =
|
||||||
settledModelVariableMap[
|
settledModelVariableMap[
|
||||||
parameter as keyof typeof settledModelVariableMap
|
parameter as keyof typeof settledModelVariableMap
|
||||||
];
|
];
|
||||||
const enabledKeys = Object.keys(LlmSettingEnableSchema);
|
const enabledKeys = Object.keys(LlmSettingEnableSchema);
|
||||||
|
|
||||||
// const nextValues = { ...currentValues, ...values };
|
|
||||||
|
|
||||||
for (const key in values) {
|
for (const key in values) {
|
||||||
if (Object.prototype.hasOwnProperty.call(values, key)) {
|
if (Object.prototype.hasOwnProperty.call(values, key)) {
|
||||||
const element = values[key];
|
const element = values[key as keyof typeof values];
|
||||||
|
|
||||||
form.setValue(`${prefix}.${key}`, element);
|
form.setValue(`${prefix}.${key}`, element);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -90,7 +85,11 @@ export function LlmSettingFieldItems({
|
|||||||
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
|
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
|
||||||
label: t(camelCase(x)),
|
label: t(camelCase(x)),
|
||||||
value: x,
|
value: x,
|
||||||
}));
|
})) as unknown as { label: string; value: ModelVariableType | 'Custom' }[];
|
||||||
|
parameterOptions.push({
|
||||||
|
label: t(camelCase('Custom')),
|
||||||
|
value: 'Custom',
|
||||||
|
});
|
||||||
|
|
||||||
const getFieldWithPrefix = useCallback(
|
const getFieldWithPrefix = useCallback(
|
||||||
(name: string) => {
|
(name: string) => {
|
||||||
@ -99,6 +98,35 @@ export function LlmSettingFieldItems({
|
|||||||
[prefix],
|
[prefix],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const checkParameterIsEquel = () => {
|
||||||
|
const [
|
||||||
|
parameter,
|
||||||
|
topPValue,
|
||||||
|
frequencyPenaltyValue,
|
||||||
|
temperatureValue,
|
||||||
|
presencePenaltyValue,
|
||||||
|
] = form.getValues([
|
||||||
|
getFieldWithPrefix('parameter'),
|
||||||
|
getFieldWithPrefix('temperature'),
|
||||||
|
getFieldWithPrefix('top_p'),
|
||||||
|
getFieldWithPrefix('frequency_penalty'),
|
||||||
|
getFieldWithPrefix('presence_penalty'),
|
||||||
|
]);
|
||||||
|
if (parameter && parameter !== 'Custom') {
|
||||||
|
const parameterValue =
|
||||||
|
settledModelVariableMap[parameter as keyof typeof ModelVariableType];
|
||||||
|
const parameterRealValue = {
|
||||||
|
top_p: topPValue,
|
||||||
|
temperature: temperatureValue,
|
||||||
|
frequency_penalty: frequencyPenaltyValue,
|
||||||
|
presence_penalty: presencePenaltyValue,
|
||||||
|
};
|
||||||
|
if (!isEqual(parameterValue, parameterRealValue)) {
|
||||||
|
form.setValue(getFieldWithPrefix('parameter'), 'Custom');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-5">
|
<div className="space-y-5">
|
||||||
<FormField
|
<FormField
|
||||||
@ -113,7 +141,7 @@ export function LlmSettingFieldItems({
|
|||||||
<FormControl>
|
<FormControl>
|
||||||
<SelectWithSearch
|
<SelectWithSearch
|
||||||
options={options || modelOptions}
|
options={options || modelOptions}
|
||||||
triggerClassName="bg-bg-card"
|
triggerClassName="!bg-bg-input"
|
||||||
{...field}
|
{...field}
|
||||||
></SelectWithSearch>
|
></SelectWithSearch>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
@ -159,6 +187,9 @@ export function LlmSettingFieldItems({
|
|||||||
label="temperature"
|
label="temperature"
|
||||||
max={1}
|
max={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
|
onChange={() => {
|
||||||
|
checkParameterIsEquel();
|
||||||
|
}}
|
||||||
></SliderInputSwitchFormField>
|
></SliderInputSwitchFormField>
|
||||||
<SliderInputSwitchFormField
|
<SliderInputSwitchFormField
|
||||||
name={getFieldWithPrefix('top_p')}
|
name={getFieldWithPrefix('top_p')}
|
||||||
@ -166,6 +197,9 @@ export function LlmSettingFieldItems({
|
|||||||
label="topP"
|
label="topP"
|
||||||
max={1}
|
max={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
|
onChange={() => {
|
||||||
|
checkParameterIsEquel();
|
||||||
|
}}
|
||||||
></SliderInputSwitchFormField>
|
></SliderInputSwitchFormField>
|
||||||
<SliderInputSwitchFormField
|
<SliderInputSwitchFormField
|
||||||
name={getFieldWithPrefix('presence_penalty')}
|
name={getFieldWithPrefix('presence_penalty')}
|
||||||
@ -173,6 +207,9 @@ export function LlmSettingFieldItems({
|
|||||||
label="presencePenalty"
|
label="presencePenalty"
|
||||||
max={1}
|
max={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
|
onChange={() => {
|
||||||
|
checkParameterIsEquel();
|
||||||
|
}}
|
||||||
></SliderInputSwitchFormField>
|
></SliderInputSwitchFormField>
|
||||||
<SliderInputSwitchFormField
|
<SliderInputSwitchFormField
|
||||||
name={getFieldWithPrefix('frequency_penalty')}
|
name={getFieldWithPrefix('frequency_penalty')}
|
||||||
@ -180,6 +217,9 @@ export function LlmSettingFieldItems({
|
|||||||
label="frequencyPenalty"
|
label="frequencyPenalty"
|
||||||
max={1}
|
max={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
|
onChange={() => {
|
||||||
|
checkParameterIsEquel();
|
||||||
|
}}
|
||||||
></SliderInputSwitchFormField>
|
></SliderInputSwitchFormField>
|
||||||
{/* <SliderInputSwitchFormField
|
{/* <SliderInputSwitchFormField
|
||||||
name={getFieldWithPrefix('max_tokens')}
|
name={getFieldWithPrefix('max_tokens')}
|
||||||
|
|||||||
@ -114,8 +114,8 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
const [avatarBase64Str, setAvatarBase64Str] = useState(''); // Avatar Image base64
|
const [avatarBase64Str, setAvatarBase64Str] = useState(''); // Avatar Image base64
|
||||||
const [datasetList, setDatasetList] = useState<MultiSelectOptionType[]>([]);
|
const [datasetList, setDatasetList] = useState<MultiSelectOptionType[]>([]);
|
||||||
const [datasetSelectEmbdId, setDatasetSelectEmbdId] = useState('');
|
const [datasetSelectEmbdId, setDatasetSelectEmbdId] = useState('');
|
||||||
const descriptionDefaultValue = 'You are an intelligent assistant.';
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const descriptionDefaultValue = t('search.descriptionValue');
|
||||||
const resetForm = useCallback(() => {
|
const resetForm = useCallback(() => {
|
||||||
formMethods.reset({
|
formMethods.reset({
|
||||||
search_id: data?.id,
|
search_id: data?.id,
|
||||||
@ -136,9 +136,9 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
use_rerank: search_config?.rerank_id ? true : false,
|
use_rerank: search_config?.rerank_id ? true : false,
|
||||||
top_k: search_config?.top_k || 1024,
|
top_k: search_config?.top_k || 1024,
|
||||||
summary: search_config?.summary || false,
|
summary: search_config?.summary || false,
|
||||||
chat_id: '',
|
chat_id: search_config?.chat_id || '',
|
||||||
llm_setting: {
|
llm_setting: {
|
||||||
llm_id: llm_setting?.llm_id || '',
|
llm_id: search_config?.chat_id || '',
|
||||||
parameter: llm_setting?.parameter,
|
parameter: llm_setting?.parameter,
|
||||||
temperature: llm_setting?.temperature,
|
temperature: llm_setting?.temperature,
|
||||||
top_p: llm_setting?.top_p,
|
top_p: llm_setting?.top_p,
|
||||||
@ -159,7 +159,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
meta_data_filter: search_config?.meta_data_filter,
|
meta_data_filter: search_config?.meta_data_filter,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}, [data, search_config, llm_setting, formMethods]);
|
}, [data, search_config, llm_setting, formMethods, descriptionDefaultValue]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
resetForm();
|
resetForm();
|
||||||
@ -255,7 +255,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
...other_config
|
...other_config
|
||||||
} = search_config;
|
} = search_config;
|
||||||
const llmSetting = {
|
const llmSetting = {
|
||||||
llm_id: llm_setting.llm_id,
|
// llm_id: llm_setting.llm_id,
|
||||||
parameter: llm_setting.parameter,
|
parameter: llm_setting.parameter,
|
||||||
temperature: llm_setting.temperature,
|
temperature: llm_setting.temperature,
|
||||||
top_p: llm_setting.top_p,
|
top_p: llm_setting.top_p,
|
||||||
@ -263,22 +263,11 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
presence_penalty: llm_setting.presence_penalty,
|
presence_penalty: llm_setting.presence_penalty,
|
||||||
} as IllmSettingProps;
|
} as IllmSettingProps;
|
||||||
|
|
||||||
if (!llm_setting.frequencyPenaltyEnabled) {
|
|
||||||
delete llmSetting.frequency_penalty;
|
|
||||||
}
|
|
||||||
if (!llm_setting.presencePenaltyEnabled) {
|
|
||||||
delete llmSetting.presence_penalty;
|
|
||||||
}
|
|
||||||
if (!llm_setting.temperatureEnabled) {
|
|
||||||
delete llmSetting.temperature;
|
|
||||||
}
|
|
||||||
if (!llm_setting.topPEnabled) {
|
|
||||||
delete llmSetting.top_p;
|
|
||||||
}
|
|
||||||
await updateSearch({
|
await updateSearch({
|
||||||
...other_formdata,
|
...other_formdata,
|
||||||
search_config: {
|
search_config: {
|
||||||
...other_config,
|
...other_config,
|
||||||
|
chat_id: llm_setting.llm_id,
|
||||||
vector_similarity_weight: 1 - vector_similarity_weight,
|
vector_similarity_weight: 1 - vector_similarity_weight,
|
||||||
rerank_id: use_rerank ? rerank_id : '',
|
rerank_id: use_rerank ? rerank_id : '',
|
||||||
llm_setting: { ...llmSetting },
|
llm_setting: { ...llmSetting },
|
||||||
@ -415,7 +404,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
<FormLabel>{t('search.description')}</FormLabel>
|
<FormLabel>{t('search.description')}</FormLabel>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<Textarea
|
<Textarea
|
||||||
placeholder="You are an intelligent assistant."
|
placeholder={descriptionDefaultValue}
|
||||||
{...field}
|
{...field}
|
||||||
onFocus={() => {
|
onFocus={() => {
|
||||||
if (field.value === descriptionDefaultValue) {
|
if (field.value === descriptionDefaultValue) {
|
||||||
@ -444,7 +433,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
<span className="text-destructive mr-1"> *</span>
|
<span className="text-destructive mr-1"> *</span>
|
||||||
{t('search.datasets')}
|
{t('search.datasets')}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
<FormControl>
|
<FormControl className="bg-bg-input">
|
||||||
<MultiSelect
|
<MultiSelect
|
||||||
options={datasetList}
|
options={datasetList}
|
||||||
onValueChange={(value) => {
|
onValueChange={(value) => {
|
||||||
@ -452,7 +441,6 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
}}
|
}}
|
||||||
showSelectAll={false}
|
showSelectAll={false}
|
||||||
placeholder={t('chat.knowledgeBasesMessage')}
|
placeholder={t('chat.knowledgeBasesMessage')}
|
||||||
variant="inverted"
|
|
||||||
maxCount={10}
|
maxCount={10}
|
||||||
defaultValue={field.value}
|
defaultValue={field.value}
|
||||||
{...field}
|
{...field}
|
||||||
@ -568,6 +556,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
|||||||
<RAGFlowSelect
|
<RAGFlowSelect
|
||||||
{...field}
|
{...field}
|
||||||
options={rerankModelOptions}
|
options={rerankModelOptions}
|
||||||
|
triggerClassName={'bg-bg-input'}
|
||||||
// disabled={disabled}
|
// disabled={disabled}
|
||||||
placeholder={'model'}
|
placeholder={'model'}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -83,7 +83,7 @@ export default function SearchingView({
|
|||||||
>
|
>
|
||||||
<h1
|
<h1
|
||||||
className={cn(
|
className={cn(
|
||||||
'text-4xl font-bold bg-gradient-to-r from-sky-600 from-30% via-sky-500 via-60% to-emerald-500 bg-clip-text cursor-pointer',
|
'text-4xl font-bold bg-gradient-to-l from-[#40EBE3] to-[#4A51FF] bg-clip-text cursor-pointer',
|
||||||
)}
|
)}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
setIsSearching?.(false);
|
setIsSearching?.(false);
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user