Compare commits

...

7 Commits

Author SHA1 Message Date
a0d630365c Refactor:Improve VoyageRerank not texts handling (#9539)
### What problem does this PR solve?

Improve VoyageRerank not texts handling

### Type of change

- [x] Refactoring
2025-08-19 10:31:04 +08:00
b5b8032a56 Feat: Support metadata auto filer for Search. (#9524)
### What problem does this PR solve?

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
2025-08-19 10:27:24 +08:00
ccb9f0b0d7 Feature (agent): Allow Retrieval kb_ids param use kb_id,and allow list kb_name or kb_id (#9531)
### What problem does this PR solve?

Allow Retrieval kb_ids param use kb_id,and allow list kb_name or kb_id。
- Add judgment on whether the knowledge base name is a list and support
batch queries
-When the knowledge base name does not exist, try using the ID for
querying
-If both query methods fail, throw an exception

### Type of change
- [x] New Feature (non-breaking change which adds functionality)
2025-08-19 09:42:39 +08:00
a0ab619aeb Fix: ensure update_progress loop always waits between iterations (#9528)
Move stop_event.wait(6) into finally block so that even when an
exception occurs, the loop still sleeps before retrying. This prevents
busy looping and excessive error logs when Redis connection fails.

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-08-19 09:42:15 +08:00
32349481ef Feat: Allow agent operators to select speech-to-text models #3221 (#9534)
### What problem does this PR solve?

Feat: Allow agent operators to select speech-to-text models #3221
### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-08-19 09:40:01 +08:00
2b9ed935f3 feat(search): Optimized search functionality and user interface #3221 (#9535)
### What problem does this PR solve?

feat(search): Optimized search functionality and user interface #3221
### Type of change
- Added similarity threshold adjustment function
- Optimized mind map display logic
- Adjusted search settings interface layout
- Fixed related search and document viewing functions
- Optimized time display and node selection logic

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-08-19 09:39:48 +08:00
188c0f614b Refa: refine search app (#9536)
### What problem does this PR solve?

Refine search app.

### Type of change

- [x] Refactoring
2025-08-19 09:33:33 +08:00
42 changed files with 551 additions and 196 deletions

View File

@ -484,7 +484,7 @@ class Canvas:
threads.append(exe.submit(FileService.parse, file["name"], FileService.get_blob(file["created_by"], file["id"]), True, file["created_by"]))
return [th.result() for th in threads]
def tool_use_callback(self, agent_id: str, func_name: str, params: dict, result: Any):
def tool_use_callback(self, agent_id: str, func_name: str, params: dict, result: Any, elapsed_time=None):
agent_ids = agent_id.split("-->")
agent_name = self.get_component_name(agent_ids[0])
path = agent_name if len(agent_ids) < 2 else agent_name+"-->"+"-->".join(agent_ids[1:])
@ -493,16 +493,16 @@ class Canvas:
if bin:
obj = json.loads(bin.encode("utf-8"))
if obj[-1]["component_id"] == agent_ids[0]:
obj[-1]["trace"].append({"path": path, "tool_name": func_name, "arguments": params, "result": result})
obj[-1]["trace"].append({"path": path, "tool_name": func_name, "arguments": params, "result": result, "elapsed_time": elapsed_time})
else:
obj.append({
"component_id": agent_ids[0],
"trace": [{"path": path, "tool_name": func_name, "arguments": params, "result": result}]
"trace": [{"path": path, "tool_name": func_name, "arguments": params, "result": result, "elapsed_time": elapsed_time}]
})
else:
obj = [{
"component_id": agent_ids[0],
"trace": [{"path": path, "tool_name": func_name, "arguments": params, "result": result}]
"trace": [{"path": path, "tool_name": func_name, "arguments": params, "result": result, "elapsed_time": elapsed_time}]
}]
REDIS_CONN.set_obj(f"{self.task_id}-{self.message_id}-logs", obj, 60*10)
except Exception as e:

View File

@ -22,7 +22,7 @@ from functools import partial
from typing import Any
import json_repair
from timeit import default_timer as timer
from agent.tools.base import LLMToolPluginCallSession, ToolParamBase, ToolBase, ToolMeta
from api.db.services.llm_service import LLMBundle
from api.db.services.tenant_llm_service import TenantLLMService
@ -215,8 +215,9 @@ class Agent(LLM, ToolBase):
hist = deepcopy(history)
last_calling = ""
if len(hist) > 3:
st = timer()
user_request = full_question(messages=history, chat_mdl=self.chat_mdl)
self.callback("Multi-turn conversation optimization", {}, user_request)
self.callback("Multi-turn conversation optimization", {}, user_request, elapsed_time=timer()-st)
else:
user_request = history[-1]["content"]
@ -263,12 +264,13 @@ class Agent(LLM, ToolBase):
if not need2cite or cited:
return
st = timer()
txt = ""
for delta_ans in self._gen_citations(entire_txt):
yield delta_ans, 0
txt += delta_ans
self.callback("gen_citations", {}, txt)
self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
def append_user_content(hist, content):
if hist[-1]["role"] == "user":
@ -276,8 +278,9 @@ class Agent(LLM, ToolBase):
else:
hist.append({"role": "user", "content": content})
st = timer()
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas)
self.callback("analyze_task", {}, task_desc)
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
for _ in range(self._param.max_rounds + 1):
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc)
# self.callback("next_step", {}, str(response)[:256]+"...")
@ -303,9 +306,10 @@ class Agent(LLM, ToolBase):
thr.append(executor.submit(use_tool, name, args))
st = timer()
reflection = reflect(self.chat_mdl, hist, [th.result() for th in thr])
append_user_content(hist, reflection)
self.callback("reflection", {}, str(reflection))
self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
except Exception as e:
logging.exception(msg=f"Wrong JSON argument format in LLM ReAct response: {e}")

View File

@ -24,6 +24,7 @@ from api.utils import hash_str2int
from rag.llm.chat_model import ToolCallSession
from rag.prompts.prompts import kb_prompt
from rag.utils.mcp_tool_call_conn import MCPToolCallSession
from timeit import default_timer as timer
class ToolParameter(TypedDict):
@ -49,12 +50,13 @@ class LLMToolPluginCallSession(ToolCallSession):
def tool_call(self, name: str, arguments: dict[str, Any]) -> Any:
assert name in self.tools_map, f"LLM tool {name} does not exist"
st = timer()
if isinstance(self.tools_map[name], MCPToolCallSession):
resp = self.tools_map[name].tool_call(name, arguments, 60)
else:
resp = self.tools_map[name].invoke(**arguments)
self.callback(name, arguments, resp)
self.callback(name, arguments, resp, elapsed_time=timer()-st)
return resp
def get_tool_obj(self, name):

View File

@ -79,6 +79,17 @@ class ExeSQL(ToolBase, ABC):
@timeout(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60))
def _invoke(self, **kwargs):
def convert_decimals(obj):
from decimal import Decimal
if isinstance(obj, Decimal):
return float(obj) # 或 str(obj)
elif isinstance(obj, dict):
return {k: convert_decimals(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [convert_decimals(item) for item in obj]
return obj
sql = kwargs.get("sql")
if not sql:
raise Exception("SQL for `ExeSQL` MUST not be empty.")
@ -122,7 +133,11 @@ class ExeSQL(ToolBase, ABC):
single_res = pd.DataFrame([i for i in cursor.fetchmany(self._param.max_records)])
single_res.columns = [i[0] for i in cursor.description]
sql_res.append(single_res.to_dict(orient='records'))
for col in single_res.columns:
if pd.api.types.is_datetime64_any_dtype(single_res[col]):
single_res[col] = single_res[col].dt.strftime('%Y-%m-%d')
sql_res.append(convert_decimals(single_res.to_dict(orient='records')))
formalized_content.append(single_res.to_markdown(index=False, floatfmt=".6f"))
self.set_output("json", sql_res)
@ -130,4 +145,4 @@ class ExeSQL(ToolBase, ABC):
return self.output("formalized_content")
def thoughts(self) -> str:
return "Query sent—waiting for the data."
return "Query sent—waiting for the data."

View File

@ -86,10 +86,16 @@ class Retrieval(ToolBase, ABC):
kb_ids.append(id)
continue
kb_nm = self._canvas.get_variable_value(id)
e, kb = KnowledgebaseService.get_by_name(kb_nm, self._canvas._tenant_id)
if not e:
raise Exception(f"Dataset({kb_nm}) does not exist.")
kb_ids.append(kb.id)
# if kb_nm is a list
kb_nm_list = kb_nm if isinstance(kb_nm, list) else [kb_nm]
for nm_or_id in kb_nm_list:
e, kb = KnowledgebaseService.get_by_name(nm_or_id,
self._canvas._tenant_id)
if not e:
e, kb = KnowledgebaseService.get_by_id(nm_or_id)
if not e:
raise Exception(f"Dataset({nm_or_id}) does not exist.")
kb_ids.append(kb.id)
filtered_kb_ids: list[str] = list(set([kb_id for kb_id in kb_ids if kb_id]))

View File

@ -29,6 +29,7 @@ from api.db.services.conversation_service import ConversationService, structure_
from api.db.services.dialog_service import DialogService, ask, chat
from api.db.services.knowledgebase_service import KnowledgebaseService
from api.db.services.llm_service import LLMBundle
from api.db.services.search_service import SearchService
from api.db.services.tenant_llm_service import TenantLLMService
from api.db.services.user_service import TenantService, UserTenantService
from api.utils.api_utils import get_data_error_result, get_json_result, server_error_response, validate_request
@ -344,10 +345,18 @@ def ask_about():
req = request.json
uid = current_user.id
search_id = req.get("search_id", "")
search_app = None
search_config = {}
if search_id:
search_app = SearchService.get_detail(search_id)
if search_app:
search_config = search_app.get("search_config", {})
def stream():
nonlocal req, uid
try:
for ans in ask(req["question"], req["kb_ids"], uid):
for ans in ask(req["question"], req["kb_ids"], uid, search_config=search_config):
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
except Exception as e:
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": {"answer": "**ERROR**: " + str(e), "reference": []}}, ensure_ascii=False) + "\n\n"
@ -366,15 +375,68 @@ def ask_about():
@validate_request("question", "kb_ids")
def mindmap():
req = request.json
search_id = req.get("search_id", "")
search_app = None
search_config = {}
if search_id:
search_app = SearchService.get_detail(search_id)
if search_app:
search_config = search_app.get("search_config", {})
kb_ids = req["kb_ids"]
if search_config.get("kb_ids", []):
kb_ids = search_config.get("kb_ids", [])
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
if not e:
return get_data_error_result(message="Knowledgebase not found!")
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING, llm_name=kb.embd_id)
chat_mdl = LLMBundle(current_user.id, LLMType.CHAT)
chat_id = ""
similarity_threshold = 0.3,
vector_similarity_weight = 0.3,
top = 1024,
doc_ids = []
rerank_id = ""
rerank_mdl = None
if search_config:
if search_config.get("chat_id", ""):
chat_id = search_config.get("chat_id", "")
if search_config.get("similarity_threshold", 0.2):
similarity_threshold = search_config.get("similarity_threshold", 0.2)
if search_config.get("vector_similarity_weight", 0.3):
vector_similarity_weight = search_config.get("vector_similarity_weight", 0.3)
if search_config.get("top_k", 1024):
top = search_config.get("top_k", 1024)
if search_config.get("doc_ids", []):
doc_ids = search_config.get("doc_ids", [])
if search_config.get("rerank_id", ""):
rerank_id = search_config.get("rerank_id", "")
tenant_id = kb.tenant_id
if search_app and search_app.get("tenant_id", ""):
tenant_id = search_app.get("tenant_id", "")
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, llm_name=kb.embd_id)
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, llm_name=chat_id)
if rerank_id:
rerank_mdl = LLMBundle(tenant_id, LLMType.RERANK, rerank_id)
question = req["question"]
ranks = settings.retrievaler.retrieval(question, embd_mdl, kb.tenant_id, kb_ids, 1, 12, 0.3, 0.3, aggs=False, rank_feature=label_question(question, [kb]))
ranks = settings.retrievaler.retrieval(
question=question,
embd_mdl=embd_mdl,
tenant_ids=tenant_id,
kb_ids=kb_ids,
page=1,
page_size=12,
similarity_threshold=similarity_threshold,
vector_similarity_weight=vector_similarity_weight,
top=top,
doc_ids=doc_ids,
aggs=False,
rerank_mdl=rerank_mdl,
rank_feature=label_question(question, [kb]),
)
mindmap = MindMapExtractor(chat_mdl)
mind_map = trio.run(mindmap, [c["content_with_weight"] for c in ranks["chunks"]])
mind_map = mind_map.output
@ -388,8 +450,19 @@ def mindmap():
@validate_request("question")
def related_questions():
req = request.json
search_id = req.get("search_id", "")
search_config = {}
if search_id:
if search_app := SearchService.get_detail(search_id):
search_config = search_app.get("search_config", {})
question = req["question"]
chat_mdl = LLMBundle(current_user.id, LLMType.CHAT)
chat_id = search_config.get("chat_id", "")
chat_mdl = LLMBundle(current_user.id, LLMType.CHAT, chat_id)
gen_conf = search_config.get("llm_setting", {"temperature": 0.9})
prompt = load_prompt("related_question")
ans = chat_mdl.chat(
prompt,
@ -402,6 +475,6 @@ Related search terms:
""",
}
],
{"temperature": 0.9},
gen_conf,
)
return get_json_result(data=[re.sub(r"^[0-9]\. ", "", a) for a in ans.split("\n") if re.match(r"^[0-9]\. ", a)])

View File

@ -902,10 +902,16 @@ def ask_about_embedded():
req = request.json
uid = objs[0].tenant_id
search_id = req.get("search_id", "")
search_config = {}
if search_id:
if search_app := SearchService.get_detail(search_id):
search_config = search_app.get("search_config", {})
def stream():
nonlocal req, uid
try:
for ans in ask(req["question"], req["kb_ids"], uid):
for ans in ask(req["question"], req["kb_ids"], uid, search_config):
yield "data:" + json.dumps({"code": 0, "message": "", "data": ans}, ensure_ascii=False) + "\n\n"
except Exception as e:
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": {"answer": "**ERROR**: " + str(e), "reference": []}}, ensure_ascii=False) + "\n\n"
@ -1021,8 +1027,19 @@ def related_questions_embedded():
tenant_id = objs[0].tenant_id
if not tenant_id:
return get_error_data_result(message="permission denined.")
search_id = req.get("search_id", "")
search_config = {}
if search_id:
if search_app := SearchService.get_detail(search_id):
search_config = search_app.get("search_config", {})
question = req["question"]
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT)
chat_id = search_config.get("chat_id", "")
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, chat_id)
gen_conf = search_config.get("llm_setting", {"temperature": 0.9})
prompt = load_prompt("related_question")
ans = chat_mdl.chat(
prompt,
@ -1035,7 +1052,7 @@ Related search terms:
""",
}
],
{"temperature": 0.9},
gen_conf,
)
return get_json_result(data=[re.sub(r"^[0-9]\. ", "", a) for a in ans.split("\n") if re.match(r"^[0-9]\. ", a)])
@ -1083,15 +1100,62 @@ def mindmap():
tenant_id = objs[0].tenant_id
req = request.json
search_id = req.get("search_id", "")
search_config = {}
if search_id:
if search_app := SearchService.get_detail(search_id):
search_config = search_app.get("search_config", {})
kb_ids = req["kb_ids"]
if search_config.get("kb_ids", []):
kb_ids = search_config.get("kb_ids", [])
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
if not e:
return get_error_data_result(message="Knowledgebase not found!")
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING, llm_name=kb.embd_id)
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT)
chat_id = ""
similarity_threshold = 0.3,
vector_similarity_weight = 0.3,
top = 1024,
doc_ids = []
rerank_id = ""
rerank_mdl = None
if search_config:
if search_config.get("chat_id", ""):
chat_id = search_config.get("chat_id", "")
if search_config.get("similarity_threshold", 0.2):
similarity_threshold = search_config.get("similarity_threshold", 0.2)
if search_config.get("vector_similarity_weight", 0.3):
vector_similarity_weight = search_config.get("vector_similarity_weight", 0.3)
if search_config.get("top_k", 1024):
top = search_config.get("top_k", 1024)
if search_config.get("doc_ids", []):
doc_ids = search_config.get("doc_ids", [])
if search_config.get("rerank_id", ""):
rerank_id = search_config.get("rerank_id", "")
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, llm_name=kb.embd_id)
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, llm_name=chat_id)
if rerank_id:
rerank_mdl = LLMBundle(tenant_id, LLMType.RERANK, rerank_id)
question = req["question"]
ranks = settings.retrievaler.retrieval(question, embd_mdl, kb.tenant_id, kb_ids, 1, 12, 0.3, 0.3, aggs=False, rank_feature=label_question(question, [kb]))
ranks = settings.retrievaler.retrieval(
question=question,
embd_mdl=embd_mdl,
tenant_ids=tenant_id,
kb_ids=kb_ids,
page=1,
page_size=12,
similarity_threshold=similarity_threshold,
vector_similarity_weight=vector_similarity_weight,
top=top,
doc_ids=doc_ids,
aggs=False,
rerank_mdl=rerank_mdl,
rank_feature=label_question(question, [kb]),
)
mindmap = MindMapExtractor(chat_mdl)
mind_map = trio.run(mindmap, [c["content_with_weight"] for c in ranks["chunks"]])
mind_map = mind_map.output

View File

@ -872,7 +872,7 @@ class Search(DataBaseModel):
default={
"kb_ids": [],
"doc_ids": [],
"similarity_threshold": 0.0,
"similarity_threshold": 0.2,
"vector_similarity_weight": 0.3,
"use_kg": False,
# rerank settings

View File

@ -40,7 +40,7 @@ from rag.app.resume import forbidden_select_fields4resume
from rag.app.tag import label_question
from rag.nlp.search import index_name
from rag.prompts import chunks_format, citation_prompt, cross_languages, full_question, kb_prompt, keyword_extraction, message_fit_in
from rag.prompts.prompts import gen_meta_filter
from rag.prompts.prompts import gen_meta_filter, PROMPT_JINJA_ENV, ASK_SUMMARY
from rag.utils import num_tokens_from_string, rmSpace
from rag.utils.tavily_conn import Tavily
@ -687,7 +687,30 @@ def tts(tts_mdl, text):
return binascii.hexlify(bin).decode("utf-8")
def ask(question, kb_ids, tenant_id, chat_llm_name=None):
def ask(question, kb_ids, tenant_id, chat_llm_name=None, search_config={}):
similarity_threshold = 0.1,
vector_similarity_weight = 0.3,
top = 1024,
doc_ids = []
rerank_id = ""
rerank_mdl = None
if search_config:
if search_config.get("kb_ids", []):
kb_ids = search_config.get("kb_ids", [])
if search_config.get("chat_id", ""):
chat_llm_name = search_config.get("chat_id", "")
if search_config.get("similarity_threshold", 0.1):
similarity_threshold = search_config.get("similarity_threshold", 0.1)
if search_config.get("vector_similarity_weight", 0.3):
vector_similarity_weight = search_config.get("vector_similarity_weight", 0.3)
if search_config.get("top_k", 1024):
top = search_config.get("top_k", 1024)
if search_config.get("doc_ids", []):
doc_ids = search_config.get("doc_ids", [])
if search_config.get("rerank_id", ""):
rerank_id = search_config.get("rerank_id", "")
kbs = KnowledgebaseService.get_by_ids(kb_ids)
embedding_list = list(set([kb.embd_id for kb in kbs]))
@ -696,30 +719,34 @@ def ask(question, kb_ids, tenant_id, chat_llm_name=None):
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embedding_list[0])
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, chat_llm_name)
if rerank_id:
rerank_mdl = LLMBundle(tenant_id, LLMType.RERANK, rerank_id)
max_tokens = chat_mdl.max_length
tenant_ids = list(set([kb.tenant_id for kb in kbs]))
kbinfos = retriever.retrieval(question, embd_mdl, tenant_ids, kb_ids, 1, 12, 0.1, 0.3, aggs=False, rank_feature=label_question(question, kbs))
kbinfos = retriever.retrieval(
question = question,
embd_mdl=embd_mdl,
tenant_ids=tenant_ids,
kb_ids=kb_ids,
page=1,
page_size=12,
similarity_threshold=similarity_threshold,
vector_similarity_weight=vector_similarity_weight,
top=top,
doc_ids=doc_ids,
aggs=False,
rerank_mdl=rerank_mdl,
rank_feature=label_question(question, kbs)
)
knowledges = kb_prompt(kbinfos, max_tokens)
prompt = """
Role: You're a smart assistant. Your name is Miss R.
Task: Summarize the information from knowledge bases and answer user's question.
Requirements and restriction:
- DO NOT make things up, especially for numbers.
- If the information from knowledge is irrelevant with user's question, JUST SAY: Sorry, no relevant information provided.
- Answer with markdown format text.
- Answer in language of user's question.
- DO NOT make things up, especially for numbers.
sys_prompt = PROMPT_JINJA_ENV.from_string(ASK_SUMMARY).render(knowledge="\n".join(knowledges))
### Information from knowledge bases
%s
The above is information from knowledge bases.
""" % "\n".join(knowledges)
msg = [{"role": "user", "content": question}]
def decorate_answer(answer):
nonlocal knowledges, kbinfos, prompt
nonlocal knowledges, kbinfos, sys_prompt
answer, idx = retriever.insert_citations(answer, [ck["content_ltks"] for ck in kbinfos["chunks"]], [ck["vector"] for ck in kbinfos["chunks"]], embd_mdl, tkweight=0.7, vtweight=0.3)
idx = set([kbinfos["chunks"][int(i)]["doc_id"] for i in idx])
recall_docs = [d for d in kbinfos["doc_aggs"] if d["doc_id"] in idx]
@ -737,7 +764,7 @@ def ask(question, kb_ids, tenant_id, chat_llm_name=None):
return {"answer": answer, "reference": refs}
answer = ""
for ans in chat_mdl.chat_streamly(prompt, msg, {"temperature": 0.1}):
for ans in chat_mdl.chat_streamly(sys_prompt, msg, {"temperature": 0.1}):
answer = ans
yield {"answer": answer, "reference": {}}
yield decorate_answer(answer)

View File

@ -59,11 +59,14 @@ def update_progress():
if redis_lock.acquire():
DocumentService.update_progress()
redis_lock.release()
stop_event.wait(6)
except Exception:
logging.exception("update_progress exception")
finally:
redis_lock.release()
try:
redis_lock.release()
except Exception:
logging.exception("update_progress exception")
stop_event.wait(6)
def signal_handler(sig, frame):
logging.info("Received interrupt signal, shutting down...")

View File

@ -482,9 +482,10 @@ class VoyageRerank(Base):
self.model_name = model_name
def similarity(self, query: str, texts: list):
rank = np.zeros(len(texts), dtype=float)
if not texts:
return rank, 0
return np.array([]), 0
rank = np.zeros(len(texts), dtype=float)
res = self.client.rerank(query=query, documents=texts, model=self.model_name, top_k=len(texts))
try:
for r in res.results:

View File

@ -611,10 +611,6 @@ def naive_merge_with_images(texts, images, chunk_token_num=128, delimiter="\n。
if re.match(f"^{dels}$", sub_sec):
continue
add_chunk(sub_sec, image)
for img in images:
if isinstance(img, Image.Image):
img.close()
return cks, result_images

View File

@ -0,0 +1,14 @@
Role: You're a smart assistant. Your name is Miss R.
Task: Summarize the information from knowledge bases and answer user's question.
Requirements and restriction:
- DO NOT make things up, especially for numbers.
- If the information from knowledge is irrelevant with user's question, JUST SAY: Sorry, no relevant information provided.
- Answer with markdown format text.
- Answer in language of user's question.
- DO NOT make things up, especially for numbers.
### Information from knowledge bases
{{ knowledge }}
The above is information from knowledge bases.

View File

@ -150,6 +150,7 @@ REFLECT = load_prompt("reflect")
SUMMARY4MEMORY = load_prompt("summary4memory")
RANK_MEMORY = load_prompt("rank_memory")
META_FILTER = load_prompt("meta_filter")
ASK_SUMMARY = load_prompt("ask_summary")
PROMPT_JINJA_ENV = jinja2.Environment(autoescape=False, trim_blocks=True, lstrip_blocks=True)

View File

@ -38,9 +38,12 @@ export const LargeModelFilterFormSchema = {
llm_filter: z.string().optional(),
};
type LargeModelFormFieldProps = Pick<NextInnerLLMSelectProps, 'showTTSModel'>;
type LargeModelFormFieldProps = Pick<
NextInnerLLMSelectProps,
'showSpeech2TextModel'
>;
export function LargeModelFormField({
showTTSModel,
showSpeech2TextModel: showTTSModel,
}: LargeModelFormFieldProps) {
const form = useFormContext();
const { t } = useTranslation();
@ -91,7 +94,7 @@ export function LargeModelFormField({
<NextLLMSelect
{...field}
filter={filter}
showTTSModel={showTTSModel}
showSpeech2TextModel={showTTSModel}
/>
</FormControl>
</section>

View File

@ -13,18 +13,18 @@ export interface NextInnerLLMSelectProps {
onChange?: (value: string) => void;
disabled?: boolean;
filter?: string;
showTTSModel?: boolean;
showSpeech2TextModel?: boolean;
}
const NextInnerLLMSelect = forwardRef<
React.ElementRef<typeof SelectPrimitive.Trigger>,
NextInnerLLMSelectProps
>(({ value, disabled, filter, showTTSModel = false }, ref) => {
>(({ value, disabled, filter, showSpeech2TextModel = false }, ref) => {
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
const ttsModel = useMemo(() => {
return showTTSModel ? [LlmModelType.TTS] : [];
}, [showTTSModel]);
return showSpeech2TextModel ? [LlmModelType.Speech2text] : [];
}, [showSpeech2TextModel]);
const modelTypes = useMemo(() => {
if (filter === LlmModelType.Chat) {

View File

@ -24,7 +24,7 @@
.messageText {
.chunkText();
.messageTextBase();
background-color: #e6f4ff;
// background-color: #e6f4ff;
word-break: break-word;
}
.messageTextDark {

View File

@ -9,6 +9,7 @@ import {
useFetchDocumentThumbnailsByIds,
} from '@/hooks/document-hooks';
import { IRegenerateMessage, IRemoveMessageById } from '@/hooks/logic-hooks';
import { cn } from '@/lib/utils';
import { IMessage } from '@/pages/chat/interface';
import MarkdownContent from '@/pages/chat/markdown-content';
import { Avatar, Flex, Space } from 'antd';
@ -129,13 +130,14 @@ const MessageItem = ({
{/* <b>{isAssistant ? '' : nickname}</b> */}
</Space>
<div
className={
className={cn(
isAssistant
? theme === 'dark'
? styles.messageTextDark
: styles.messageText
: styles.messageUserText
}
: styles.messageUserText,
{ '!bg-bg-card': !isAssistant },
)}
>
<MarkdownContent
loading={loading}

View File

@ -369,22 +369,28 @@ export const useScrollToBottom = (
return () => container.removeEventListener('scroll', handleScroll);
}, [containerRef, checkIfUserAtBottom]);
// Imperative scroll function
const scrollToBottom = useCallback(() => {
if (containerRef?.current) {
const container = containerRef.current;
container.scrollTo({
top: container.scrollHeight - container.clientHeight,
behavior: 'smooth',
});
}
}, [containerRef]);
useEffect(() => {
if (!messages) return;
if (!containerRef?.current) return;
requestAnimationFrame(() => {
setTimeout(() => {
if (isAtBottomRef.current) {
ref.current?.scrollIntoView({ behavior: 'smooth' });
scrollToBottom();
}
}, 30);
}, 100);
});
}, [messages, containerRef]);
// Imperative scroll function
const scrollToBottom = useCallback(() => {
ref.current?.scrollIntoView({ behavior: 'smooth' });
}, []);
}, [messages, containerRef, scrollToBottom]);
return { scrollRef: ref, isAtBottom, scrollToBottom };
};

View File

@ -7,4 +7,5 @@ export interface IFeedbackRequestBody {
export interface IAskRequestBody {
question: string;
kb_ids: string[];
search_id?: string;
}

View File

@ -1,6 +1,5 @@
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
import { useTheme } from '@/components/theme-provider';
import { Badge } from '@/components/ui/badge';
import { Button } from '@/components/ui/button';
import {
DropdownMenu,
@ -163,9 +162,10 @@ export function Header() {
className="size-8 cursor-pointer"
onClick={navigateToProfile}
></RAGFlowAvatar>
<Badge className="h-5 w-8 absolute font-normal p-0 justify-center -right-8 -top-2 text-bg-base bg-gradient-to-l from-[#42D7E7] to-[#478AF5]">
{/* Temporarily hidden */}
{/* <Badge className="h-5 w-8 absolute font-normal p-0 justify-center -right-8 -top-2 text-bg-base bg-gradient-to-l from-[#42D7E7] to-[#478AF5]">
Pro
</Badge>
</Badge> */}
</div>
</div>
</section>

View File

@ -5,6 +5,7 @@ export default {
deleteModalTitle: 'Are you sure to delete this item?',
ok: 'Yes',
cancel: 'No',
no: 'No',
total: 'Total',
rename: 'Rename',
name: 'Name',
@ -575,6 +576,8 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
automatic: 'Automatic',
manual: 'Manual',
},
cancel: 'Cancel',
chatSetting: 'Chat setting',
},
setting: {
profile: 'Profile',
@ -1437,6 +1440,8 @@ This delimiter is used to split the input text into several text pieces echo of
showQueryMindmap: 'Show Query Mindmap',
embedApp: 'Embed App',
relatedSearch: 'Related Search',
okText: 'Save',
cancelText: 'Cancel',
},
},
};

View File

@ -569,6 +569,8 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
automatic: '自动',
manual: '手动',
},
cancel: '取消',
chatSetting: '聊天设置',
},
setting: {
profile: '概要',
@ -1341,6 +1343,8 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
showQueryMindmap: '显示查询思维导图',
embedApp: '嵌入网站',
relatedSearch: '相关搜索',
okText: '保存',
cancelText: '返回',
},
},
};

View File

@ -242,7 +242,7 @@ export function InnerNextStepDropdown({
}}
onClick={(e) => e.stopPropagation()}
>
<div className="w-[300px] font-semibold bg-white border border-border rounded-md shadow-lg">
<div className="w-[300px] font-semibold bg-bg-base border border-border rounded-md shadow-lg">
<div className="px-3 py-2 border-b border-border">
<div className="text-sm font-medium">Next Step</div>
</div>

View File

@ -128,7 +128,7 @@ function AgentForm({ node }: INextOperatorForm) {
<FormWrapper>
<FormContainer>
{isSubAgent && <DescriptionField></DescriptionField>}
<LargeModelFormField showTTSModel></LargeModelFormField>
<LargeModelFormField showSpeech2TextModel></LargeModelFormField>
{findLlmByUuid(llmId)?.model_type === LlmModelType.Image2text && (
<QueryVariable
name="visual_files_var"

View File

@ -158,8 +158,9 @@ const ToolTimelineItem = ({
</span>
)}
<span className="text-text-secondary text-xs">
{/* 0:00
{x.data.elapsed_time?.toString().slice(0, 6)} */}
{/* 0:00*/}
{tool.elapsed_time?.toString().slice(0, 6) || ''}
{tool.elapsed_time ? 's' : ''}
</span>
<span
className={cn(

View File

@ -153,6 +153,22 @@ export const WorkFlowTimeline = ({
}, []);
}, [currentEventListWithoutMessage, sendLoading]);
const getElapsedTime = (nodeId: string) => {
if (nodeId === 'begin') {
return '';
}
const data = currentEventListWithoutMessage?.find((x) => {
return (
x.data.component_id === nodeId &&
x.event === MessageEventType.NodeFinished
);
});
if (!data || data?.data.elapsed_time < 0.000001) {
return '';
}
return data?.data.elapsed_time || '';
};
const hasTrace = useCallback(
(componentId: string) => {
if (Array.isArray(traceData)) {
@ -272,7 +288,10 @@ export const WorkFlowTimeline = ({
nodeLabel)}
</span>
<span className="text-text-secondary text-xs">
{x.data.elapsed_time?.toString().slice(0, 6)}
{getElapsedTime(x.data.component_id)
.toString()
.slice(0, 6)}
{getElapsedTime(x.data.component_id) ? 's' : ''}
</span>
<span
className={cn(

View File

@ -1,4 +1,4 @@
import { ButtonLoading } from '@/components/ui/button';
import { Button, ButtonLoading } from '@/components/ui/button';
import { Form } from '@/components/ui/form';
import { Separator } from '@/components/ui/separator';
import { useFetchDialog, useSetDialog } from '@/hooks/use-chat-request';
@ -11,6 +11,7 @@ import { zodResolver } from '@hookform/resolvers/zod';
import { X } from 'lucide-react';
import { useEffect } from 'react';
import { useForm } from 'react-hook-form';
import { useTranslation } from 'react-i18next';
import { useParams } from 'umi';
import { z } from 'zod';
import { DatasetMetadata } from '../../constants';
@ -25,6 +26,7 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
const { data } = useFetchDialog();
const { setDialog, loading } = useSetDialog();
const { id } = useParams();
const { t } = useTranslation();
type FormSchemaType = z.infer<typeof formSchema>;
@ -89,25 +91,26 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
return (
<section className="p-5 w-[440px] border-l">
<div className="flex justify-between items-center text-base pb-2">
Chat Settings
{t('chat.chatSetting')}
<X className="size-4 cursor-pointer" onClick={switchSettingVisible} />
</div>
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit, onInvalid)}>
<section className="space-y-6 overflow-auto max-h-[85vh] pr-4">
<section className="space-y-6 overflow-auto max-h-[82vh] pr-4">
<ChatBasicSetting></ChatBasicSetting>
<Separator />
<ChatPromptEngine></ChatPromptEngine>
<Separator />
<ChatModelSettings></ChatModelSettings>
</section>
<ButtonLoading
className="w-full my-4"
type="submit"
loading={loading}
>
Update
</ButtonLoading>
<div className="space-x-5 text-right">
<Button variant={'outline'} onClick={switchSettingVisible}>
{t('chat.cancel')}
</Button>
<ButtonLoading className=" my-4" type="submit" loading={loading}>
{t('common.save')}
</ButtonLoading>
</div>
</form>
</Form>
</section>

View File

@ -23,7 +23,7 @@ interface IProps {
export function SingleChatBox({ controller }: IProps) {
const {
value,
// scrollRef,
scrollRef,
messageContainerRef,
sendLoading,
derivedMessages,
@ -47,7 +47,7 @@ export function SingleChatBox({ controller }: IProps) {
return (
<section className="flex flex-col p-5 h-full">
<div ref={messageContainerRef} className="flex-1 overflow-auto min-h-0">
<div className="w-full">
<div className="w-full pr-5">
{derivedMessages?.map((message, i) => {
return (
<MessageItem
@ -77,7 +77,7 @@ export function SingleChatBox({ controller }: IProps) {
);
})}
</div>
{/* <div ref={scrollRef} /> */}
<div ref={scrollRef} />
</div>
<NextMessageInput
disabled={disabled}

View File

@ -100,7 +100,7 @@ export default function Chat() {
{t('common.embedIntoSite')}
</Button>
</PageHeader>
<div className="flex flex-1 min-h-0">
<div className="flex flex-1 min-h-0 pb-9">
<Sessions
hasSingleChatBox={hasSingleChatBox}
handleConversationCardClick={handleConversationCardClick}

View File

@ -11,6 +11,7 @@ import {
import { cn } from '@/lib/utils';
import { PanelLeftClose, PanelRightClose, Plus } from 'lucide-react';
import { useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { useHandleClickConversationCard } from '../hooks/use-click-card';
import { useSelectDerivedConversationList } from '../hooks/use-select-conversation-list';
import { ConversationDropdown } from './conversation-dropdown';
@ -24,6 +25,7 @@ export function Sessions({
handleConversationCardClick,
switchSettingVisible,
}: SessionProps) {
const { t } = useTranslation();
const {
list: conversationList,
addTemporaryConversation,
@ -102,8 +104,9 @@ export function Sessions({
className="w-full"
onClick={switchSettingVisible}
disabled={!hasSingleChatBox}
variant={'outline'}
>
Chat Settings
{t('chat.chatSetting')}
</Button>
</div>
</section>

View File

@ -1,4 +1,3 @@
import { useFetchTokenListBeforeOtherStep } from '@/components/embed-dialog/use-show-embed-dialog';
import HightLightMarkdown from '@/components/highlight-markdown';
import { Modal } from '@/components/ui/modal/modal';
import { RAGFlowSelect } from '@/components/ui/select';
@ -9,7 +8,7 @@ import {
} from '@/constants/common';
import { useTranslate } from '@/hooks/common-hooks';
import { message } from 'antd';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useMemo, useState } from 'react';
type IEmbedAppModalProps = {
open: any;
@ -18,17 +17,13 @@ type IEmbedAppModalProps = {
from: string;
setOpen: (e: any) => void;
tenantId: string;
beta?: string;
};
const EmbedAppModal = (props: IEmbedAppModalProps) => {
const { t } = useTranslate('search');
const { open, setOpen, token = '', from, url, tenantId } = props;
const { beta, handleOperate } = useFetchTokenListBeforeOtherStep();
useEffect(() => {
if (open && !beta) {
handleOperate();
}
}, [handleOperate, open, beta]);
const { open, setOpen, token = '', from, url, tenantId, beta = '' } = props;
const [hideAvatar, setHideAvatar] = useState(false);
const [locale, setLocale] = useState('');

View File

@ -234,7 +234,10 @@ export const useTestRetrieval = (
setSelectedDocumentIds,
};
};
export const useFetchRelatedQuestions = (tenantId?: string) => {
export const useFetchRelatedQuestions = (
tenantId?: string,
searchId?: string,
) => {
const [searchParams] = useSearchParams();
const shared_id = searchParams.get('shared_id');
const retrievalTestFunc = shared_id
@ -251,6 +254,7 @@ export const useFetchRelatedQuestions = (tenantId?: string) => {
const { data } = await retrievalTestFunc({
question,
tenant_id: tenantId,
search_id: searchId,
});
return data?.data ?? [];
@ -260,7 +264,12 @@ export const useFetchRelatedQuestions = (tenantId?: string) => {
return { data, loading, fetchRelatedQuestions: mutateAsync };
};
export const useSendQuestion = (kbIds: string[], tenantId?: string) => {
export const useSendQuestion = (
kbIds: string[],
tenantId?: string,
searchId: string = '',
related_search: boolean = false,
) => {
const { sharedId } = useGetSharedSearchParams();
const { send, answer, done, stopOutputMessage } = useSendMessageWithSse(
sharedId ? api.askShare : api.ask,
@ -271,7 +280,7 @@ export const useSendQuestion = (kbIds: string[], tenantId?: string) => {
const [sendingLoading, setSendingLoading] = useState(false);
const [currentAnswer, setCurrentAnswer] = useState({} as IAnswer);
const { fetchRelatedQuestions, data: relatedQuestions } =
useFetchRelatedQuestions(tenantId);
useFetchRelatedQuestions(tenantId, searchId);
const [searchStr, setSearchStr] = useState<string>('');
const [isFirstRender, setIsFirstRender] = useState(true);
const [selectedDocumentIds, setSelectedDocumentIds] = useState<string[]>([]);
@ -286,7 +295,7 @@ export const useSendQuestion = (kbIds: string[], tenantId?: string) => {
setIsFirstRender(false);
setCurrentAnswer({} as IAnswer);
setSendingLoading(true);
send({ kb_ids: kbIds, question: q, tenantId });
send({ kb_ids: kbIds, question: q, tenantId, search_id: searchId });
testChunk({
kb_id: kbIds,
highlight: true,
@ -295,7 +304,9 @@ export const useSendQuestion = (kbIds: string[], tenantId?: string) => {
size: pagination.pageSize,
});
fetchRelatedQuestions(q);
if (related_search) {
fetchRelatedQuestions(q);
}
},
[
send,
@ -305,6 +316,8 @@ export const useSendQuestion = (kbIds: string[], tenantId?: string) => {
setPagination,
pagination.pageSize,
tenantId,
searchId,
related_search,
],
);
@ -408,7 +421,12 @@ export const useSearching = ({
isSearchStrEmpty,
setSearchStr,
stopOutputMessage,
} = useSendQuestion(searchData.search_config.kb_ids, tenantId as string);
} = useSendQuestion(
searchData.search_config.kb_ids,
tenantId as string,
searchData.id,
searchData.search_config.related_search,
);
const handleSearchStrChange = useCallback(
(value: string) => {
@ -435,15 +453,20 @@ export const useSearching = ({
showMindMapModal,
mindMapLoading,
mindMap,
} = useShowMindMapDrawer(searchData.search_config.kb_ids, searchStr);
} = useShowMindMapDrawer(
searchData.search_config.kb_ids,
searchStr,
searchData.id,
);
const { chunks, total } = useSelectTestingResult();
const handleSearch = useCallback(
(value: string) => {
sendQuestion(value);
setSearchStr?.(value);
hideMindMapModal();
},
[setSearchStr, sendQuestion],
[setSearchStr, sendQuestion, hideMindMapModal],
);
const { pagination, setPagination } = useGetPaginationWithRouter();

View File

@ -1,3 +1,4 @@
import { useFetchTokenListBeforeOtherStep } from '@/components/embed-dialog/use-show-embed-dialog';
import { PageHeader } from '@/components/page-header';
import {
Breadcrumb,
@ -10,7 +11,10 @@ import {
import { Button } from '@/components/ui/button';
import { SharedFrom } from '@/constants/chat';
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
import { useFetchTenantInfo } from '@/hooks/user-setting-hooks';
import {
useFetchTenantInfo,
useFetchUserInfo,
} from '@/hooks/user-setting-hooks';
import { Send, Settings } from 'lucide-react';
import { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
@ -29,11 +33,13 @@ export default function SearchPage() {
const { navigateToSearchList } = useNavigatePage();
const [isSearching, setIsSearching] = useState(false);
const { data: SearchData } = useFetchSearchDetail();
const { beta, handleOperate } = useFetchTokenListBeforeOtherStep();
const [openSetting, setOpenSetting] = useState(false);
const [openEmbed, setOpenEmbed] = useState(false);
const [searchText, setSearchText] = useState('');
const { data: tenantInfo } = useFetchTenantInfo();
const { data: userInfo } = useFetchUserInfo();
const tenantId = tenantInfo.tenant_id;
const { t } = useTranslation();
const { openSetting: checkOpenSetting } = useCheckSettings(
@ -75,6 +81,7 @@ export default function SearchPage() {
isSearching={isSearching}
searchText={searchText}
setSearchText={setSearchText}
userInfo={userInfo}
/>
</div>
)}
@ -105,6 +112,7 @@ export default function SearchPage() {
token={SearchData?.id as string}
from={SharedFrom.Search}
tenantId={tenantId}
beta={beta}
/>
}
{
@ -121,7 +129,14 @@ export default function SearchPage() {
<div className="absolute right-5 top-12 ">
<Button
className="bg-text-primary text-bg-base border-b-[#00BEB4] border-b-2"
onClick={() => setOpenEmbed(!openEmbed)}
onClick={() => {
handleOperate().then((res) => {
console.log(res, 'res');
if (res) {
setOpenEmbed(!openEmbed);
}
});
}}
>
<Send />
<div>{t('search.embedApp')}</div>

View File

@ -4,7 +4,7 @@ import { IReference, IReferenceChunk } from '@/interfaces/database/chat';
import { getExtension } from '@/utils/document-util';
import { InfoCircleOutlined } from '@ant-design/icons';
import DOMPurify from 'dompurify';
import { useCallback, useEffect, useMemo } from 'react';
import { memo, useCallback, useEffect, useMemo } from 'react';
import Markdown from 'react-markdown';
import reactStringReplace from 'react-string-replace';
import SyntaxHighlighter from 'react-syntax-highlighter';
@ -82,18 +82,18 @@ const MarkdownContent = ({
(
documentId: string,
chunk: IReferenceChunk,
isPdf: boolean,
documentUrl?: string,
// isPdf: boolean,
// documentUrl?: string,
) =>
() => {
if (!isPdf) {
if (!documentUrl) {
return;
}
window.open(documentUrl, '_blank');
} else {
clickDocumentButton?.(documentId, chunk);
}
// if (!isPdf) {
// if (!documentUrl) {
// return;
// }
// window.open(documentUrl, '_blank');
// } else {
clickDocumentButton?.(documentId, chunk);
// }
},
[clickDocumentButton],
);
@ -144,7 +144,6 @@ const MarkdownContent = ({
const getPopoverContent = useCallback(
(chunkIndex: number) => {
const {
documentUrl,
fileThumbnail,
fileExtension,
imageId,
@ -198,8 +197,8 @@ const MarkdownContent = ({
onClick={handleDocumentButtonClick(
documentId,
chunkItem,
fileExtension === 'pdf',
documentUrl,
// fileExtension === 'pdf',
// documentUrl,
)}
>
{document?.doc_name}
@ -218,8 +217,7 @@ const MarkdownContent = ({
let replacedText = reactStringReplace(text, currentReg, (match, i) => {
const chunkIndex = getChunkIndex(match);
const { documentUrl, fileExtension, imageId, chunkItem, documentId } =
getReferenceInfo(chunkIndex);
const { imageId, chunkItem, documentId } = getReferenceInfo(chunkIndex);
const docType = chunkItem?.doc_type;
@ -232,8 +230,8 @@ const MarkdownContent = ({
? handleDocumentButtonClick(
documentId,
chunkItem,
fileExtension === 'pdf',
documentUrl,
// fileExtension === 'pdf',
// documentUrl,
)
: () => {}
}
@ -243,7 +241,9 @@ const MarkdownContent = ({
<PopoverTrigger>
<InfoCircleOutlined className={styles.referenceIcon} />
</PopoverTrigger>
<PopoverContent>{getPopoverContent(chunkIndex)}</PopoverContent>
<PopoverContent className="!w-fit">
{getPopoverContent(chunkIndex)}
</PopoverContent>
</Popover>
);
});
@ -292,4 +292,4 @@ const MarkdownContent = ({
);
};
export default MarkdownContent;
export default memo(MarkdownContent);

View File

@ -27,7 +27,7 @@ const MindMapDrawer = ({ data, hideModal, visible, loading }: IProps) => {
/>
</div>
{loading && (
<div className="absolute top-48">
<div className=" rounded-lg p-4 w-full h-full">
<Progress value={percent} className="h-1 flex-1 min-w-10" />
</div>
)}

View File

@ -1,5 +1,5 @@
import { Input } from '@/components/originui/input';
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
import { IUserInfo } from '@/interfaces/database/user-setting';
import { cn } from '@/lib/utils';
import { Search } from 'lucide-react';
import { Dispatch, SetStateAction } from 'react';
@ -12,13 +12,15 @@ export default function SearchPage({
setIsSearching,
searchText,
setSearchText,
userInfo,
}: {
isSearching: boolean;
setIsSearching: Dispatch<SetStateAction<boolean>>;
searchText: string;
setSearchText: Dispatch<SetStateAction<string>>;
userInfo?: IUserInfo;
}) {
const { data: userInfo } = useFetchUserInfo();
// const { data: userInfo } = useFetchUserInfo();
const { t } = useTranslation();
return (
<section className="relative w-full flex transition-all justify-center items-center mt-32">
@ -38,7 +40,11 @@ export default function SearchPage({
<>
<p className="mb-4 transition-opacity">👋 Hi there</p>
<p className="mb-10 transition-opacity">
{t('search.welcomeBack')}, {userInfo?.nickname}
{userInfo && (
<>
{t('search.welcomeBack')}, {userInfo.nickname}
</>
)}
</p>
</>
)}

View File

@ -18,6 +18,7 @@ import {
} from '@/components/ui/multi-select';
import { RAGFlowSelect } from '@/components/ui/select';
import { Switch } from '@/components/ui/switch';
import { Textarea } from '@/components/ui/textarea';
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
import {
useComposeLlmOptionsByModelTypes,
@ -64,7 +65,7 @@ const SearchSettingFormSchema = z
description: z.string().optional(),
search_config: z.object({
kb_ids: z.array(z.string()).min(1, 'At least one dataset is required'),
vector_similarity_weight: z.number().min(0).max(100),
vector_similarity_weight: z.number().min(0).max(1),
web_search: z.boolean(),
similarity_threshold: z.number(),
use_kg: z.boolean(),
@ -128,7 +129,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
: 0.3) || 0.3,
web_search: search_config?.web_search || false,
doc_ids: [],
similarity_threshold: 0.0,
similarity_threshold: search_config?.similarity_threshold || 0.2,
use_kg: false,
rerank_id: search_config?.rerank_id || '',
use_rerank: search_config?.rerank_id ? true : false,
@ -417,7 +418,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
<FormItem>
<FormLabel>{t('search.description')}</FormLabel>
<FormControl>
<Input
<Textarea
placeholder="You are an intelligent assistant."
{...field}
onFocus={() => {
@ -466,7 +467,41 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
</FormItem>
)}
/>
<FormField
control={formMethods.control}
name="search_config.similarity_threshold"
render={({ field }) => (
<FormItem>
<FormLabel>Similarity Threshold</FormLabel>
<div
className={cn(
'flex items-center gap-4 justify-between',
className,
)}
>
<FormControl>
<SingleFormSlider
{...field}
max={1}
min={0}
step={0.01}
></SingleFormSlider>
</FormControl>
<FormControl>
<Input
type={'number'}
className="h-7 w-20 bg-bg-card"
max={1}
min={0}
step={0.01}
{...field}
></Input>
</FormControl>
</div>
<FormMessage />
</FormItem>
)}
/>
{/* Keyword Similarity Weight */}
<FormField
control={formMethods.control}
@ -474,7 +509,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
render={({ field }) => (
<FormItem>
<FormLabel>
<span className="text-destructive mr-1"> *</span>Keyword
<span className="text-destructive mr-1"> *</span>Vector
Similarity Weight
</FormLabel>
<div
@ -608,7 +643,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
)}
{/* Feature Controls */}
<FormField
{/* <FormField
control={formMethods.control}
name="search_config.web_search"
render={({ field }) => (
@ -622,7 +657,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
<FormLabel>{t('search.enableWebSearch')}</FormLabel>
</FormItem>
)}
/>
/> */}
<FormField
control={formMethods.control}
@ -666,9 +701,9 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
setOpen(false);
}}
>
{t('modal.cancelText')}
{t('search.cancelText')}
</Button>
<Button type="submit">{t('modal.okText')}</Button>
<Button type="submit">{t('search.okText')}</Button>
</div>
</form>
</Form>

View File

@ -276,7 +276,7 @@ export default function SearchingView({
</div>
{mindMapVisible && (
<div className="flex-1 h-[88dvh] z-30 ml-8 mt-5">
<div className="flex-1 h-[88dvh] z-30 ml-32 mt-5">
<MindMapDrawer
visible={mindMapVisible}
hideModal={hideMindMapModal}

View File

@ -1,26 +1,28 @@
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
import i18n from '@/locales/config';
import { useEffect } from 'react';
import { useEffect, useState } from 'react';
import {
ISearchAppDetailProps,
useFetchSearchDetail,
} from '../../next-searches/hooks';
import { useGetSharedSearchParams, useSearching } from '../hooks';
import '../index.less';
import SearchingView from '../search-view';
export default function SearchingPage() {
import SearchHome from '../search-home';
import SearchingPage from '../searching';
export default function ShareSeachPage() {
const { tenantId, locale, visibleAvatar } = useGetSharedSearchParams();
const {
data: searchData = {
search_config: { kb_ids: [] },
} as unknown as ISearchAppDetailProps,
} = useFetchSearchDetail(tenantId as string);
const [isSearching, setIsSearching] = useState(false);
const [searchText, setSearchText] = useState('');
const searchingParam = useSearching({
data: searchData,
});
useEffect(() => {
console.log('locale', locale, i18n.language);
if (locale && i18n.language !== locale) {
i18n.changeLanguage(locale);
}
@ -28,15 +30,36 @@ export default function SearchingPage() {
return (
<>
{visibleAvatar && (
<div className="flex justify-start items-center gap-1 mx-6 mt-6 text-text-primary">
<div className="flex justify-start items-center gap-2 mx-6 mt-6 text-text-primary">
<RAGFlowAvatar
className="size-6"
avatar={searchData.avatar}
name={searchData.name}
></RAGFlowAvatar>
<div>{searchData.name}</div>
</div>
)}
<SearchingView {...searchingParam} searchData={searchData} />;
{/* <SearchingView {...searchingParam} searchData={searchData} />; */}
{!isSearching && (
<div className="animate-fade-in-down">
<SearchHome
setIsSearching={setIsSearching}
isSearching={isSearching}
searchText={searchText}
setSearchText={setSearchText}
/>
</div>
)}
{isSearching && (
<div className="animate-fade-in-up">
<SearchingPage
setIsSearching={setIsSearching}
searchText={searchText}
setSearchText={setSearchText}
data={searchData as ISearchAppDetailProps}
/>
</div>
)}
</>
);
}

View File

@ -6,7 +6,6 @@ import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useParams, useSearchParams } from 'umi';
interface CreateSearchProps {
name: string;
description?: string;
@ -122,40 +121,6 @@ interface DeleteSearchResponse {
message: string;
}
export const useDeleteSearch = () => {
const { t } = useTranslation();
const {
data,
isError,
mutateAsync: deleteSearchMutation,
} = useMutation<DeleteSearchResponse, Error, DeleteSearchProps>({
mutationKey: ['deleteSearch'],
mutationFn: async (props) => {
const response = await searchService.deleteSearch(props);
if (response.code !== 0) {
throw new Error(response.message || 'Failed to delete search');
}
return response;
},
onSuccess: () => {
message.success(t('message.deleted'));
},
onError: (error) => {
message.error(t('message.error', { error: error.message }));
},
});
const deleteSearch = useCallback(
(props: DeleteSearchProps) => {
return deleteSearchMutation(props);
},
[deleteSearchMutation],
);
return { data, isError, deleteSearch };
};
export interface IllmSettingProps {
llm_id: string;
parameter: string;
@ -237,6 +202,42 @@ export const useFetchSearchDetail = (tenantId?: string) => {
return { data: data?.data, isLoading, isError };
};
export const useDeleteSearch = () => {
const { t } = useTranslation();
const queryClient = useQueryClient();
const {
data,
isError,
mutateAsync: deleteSearchMutation,
} = useMutation<DeleteSearchResponse, Error, DeleteSearchProps>({
mutationKey: ['deleteSearch'],
mutationFn: async (props) => {
const { data: response } = await searchService.deleteSearch(props);
if (response.code !== 0) {
throw new Error(response.message || 'Failed to delete search');
}
queryClient.invalidateQueries({ queryKey: ['searchList'] });
return response;
},
onSuccess: () => {
message.success(t('message.deleted'));
},
onError: (error) => {
message.error(t('message.error', { error: error.message }));
},
});
const deleteSearch = useCallback(
(props: DeleteSearchProps) => {
return deleteSearchMutation(props);
},
[deleteSearchMutation],
);
return { data, isError, deleteSearch };
};
export type IUpdateSearchProps = Omit<ISearchAppDetailProps, 'id'> & {
search_id: string;
};

View File

@ -217,7 +217,11 @@ export const useTestRetrieval = (
};
};
export const useShowMindMapDrawer = (kbIds: string[], question: string) => {
export const useShowMindMapDrawer = (
kbIds: string[],
question: string,
searchId = '',
) => {
const { visible, showModal, hideModal } = useSetModalState();
const ref = useRef<any>();
@ -228,7 +232,7 @@ export const useShowMindMapDrawer = (kbIds: string[], question: string) => {
} = useSearchFetchMindMap();
const handleShowModal = useCallback(() => {
const searchParams = { question: trim(question), kb_ids: kbIds };
const searchParams = { question: trim(question), kb_ids: kbIds, searchId };
if (
!isEmpty(searchParams.question) &&
!isEqual(searchParams, ref.current)
@ -237,7 +241,7 @@ export const useShowMindMapDrawer = (kbIds: string[], question: string) => {
fetchMindMap(searchParams);
}
showModal();
}, [fetchMindMap, showModal, question, kbIds]);
}, [fetchMindMap, showModal, question, kbIds, searchId]);
return {
mindMap,