mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-05 18:15:06 +08:00
Compare commits
12 Commits
a95f22fa88
...
8e6ddd7c1b
| Author | SHA1 | Date | |
|---|---|---|---|
| 8e6ddd7c1b | |||
| d1bc7ad2ee | |||
| 321474fb97 | |||
| ea89e4e0c6 | |||
| 9e31631d8f | |||
| 712d537d66 | |||
| bd4eb19393 | |||
| 02efab7c11 | |||
| 8ce129bc51 | |||
| d5a44e913d | |||
| 1444de981c | |||
| bd76b8ff1a |
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@ -209,7 +209,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
|
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api | tee es_sdk_test.log
|
||||||
|
|
||||||
- name: Run frontend api tests against Elasticsearch
|
- name: Run frontend api tests against Elasticsearch
|
||||||
run: |
|
run: |
|
||||||
@ -218,7 +218,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py
|
source .venv/bin/activate && pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py | tee es_api_test.log
|
||||||
|
|
||||||
- name: Run http api tests against Elasticsearch
|
- name: Run http api tests against Elasticsearch
|
||||||
run: |
|
run: |
|
||||||
@ -227,7 +227,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
|
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api | tee es_http_api_test.log
|
||||||
|
|
||||||
- name: Stop ragflow:nightly
|
- name: Stop ragflow:nightly
|
||||||
if: always() # always run this step even if previous steps failed
|
if: always() # always run this step even if previous steps failed
|
||||||
@ -247,7 +247,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api > infinity_sdk_test.log
|
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api | tee infinity_sdk_test.log
|
||||||
|
|
||||||
- name: Run frontend api tests against Infinity
|
- name: Run frontend api tests against Infinity
|
||||||
run: |
|
run: |
|
||||||
@ -256,7 +256,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py > infinity_api_test.log
|
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py | tee infinity_api_test.log
|
||||||
|
|
||||||
- name: Run http api tests against Infinity
|
- name: Run http api tests against Infinity
|
||||||
run: |
|
run: |
|
||||||
@ -265,7 +265,7 @@ jobs:
|
|||||||
echo "Waiting for service to be available..."
|
echo "Waiting for service to be available..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api > infinity_http_api_test.log
|
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api | tee infinity_http_api_test.log
|
||||||
|
|
||||||
- name: Stop ragflow:nightly
|
- name: Stop ragflow:nightly
|
||||||
if: always() # always run this step even if previous steps failed
|
if: always() # always run this step even if previous steps failed
|
||||||
|
|||||||
@ -540,6 +540,8 @@ class Canvas(Graph):
|
|||||||
cite = re.search(r"\[ID:[ 0-9]+\]", cpn_obj.output("content"))
|
cite = re.search(r"\[ID:[ 0-9]+\]", cpn_obj.output("content"))
|
||||||
|
|
||||||
message_end = {}
|
message_end = {}
|
||||||
|
if cpn_obj.get_param("status"):
|
||||||
|
message_end["status"] = cpn_obj.get_param("status")
|
||||||
if isinstance(cpn_obj.output("attachment"), dict):
|
if isinstance(cpn_obj.output("attachment"), dict):
|
||||||
message_end["attachment"] = cpn_obj.output("attachment")
|
message_end["attachment"] = cpn_obj.output("attachment")
|
||||||
if cite:
|
if cite:
|
||||||
|
|||||||
@ -29,8 +29,8 @@ from api.db.services.llm_service import LLMBundle
|
|||||||
from api.db.services.tenant_llm_service import TenantLLMService
|
from api.db.services.tenant_llm_service import TenantLLMService
|
||||||
from api.db.services.mcp_server_service import MCPServerService
|
from api.db.services.mcp_server_service import MCPServerService
|
||||||
from common.connection_utils import timeout
|
from common.connection_utils import timeout
|
||||||
from rag.prompts.generator import next_step_async, COMPLETE_TASK, analyze_task_async, \
|
from rag.prompts.generator import next_step_async, COMPLETE_TASK, \
|
||||||
citation_prompt, reflect_async, kb_prompt, citation_plus, full_question, message_fit_in, structured_output_prompt
|
citation_prompt, kb_prompt, citation_plus, full_question, message_fit_in, structured_output_prompt
|
||||||
from common.mcp_tool_call_conn import MCPToolCallSession, mcp_tool_metadata_to_openai_tool
|
from common.mcp_tool_call_conn import MCPToolCallSession, mcp_tool_metadata_to_openai_tool
|
||||||
from agent.component.llm import LLMParam, LLM
|
from agent.component.llm import LLMParam, LLM
|
||||||
|
|
||||||
@ -84,9 +84,10 @@ class Agent(LLM, ToolBase):
|
|||||||
def __init__(self, canvas, id, param: LLMParam):
|
def __init__(self, canvas, id, param: LLMParam):
|
||||||
LLM.__init__(self, canvas, id, param)
|
LLM.__init__(self, canvas, id, param)
|
||||||
self.tools = {}
|
self.tools = {}
|
||||||
for cpn in self._param.tools:
|
for idx, cpn in enumerate(self._param.tools):
|
||||||
cpn = self._load_tool_obj(cpn)
|
cpn = self._load_tool_obj(cpn)
|
||||||
self.tools[cpn.get_meta()["function"]["name"]] = cpn
|
name = cpn.get_meta()["function"]["name"]
|
||||||
|
self.tools[f"{name}_{idx}"] = cpn
|
||||||
|
|
||||||
self.chat_mdl = LLMBundle(self._canvas.get_tenant_id(), TenantLLMService.llm_id2llm_type(self._param.llm_id), self._param.llm_id,
|
self.chat_mdl = LLMBundle(self._canvas.get_tenant_id(), TenantLLMService.llm_id2llm_type(self._param.llm_id), self._param.llm_id,
|
||||||
max_retries=self._param.max_retries,
|
max_retries=self._param.max_retries,
|
||||||
@ -202,7 +203,7 @@ class Agent(LLM, ToolBase):
|
|||||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||||
use_tools = []
|
use_tools = []
|
||||||
ans = ""
|
ans = ""
|
||||||
async for delta_ans, _tk in self._react_with_tools_streamly_async(prompt, msg, use_tools, user_defined_prompt,schema_prompt=schema_prompt):
|
async for delta_ans, _tk in self._react_with_tools_streamly_async_simple(prompt, msg, use_tools, user_defined_prompt,schema_prompt=schema_prompt):
|
||||||
if self.check_if_canceled("Agent processing"):
|
if self.check_if_canceled("Agent processing"):
|
||||||
return
|
return
|
||||||
ans += delta_ans
|
ans += delta_ans
|
||||||
@ -246,7 +247,7 @@ class Agent(LLM, ToolBase):
|
|||||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||||
answer_without_toolcall = ""
|
answer_without_toolcall = ""
|
||||||
use_tools = []
|
use_tools = []
|
||||||
async for delta_ans, _ in self._react_with_tools_streamly_async(prompt, msg, use_tools, user_defined_prompt):
|
async for delta_ans, _ in self._react_with_tools_streamly_async_simple(prompt, msg, use_tools, user_defined_prompt):
|
||||||
if self.check_if_canceled("Agent streaming"):
|
if self.check_if_canceled("Agent streaming"):
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -264,7 +265,7 @@ class Agent(LLM, ToolBase):
|
|||||||
if use_tools:
|
if use_tools:
|
||||||
self.set_output("use_tools", use_tools)
|
self.set_output("use_tools", use_tools)
|
||||||
|
|
||||||
async def _react_with_tools_streamly_async(self, prompt, history: list[dict], use_tools, user_defined_prompt={}, schema_prompt: str = ""):
|
async def _react_with_tools_streamly_async_simple(self, prompt, history: list[dict], use_tools, user_defined_prompt={}, schema_prompt: str = ""):
|
||||||
token_count = 0
|
token_count = 0
|
||||||
tool_metas = self.tool_meta
|
tool_metas = self.tool_meta
|
||||||
hist = deepcopy(history)
|
hist = deepcopy(history)
|
||||||
@ -276,6 +277,28 @@ class Agent(LLM, ToolBase):
|
|||||||
else:
|
else:
|
||||||
user_request = history[-1]["content"]
|
user_request = history[-1]["content"]
|
||||||
|
|
||||||
|
def build_task_desc(prompt: str, user_request: str, tool_metas: list[dict], user_defined_prompt: dict | None = None) -> str:
|
||||||
|
"""Build a minimal task_desc by concatenating prompt, query, and tool schemas."""
|
||||||
|
user_defined_prompt = user_defined_prompt or {}
|
||||||
|
|
||||||
|
tools_json = json.dumps(tool_metas, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
|
task_desc = (
|
||||||
|
"### Agent Prompt\n"
|
||||||
|
f"{prompt}\n\n"
|
||||||
|
"### User Request\n"
|
||||||
|
f"{user_request}\n\n"
|
||||||
|
"### Tools (schemas)\n"
|
||||||
|
f"{tools_json}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if user_defined_prompt:
|
||||||
|
udp_json = json.dumps(user_defined_prompt, ensure_ascii=False, indent=2)
|
||||||
|
task_desc += "\n### User Defined Prompts\n" + udp_json + "\n"
|
||||||
|
|
||||||
|
return task_desc
|
||||||
|
|
||||||
|
|
||||||
async def use_tool_async(name, args):
|
async def use_tool_async(name, args):
|
||||||
nonlocal hist, use_tools, last_calling
|
nonlocal hist, use_tools, last_calling
|
||||||
logging.info(f"{last_calling=} == {name=}")
|
logging.info(f"{last_calling=} == {name=}")
|
||||||
@ -286,9 +309,6 @@ class Agent(LLM, ToolBase):
|
|||||||
"arguments": args,
|
"arguments": args,
|
||||||
"results": tool_response
|
"results": tool_response
|
||||||
})
|
})
|
||||||
# self.callback("add_memory", {}, "...")
|
|
||||||
#self.add_memory(hist[-2]["content"], hist[-1]["content"], name, args, str(tool_response), user_defined_prompt)
|
|
||||||
|
|
||||||
return name, tool_response
|
return name, tool_response
|
||||||
|
|
||||||
async def complete():
|
async def complete():
|
||||||
@ -326,6 +346,21 @@ class Agent(LLM, ToolBase):
|
|||||||
|
|
||||||
self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
|
self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
|
||||||
|
|
||||||
|
def build_observation(tool_call_res: list[tuple]) -> str:
|
||||||
|
"""
|
||||||
|
Build a Observation from tool call results.
|
||||||
|
No LLM involved.
|
||||||
|
"""
|
||||||
|
if not tool_call_res:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
lines = ["Observation:"]
|
||||||
|
for name, result in tool_call_res:
|
||||||
|
lines.append(f"[{name} result]")
|
||||||
|
lines.append(str(result))
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
def append_user_content(hist, content):
|
def append_user_content(hist, content):
|
||||||
if hist[-1]["role"] == "user":
|
if hist[-1]["role"] == "user":
|
||||||
hist[-1]["content"] += content
|
hist[-1]["content"] += content
|
||||||
@ -333,7 +368,7 @@ class Agent(LLM, ToolBase):
|
|||||||
hist.append({"role": "user", "content": content})
|
hist.append({"role": "user", "content": content})
|
||||||
|
|
||||||
st = timer()
|
st = timer()
|
||||||
task_desc = await analyze_task_async(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
task_desc = build_task_desc(prompt, user_request, tool_metas, user_defined_prompt)
|
||||||
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||||
for _ in range(self._param.max_rounds + 1):
|
for _ in range(self._param.max_rounds + 1):
|
||||||
if self.check_if_canceled("Agent streaming"):
|
if self.check_if_canceled("Agent streaming"):
|
||||||
@ -364,7 +399,7 @@ class Agent(LLM, ToolBase):
|
|||||||
|
|
||||||
results = await asyncio.gather(*tool_tasks) if tool_tasks else []
|
results = await asyncio.gather(*tool_tasks) if tool_tasks else []
|
||||||
st = timer()
|
st = timer()
|
||||||
reflection = await reflect_async(self.chat_mdl, hist, results, user_defined_prompt)
|
reflection = build_observation(results)
|
||||||
append_user_content(hist, reflection)
|
append_user_content(hist, reflection)
|
||||||
self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
||||||
|
|
||||||
@ -393,6 +428,135 @@ Respond immediately with your final comprehensive answer.
|
|||||||
async for txt, tkcnt in complete():
|
async for txt, tkcnt in complete():
|
||||||
yield txt, tkcnt
|
yield txt, tkcnt
|
||||||
|
|
||||||
|
# async def _react_with_tools_streamly_async(self, prompt, history: list[dict], use_tools, user_defined_prompt={}, schema_prompt: str = ""):
|
||||||
|
# token_count = 0
|
||||||
|
# tool_metas = self.tool_meta
|
||||||
|
# hist = deepcopy(history)
|
||||||
|
# last_calling = ""
|
||||||
|
# if len(hist) > 3:
|
||||||
|
# st = timer()
|
||||||
|
# user_request = await full_question(messages=history, chat_mdl=self.chat_mdl)
|
||||||
|
# self.callback("Multi-turn conversation optimization", {}, user_request, elapsed_time=timer()-st)
|
||||||
|
# else:
|
||||||
|
# user_request = history[-1]["content"]
|
||||||
|
|
||||||
|
# async def use_tool_async(name, args):
|
||||||
|
# nonlocal hist, use_tools, last_calling
|
||||||
|
# logging.info(f"{last_calling=} == {name=}")
|
||||||
|
# last_calling = name
|
||||||
|
# tool_response = await self.toolcall_session.tool_call_async(name, args)
|
||||||
|
# use_tools.append({
|
||||||
|
# "name": name,
|
||||||
|
# "arguments": args,
|
||||||
|
# "results": tool_response
|
||||||
|
# })
|
||||||
|
# # self.callback("add_memory", {}, "...")
|
||||||
|
# #self.add_memory(hist[-2]["content"], hist[-1]["content"], name, args, str(tool_response), user_defined_prompt)
|
||||||
|
|
||||||
|
# return name, tool_response
|
||||||
|
|
||||||
|
# async def complete():
|
||||||
|
# nonlocal hist
|
||||||
|
# need2cite = self._param.cite and self._canvas.get_reference()["chunks"] and self._id.find("-->") < 0
|
||||||
|
# if schema_prompt:
|
||||||
|
# need2cite = False
|
||||||
|
# cited = False
|
||||||
|
# if hist and hist[0]["role"] == "system":
|
||||||
|
# if schema_prompt:
|
||||||
|
# hist[0]["content"] += "\n" + schema_prompt
|
||||||
|
# if need2cite and len(hist) < 7:
|
||||||
|
# hist[0]["content"] += citation_prompt()
|
||||||
|
# cited = True
|
||||||
|
# yield "", token_count
|
||||||
|
|
||||||
|
# _hist = hist
|
||||||
|
# if len(hist) > 12:
|
||||||
|
# _hist = [hist[0], hist[1], *hist[-10:]]
|
||||||
|
# entire_txt = ""
|
||||||
|
# async for delta_ans in self._generate_streamly(_hist):
|
||||||
|
# if not need2cite or cited:
|
||||||
|
# yield delta_ans, 0
|
||||||
|
# entire_txt += delta_ans
|
||||||
|
# if not need2cite or cited:
|
||||||
|
# return
|
||||||
|
|
||||||
|
# st = timer()
|
||||||
|
# txt = ""
|
||||||
|
# async for delta_ans in self._gen_citations_async(entire_txt):
|
||||||
|
# if self.check_if_canceled("Agent streaming"):
|
||||||
|
# return
|
||||||
|
# yield delta_ans, 0
|
||||||
|
# txt += delta_ans
|
||||||
|
|
||||||
|
# self.callback("gen_citations", {}, txt, elapsed_time=timer()-st)
|
||||||
|
|
||||||
|
# def append_user_content(hist, content):
|
||||||
|
# if hist[-1]["role"] == "user":
|
||||||
|
# hist[-1]["content"] += content
|
||||||
|
# else:
|
||||||
|
# hist.append({"role": "user", "content": content})
|
||||||
|
|
||||||
|
# st = timer()
|
||||||
|
# task_desc = await analyze_task_async(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
||||||
|
# self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||||
|
# for _ in range(self._param.max_rounds + 1):
|
||||||
|
# if self.check_if_canceled("Agent streaming"):
|
||||||
|
# return
|
||||||
|
# response, tk = await next_step_async(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||||
|
# # self.callback("next_step", {}, str(response)[:256]+"...")
|
||||||
|
# token_count += tk or 0
|
||||||
|
# hist.append({"role": "assistant", "content": response})
|
||||||
|
# try:
|
||||||
|
# functions = json_repair.loads(re.sub(r"```.*", "", response))
|
||||||
|
# if not isinstance(functions, list):
|
||||||
|
# raise TypeError(f"List should be returned, but `{functions}`")
|
||||||
|
# for f in functions:
|
||||||
|
# if not isinstance(f, dict):
|
||||||
|
# raise TypeError(f"An object type should be returned, but `{f}`")
|
||||||
|
|
||||||
|
# tool_tasks = []
|
||||||
|
# for func in functions:
|
||||||
|
# name = func["name"]
|
||||||
|
# args = func["arguments"]
|
||||||
|
# if name == COMPLETE_TASK:
|
||||||
|
# append_user_content(hist, f"Respond with a formal answer. FORGET(DO NOT mention) about `{COMPLETE_TASK}`. The language for the response MUST be as the same as the first user request.\n")
|
||||||
|
# async for txt, tkcnt in complete():
|
||||||
|
# yield txt, tkcnt
|
||||||
|
# return
|
||||||
|
|
||||||
|
# tool_tasks.append(asyncio.create_task(use_tool_async(name, args)))
|
||||||
|
|
||||||
|
# results = await asyncio.gather(*tool_tasks) if tool_tasks else []
|
||||||
|
# st = timer()
|
||||||
|
# reflection = await reflect_async(self.chat_mdl, hist, results, user_defined_prompt)
|
||||||
|
# append_user_content(hist, reflection)
|
||||||
|
# self.callback("reflection", {}, str(reflection), elapsed_time=timer()-st)
|
||||||
|
|
||||||
|
# except Exception as e:
|
||||||
|
# logging.exception(msg=f"Wrong JSON argument format in LLM ReAct response: {e}")
|
||||||
|
# e = f"\nTool call error, please correct the input parameter of response format and call it again.\n *** Exception ***\n{e}"
|
||||||
|
# append_user_content(hist, str(e))
|
||||||
|
|
||||||
|
# logging.warning( f"Exceed max rounds: {self._param.max_rounds}")
|
||||||
|
# final_instruction = f"""
|
||||||
|
# {user_request}
|
||||||
|
# IMPORTANT: You have reached the conversation limit. Based on ALL the information and research you have gathered so far, please provide a DIRECT and COMPREHENSIVE final answer to the original request.
|
||||||
|
# Instructions:
|
||||||
|
# 1. SYNTHESIZE all information collected during this conversation
|
||||||
|
# 2. Provide a COMPLETE response using existing data - do not suggest additional research
|
||||||
|
# 3. Structure your response as a FINAL DELIVERABLE, not a plan
|
||||||
|
# 4. If information is incomplete, state what you found and provide the best analysis possible with available data
|
||||||
|
# 5. DO NOT mention conversation limits or suggest further steps
|
||||||
|
# 6. Focus on delivering VALUE with the information already gathered
|
||||||
|
# Respond immediately with your final comprehensive answer.
|
||||||
|
# """
|
||||||
|
# if self.check_if_canceled("Agent final instruction"):
|
||||||
|
# return
|
||||||
|
# append_user_content(hist, final_instruction)
|
||||||
|
|
||||||
|
# async for txt, tkcnt in complete():
|
||||||
|
# yield txt, tkcnt
|
||||||
|
|
||||||
async def _gen_citations_async(self, text):
|
async def _gen_citations_async(self, text):
|
||||||
retrievals = self._canvas.get_reference()
|
retrievals = self._canvas.get_reference()
|
||||||
retrievals = {"chunks": list(retrievals["chunks"].values()), "doc_aggs": list(retrievals["doc_aggs"].values())}
|
retrievals = {"chunks": list(retrievals["chunks"].values()), "doc_aggs": list(retrievals["doc_aggs"].values())}
|
||||||
|
|||||||
@ -76,6 +76,7 @@ async def list_chunk():
|
|||||||
"image_id": sres.field[id].get("img_id", ""),
|
"image_id": sres.field[id].get("img_id", ""),
|
||||||
"available_int": int(sres.field[id].get("available_int", 1)),
|
"available_int": int(sres.field[id].get("available_int", 1)),
|
||||||
"positions": sres.field[id].get("position_int", []),
|
"positions": sres.field[id].get("position_int", []),
|
||||||
|
"doc_type_kwd": sres.field[id].get("doc_type_kwd")
|
||||||
}
|
}
|
||||||
assert isinstance(d["positions"], list)
|
assert isinstance(d["positions"], list)
|
||||||
assert len(d["positions"]) == 0 or (isinstance(d["positions"][0], list) and len(d["positions"][0]) == 5)
|
assert len(d["positions"]) == 0 or (isinstance(d["positions"][0], list) and len(d["positions"][0]) == 5)
|
||||||
@ -176,10 +177,9 @@ async def set():
|
|||||||
settings.docStoreConn.update({"id": req["chunk_id"]}, _d, search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.update({"id": req["chunk_id"]}, _d, search.index_name(tenant_id), doc.kb_id)
|
||||||
|
|
||||||
# update image
|
# update image
|
||||||
image_id = req.get("img_id")
|
|
||||||
bkt, name = image_id.split("-")
|
|
||||||
image_base64 = req.get("image_base64", None)
|
image_base64 = req.get("image_base64", None)
|
||||||
if image_base64:
|
if image_base64:
|
||||||
|
bkt, name = req.get("img_id", "-").split("-")
|
||||||
image_binary = base64.b64decode(image_base64)
|
image_binary = base64.b64decode(image_base64)
|
||||||
settings.STORAGE_IMPL.put(bkt, name, image_binary)
|
settings.STORAGE_IMPL.put(bkt, name, image_binary)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|||||||
@ -150,6 +150,21 @@ async def update():
|
|||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
|
|
||||||
|
|
||||||
|
@manager.route('/update_metadata_setting', methods=['post']) # noqa: F821
|
||||||
|
@login_required
|
||||||
|
@validate_request("kb_id", "metadata")
|
||||||
|
async def update_metadata_setting():
|
||||||
|
req = await get_request_json()
|
||||||
|
e, kb = KnowledgebaseService.get_by_id(req["kb_id"])
|
||||||
|
if not e:
|
||||||
|
return get_data_error_result(
|
||||||
|
message="Database error (Knowledgebase rename)!")
|
||||||
|
kb = kb.to_dict()
|
||||||
|
kb["parser_config"]["metadata"] = req["metadata"]
|
||||||
|
KnowledgebaseService.update_by_id(kb["id"], kb)
|
||||||
|
return get_json_result(data=kb)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/detail', methods=['GET']) # noqa: F821
|
@manager.route('/detail', methods=['GET']) # noqa: F821
|
||||||
@login_required
|
@login_required
|
||||||
def detail():
|
def detail():
|
||||||
|
|||||||
@ -326,7 +326,6 @@ async def webhook(agent_id: str):
|
|||||||
secret = jwt_cfg.get("secret")
|
secret = jwt_cfg.get("secret")
|
||||||
if not secret:
|
if not secret:
|
||||||
raise Exception("JWT secret not configured")
|
raise Exception("JWT secret not configured")
|
||||||
required_claims = jwt_cfg.get("required_claims", [])
|
|
||||||
|
|
||||||
auth_header = request.headers.get("Authorization", "")
|
auth_header = request.headers.get("Authorization", "")
|
||||||
if not auth_header.startswith("Bearer "):
|
if not auth_header.startswith("Bearer "):
|
||||||
@ -750,7 +749,7 @@ async def webhook(agent_id: str):
|
|||||||
async def sse():
|
async def sse():
|
||||||
nonlocal canvas
|
nonlocal canvas
|
||||||
contents: list[str] = []
|
contents: list[str] = []
|
||||||
|
status = 200
|
||||||
try:
|
try:
|
||||||
async for ans in canvas.run(
|
async for ans in canvas.run(
|
||||||
query="",
|
query="",
|
||||||
@ -765,6 +764,8 @@ async def webhook(agent_id: str):
|
|||||||
content = "</think>"
|
content = "</think>"
|
||||||
if content:
|
if content:
|
||||||
contents.append(content)
|
contents.append(content)
|
||||||
|
if ans["event"] == "message_end":
|
||||||
|
status = int(ans["data"].get("status", status))
|
||||||
if is_test:
|
if is_test:
|
||||||
append_webhook_trace(
|
append_webhook_trace(
|
||||||
agent_id,
|
agent_id,
|
||||||
@ -782,7 +783,11 @@ async def webhook(agent_id: str):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
final_content = "".join(contents)
|
final_content = "".join(contents)
|
||||||
yield json.dumps(final_content, ensure_ascii=False)
|
return {
|
||||||
|
"message": final_content,
|
||||||
|
"success": True,
|
||||||
|
"code": status,
|
||||||
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if is_test:
|
if is_test:
|
||||||
@ -804,10 +809,14 @@ async def webhook(agent_id: str):
|
|||||||
"success": False,
|
"success": False,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
yield json.dumps({"code": 500, "message": str(e)}, ensure_ascii=False)
|
return {"code": 400, "message": str(e),"success":False}
|
||||||
|
|
||||||
resp = Response(sse(), mimetype="application/json")
|
result = await sse()
|
||||||
return resp
|
return Response(
|
||||||
|
json.dumps(result),
|
||||||
|
status=result["code"],
|
||||||
|
mimetype="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/webhook_trace/<agent_id>", methods=["GET"]) # noqa: F821
|
@manager.route("/webhook_trace/<agent_id>", methods=["GET"]) # noqa: F821
|
||||||
|
|||||||
@ -411,6 +411,8 @@ class KnowledgebaseService(CommonService):
|
|||||||
ok, _t = TenantService.get_by_id(tenant_id)
|
ok, _t = TenantService.get_by_id(tenant_id)
|
||||||
if not ok:
|
if not ok:
|
||||||
return False, get_data_error_result(message="Tenant not found.")
|
return False, get_data_error_result(message="Tenant not found.")
|
||||||
|
if kwargs.get("parser_config") and isinstance(kwargs["parser_config"], dict) and not kwargs["parser_config"].get("llm_id"):
|
||||||
|
kwargs["parser_config"]["llm_id"] = _t.llm_id
|
||||||
|
|
||||||
# Build payload
|
# Build payload
|
||||||
kb_id = get_uuid()
|
kb_id = get_uuid()
|
||||||
|
|||||||
@ -31,6 +31,7 @@
|
|||||||
"entity_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
"entity_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||||
"source_id": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
"source_id": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||||
"n_hop_with_weight": {"type": "varchar", "default": ""},
|
"n_hop_with_weight": {"type": "varchar", "default": ""},
|
||||||
|
"mom_with_weight": {"type": "varchar", "default": ""},
|
||||||
"removed_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
"removed_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||||
"doc_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
"doc_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||||
"toc_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
"toc_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||||
|
|||||||
@ -762,6 +762,13 @@
|
|||||||
"status": "1",
|
"status": "1",
|
||||||
"rank": "940",
|
"rank": "940",
|
||||||
"llm": [
|
"llm": [
|
||||||
|
{
|
||||||
|
"llm_name": "glm-4.7",
|
||||||
|
"tags": "LLM,CHAT,128K",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"llm_name": "glm-4.5",
|
"llm_name": "glm-4.5",
|
||||||
"tags": "LLM,CHAT,128K",
|
"tags": "LLM,CHAT,128K",
|
||||||
|
|||||||
@ -38,8 +38,8 @@ def vision_figure_parser_figure_data_wrapper(figures_data_without_positions):
|
|||||||
|
|
||||||
|
|
||||||
def vision_figure_parser_docx_wrapper(sections, tbls, callback=None,**kwargs):
|
def vision_figure_parser_docx_wrapper(sections, tbls, callback=None,**kwargs):
|
||||||
if not tbls:
|
if not sections:
|
||||||
return []
|
return tbls
|
||||||
try:
|
try:
|
||||||
vision_model = LLMBundle(kwargs["tenant_id"], LLMType.IMAGE2TEXT)
|
vision_model = LLMBundle(kwargs["tenant_id"], LLMType.IMAGE2TEXT)
|
||||||
callback(0.7, "Visual model detected. Attempting to enhance figure extraction...")
|
callback(0.7, "Visual model detected. Attempting to enhance figure extraction...")
|
||||||
|
|||||||
@ -23,8 +23,8 @@ def get_urls(use_china_mirrors=False) -> list[Union[str, list[str]]]:
|
|||||||
return [
|
return [
|
||||||
"http://mirrors.tuna.tsinghua.edu.cn/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_amd64.deb",
|
"http://mirrors.tuna.tsinghua.edu.cn/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_amd64.deb",
|
||||||
"http://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_arm64.deb",
|
"http://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_arm64.deb",
|
||||||
"https://repo.huaweicloud.com/repository/maven/org/apache/tika/tika-server-standard/3.0.0/tika-server-standard-3.0.0.jar",
|
"https://repo.huaweicloud.com/repository/maven/org/apache/tika/tika-server-standard/3.2.3/tika-server-standard-3.2.3.jar",
|
||||||
"https://repo.huaweicloud.com/repository/maven/org/apache/tika/tika-server-standard/3.0.0/tika-server-standard-3.0.0.jar.md5",
|
"https://repo.huaweicloud.com/repository/maven/org/apache/tika/tika-server-standard/3.2.3/tika-server-standard-3.2.3.jar.md5",
|
||||||
"https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken",
|
"https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken",
|
||||||
["https://registry.npmmirror.com/-/binary/chrome-for-testing/121.0.6167.85/linux64/chrome-linux64.zip", "chrome-linux64-121-0-6167-85"],
|
["https://registry.npmmirror.com/-/binary/chrome-for-testing/121.0.6167.85/linux64/chrome-linux64.zip", "chrome-linux64-121-0-6167-85"],
|
||||||
["https://registry.npmmirror.com/-/binary/chrome-for-testing/121.0.6167.85/linux64/chromedriver-linux64.zip", "chromedriver-linux64-121-0-6167-85"],
|
["https://registry.npmmirror.com/-/binary/chrome-for-testing/121.0.6167.85/linux64/chromedriver-linux64.zip", "chromedriver-linux64-121-0-6167-85"],
|
||||||
@ -34,8 +34,8 @@ def get_urls(use_china_mirrors=False) -> list[Union[str, list[str]]]:
|
|||||||
return [
|
return [
|
||||||
"http://archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_amd64.deb",
|
"http://archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_amd64.deb",
|
||||||
"http://ports.ubuntu.com/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_arm64.deb",
|
"http://ports.ubuntu.com/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_arm64.deb",
|
||||||
"https://repo1.maven.org/maven2/org/apache/tika/tika-server-standard/3.0.0/tika-server-standard-3.0.0.jar",
|
"https://repo1.maven.org/maven2/org/apache/tika/tika-server-standard/3.2.3/tika-server-standard-3.2.3.jar",
|
||||||
"https://repo1.maven.org/maven2/org/apache/tika/tika-server-standard/3.0.0/tika-server-standard-3.0.0.jar.md5",
|
"https://repo1.maven.org/maven2/org/apache/tika/tika-server-standard/3.2.3/tika-server-standard-3.2.3.jar.md5",
|
||||||
"https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken",
|
"https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken",
|
||||||
["https://storage.googleapis.com/chrome-for-testing-public/121.0.6167.85/linux64/chrome-linux64.zip", "chrome-linux64-121-0-6167-85"],
|
["https://storage.googleapis.com/chrome-for-testing-public/121.0.6167.85/linux64/chrome-linux64.zip", "chrome-linux64-121-0-6167-85"],
|
||||||
["https://storage.googleapis.com/chrome-for-testing-public/121.0.6167.85/linux64/chromedriver-linux64.zip", "chromedriver-linux64-121-0-6167-85"],
|
["https://storage.googleapis.com/chrome-for-testing-public/121.0.6167.85/linux64/chromedriver-linux64.zip", "chromedriver-linux64-121-0-6167-85"],
|
||||||
|
|||||||
@ -91,7 +91,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
filename, binary=binary, from_page=from_page, to_page=to_page)
|
filename, binary=binary, from_page=from_page, to_page=to_page)
|
||||||
remove_contents_table(sections, eng=is_english(
|
remove_contents_table(sections, eng=is_english(
|
||||||
random_choices([t for t, _ in sections], k=200)))
|
random_choices([t for t, _ in sections], k=200)))
|
||||||
tbls=vision_figure_parser_docx_wrapper(sections=sections,tbls=tbls,callback=callback,**kwargs)
|
tbls = vision_figure_parser_docx_wrapper(sections=sections,tbls=tbls,callback=callback,**kwargs)
|
||||||
# tbls = [((None, lns), None) for lns in tbls]
|
# tbls = [((None, lns), None) for lns in tbls]
|
||||||
sections=[(item[0],item[1] if item[1] is not None else "") for item in sections if not isinstance(item[1], Image.Image)]
|
sections=[(item[0],item[1] if item[1] is not None else "") for item in sections if not isinstance(item[1], Image.Image)]
|
||||||
callback(0.8, "Finish parsing.")
|
callback(0.8, "Finish parsing.")
|
||||||
@ -147,9 +147,16 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
|
|
||||||
elif re.search(r"\.doc$", filename, re.IGNORECASE):
|
elif re.search(r"\.doc$", filename, re.IGNORECASE):
|
||||||
callback(0.1, "Start to parse.")
|
callback(0.1, "Start to parse.")
|
||||||
with BytesIO(binary) as binary:
|
try:
|
||||||
binary = BytesIO(binary)
|
from tika import parser as tika_parser
|
||||||
doc_parsed = parser.from_buffer(binary)
|
except Exception as e:
|
||||||
|
callback(0.8, f"tika not available: {e}. Unsupported .doc parsing.")
|
||||||
|
logging.warning(f"tika not available: {e}. Unsupported .doc parsing for {filename}.")
|
||||||
|
return []
|
||||||
|
|
||||||
|
binary = BytesIO(binary)
|
||||||
|
doc_parsed = tika_parser.from_buffer(binary)
|
||||||
|
if doc_parsed.get('content', None) is not None:
|
||||||
sections = doc_parsed['content'].split('\n')
|
sections = doc_parsed['content'].split('\n')
|
||||||
sections = [(line, "") for line in sections if line]
|
sections = [(line, "") for line in sections if line]
|
||||||
remove_contents_table(sections, eng=is_english(
|
remove_contents_table(sections, eng=is_english(
|
||||||
|
|||||||
@ -312,7 +312,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
tk_cnt = num_tokens_from_string(txt)
|
tk_cnt = num_tokens_from_string(txt)
|
||||||
if sec_id > -1:
|
if sec_id > -1:
|
||||||
last_sid = sec_id
|
last_sid = sec_id
|
||||||
tbls=vision_figure_parser_pdf_wrapper(tbls=tbls,callback=callback,**kwargs)
|
tbls = vision_figure_parser_pdf_wrapper(tbls=tbls,callback=callback,**kwargs)
|
||||||
res = tokenize_table(tbls, doc, eng)
|
res = tokenize_table(tbls, doc, eng)
|
||||||
res.extend(tokenize_chunks(chunks, doc, eng, pdf_parser))
|
res.extend(tokenize_chunks(chunks, doc, eng, pdf_parser))
|
||||||
table_ctx = max(0, int(parser_config.get("table_context_size", 0) or 0))
|
table_ctx = max(0, int(parser_config.get("table_context_size", 0) or 0))
|
||||||
@ -325,7 +325,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
docx_parser = Docx()
|
docx_parser = Docx()
|
||||||
ti_list, tbls = docx_parser(filename, binary,
|
ti_list, tbls = docx_parser(filename, binary,
|
||||||
from_page=0, to_page=10000, callback=callback)
|
from_page=0, to_page=10000, callback=callback)
|
||||||
tbls=vision_figure_parser_docx_wrapper(sections=ti_list,tbls=tbls,callback=callback,**kwargs)
|
tbls = vision_figure_parser_docx_wrapper(sections=ti_list,tbls=tbls,callback=callback,**kwargs)
|
||||||
res = tokenize_table(tbls, doc, eng)
|
res = tokenize_table(tbls, doc, eng)
|
||||||
for text, image in ti_list:
|
for text, image in ti_list:
|
||||||
d = copy.deepcopy(doc)
|
d = copy.deepcopy(doc)
|
||||||
|
|||||||
@ -76,7 +76,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
if re.search(r"\.docx$", filename, re.IGNORECASE):
|
if re.search(r"\.docx$", filename, re.IGNORECASE):
|
||||||
callback(0.1, "Start to parse.")
|
callback(0.1, "Start to parse.")
|
||||||
sections, tbls = naive.Docx()(filename, binary)
|
sections, tbls = naive.Docx()(filename, binary)
|
||||||
tbls=vision_figure_parser_docx_wrapper(sections=sections,tbls=tbls,callback=callback,**kwargs)
|
tbls = vision_figure_parser_docx_wrapper(sections=sections, tbls=tbls, callback=callback, **kwargs)
|
||||||
sections = [s for s, _ in sections if s]
|
sections = [s for s, _ in sections if s]
|
||||||
for (_, html), _ in tbls:
|
for (_, html), _ in tbls:
|
||||||
sections.append(html)
|
sections.append(html)
|
||||||
@ -142,10 +142,18 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
|||||||
|
|
||||||
elif re.search(r"\.doc$", filename, re.IGNORECASE):
|
elif re.search(r"\.doc$", filename, re.IGNORECASE):
|
||||||
callback(0.1, "Start to parse.")
|
callback(0.1, "Start to parse.")
|
||||||
|
try:
|
||||||
|
from tika import parser as tika_parser
|
||||||
|
except Exception as e:
|
||||||
|
callback(0.8, f"tika not available: {e}. Unsupported .doc parsing.")
|
||||||
|
logging.warning(f"tika not available: {e}. Unsupported .doc parsing for {filename}.")
|
||||||
|
return []
|
||||||
|
|
||||||
binary = BytesIO(binary)
|
binary = BytesIO(binary)
|
||||||
doc_parsed = parser.from_buffer(binary)
|
doc_parsed = tika_parser.from_buffer(binary)
|
||||||
sections = doc_parsed['content'].split('\n')
|
if doc_parsed.get('content', None) is not None:
|
||||||
sections = [s for s in sections if s]
|
sections = doc_parsed['content'].split('\n')
|
||||||
|
sections = [s for s in sections if s]
|
||||||
callback(0.8, "Finish parsing.")
|
callback(0.8, "Finish parsing.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -650,7 +650,7 @@ class Parser(ProcessBase):
|
|||||||
tmpf.flush()
|
tmpf.flush()
|
||||||
tmp_path = os.path.abspath(tmpf.name)
|
tmp_path = os.path.abspath(tmpf.name)
|
||||||
|
|
||||||
seq2txt_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.SPEECH2TEXT)
|
seq2txt_mdl = LLMBundle(self._canvas.get_tenant_id(), LLMType.SPEECH2TEXT, llm_name=conf["llm_id"])
|
||||||
txt = seq2txt_mdl.transcription(tmp_path)
|
txt = seq2txt_mdl.transcription(tmp_path)
|
||||||
|
|
||||||
self.set_output("text", txt)
|
self.set_output("text", txt)
|
||||||
|
|||||||
@ -324,8 +324,9 @@ def tool_schema(tools_description: list[dict], complete_task=False):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for tool in tools_description:
|
for idx, tool in enumerate(tools_description):
|
||||||
desc[tool["function"]["name"]] = tool
|
name = tool["function"]["name"]
|
||||||
|
desc[f"{name}_{idx}"] = tool
|
||||||
|
|
||||||
return "\n\n".join([f"## {i+1}. {fnm}\n{json.dumps(des, ensure_ascii=False, indent=4)}" for i, (fnm, des) in enumerate(desc.items())])
|
return "\n\n".join([f"## {i+1}. {fnm}\n{json.dumps(des, ensure_ascii=False, indent=4)}" for i, (fnm, des) in enumerate(desc.items())])
|
||||||
|
|
||||||
|
|||||||
@ -374,7 +374,7 @@ async def build_chunks(task, progress_callback):
|
|||||||
chat_mdl = LLMBundle(task["tenant_id"], LLMType.CHAT, llm_name=task["llm_id"], lang=task["language"])
|
chat_mdl = LLMBundle(task["tenant_id"], LLMType.CHAT, llm_name=task["llm_id"], lang=task["language"])
|
||||||
|
|
||||||
async def gen_metadata_task(chat_mdl, d):
|
async def gen_metadata_task(chat_mdl, d):
|
||||||
cached = get_llm_cache(chat_mdl.llm_name, d["content_with_weight"], "metadata")
|
cached = get_llm_cache(chat_mdl.llm_name, d["content_with_weight"], "metadata", {})
|
||||||
if not cached:
|
if not cached:
|
||||||
async with chat_limiter:
|
async with chat_limiter:
|
||||||
cached = await gen_metadata(chat_mdl,
|
cached = await gen_metadata(chat_mdl,
|
||||||
@ -852,7 +852,7 @@ async def do_handle_task(task):
|
|||||||
task_tenant_id = task["tenant_id"]
|
task_tenant_id = task["tenant_id"]
|
||||||
task_embedding_id = task["embd_id"]
|
task_embedding_id = task["embd_id"]
|
||||||
task_language = task["language"]
|
task_language = task["language"]
|
||||||
task_llm_id = task["llm_id"]
|
task_llm_id = task["parser_config"].get("llm_id") or task["llm_id"]
|
||||||
task_dataset_id = task["kb_id"]
|
task_dataset_id = task["kb_id"]
|
||||||
task_doc_id = task["doc_id"]
|
task_doc_id = task["doc_id"]
|
||||||
task_document_name = task["name"]
|
task_document_name = task["name"]
|
||||||
@ -1024,33 +1024,65 @@ async def do_handle_task(task):
|
|||||||
|
|
||||||
chunk_count = len(set([chunk["id"] for chunk in chunks]))
|
chunk_count = len(set([chunk["id"] for chunk in chunks]))
|
||||||
start_ts = timer()
|
start_ts = timer()
|
||||||
e = await insert_es(task_id, task_tenant_id, task_dataset_id, chunks, progress_callback)
|
|
||||||
if not e:
|
|
||||||
return
|
|
||||||
|
|
||||||
logging.info("Indexing doc({}), page({}-{}), chunks({}), elapsed: {:.2f}".format(task_document_name, task_from_page,
|
async def _maybe_insert_es(_chunks):
|
||||||
task_to_page, len(chunks),
|
if has_canceled(task_id):
|
||||||
timer() - start_ts))
|
return True
|
||||||
|
e = await insert_es(task_id, task_tenant_id, task_dataset_id, _chunks, progress_callback)
|
||||||
|
return bool(e)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not await _maybe_insert_es(chunks):
|
||||||
|
return
|
||||||
|
|
||||||
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, token_count, chunk_count, 0)
|
logging.info(
|
||||||
|
"Indexing doc({}), page({}-{}), chunks({}), elapsed: {:.2f}".format(
|
||||||
|
task_document_name, task_from_page, task_to_page, len(chunks), timer() - start_ts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
time_cost = timer() - start_ts
|
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, token_count, chunk_count, 0)
|
||||||
progress_callback(msg="Indexing done ({:.2f}s).".format(time_cost))
|
|
||||||
if toc_thread:
|
|
||||||
d = toc_thread.result()
|
|
||||||
if d:
|
|
||||||
e = await insert_es(task_id, task_tenant_id, task_dataset_id, [d], progress_callback)
|
|
||||||
if not e:
|
|
||||||
return
|
|
||||||
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, 0, 1, 0)
|
|
||||||
|
|
||||||
task_time_cost = timer() - task_start_ts
|
progress_callback(msg="Indexing done ({:.2f}s).".format(timer() - start_ts))
|
||||||
progress_callback(prog=1.0, msg="Task done ({:.2f}s)".format(task_time_cost))
|
|
||||||
logging.info(
|
|
||||||
"Chunk doc({}), page({}-{}), chunks({}), token({}), elapsed:{:.2f}".format(task_document_name, task_from_page,
|
|
||||||
task_to_page, len(chunks),
|
|
||||||
token_count, task_time_cost))
|
|
||||||
|
|
||||||
|
if toc_thread:
|
||||||
|
d = toc_thread.result()
|
||||||
|
if d:
|
||||||
|
if not await _maybe_insert_es([d]):
|
||||||
|
return
|
||||||
|
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, 0, 1, 0)
|
||||||
|
|
||||||
|
if has_canceled(task_id):
|
||||||
|
progress_callback(-1, msg="Task has been canceled.")
|
||||||
|
return
|
||||||
|
|
||||||
|
task_time_cost = timer() - task_start_ts
|
||||||
|
progress_callback(prog=1.0, msg="Task done ({:.2f}s)".format(task_time_cost))
|
||||||
|
logging.info(
|
||||||
|
"Chunk doc({}), page({}-{}), chunks({}), token({}), elapsed:{:.2f}".format(
|
||||||
|
task_document_name, task_from_page, task_to_page, len(chunks), token_count, task_time_cost
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if has_canceled(task_id):
|
||||||
|
try:
|
||||||
|
exists = await asyncio.to_thread(
|
||||||
|
settings.docStoreConn.indexExist,
|
||||||
|
search.index_name(task_tenant_id),
|
||||||
|
task_dataset_id,
|
||||||
|
)
|
||||||
|
if exists:
|
||||||
|
await asyncio.to_thread(
|
||||||
|
settings.docStoreConn.delete,
|
||||||
|
{"doc_id": task_doc_id},
|
||||||
|
search.index_name(task_tenant_id),
|
||||||
|
task_dataset_id,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logging.exception(
|
||||||
|
f"Remove doc({task_doc_id}) from docStore failed when task({task_id}) canceled."
|
||||||
|
)
|
||||||
|
|
||||||
async def handle_task():
|
async def handle_task():
|
||||||
|
|
||||||
|
|||||||
10
web/src/assets/svg/home-icon/memory-bri.svg
Normal file
10
web/src/assets/svg/home-icon/memory-bri.svg
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M22 12H2M22 12V18C22 18.5304 21.7893 19.0391 21.4142 19.4142C21.0391 19.7893 20.5304 20 20 20H4C3.46957 20 2.96086 19.7893 2.58579 19.4142C2.21071 19.0391 2 18.5304 2 18V12M22 12L18.55 5.11C18.3844 4.77679 18.1292 4.49637 17.813 4.30028C17.4967 4.10419 17.1321 4.0002 16.76 4H7.24C6.86792 4.0002 6.50326 4.10419 6.18704 4.30028C5.87083 4.49637 5.61558 4.77679 5.45 5.11L2 12" stroke="url(#paint0_linear_1415_84974)" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<path d="M6 16H6.01M10 16H10.01" stroke="#00BEB4" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="paint0_linear_1415_84974" x1="12.5556" y1="4" x2="12.5556" y2="20" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop stop-color="#161618"/>
|
||||||
|
<stop offset="1" stop-color="#666666"/>
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 935 B |
@ -118,7 +118,7 @@ export const FilterField = memo(
|
|||||||
setShowAll(!showAll);
|
setShowAll(!showAll);
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<FormLabel className="text-text-primary">
|
<FormLabel className="text-text-secondary text-sm">
|
||||||
{item.label}
|
{item.label}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
{showAll ? (
|
{showAll ? (
|
||||||
|
|||||||
@ -33,6 +33,7 @@ export type CheckboxFormMultipleProps = {
|
|||||||
onChange?: FilterChange;
|
onChange?: FilterChange;
|
||||||
onOpenChange?: (open: boolean) => void;
|
onOpenChange?: (open: boolean) => void;
|
||||||
setOpen(open: boolean): void;
|
setOpen(open: boolean): void;
|
||||||
|
filterGroup?: Record<string, string[]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
function CheckboxFormMultiple({
|
function CheckboxFormMultiple({
|
||||||
@ -40,6 +41,7 @@ function CheckboxFormMultiple({
|
|||||||
value,
|
value,
|
||||||
onChange,
|
onChange,
|
||||||
setOpen,
|
setOpen,
|
||||||
|
filterGroup,
|
||||||
}: CheckboxFormMultipleProps) {
|
}: CheckboxFormMultipleProps) {
|
||||||
const [resolvedFilters, setResolvedFilters] =
|
const [resolvedFilters, setResolvedFilters] =
|
||||||
useState<FilterCollection[]>(filters);
|
useState<FilterCollection[]>(filters);
|
||||||
@ -120,6 +122,20 @@ function CheckboxFormMultiple({
|
|||||||
}
|
}
|
||||||
}, [form, value, resolvedFilters, fieldsDict]);
|
}, [form, value, resolvedFilters, fieldsDict]);
|
||||||
|
|
||||||
|
const filterList = useMemo(() => {
|
||||||
|
const filterSet = filterGroup
|
||||||
|
? Object.values(filterGroup).reduce<Set<string>>((pre, cur) => {
|
||||||
|
cur.forEach((item) => pre.add(item));
|
||||||
|
return pre;
|
||||||
|
}, new Set())
|
||||||
|
: new Set();
|
||||||
|
return [...filterSet];
|
||||||
|
}, [filterGroup]);
|
||||||
|
|
||||||
|
const notInfilterGroup = useMemo(() => {
|
||||||
|
return filters.filter((x) => !filterList.includes(x.field));
|
||||||
|
}, [filterList, filters]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<form
|
<form
|
||||||
@ -127,34 +143,70 @@ function CheckboxFormMultiple({
|
|||||||
className="space-y-8 px-5 py-2.5"
|
className="space-y-8 px-5 py-2.5"
|
||||||
onReset={() => form.reset()}
|
onReset={() => form.reset()}
|
||||||
>
|
>
|
||||||
{filters.map((x) => (
|
<div className="space-y-4">
|
||||||
<FormField
|
{filterGroup &&
|
||||||
key={x.field}
|
Object.keys(filterGroup).map((key) => {
|
||||||
control={form.control}
|
const filterKeys = filterGroup[key];
|
||||||
name={x.field}
|
const thisFilters = filters.filter((x) =>
|
||||||
render={() => (
|
filterKeys.includes(x.field),
|
||||||
<FormItem className="space-y-4">
|
);
|
||||||
<div>
|
return (
|
||||||
<FormLabel className="text-text-primary">{x.label}</FormLabel>
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex flex-col space-y-4 border-b border-border-button pb-4"
|
||||||
|
>
|
||||||
|
<div className="text-text-primary text-sm">{key}</div>
|
||||||
|
<div className="flex flex-col space-y-4">
|
||||||
|
{thisFilters.map((x) => (
|
||||||
|
<FilterField
|
||||||
|
key={x.field}
|
||||||
|
item={{ ...x, id: x.field }}
|
||||||
|
parent={{
|
||||||
|
...x,
|
||||||
|
id: x.field,
|
||||||
|
field: ``,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{x.list.map((item) => {
|
);
|
||||||
return (
|
})}
|
||||||
<FilterField
|
{notInfilterGroup &&
|
||||||
key={item.id}
|
notInfilterGroup.map((x) => {
|
||||||
item={{ ...item }}
|
return (
|
||||||
parent={{
|
<FormField
|
||||||
...x,
|
key={x.field}
|
||||||
id: x.field,
|
control={form.control}
|
||||||
// field: `${x.field}${item.field ? '.' + item.field : ''}`,
|
name={x.field}
|
||||||
}}
|
render={() => (
|
||||||
/>
|
<FormItem className="space-y-4">
|
||||||
);
|
<div>
|
||||||
})}
|
<FormLabel className="text-text-primary text-sm">
|
||||||
<FormMessage />
|
{x.label}
|
||||||
</FormItem>
|
</FormLabel>
|
||||||
)}
|
</div>
|
||||||
/>
|
{x.list.map((item) => {
|
||||||
))}
|
return (
|
||||||
|
<FilterField
|
||||||
|
key={item.id}
|
||||||
|
item={{ ...item }}
|
||||||
|
parent={{
|
||||||
|
...x,
|
||||||
|
id: x.field,
|
||||||
|
// field: `${x.field}${item.field ? '.' + item.field : ''}`,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
<FormMessage />
|
||||||
|
</FormItem>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-end gap-5">
|
<div className="flex justify-end gap-5">
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
type="button"
|
||||||
@ -185,6 +237,7 @@ export function FilterPopover({
|
|||||||
onChange,
|
onChange,
|
||||||
onOpenChange,
|
onOpenChange,
|
||||||
filters,
|
filters,
|
||||||
|
filterGroup,
|
||||||
}: PropsWithChildren & Omit<CheckboxFormMultipleProps, 'setOpen'>) {
|
}: PropsWithChildren & Omit<CheckboxFormMultipleProps, 'setOpen'>) {
|
||||||
const [open, setOpen] = useState(false);
|
const [open, setOpen] = useState(false);
|
||||||
const onOpenChangeFun = useCallback(
|
const onOpenChangeFun = useCallback(
|
||||||
@ -203,6 +256,7 @@ export function FilterPopover({
|
|||||||
value={value}
|
value={value}
|
||||||
filters={filters}
|
filters={filters}
|
||||||
setOpen={setOpen}
|
setOpen={setOpen}
|
||||||
|
filterGroup={filterGroup}
|
||||||
></CheckboxFormMultiple>
|
></CheckboxFormMultiple>
|
||||||
</PopoverContent>
|
</PopoverContent>
|
||||||
</Popover>
|
</Popover>
|
||||||
|
|||||||
@ -58,9 +58,11 @@ export default function ListFilterBar({
|
|||||||
filters,
|
filters,
|
||||||
className,
|
className,
|
||||||
icon,
|
icon,
|
||||||
|
filterGroup,
|
||||||
}: PropsWithChildren<IProps & Omit<CheckboxFormMultipleProps, 'setOpen'>> & {
|
}: PropsWithChildren<IProps & Omit<CheckboxFormMultipleProps, 'setOpen'>> & {
|
||||||
className?: string;
|
className?: string;
|
||||||
icon?: ReactNode;
|
icon?: ReactNode;
|
||||||
|
filterGroup?: Record<string, string[]>;
|
||||||
}) {
|
}) {
|
||||||
const filterCount = useMemo(() => {
|
const filterCount = useMemo(() => {
|
||||||
return typeof value === 'object' && value !== null
|
return typeof value === 'object' && value !== null
|
||||||
@ -99,6 +101,7 @@ export default function ListFilterBar({
|
|||||||
value={value}
|
value={value}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
filters={filters}
|
filters={filters}
|
||||||
|
filterGroup={filterGroup}
|
||||||
onOpenChange={onOpenChange}
|
onOpenChange={onOpenChange}
|
||||||
>
|
>
|
||||||
<FilterButton count={filterCount}></FilterButton>
|
<FilterButton count={filterCount}></FilterButton>
|
||||||
|
|||||||
33
web/src/components/memories-form-field.tsx
Normal file
33
web/src/components/memories-form-field.tsx
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { useFetchAllMemoryList } from '@/hooks/use-memory-request';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { RAGFlowFormItem } from './ragflow-form';
|
||||||
|
import { MultiSelect } from './ui/multi-select';
|
||||||
|
|
||||||
|
type MemoriesFormFieldProps = {
|
||||||
|
label: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function MemoriesFormField({ label }: MemoriesFormFieldProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const memoryList = useFetchAllMemoryList();
|
||||||
|
|
||||||
|
const options = memoryList.data?.map((memory) => ({
|
||||||
|
label: memory.name,
|
||||||
|
value: memory.id,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<RAGFlowFormItem name="memory_ids" label={label}>
|
||||||
|
{(field) => (
|
||||||
|
<MultiSelect
|
||||||
|
options={options || []}
|
||||||
|
placeholder={t('common.pleaseSelect')}
|
||||||
|
maxCount={100}
|
||||||
|
onValueChange={field.onChange}
|
||||||
|
defaultValue={field.value}
|
||||||
|
modalPopover
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</RAGFlowFormItem>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -1,6 +1,7 @@
|
|||||||
import { DocumentParserType } from '@/constants/knowledge';
|
import { DocumentParserType } from '@/constants/knowledge';
|
||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
|
import { LLMModelItem } from '@/pages/dataset/dataset-setting/configuration/common-item';
|
||||||
import {
|
import {
|
||||||
GenerateLogButton,
|
GenerateLogButton,
|
||||||
GenerateType,
|
GenerateType,
|
||||||
@ -136,6 +137,10 @@ const GraphRagItems = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<FormContainer className={cn({ 'mb-4': marginBottom }, className)}>
|
<FormContainer className={cn({ 'mb-4': marginBottom }, className)}>
|
||||||
|
<LLMModelItem
|
||||||
|
label={t('globalIndexModel')}
|
||||||
|
name={'parser_config.llm_id'}
|
||||||
|
/>
|
||||||
<UseGraphRagFormField
|
<UseGraphRagFormField
|
||||||
data={data}
|
data={data}
|
||||||
onDelete={onDelete}
|
onDelete={onDelete}
|
||||||
|
|||||||
@ -122,9 +122,9 @@ export const useFetchDocumentList = () => {
|
|||||||
page: pagination.current,
|
page: pagination.current,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
suffix: filterValue.type,
|
suffix: filterValue.type as string[],
|
||||||
run_status: filterValue.run,
|
run_status: filterValue.run as string[],
|
||||||
metadata: filterValue.metadata,
|
metadata: filterValue.metadata as Record<string, string[]>,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
if (ret.data.code === 0) {
|
if (ret.data.code === 0) {
|
||||||
|
|||||||
30
web/src/hooks/use-memory-request.ts
Normal file
30
web/src/hooks/use-memory-request.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { IMemory } from '@/interfaces/database/memory';
|
||||||
|
import memoryService from '@/services/memory-service';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
|
||||||
|
export const enum MemoryApiAction {
|
||||||
|
FetchMemoryList = 'fetchMemoryList',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useFetchAllMemoryList = () => {
|
||||||
|
const { data, isLoading, isError, refetch } = useQuery<IMemory[], Error>({
|
||||||
|
queryKey: [MemoryApiAction.FetchMemoryList],
|
||||||
|
queryFn: async () => {
|
||||||
|
const { data: response } = await memoryService.getMemoryList(
|
||||||
|
{
|
||||||
|
params: { page_size: 100000000, page: 1 },
|
||||||
|
data: {},
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
return response.data.memory_list ?? [];
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
data,
|
||||||
|
isLoading,
|
||||||
|
isError,
|
||||||
|
refetch,
|
||||||
|
};
|
||||||
|
};
|
||||||
11
web/src/interfaces/database/memory.ts
Normal file
11
web/src/interfaces/database/memory.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
export interface IMemory {
|
||||||
|
avatar: null;
|
||||||
|
description: null;
|
||||||
|
id: string;
|
||||||
|
memory_type: string[];
|
||||||
|
name: string;
|
||||||
|
owner_name: string;
|
||||||
|
permissions: string;
|
||||||
|
storage_type: string;
|
||||||
|
tenant_id: string;
|
||||||
|
}
|
||||||
@ -33,4 +33,5 @@ export interface IFetchKnowledgeListRequestParams {
|
|||||||
export interface IFetchDocumentListRequestBody {
|
export interface IFetchDocumentListRequestBody {
|
||||||
suffix?: string[];
|
suffix?: string[];
|
||||||
run_status?: string[];
|
run_status?: string[];
|
||||||
|
metadata?: Record<string, string[]>;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -176,6 +176,7 @@ Procedural Memory: Learned skills, habits, and automated procedures.`,
|
|||||||
},
|
},
|
||||||
knowledgeDetails: {
|
knowledgeDetails: {
|
||||||
metadata: {
|
metadata: {
|
||||||
|
fieldSetting: 'Field settings',
|
||||||
changesAffectNewParses: 'Changes affect new parses only.',
|
changesAffectNewParses: 'Changes affect new parses only.',
|
||||||
editMetadataForDataset: 'View and edit metadata for ',
|
editMetadataForDataset: 'View and edit metadata for ',
|
||||||
restrictDefinedValues: 'Restrict to defined values',
|
restrictDefinedValues: 'Restrict to defined values',
|
||||||
@ -190,6 +191,8 @@ Procedural Memory: Learned skills, habits, and automated procedures.`,
|
|||||||
fieldName: 'Field name',
|
fieldName: 'Field name',
|
||||||
editMetadata: 'Edit metadata',
|
editMetadata: 'Edit metadata',
|
||||||
},
|
},
|
||||||
|
metadataField: 'Metadata field',
|
||||||
|
systemAttribute: 'System attribute',
|
||||||
localUpload: 'Local upload',
|
localUpload: 'Local upload',
|
||||||
fileSize: 'File size',
|
fileSize: 'File size',
|
||||||
fileType: 'File type',
|
fileType: 'File type',
|
||||||
@ -363,7 +366,11 @@ Procedural Memory: Learned skills, habits, and automated procedures.`,
|
|||||||
reRankModelWaring: 'Re-rank model is very time consuming.',
|
reRankModelWaring: 'Re-rank model is very time consuming.',
|
||||||
},
|
},
|
||||||
knowledgeConfiguration: {
|
knowledgeConfiguration: {
|
||||||
|
globalIndexModelTip:
|
||||||
|
'Used to generate Knowledge graphs, RAPTOR, auto-metadata, auto-keyword and auto-question. Model performance will affects generation quality.',
|
||||||
|
globalIndexModel: 'Indexing model',
|
||||||
settings: 'Settings',
|
settings: 'Settings',
|
||||||
|
autoMetadataTip: `Automatically generate metadata. Applies to new files during parsing. Existing files require re-parsing to update (chunks remain preserved). Be aware that extra tokens will be consumed by the indexing model specified in 'Configuration'.`,
|
||||||
imageContextWindow: 'Image context window',
|
imageContextWindow: 'Image context window',
|
||||||
imageContextWindowTip:
|
imageContextWindowTip:
|
||||||
'Captures N tokens of text above and below the image to provide richer background context for the image chunk.',
|
'Captures N tokens of text above and below the image to provide richer background context for the image chunk.',
|
||||||
@ -2120,9 +2127,11 @@ Important structured information may include: names, dates, locations, events, k
|
|||||||
queryParameters: 'Query parameters',
|
queryParameters: 'Query parameters',
|
||||||
headerParameters: 'Header parameters',
|
headerParameters: 'Header parameters',
|
||||||
requestBodyParameters: 'Request body parameters',
|
requestBodyParameters: 'Request body parameters',
|
||||||
streaming: 'Accepted response',
|
immediately: 'Accepted response',
|
||||||
immediately: 'Final response',
|
streaming: 'Final response',
|
||||||
},
|
},
|
||||||
|
saveToMemory: 'Save to memory',
|
||||||
|
memory: 'Memory',
|
||||||
},
|
},
|
||||||
llmTools: {
|
llmTools: {
|
||||||
bad_calculator: {
|
bad_calculator: {
|
||||||
|
|||||||
@ -93,6 +93,67 @@ export default {
|
|||||||
search: '搜索',
|
search: '搜索',
|
||||||
welcome: '欢迎来到',
|
welcome: '欢迎来到',
|
||||||
dataset: '知识库',
|
dataset: '知识库',
|
||||||
|
Memories: '记忆',
|
||||||
|
},
|
||||||
|
memories: {
|
||||||
|
llmTooltip: '分析对话内容,提取关键信息,并生成结构化的记忆摘要。',
|
||||||
|
embeddingModelTooltip:
|
||||||
|
'将文本转换为数值向量,用于语义相似度搜索和记忆检索。',
|
||||||
|
embeddingModelError: '记忆类型为必填项,且"原始"类型不可删除。',
|
||||||
|
memoryTypeTooltip: `原始: 用户与代理之间的原始对话内容(默认必需)。
|
||||||
|
语义记忆: 关于用户和世界的通用知识和事实。
|
||||||
|
情景记忆: 带时间戳的特定事件和经历记录。
|
||||||
|
程序记忆: 学习的技能、习惯和自动化程序。`,
|
||||||
|
editName: '编辑名称',
|
||||||
|
memory: '记忆',
|
||||||
|
createMemory: '创建记忆',
|
||||||
|
name: '名称',
|
||||||
|
memoryNamePlaceholder: '记忆名称',
|
||||||
|
memoryType: '记忆类型',
|
||||||
|
embeddingModel: '嵌入模型',
|
||||||
|
selectModel: '选择模型',
|
||||||
|
llm: '大语言模型',
|
||||||
|
delMemoryWarn: `删除后,此记忆中的所有消息都将被删除,代理将无法检索。`,
|
||||||
|
},
|
||||||
|
memory: {
|
||||||
|
messages: {
|
||||||
|
messageDescription:
|
||||||
|
'记忆检索已在高级设置中配置相似度阈值、关键词相似度权重和前N个结果。',
|
||||||
|
copied: '已复制!',
|
||||||
|
contentEmbed: '内容嵌入',
|
||||||
|
content: '内容',
|
||||||
|
delMessageWarn: `遗忘后,代理将无法检索此消息。`,
|
||||||
|
forgetMessage: '遗忘消息',
|
||||||
|
sessionId: '会话ID',
|
||||||
|
agent: '代理',
|
||||||
|
type: '类型',
|
||||||
|
validDate: '有效日期',
|
||||||
|
forgetAt: '遗忘于',
|
||||||
|
source: '来源',
|
||||||
|
enable: '启用',
|
||||||
|
action: '操作',
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
avatar: '头像',
|
||||||
|
description: '描述',
|
||||||
|
memorySize: '记忆大小',
|
||||||
|
advancedSettings: '高级设置',
|
||||||
|
permission: '权限',
|
||||||
|
onlyMe: '仅自己',
|
||||||
|
team: '团队',
|
||||||
|
storageType: '存储类型',
|
||||||
|
storageTypePlaceholder: '请选择存储类型',
|
||||||
|
forgetPolicy: '遗忘策略',
|
||||||
|
temperature: '温度',
|
||||||
|
systemPrompt: '系统提示词',
|
||||||
|
systemPromptPlaceholder: '请输入系统提示词',
|
||||||
|
userPrompt: '用户提示词',
|
||||||
|
userPromptPlaceholder: '请输入用户提示词',
|
||||||
|
},
|
||||||
|
sideBar: {
|
||||||
|
messages: '消息',
|
||||||
|
configuration: '配置',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
knowledgeList: {
|
knowledgeList: {
|
||||||
welcome: '欢迎回来',
|
welcome: '欢迎回来',
|
||||||
@ -106,6 +167,22 @@ export default {
|
|||||||
parserRequired: '分块方法必填',
|
parserRequired: '分块方法必填',
|
||||||
},
|
},
|
||||||
knowledgeDetails: {
|
knowledgeDetails: {
|
||||||
|
metadata: {
|
||||||
|
fieldSetting: '字段设置',
|
||||||
|
changesAffectNewParses: '更改仅影响新解析。',
|
||||||
|
editMetadataForDataset: '查看和编辑元数据于 ',
|
||||||
|
restrictDefinedValues: '限制为已定义值',
|
||||||
|
metadataGenerationSettings: '元数据生成设置',
|
||||||
|
manageMetadataForDataset: '管理此数据集的元数据',
|
||||||
|
manageMetadata: '管理元数据',
|
||||||
|
metadata: '元数据',
|
||||||
|
values: '值',
|
||||||
|
action: '操作',
|
||||||
|
field: '字段',
|
||||||
|
description: '描述',
|
||||||
|
fieldName: '字段名',
|
||||||
|
editMetadata: '编辑元数据',
|
||||||
|
},
|
||||||
localUpload: '本地上传',
|
localUpload: '本地上传',
|
||||||
fileSize: '文件大小',
|
fileSize: '文件大小',
|
||||||
fileType: '文件类型',
|
fileType: '文件类型',
|
||||||
@ -1863,6 +1940,8 @@ Tokenizer 会根据所选方式将内容存储为对应的数据结构。`,
|
|||||||
headerParameters: '请求头参数',
|
headerParameters: '请求头参数',
|
||||||
requestBodyParameters: '请求体参数',
|
requestBodyParameters: '请求体参数',
|
||||||
},
|
},
|
||||||
|
saveToMemory: '保存到Memory',
|
||||||
|
memory: 'Memory',
|
||||||
},
|
},
|
||||||
footer: {
|
footer: {
|
||||||
profile: 'All rights reserved @ React',
|
profile: 'All rights reserved @ React',
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import {
|
|||||||
import {
|
import {
|
||||||
IAttachment,
|
IAttachment,
|
||||||
IEventList,
|
IEventList,
|
||||||
IInputEvent,
|
|
||||||
IMessageEndData,
|
IMessageEndData,
|
||||||
IMessageEndEvent,
|
IMessageEndEvent,
|
||||||
IMessageEvent,
|
IMessageEvent,
|
||||||
@ -93,7 +92,7 @@ export function findMessageFromList(eventList: IEventList) {
|
|||||||
export function findInputFromList(eventList: IEventList) {
|
export function findInputFromList(eventList: IEventList) {
|
||||||
const inputEvent = eventList.find(
|
const inputEvent = eventList.find(
|
||||||
(x) => x.event === MessageEventType.UserInputs,
|
(x) => x.event === MessageEventType.UserInputs,
|
||||||
) as IInputEvent;
|
);
|
||||||
|
|
||||||
if (!inputEvent) {
|
if (!inputEvent) {
|
||||||
return {};
|
return {};
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { FormContainer } from '@/components/form-container';
|
import { MemoriesFormField } from '@/components/memories-form-field';
|
||||||
import { BlockButton, Button } from '@/components/ui/button';
|
import { BlockButton, Button } from '@/components/ui/button';
|
||||||
import {
|
import {
|
||||||
Form,
|
Form,
|
||||||
@ -41,6 +41,7 @@ function MessageForm({ node }: INextOperatorForm) {
|
|||||||
output_format: z.string().optional(),
|
output_format: z.string().optional(),
|
||||||
auto_play: z.boolean().optional(),
|
auto_play: z.boolean().optional(),
|
||||||
status: z.number().optional(),
|
status: z.number().optional(),
|
||||||
|
memory_ids: z.array(z.string()).optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
const form = useForm({
|
const form = useForm({
|
||||||
@ -67,99 +68,100 @@ function MessageForm({ node }: INextOperatorForm) {
|
|||||||
{showWebhookResponseStatus && (
|
{showWebhookResponseStatus && (
|
||||||
<WebHookResponseStatusFormField name="status"></WebHookResponseStatusFormField>
|
<WebHookResponseStatusFormField name="status"></WebHookResponseStatusFormField>
|
||||||
)}
|
)}
|
||||||
<FormContainer>
|
<FormItem>
|
||||||
<FormItem>
|
<FormLabel tooltip={t('flow.msgTip')}>{t('flow.msg')}</FormLabel>
|
||||||
<FormLabel tooltip={t('flow.msgTip')}>{t('flow.msg')}</FormLabel>
|
<div className="space-y-4">
|
||||||
<div className="space-y-4">
|
{fields.map((field, index) => (
|
||||||
{fields.map((field, index) => (
|
<div key={field.id} className="flex items-start gap-2">
|
||||||
<div key={field.id} className="flex items-start gap-2">
|
<FormField
|
||||||
<FormField
|
control={form.control}
|
||||||
control={form.control}
|
name={`content.${index}.value`}
|
||||||
name={`content.${index}.value`}
|
render={({ field }) => (
|
||||||
render={({ field }) => (
|
<FormItem className="flex-1">
|
||||||
<FormItem className="flex-1">
|
<FormControl>
|
||||||
<FormControl>
|
<PromptEditor
|
||||||
<PromptEditor
|
{...field}
|
||||||
{...field}
|
placeholder={t('flow.messagePlaceholder')}
|
||||||
placeholder={t('flow.messagePlaceholder')}
|
></PromptEditor>
|
||||||
></PromptEditor>
|
</FormControl>
|
||||||
</FormControl>
|
</FormItem>
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
{fields.length > 1 && (
|
|
||||||
<Button
|
|
||||||
type="button"
|
|
||||||
variant={'ghost'}
|
|
||||||
onClick={() => remove(index)}
|
|
||||||
>
|
|
||||||
<X />
|
|
||||||
</Button>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
/>
|
||||||
))}
|
{fields.length > 1 && (
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant={'ghost'}
|
||||||
|
onClick={() => remove(index)}
|
||||||
|
>
|
||||||
|
<X />
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
|
||||||
<BlockButton
|
<BlockButton
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => append({ value: '' })} // "" will cause the inability to add, refer to: https://github.com/orgs/react-hook-form/discussions/8485#discussioncomment-2961861
|
onClick={() => append({ value: '' })} // "" will cause the inability to add, refer to: https://github.com/orgs/react-hook-form/discussions/8485#discussioncomment-2961861
|
||||||
>
|
>
|
||||||
{t('flow.addMessage')}
|
{t('flow.addMessage')}
|
||||||
</BlockButton>
|
</BlockButton>
|
||||||
</div>
|
</div>
|
||||||
<FormMessage />
|
<FormMessage />
|
||||||
</FormItem>
|
</FormItem>
|
||||||
</FormContainer>
|
{!showWebhookResponseStatus && (
|
||||||
<FormContainer>
|
<>
|
||||||
<FormItem>
|
<FormItem>
|
||||||
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
|
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
|
||||||
{t('flow.downloadFileType')}
|
{t('flow.downloadFileType')}
|
||||||
</FormLabel>
|
</FormLabel>
|
||||||
<FormField
|
<FormField
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name={`output_format`}
|
name={`output_format`}
|
||||||
render={({ field }) => (
|
render={({ field }) => (
|
||||||
<FormItem className="flex-1">
|
<FormItem className="flex-1">
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<RAGFlowSelect
|
<RAGFlowSelect
|
||||||
options={Object.keys(ExportFileType).map(
|
options={Object.keys(ExportFileType).map(
|
||||||
(key: string) => {
|
(key: string) => {
|
||||||
return {
|
return {
|
||||||
value:
|
value:
|
||||||
ExportFileType[
|
ExportFileType[
|
||||||
key as keyof typeof ExportFileType
|
key as keyof typeof ExportFileType
|
||||||
],
|
],
|
||||||
label: key,
|
label: key,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
)}
|
)}
|
||||||
{...field}
|
{...field}
|
||||||
onValueChange={field.onChange}
|
onValueChange={field.onChange}
|
||||||
placeholder={t('common.selectPlaceholder')}
|
placeholder={t('common.selectPlaceholder')}
|
||||||
allowClear
|
allowClear
|
||||||
></RAGFlowSelect>
|
></RAGFlowSelect>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
<FormItem>
|
<FormItem>
|
||||||
<FormLabel>{t('flow.autoPlay')}</FormLabel>
|
<FormLabel>{t('flow.autoPlay')}</FormLabel>
|
||||||
<FormField
|
<FormField
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name={`auto_play`}
|
name={`auto_play`}
|
||||||
render={({ field }) => (
|
render={({ field }) => (
|
||||||
<FormItem className="flex-1">
|
<FormItem className="flex-1">
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<Switch
|
<Switch
|
||||||
checked={field.value}
|
checked={field.value}
|
||||||
onCheckedChange={field.onChange}
|
onCheckedChange={field.onChange}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
</FormContainer>
|
</>
|
||||||
|
)}
|
||||||
|
<MemoriesFormField label={t('flow.saveToMemory')}></MemoriesFormField>
|
||||||
</FormWrapper>
|
</FormWrapper>
|
||||||
</Form>
|
</Form>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -15,16 +15,16 @@ export function useShowWebhookResponseStatus(form: UseFormReturn<any>) {
|
|||||||
const showWebhookResponseStatus = useMemo(() => {
|
const showWebhookResponseStatus = useMemo(() => {
|
||||||
const formData: BeginFormSchemaType = getNode(BeginId)?.data.form;
|
const formData: BeginFormSchemaType = getNode(BeginId)?.data.form;
|
||||||
return (
|
return (
|
||||||
formData.mode === AgentDialogueMode.Webhook &&
|
formData?.mode === AgentDialogueMode.Webhook &&
|
||||||
formData.execution_mode === WebhookExecutionMode.Streaming
|
formData.execution_mode === WebhookExecutionMode.Streaming
|
||||||
);
|
);
|
||||||
}, []);
|
}, [getNode]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (showWebhookResponseStatus && isEmpty(form.getValues('status'))) {
|
if (showWebhookResponseStatus && isEmpty(form.getValues('status'))) {
|
||||||
form.setValue('status', 200, { shouldValidate: true, shouldDirty: true });
|
form.setValue('status', 200, { shouldValidate: true, shouldDirty: true });
|
||||||
}
|
}
|
||||||
}, []);
|
}, [form, showWebhookResponseStatus]);
|
||||||
|
|
||||||
return showWebhookResponseStatus;
|
return showWebhookResponseStatus;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,6 +2,7 @@ import { Collapse } from '@/components/collapse';
|
|||||||
import { CrossLanguageFormField } from '@/components/cross-language-form-field';
|
import { CrossLanguageFormField } from '@/components/cross-language-form-field';
|
||||||
import { FormContainer } from '@/components/form-container';
|
import { FormContainer } from '@/components/form-container';
|
||||||
import { KnowledgeBaseFormField } from '@/components/knowledge-base-item';
|
import { KnowledgeBaseFormField } from '@/components/knowledge-base-item';
|
||||||
|
import { MemoriesFormField } from '@/components/memories-form-field';
|
||||||
import {
|
import {
|
||||||
MetadataFilter,
|
MetadataFilter,
|
||||||
MetadataFilterSchema,
|
MetadataFilterSchema,
|
||||||
@ -46,6 +47,7 @@ export const RetrievalPartialSchema = {
|
|||||||
use_kg: z.boolean(),
|
use_kg: z.boolean(),
|
||||||
toc_enhance: z.boolean(),
|
toc_enhance: z.boolean(),
|
||||||
...MetadataFilterSchema,
|
...MetadataFilterSchema,
|
||||||
|
memory_ids: z.array(z.string()).optional(),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const FormSchema = z.object({
|
export const FormSchema = z.object({
|
||||||
@ -109,12 +111,11 @@ function RetrievalForm({ node }: INextOperatorForm) {
|
|||||||
return (
|
return (
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<FormWrapper>
|
<FormWrapper>
|
||||||
<FormContainer>
|
<RAGFlowFormItem name="query" label={t('flow.query')}>
|
||||||
<RAGFlowFormItem name="query" label={t('flow.query')}>
|
<PromptEditor></PromptEditor>
|
||||||
<PromptEditor></PromptEditor>
|
</RAGFlowFormItem>
|
||||||
</RAGFlowFormItem>
|
<KnowledgeBaseFormField showVariable></KnowledgeBaseFormField>
|
||||||
<KnowledgeBaseFormField showVariable></KnowledgeBaseFormField>
|
<MemoriesFormField label={t('flow.memory')}></MemoriesFormField>
|
||||||
</FormContainer>
|
|
||||||
<Collapse title={<div>{t('flow.advancedSettings')}</div>}>
|
<Collapse title={<div>{t('flow.advancedSettings')}</div>}>
|
||||||
<FormContainer>
|
<FormContainer>
|
||||||
<SimilaritySliderFormField
|
<SimilaritySliderFormField
|
||||||
|
|||||||
@ -2,6 +2,7 @@ import { Collapse } from '@/components/collapse';
|
|||||||
import { CrossLanguageFormField } from '@/components/cross-language-form-field';
|
import { CrossLanguageFormField } from '@/components/cross-language-form-field';
|
||||||
import { FormContainer } from '@/components/form-container';
|
import { FormContainer } from '@/components/form-container';
|
||||||
import { KnowledgeBaseFormField } from '@/components/knowledge-base-item';
|
import { KnowledgeBaseFormField } from '@/components/knowledge-base-item';
|
||||||
|
import { MemoriesFormField } from '@/components/memories-form-field';
|
||||||
import { MetadataFilter } from '@/components/metadata-filter';
|
import { MetadataFilter } from '@/components/metadata-filter';
|
||||||
import { RerankFormFields } from '@/components/rerank';
|
import { RerankFormFields } from '@/components/rerank';
|
||||||
import { SimilaritySliderFormField } from '@/components/similarity-slider';
|
import { SimilaritySliderFormField } from '@/components/similarity-slider';
|
||||||
@ -40,10 +41,9 @@ const RetrievalForm = () => {
|
|||||||
return (
|
return (
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<FormWrapper>
|
<FormWrapper>
|
||||||
<FormContainer>
|
<DescriptionField></DescriptionField>
|
||||||
<DescriptionField></DescriptionField>
|
<KnowledgeBaseFormField showVariable></KnowledgeBaseFormField>
|
||||||
<KnowledgeBaseFormField showVariable></KnowledgeBaseFormField>
|
<MemoriesFormField label={t('flow.memory')}></MemoriesFormField>
|
||||||
</FormContainer>
|
|
||||||
<Collapse title={<div>{t('flow.advancedSettings')}</div>}>
|
<Collapse title={<div>{t('flow.advancedSettings')}</div>}>
|
||||||
<FormContainer>
|
<FormContainer>
|
||||||
<SimilaritySliderFormField
|
<SimilaritySliderFormField
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import message from '@/components/ui/message';
|
import message from '@/components/ui/message';
|
||||||
import { useSetModalState } from '@/hooks/common-hooks';
|
import { useSetModalState } from '@/hooks/common-hooks';
|
||||||
import { useSetDocumentMeta } from '@/hooks/use-document-request';
|
import { useSetDocumentMeta } from '@/hooks/use-document-request';
|
||||||
import {
|
import kbService, {
|
||||||
getMetaDataService,
|
getMetaDataService,
|
||||||
updateMetaData,
|
updateMetaData,
|
||||||
} from '@/services/knowledge-service';
|
} from '@/services/knowledge-service';
|
||||||
@ -198,20 +198,24 @@ export const useManageMetaDataModal = (
|
|||||||
const { setDocumentMeta } = useSetDocumentMeta();
|
const { setDocumentMeta } = useSetDocumentMeta();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (data) {
|
if (type === MetadataType.Manage) {
|
||||||
setTableData(data);
|
if (data) {
|
||||||
} else {
|
setTableData(data);
|
||||||
setTableData([]);
|
} else {
|
||||||
|
setTableData([]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [data]);
|
}, [data, type]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (metaData) {
|
if (type !== MetadataType.Manage) {
|
||||||
setTableData(metaData);
|
if (metaData) {
|
||||||
} else {
|
setTableData(metaData);
|
||||||
setTableData([]);
|
} else {
|
||||||
|
setTableData([]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [metaData]);
|
}, [metaData, type]);
|
||||||
|
|
||||||
const handleDeleteSingleValue = useCallback(
|
const handleDeleteSingleValue = useCallback(
|
||||||
(field: string, value: string) => {
|
(field: string, value: string) => {
|
||||||
@ -255,7 +259,7 @@ export const useManageMetaDataModal = (
|
|||||||
data: operations,
|
data: operations,
|
||||||
});
|
});
|
||||||
if (res.code === 0) {
|
if (res.code === 0) {
|
||||||
message.success(t('message.success'));
|
message.success(t('message.operated'));
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -282,11 +286,18 @@ export const useManageMetaDataModal = (
|
|||||||
const handleSaveSettings = useCallback(
|
const handleSaveSettings = useCallback(
|
||||||
async (callback: () => void) => {
|
async (callback: () => void) => {
|
||||||
const data = util.tableDataToMetaDataSettingJSON(tableData);
|
const data = util.tableDataToMetaDataSettingJSON(tableData);
|
||||||
callback();
|
const { data: res } = await kbService.kbUpdateMetaData({
|
||||||
|
kb_id: id,
|
||||||
|
metadata: data,
|
||||||
|
});
|
||||||
|
if (res.code === 0) {
|
||||||
|
message.success(t('message.operated'));
|
||||||
|
callback?.();
|
||||||
|
}
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
},
|
},
|
||||||
[tableData],
|
[tableData, id],
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleSave = useCallback(
|
const handleSave = useCallback(
|
||||||
|
|||||||
@ -27,7 +27,7 @@ import {
|
|||||||
import { Plus, Settings, Trash2 } from 'lucide-react';
|
import { Plus, Settings, Trash2 } from 'lucide-react';
|
||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useManageMetaDataModal } from './hook';
|
import { MetadataType, useManageMetaDataModal } from './hook';
|
||||||
import { IManageModalProps, IMetaDataTableData } from './interface';
|
import { IManageModalProps, IMetaDataTableData } from './interface';
|
||||||
import { ManageValuesModal } from './manage-values-modal';
|
import { ManageValuesModal } from './manage-values-modal';
|
||||||
export const ManageMetadataModal = (props: IManageModalProps) => {
|
export const ManageMetadataModal = (props: IManageModalProps) => {
|
||||||
@ -54,7 +54,6 @@ export const ManageMetadataModal = (props: IManageModalProps) => {
|
|||||||
values: [],
|
values: [],
|
||||||
});
|
});
|
||||||
|
|
||||||
const [currentValueIndex, setCurrentValueIndex] = useState<number>(0);
|
|
||||||
const [deleteDialogContent, setDeleteDialogContent] = useState({
|
const [deleteDialogContent, setDeleteDialogContent] = useState({
|
||||||
visible: false,
|
visible: false,
|
||||||
title: '',
|
title: '',
|
||||||
@ -95,12 +94,12 @@ export const ManageMetadataModal = (props: IManageModalProps) => {
|
|||||||
description: '',
|
description: '',
|
||||||
values: [],
|
values: [],
|
||||||
});
|
});
|
||||||
setCurrentValueIndex(tableData.length || 0);
|
// setCurrentValueIndex(tableData.length || 0);
|
||||||
showManageValuesModal();
|
showManageValuesModal();
|
||||||
};
|
};
|
||||||
const handleEditValueRow = useCallback(
|
const handleEditValueRow = useCallback(
|
||||||
(data: IMetaDataTableData, index: number) => {
|
(data: IMetaDataTableData) => {
|
||||||
setCurrentValueIndex(index);
|
// setCurrentValueIndex(index);
|
||||||
setValueData(data);
|
setValueData(data);
|
||||||
showManageValuesModal();
|
showManageValuesModal();
|
||||||
},
|
},
|
||||||
@ -186,7 +185,7 @@ export const ManageMetadataModal = (props: IManageModalProps) => {
|
|||||||
variant={'ghost'}
|
variant={'ghost'}
|
||||||
className="bg-transparent px-1 py-0"
|
className="bg-transparent px-1 py-0"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
handleEditValueRow(row.original, row.index);
|
handleEditValueRow(row.original);
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Settings />
|
<Settings />
|
||||||
@ -244,16 +243,32 @@ export const ManageMetadataModal = (props: IManageModalProps) => {
|
|||||||
|
|
||||||
const handleSaveValues = (data: IMetaDataTableData) => {
|
const handleSaveValues = (data: IMetaDataTableData) => {
|
||||||
setTableData((prev) => {
|
setTableData((prev) => {
|
||||||
if (currentValueIndex >= prev.length) {
|
//If the keys are the same, they need to be merged.
|
||||||
return [...prev, data];
|
const fieldMap = new Map<string, any>();
|
||||||
|
|
||||||
|
prev.forEach((item) => {
|
||||||
|
if (fieldMap.has(item.field)) {
|
||||||
|
const existingItem = fieldMap.get(item.field);
|
||||||
|
const mergedValues = [
|
||||||
|
...new Set([...existingItem.values, ...item.values]),
|
||||||
|
];
|
||||||
|
fieldMap.set(item.field, { ...existingItem, values: mergedValues });
|
||||||
|
} else {
|
||||||
|
fieldMap.set(item.field, item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (fieldMap.has(data.field)) {
|
||||||
|
const existingItem = fieldMap.get(data.field);
|
||||||
|
const mergedValues = [
|
||||||
|
...new Set([...existingItem.values, ...data.values]),
|
||||||
|
];
|
||||||
|
fieldMap.set(data.field, { ...existingItem, values: mergedValues });
|
||||||
} else {
|
} else {
|
||||||
return prev.map((item, index) => {
|
fieldMap.set(data.field, data);
|
||||||
if (index === currentValueIndex) {
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
return item;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return Array.from(fieldMap.values());
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -335,7 +350,13 @@ export const ManageMetadataModal = (props: IManageModalProps) => {
|
|||||||
</Modal>
|
</Modal>
|
||||||
{manageValuesVisible && (
|
{manageValuesVisible && (
|
||||||
<ManageValuesModal
|
<ManageValuesModal
|
||||||
title={<div>{t('knowledgeDetails.metadata.editMetadata')}</div>}
|
title={
|
||||||
|
<div>
|
||||||
|
{metadataType === MetadataType.Setting
|
||||||
|
? t('knowledgeDetails.metadata.fieldSetting')
|
||||||
|
: t('knowledgeDetails.metadata.editMetadata')}
|
||||||
|
</div>
|
||||||
|
}
|
||||||
visible={manageValuesVisible}
|
visible={manageValuesVisible}
|
||||||
hideModal={hideManageValuesModal}
|
hideModal={hideManageValuesModal}
|
||||||
data={valueData}
|
data={valueData}
|
||||||
|
|||||||
@ -110,8 +110,8 @@ export const ManageValuesModal = (props: IManageValuesProps) => {
|
|||||||
|
|
||||||
// Handle blur event, synchronize to main state
|
// Handle blur event, synchronize to main state
|
||||||
const handleValueBlur = useCallback(() => {
|
const handleValueBlur = useCallback(() => {
|
||||||
addUpdateValue(metaData.field, [...tempValues]);
|
addUpdateValue(metaData.field, [...new Set([...tempValues])]);
|
||||||
handleChange('values', [...tempValues]);
|
handleChange('values', [...new Set([...tempValues])]);
|
||||||
}, [handleChange, tempValues, metaData, addUpdateValue]);
|
}, [handleChange, tempValues, metaData, addUpdateValue]);
|
||||||
|
|
||||||
// Handle delete operation
|
// Handle delete operation
|
||||||
@ -139,12 +139,12 @@ export const ManageValuesModal = (props: IManageValuesProps) => {
|
|||||||
|
|
||||||
// Handle adding new value
|
// Handle adding new value
|
||||||
const handleAddValue = useCallback(() => {
|
const handleAddValue = useCallback(() => {
|
||||||
setTempValues((prev) => [...prev, '']);
|
setTempValues((prev) => [...new Set([...prev, ''])]);
|
||||||
|
|
||||||
// Synchronize to main state
|
// Synchronize to main state
|
||||||
setMetaData((prev) => ({
|
setMetaData((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
values: [...prev.values, ''],
|
values: [...new Set([...prev.values, ''])],
|
||||||
}));
|
}));
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ export const ManageValuesModal = (props: IManageValuesProps) => {
|
|||||||
open={visible}
|
open={visible}
|
||||||
onCancel={handleHideModal}
|
onCancel={handleHideModal}
|
||||||
className="!w-[460px]"
|
className="!w-[460px]"
|
||||||
okText={t('common.save')}
|
okText={t('common.confirm')}
|
||||||
onOk={handleSave}
|
onOk={handleSave}
|
||||||
maskClosable={false}
|
maskClosable={false}
|
||||||
footer={null}
|
footer={null}
|
||||||
|
|||||||
@ -3,11 +3,13 @@ import {
|
|||||||
AutoQuestionsFormField,
|
AutoQuestionsFormField,
|
||||||
} from '@/components/auto-keywords-form-field';
|
} from '@/components/auto-keywords-form-field';
|
||||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function AudioConfiguration() {
|
export function AudioConfiguration() {
|
||||||
return (
|
return (
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
<>
|
<>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
ConfigurationFormContainer,
|
ConfigurationFormContainer,
|
||||||
MainContainer,
|
MainContainer,
|
||||||
} from '../configuration-form-container';
|
} from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function BookConfiguration() {
|
export function BookConfiguration() {
|
||||||
return (
|
return (
|
||||||
@ -16,6 +17,7 @@ export function BookConfiguration() {
|
|||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|||||||
@ -3,7 +3,10 @@ import {
|
|||||||
FormFieldType,
|
FormFieldType,
|
||||||
RenderField,
|
RenderField,
|
||||||
} from '@/components/dynamic-form';
|
} from '@/components/dynamic-form';
|
||||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
import {
|
||||||
|
SelectWithSearch,
|
||||||
|
SelectWithSearchFlagOptionType,
|
||||||
|
} from '@/components/originui/select-with-search';
|
||||||
import { SliderInputFormField } from '@/components/slider-input-form-field';
|
import { SliderInputFormField } from '@/components/slider-input-form-field';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import {
|
import {
|
||||||
@ -16,7 +19,9 @@ import {
|
|||||||
import { Radio } from '@/components/ui/radio';
|
import { Radio } from '@/components/ui/radio';
|
||||||
import { Spin } from '@/components/ui/spin';
|
import { Spin } from '@/components/ui/spin';
|
||||||
import { Switch } from '@/components/ui/switch';
|
import { Switch } from '@/components/ui/switch';
|
||||||
|
import { LlmModelType } from '@/constants/knowledge';
|
||||||
import { useTranslate } from '@/hooks/common-hooks';
|
import { useTranslate } from '@/hooks/common-hooks';
|
||||||
|
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { t } from 'i18next';
|
import { t } from 'i18next';
|
||||||
import { Settings } from 'lucide-react';
|
import { Settings } from 'lucide-react';
|
||||||
@ -41,6 +46,8 @@ import {
|
|||||||
interface IProps {
|
interface IProps {
|
||||||
line?: 1 | 2;
|
line?: 1 | 2;
|
||||||
isEdit?: boolean;
|
isEdit?: boolean;
|
||||||
|
label?: string;
|
||||||
|
name?: string;
|
||||||
}
|
}
|
||||||
export function ChunkMethodItem(props: IProps) {
|
export function ChunkMethodItem(props: IProps) {
|
||||||
const { line } = props;
|
const { line } = props;
|
||||||
@ -368,7 +375,7 @@ export function AutoMetadata() {
|
|||||||
type: FormFieldType.Custom,
|
type: FormFieldType.Custom,
|
||||||
horizontal: true,
|
horizontal: true,
|
||||||
defaultValue: true,
|
defaultValue: true,
|
||||||
|
tooltip: t('knowledgeConfiguration.autoMetadataTip'),
|
||||||
render: (fieldProps: ControllerRenderProps) => (
|
render: (fieldProps: ControllerRenderProps) => (
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<Button
|
<Button
|
||||||
@ -432,3 +439,80 @@ export function AutoMetadata() {
|
|||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const LLMSelect = ({
|
||||||
|
isEdit,
|
||||||
|
field,
|
||||||
|
disabled = false,
|
||||||
|
}: {
|
||||||
|
isEdit: boolean;
|
||||||
|
field: FieldValues;
|
||||||
|
name?: string;
|
||||||
|
disabled?: boolean;
|
||||||
|
}) => {
|
||||||
|
const { t } = useTranslate('knowledgeConfiguration');
|
||||||
|
const modelOptions = useComposeLlmOptionsByModelTypes([
|
||||||
|
LlmModelType.Chat,
|
||||||
|
LlmModelType.Image2text,
|
||||||
|
]);
|
||||||
|
return (
|
||||||
|
<SelectWithSearch
|
||||||
|
onChange={async (value) => {
|
||||||
|
field.onChange(value);
|
||||||
|
}}
|
||||||
|
disabled={disabled && !isEdit}
|
||||||
|
value={field.value}
|
||||||
|
options={modelOptions as SelectWithSearchFlagOptionType[]}
|
||||||
|
placeholder={t('embeddingModelPlaceholder')}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export function LLMModelItem({ line = 1, isEdit, label, name }: IProps) {
|
||||||
|
const { t } = useTranslate('knowledgeConfiguration');
|
||||||
|
const form = useFormContext();
|
||||||
|
const disabled = useHasParsedDocument(isEdit);
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<FormField
|
||||||
|
control={form.control}
|
||||||
|
name={name ?? 'llm_id'}
|
||||||
|
render={({ field }) => (
|
||||||
|
<FormItem className={cn(' items-center space-y-0 ')}>
|
||||||
|
<div
|
||||||
|
className={cn('flex', {
|
||||||
|
' items-center': line === 1,
|
||||||
|
'flex-col gap-1': line === 2,
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
<FormLabel
|
||||||
|
required
|
||||||
|
tooltip={t('globalIndexModelTip')}
|
||||||
|
className={cn('text-sm whitespace-wrap ', {
|
||||||
|
'w-1/4': line === 1,
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
{label ?? t('llmModel')}
|
||||||
|
</FormLabel>
|
||||||
|
<div
|
||||||
|
className={cn('text-text-secondary', { 'w-3/4': line === 1 })}
|
||||||
|
>
|
||||||
|
<FormControl>
|
||||||
|
<LLMSelect
|
||||||
|
isEdit={!!isEdit}
|
||||||
|
field={field}
|
||||||
|
disabled={disabled}
|
||||||
|
></LLMSelect>
|
||||||
|
</FormControl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex pt-1">
|
||||||
|
<div className={line === 1 ? 'w-1/4' : ''}></div>
|
||||||
|
<FormMessage />
|
||||||
|
</div>
|
||||||
|
</FormItem>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@ -3,11 +3,13 @@ import {
|
|||||||
AutoQuestionsFormField,
|
AutoQuestionsFormField,
|
||||||
} from '@/components/auto-keywords-form-field';
|
} from '@/components/auto-keywords-form-field';
|
||||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function EmailConfiguration() {
|
export function EmailConfiguration() {
|
||||||
return (
|
return (
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
<>
|
<>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
ConfigurationFormContainer,
|
ConfigurationFormContainer,
|
||||||
MainContainer,
|
MainContainer,
|
||||||
} from '../configuration-form-container';
|
} from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function LawsConfiguration() {
|
export function LawsConfiguration() {
|
||||||
return (
|
return (
|
||||||
@ -16,6 +17,7 @@ export function LawsConfiguration() {
|
|||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
ConfigurationFormContainer,
|
ConfigurationFormContainer,
|
||||||
MainContainer,
|
MainContainer,
|
||||||
} from '../configuration-form-container';
|
} from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function ManualConfiguration() {
|
export function ManualConfiguration() {
|
||||||
return (
|
return (
|
||||||
@ -16,6 +17,7 @@ export function ManualConfiguration() {
|
|||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|||||||
@ -4,12 +4,14 @@ import {
|
|||||||
} from '@/components/auto-keywords-form-field';
|
} from '@/components/auto-keywords-form-field';
|
||||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function OneConfiguration() {
|
export function OneConfiguration() {
|
||||||
return (
|
return (
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
<LayoutRecognizeFormField></LayoutRecognizeFormField>
|
<LayoutRecognizeFormField></LayoutRecognizeFormField>
|
||||||
<>
|
<>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
ConfigurationFormContainer,
|
ConfigurationFormContainer,
|
||||||
MainContainer,
|
MainContainer,
|
||||||
} from '../configuration-form-container';
|
} from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function PaperConfiguration() {
|
export function PaperConfiguration() {
|
||||||
return (
|
return (
|
||||||
@ -16,6 +17,7 @@ export function PaperConfiguration() {
|
|||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|||||||
@ -3,11 +3,13 @@ import {
|
|||||||
AutoQuestionsFormField,
|
AutoQuestionsFormField,
|
||||||
} from '@/components/auto-keywords-form-field';
|
} from '@/components/auto-keywords-form-field';
|
||||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function PictureConfiguration() {
|
export function PictureConfiguration() {
|
||||||
return (
|
return (
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
<>
|
<>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
ConfigurationFormContainer,
|
ConfigurationFormContainer,
|
||||||
MainContainer,
|
MainContainer,
|
||||||
} from '../configuration-form-container';
|
} from '../configuration-form-container';
|
||||||
|
import { AutoMetadata } from './common-item';
|
||||||
|
|
||||||
export function PresentationConfiguration() {
|
export function PresentationConfiguration() {
|
||||||
return (
|
return (
|
||||||
@ -16,6 +17,7 @@ export function PresentationConfiguration() {
|
|||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|
||||||
<ConfigurationFormContainer>
|
<ConfigurationFormContainer>
|
||||||
|
<AutoMetadata />
|
||||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||||
</ConfigurationFormContainer>
|
</ConfigurationFormContainer>
|
||||||
|
|||||||
@ -96,6 +96,7 @@ export const formSchema = z
|
|||||||
)
|
)
|
||||||
.optional(),
|
.optional(),
|
||||||
enable_metadata: z.boolean().optional(),
|
enable_metadata: z.boolean().optional(),
|
||||||
|
llm_id: z.string().optional(),
|
||||||
})
|
})
|
||||||
.optional(),
|
.optional(),
|
||||||
pagerank: z.number(),
|
pagerank: z.number(),
|
||||||
|
|||||||
@ -93,6 +93,7 @@ export default function DatasetSettings() {
|
|||||||
},
|
},
|
||||||
metadata: [],
|
metadata: [],
|
||||||
enable_metadata: false,
|
enable_metadata: false,
|
||||||
|
llm_id: '',
|
||||||
},
|
},
|
||||||
pipeline_id: '',
|
pipeline_id: '',
|
||||||
parseType: 1,
|
parseType: 1,
|
||||||
|
|||||||
@ -16,7 +16,7 @@ import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-reques
|
|||||||
import { Pen, Upload } from 'lucide-react';
|
import { Pen, Upload } from 'lucide-react';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useManageMetadata } from '../components/metedata/hook';
|
import { MetadataType, useManageMetadata } from '../components/metedata/hook';
|
||||||
import { ManageMetadataModal } from '../components/metedata/manage-modal';
|
import { ManageMetadataModal } from '../components/metedata/manage-modal';
|
||||||
import { DatasetTable } from './dataset-table';
|
import { DatasetTable } from './dataset-table';
|
||||||
import Generate from './generate-button/generate';
|
import Generate from './generate-button/generate';
|
||||||
@ -53,7 +53,7 @@ export default function Dataset() {
|
|||||||
const { data: dataSetData } = useFetchKnowledgeBaseConfiguration({
|
const { data: dataSetData } = useFetchKnowledgeBaseConfiguration({
|
||||||
refreshCount,
|
refreshCount,
|
||||||
});
|
});
|
||||||
const { filters, onOpenChange } = useSelectDatasetFilters();
|
const { filters, onOpenChange, filterGroup } = useSelectDatasetFilters();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
createLoading,
|
createLoading,
|
||||||
@ -90,6 +90,7 @@ export default function Dataset() {
|
|||||||
onSearchChange={handleInputChange}
|
onSearchChange={handleInputChange}
|
||||||
searchString={searchString}
|
searchString={searchString}
|
||||||
value={filterValue}
|
value={filterValue}
|
||||||
|
filterGroup={filterGroup}
|
||||||
onChange={handleFilterSubmit}
|
onChange={handleFilterSubmit}
|
||||||
onOpenChange={onOpenChange}
|
onOpenChange={onOpenChange}
|
||||||
filters={filters}
|
filters={filters}
|
||||||
@ -105,7 +106,25 @@ export default function Dataset() {
|
|||||||
<Button
|
<Button
|
||||||
variant={'ghost'}
|
variant={'ghost'}
|
||||||
className="border border-border-button"
|
className="border border-border-button"
|
||||||
onClick={() => showManageMetadataModal()}
|
onClick={() =>
|
||||||
|
showManageMetadataModal({
|
||||||
|
type: MetadataType.Manage,
|
||||||
|
isCanAdd: false,
|
||||||
|
isEditField: true,
|
||||||
|
title: (
|
||||||
|
<div className="flex flex-col gap-2">
|
||||||
|
<div className="text-base font-normal">
|
||||||
|
{t('knowledgeDetails.metadata.manageMetadata')}
|
||||||
|
</div>
|
||||||
|
<div className="text-sm text-text-secondary">
|
||||||
|
{t(
|
||||||
|
'knowledgeDetails.metadata.manageMetadataForDataset',
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
>
|
>
|
||||||
<div className="flex gap-1 items-center">
|
<div className="flex gap-1 items-center">
|
||||||
<Pen size={14} />
|
<Pen size={14} />
|
||||||
@ -179,6 +198,7 @@ export default function Dataset() {
|
|||||||
// selectedRowKeys={selectedRowKeys}
|
// selectedRowKeys={selectedRowKeys}
|
||||||
tableData={tableData}
|
tableData={tableData}
|
||||||
isCanAdd={metadataConfig.isCanAdd}
|
isCanAdd={metadataConfig.isCanAdd}
|
||||||
|
isEditField={metadataConfig.isEditField}
|
||||||
isDeleteSingleValue={metadataConfig.isDeleteSingleValue}
|
isDeleteSingleValue={metadataConfig.isDeleteSingleValue}
|
||||||
type={metadataConfig.type}
|
type={metadataConfig.type}
|
||||||
otherData={metadataConfig.record}
|
otherData={metadataConfig.record}
|
||||||
|
|||||||
@ -49,9 +49,13 @@ export function useSelectDatasetFilters() {
|
|||||||
return [
|
return [
|
||||||
{ field: 'type', label: 'File Type', list: fileTypes },
|
{ field: 'type', label: 'File Type', list: fileTypes },
|
||||||
{ field: 'run', label: 'Status', list: fileStatus },
|
{ field: 'run', label: 'Status', list: fileStatus },
|
||||||
{ field: 'metadata', label: 'metadata', list: metaDataList },
|
{ field: 'metadata', label: 'Metadata field', list: metaDataList },
|
||||||
] as FilterCollection[];
|
] as FilterCollection[];
|
||||||
}, [fileStatus, fileTypes, metaDataList]);
|
}, [fileStatus, fileTypes, metaDataList]);
|
||||||
|
|
||||||
return { filters, onOpenChange };
|
const filterGroup = {
|
||||||
|
[t('systemAttribute')]: ['type', 'run'],
|
||||||
|
// [t('metadataField')]: ['metadata'],
|
||||||
|
};
|
||||||
|
return { filters, onOpenChange, filterGroup };
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import { DynamicForm } from '@/components/dynamic-form';
|
import { DynamicForm } from '@/components/dynamic-form';
|
||||||
import { useModelOptions } from '@/components/llm-setting-items/llm-form-field';
|
|
||||||
import { HomeIcon } from '@/components/svg-icon';
|
import { HomeIcon } from '@/components/svg-icon';
|
||||||
import { Modal } from '@/components/ui/modal/modal';
|
import { Modal } from '@/components/ui/modal/modal';
|
||||||
import { memo, useMemo } from 'react';
|
import { memo, useMemo } from 'react';
|
||||||
@ -18,7 +17,7 @@ type IProps = {
|
|||||||
export const AddOrEditModal = memo((props: IProps) => {
|
export const AddOrEditModal = memo((props: IProps) => {
|
||||||
const { open, onClose, onSubmit, initialMemory, isCreate } = props;
|
const { open, onClose, onSubmit, initialMemory, isCreate } = props;
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { modelOptions } = useModelOptions();
|
// const { modelOptions } = useModelOptions();
|
||||||
|
|
||||||
const fields = useMemo(() => {
|
const fields = useMemo(() => {
|
||||||
if (!isCreate) {
|
if (!isCreate) {
|
||||||
@ -26,21 +25,22 @@ export const AddOrEditModal = memo((props: IProps) => {
|
|||||||
(field: any) => field.name === 'name',
|
(field: any) => field.name === 'name',
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
const tempFields = createMemoryFields(t).map((field: any) => {
|
// const tempFields = createMemoryFields(t).map((field: any) => {
|
||||||
if (field.name === 'llm_id') {
|
// if (field.name === 'llm_id') {
|
||||||
return {
|
// return {
|
||||||
...field,
|
// ...field,
|
||||||
options: modelOptions,
|
// options: modelOptions,
|
||||||
};
|
// };
|
||||||
} else {
|
// } else {
|
||||||
return {
|
// return {
|
||||||
...field,
|
// ...field,
|
||||||
};
|
// };
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
return tempFields;
|
// return tempFields;
|
||||||
|
return createMemoryFields(t);
|
||||||
}
|
}
|
||||||
}, [modelOptions, isCreate]);
|
}, [isCreate, t]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Modal
|
<Modal
|
||||||
|
|||||||
@ -1,5 +1,8 @@
|
|||||||
import { FormFieldConfig, FormFieldType } from '@/components/dynamic-form';
|
import { FormFieldConfig, FormFieldType } from '@/components/dynamic-form';
|
||||||
import { EmbeddingSelect } from '@/pages/dataset/dataset-setting/configuration/common-item';
|
import {
|
||||||
|
EmbeddingSelect,
|
||||||
|
LLMSelect,
|
||||||
|
} from '@/pages/dataset/dataset-setting/configuration/common-item';
|
||||||
import { TFunction } from 'i18next';
|
import { TFunction } from 'i18next';
|
||||||
export enum MemoryType {
|
export enum MemoryType {
|
||||||
Raw = 'raw',
|
Raw = 'raw',
|
||||||
@ -52,6 +55,7 @@ export const createMemoryFields = (t: TFunction) =>
|
|||||||
required: true,
|
required: true,
|
||||||
type: FormFieldType.Select,
|
type: FormFieldType.Select,
|
||||||
tooltip: t('memories.llmTooltip'),
|
tooltip: t('memories.llmTooltip'),
|
||||||
|
render: (field) => <LLMSelect field={field} isEdit={false} />,
|
||||||
},
|
},
|
||||||
] as FormFieldConfig[];
|
] as FormFieldConfig[];
|
||||||
|
|
||||||
|
|||||||
@ -47,6 +47,7 @@ const {
|
|||||||
runRaptor,
|
runRaptor,
|
||||||
traceRaptor,
|
traceRaptor,
|
||||||
check_embedding,
|
check_embedding,
|
||||||
|
kbUpdateMetaData,
|
||||||
} = api;
|
} = api;
|
||||||
|
|
||||||
const methods = {
|
const methods = {
|
||||||
@ -215,6 +216,10 @@ const methods = {
|
|||||||
url: check_embedding,
|
url: check_embedding,
|
||||||
method: 'post',
|
method: 'post',
|
||||||
},
|
},
|
||||||
|
kbUpdateMetaData: {
|
||||||
|
url: kbUpdateMetaData,
|
||||||
|
method: 'post',
|
||||||
|
},
|
||||||
// getMetaData: {
|
// getMetaData: {
|
||||||
// url: getMetaData,
|
// url: getMetaData,
|
||||||
// method: 'get',
|
// method: 'get',
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import api from '@/utils/api';
|
import api from '@/utils/api';
|
||||||
|
import request from '@/utils/next-request';
|
||||||
import { registerNextServer } from '@/utils/register-server';
|
import { registerNextServer } from '@/utils/register-server';
|
||||||
import request from '@/utils/request';
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
createMemory,
|
createMemory,
|
||||||
|
|||||||
@ -79,6 +79,7 @@ export default {
|
|||||||
pipelineRerun: `${api_host}/canvas/rerun`,
|
pipelineRerun: `${api_host}/canvas/rerun`,
|
||||||
getMetaData: `${api_host}/document/metadata/summary`,
|
getMetaData: `${api_host}/document/metadata/summary`,
|
||||||
updateMetaData: `${api_host}/document/metadata/update`,
|
updateMetaData: `${api_host}/document/metadata/update`,
|
||||||
|
kbUpdateMetaData: `${api_host}/kb/update_metadata_setting`,
|
||||||
|
|
||||||
// tags
|
// tags
|
||||||
listTag: (knowledgeId: string) => `${api_host}/kb/${knowledgeId}/tags`,
|
listTag: (knowledgeId: string) => `${api_host}/kb/${knowledgeId}/tags`,
|
||||||
|
|||||||
Reference in New Issue
Block a user