mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
22 Commits
v0.20.3
...
0d7e52338e
| Author | SHA1 | Date | |
|---|---|---|---|
| 0d7e52338e | |||
| 4110f7f5ce | |||
| 0af57ff772 | |||
| 0bd58038a8 | |||
| 0cbcfcfedf | |||
| fbdde0259a | |||
| d482173c9b | |||
| 929dc97509 | |||
| 30005c0203 | |||
| 382458ace7 | |||
| 4080f6a54a | |||
| 09570c7eef | |||
| 312f1a0477 | |||
| 1ca226e43b | |||
| 830cda6a3a | |||
| c66dbbe433 | |||
| 3b218b2dc0 | |||
| d58ef6127f | |||
| 55173c7201 | |||
| f860bdf0ad | |||
| 997627861a | |||
| 9f9d32d2cd |
@ -131,7 +131,16 @@ class Canvas:
|
||||
|
||||
self.path = self.dsl["path"]
|
||||
self.history = self.dsl["history"]
|
||||
self.globals = self.dsl["globals"]
|
||||
if "globals" in self.dsl:
|
||||
self.globals = self.dsl["globals"]
|
||||
else:
|
||||
self.globals = {
|
||||
"sys.query": "",
|
||||
"sys.user_id": "",
|
||||
"sys.conversation_turns": 0,
|
||||
"sys.files": []
|
||||
}
|
||||
|
||||
self.retrieval = self.dsl["retrieval"]
|
||||
self.memory = self.dsl.get("memory", [])
|
||||
|
||||
@ -417,7 +426,7 @@ class Canvas:
|
||||
convs = []
|
||||
if window_size <= 0:
|
||||
return convs
|
||||
for role, obj in self.history[window_size * -1:]:
|
||||
for role, obj in self.history[window_size * -2:]:
|
||||
if isinstance(obj, dict):
|
||||
convs.append({"role": role, "content": obj.get("content", "")})
|
||||
else:
|
||||
|
||||
@ -36,7 +36,7 @@ _IS_RAW_CONF = "_is_raw_conf"
|
||||
|
||||
class ComponentParamBase(ABC):
|
||||
def __init__(self):
|
||||
self.message_history_window_size = 22
|
||||
self.message_history_window_size = 13
|
||||
self.inputs = {}
|
||||
self.outputs = {}
|
||||
self.description = ""
|
||||
|
||||
@ -18,11 +18,8 @@ import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Generator
|
||||
|
||||
import json_repair
|
||||
from copy import deepcopy
|
||||
from functools import partial
|
||||
|
||||
from api.db import LLMType
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from api.db.services.tenant_llm_service import TenantLLMService
|
||||
@ -130,7 +127,7 @@ class LLM(ComponentBase):
|
||||
|
||||
args = {}
|
||||
vars = self.get_input_elements() if not self._param.debug_inputs else self._param.debug_inputs
|
||||
prompt = self._param.sys_prompt
|
||||
sys_prompt = self._param.sys_prompt
|
||||
for k, o in vars.items():
|
||||
args[k] = o["value"]
|
||||
if not isinstance(args[k], str):
|
||||
@ -141,14 +138,18 @@ class LLM(ComponentBase):
|
||||
self.set_input_value(k, args[k])
|
||||
|
||||
msg = self._canvas.get_history(self._param.message_history_window_size)[:-1]
|
||||
msg.extend(deepcopy(self._param.prompts))
|
||||
prompt = self.string_format(prompt, args)
|
||||
for p in self._param.prompts:
|
||||
if msg and msg[-1]["role"] == p["role"]:
|
||||
continue
|
||||
msg.append(p)
|
||||
|
||||
sys_prompt = self.string_format(sys_prompt, args)
|
||||
for m in msg:
|
||||
m["content"] = self.string_format(m["content"], args)
|
||||
if self._param.cite and self._canvas.get_reference()["chunks"]:
|
||||
prompt += citation_prompt()
|
||||
sys_prompt += citation_prompt()
|
||||
|
||||
return prompt, msg
|
||||
return sys_prompt, msg
|
||||
|
||||
def _generate(self, msg:list[dict], **kwargs) -> str:
|
||||
if not self.imgs:
|
||||
|
||||
@ -150,10 +150,10 @@ def update(tenant_id, chat_id):
|
||||
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
||||
return get_error_data_result(message="You do not own the chat")
|
||||
req = request.json
|
||||
ids = req.get("dataset_ids")
|
||||
ids = req.get("dataset_ids", [])
|
||||
if "show_quotation" in req:
|
||||
req["do_refer"] = req.pop("show_quotation")
|
||||
if ids is not None:
|
||||
if ids:
|
||||
for kb_id in ids:
|
||||
kbs = KnowledgebaseService.accessible(kb_id=kb_id, user_id=tenant_id)
|
||||
if not kbs:
|
||||
|
||||
@ -44,9 +44,6 @@ def retrieval(tenant_id):
|
||||
if not e:
|
||||
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
|
||||
|
||||
if kb.tenant_id != tenant_id:
|
||||
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
|
||||
|
||||
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
||||
|
||||
ranks = settings.retrievaler.retrieval(
|
||||
|
||||
@ -14,13 +14,15 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from .pdf_parser import RAGFlowPdfParser as PdfParser, PlainParser
|
||||
from .docx_parser import RAGFlowDocxParser as DocxParser
|
||||
from .excel_parser import RAGFlowExcelParser as ExcelParser
|
||||
from .ppt_parser import RAGFlowPptParser as PptParser
|
||||
from .html_parser import RAGFlowHtmlParser as HtmlParser
|
||||
from .json_parser import RAGFlowJsonParser as JsonParser
|
||||
from .markdown_parser import MarkdownElementExtractor
|
||||
from .markdown_parser import RAGFlowMarkdownParser as MarkdownParser
|
||||
from .pdf_parser import PlainParser
|
||||
from .pdf_parser import RAGFlowPdfParser as PdfParser
|
||||
from .ppt_parser import RAGFlowPptParser as PptParser
|
||||
from .txt_parser import RAGFlowTxtParser as TxtParser
|
||||
|
||||
__all__ = [
|
||||
@ -33,4 +35,6 @@ __all__ = [
|
||||
"JsonParser",
|
||||
"MarkdownParser",
|
||||
"TxtParser",
|
||||
]
|
||||
"MarkdownElementExtractor",
|
||||
]
|
||||
|
||||
|
||||
@ -17,8 +17,10 @@
|
||||
|
||||
import re
|
||||
|
||||
import mistune
|
||||
from markdown import markdown
|
||||
|
||||
|
||||
class RAGFlowMarkdownParser:
|
||||
def __init__(self, chunk_token_num=128):
|
||||
self.chunk_token_num = int(chunk_token_num)
|
||||
@ -35,40 +37,44 @@ class RAGFlowMarkdownParser:
|
||||
table_list.append(raw_table)
|
||||
if separate_tables:
|
||||
# Skip this match (i.e., remove it)
|
||||
new_text += working_text[last_end:match.start()] + "\n\n"
|
||||
new_text += working_text[last_end : match.start()] + "\n\n"
|
||||
else:
|
||||
# Replace with rendered HTML
|
||||
html_table = markdown(raw_table, extensions=['markdown.extensions.tables']) if render else raw_table
|
||||
new_text += working_text[last_end:match.start()] + html_table + "\n\n"
|
||||
html_table = markdown(raw_table, extensions=["markdown.extensions.tables"]) if render else raw_table
|
||||
new_text += working_text[last_end : match.start()] + html_table + "\n\n"
|
||||
last_end = match.end()
|
||||
new_text += working_text[last_end:]
|
||||
return new_text
|
||||
|
||||
if "|" in markdown_text: # for optimize performance
|
||||
if "|" in markdown_text: # for optimize performance
|
||||
# Standard Markdown table
|
||||
border_table_pattern = re.compile(
|
||||
r'''
|
||||
r"""
|
||||
(?:\n|^)
|
||||
(?:\|.*?\|.*?\|.*?\n)
|
||||
(?:\|(?:\s*[:-]+[-| :]*\s*)\|.*?\n)
|
||||
(?:\|.*?\|.*?\|.*?\n)+
|
||||
''', re.VERBOSE)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
working_text = replace_tables_with_rendered_html(border_table_pattern, tables)
|
||||
|
||||
# Borderless Markdown table
|
||||
no_border_table_pattern = re.compile(
|
||||
r'''
|
||||
r"""
|
||||
(?:\n|^)
|
||||
(?:\S.*?\|.*?\n)
|
||||
(?:(?:\s*[:-]+[-| :]*\s*).*?\n)
|
||||
(?:\S.*?\|.*?\n)+
|
||||
''', re.VERBOSE)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
working_text = replace_tables_with_rendered_html(no_border_table_pattern, tables)
|
||||
|
||||
if "<table>" in working_text.lower(): # for optimize performance
|
||||
#HTML table extraction - handle possible html/body wrapper tags
|
||||
if "<table>" in working_text.lower(): # for optimize performance
|
||||
# HTML table extraction - handle possible html/body wrapper tags
|
||||
html_table_pattern = re.compile(
|
||||
r'''
|
||||
r"""
|
||||
(?:\n|^)
|
||||
\s*
|
||||
(?:
|
||||
@ -83,9 +89,10 @@ class RAGFlowMarkdownParser:
|
||||
)
|
||||
\s*
|
||||
(?=\n|$)
|
||||
''',
|
||||
re.VERBOSE | re.DOTALL | re.IGNORECASE
|
||||
""",
|
||||
re.VERBOSE | re.DOTALL | re.IGNORECASE,
|
||||
)
|
||||
|
||||
def replace_html_tables():
|
||||
nonlocal working_text
|
||||
new_text = ""
|
||||
@ -94,9 +101,9 @@ class RAGFlowMarkdownParser:
|
||||
raw_table = match.group()
|
||||
tables.append(raw_table)
|
||||
if separate_tables:
|
||||
new_text += working_text[last_end:match.start()] + "\n\n"
|
||||
new_text += working_text[last_end : match.start()] + "\n\n"
|
||||
else:
|
||||
new_text += working_text[last_end:match.start()] + raw_table + "\n\n"
|
||||
new_text += working_text[last_end : match.start()] + raw_table + "\n\n"
|
||||
last_end = match.end()
|
||||
new_text += working_text[last_end:]
|
||||
working_text = new_text
|
||||
@ -104,3 +111,163 @@ class RAGFlowMarkdownParser:
|
||||
replace_html_tables()
|
||||
|
||||
return working_text, tables
|
||||
|
||||
|
||||
class MarkdownElementExtractor:
|
||||
def __init__(self, markdown_content):
|
||||
self.markdown_content = markdown_content
|
||||
self.lines = markdown_content.split("\n")
|
||||
self.ast_parser = mistune.create_markdown(renderer="ast")
|
||||
self.ast_nodes = self.ast_parser(markdown_content)
|
||||
|
||||
def extract_elements(self):
|
||||
"""Extract individual elements (headers, code blocks, lists, etc.)"""
|
||||
sections = []
|
||||
|
||||
i = 0
|
||||
while i < len(self.lines):
|
||||
line = self.lines[i]
|
||||
|
||||
if re.match(r"^#{1,6}\s+.*$", line):
|
||||
# header
|
||||
element = self._extract_header(i)
|
||||
sections.append(element["content"])
|
||||
i = element["end_line"] + 1
|
||||
elif line.strip().startswith("```"):
|
||||
# code block
|
||||
element = self._extract_code_block(i)
|
||||
sections.append(element["content"])
|
||||
i = element["end_line"] + 1
|
||||
elif re.match(r"^\s*[-*+]\s+.*$", line) or re.match(r"^\s*\d+\.\s+.*$", line):
|
||||
# list block
|
||||
element = self._extract_list_block(i)
|
||||
sections.append(element["content"])
|
||||
i = element["end_line"] + 1
|
||||
elif line.strip().startswith(">"):
|
||||
# blockquote
|
||||
element = self._extract_blockquote(i)
|
||||
sections.append(element["content"])
|
||||
i = element["end_line"] + 1
|
||||
elif line.strip():
|
||||
# text block (paragraphs and inline elements until next block element)
|
||||
element = self._extract_text_block(i)
|
||||
sections.append(element["content"])
|
||||
i = element["end_line"] + 1
|
||||
else:
|
||||
i += 1
|
||||
|
||||
sections = [section for section in sections if section.strip()]
|
||||
return sections
|
||||
|
||||
def _extract_header(self, start_pos):
|
||||
return {
|
||||
"type": "header",
|
||||
"content": self.lines[start_pos],
|
||||
"start_line": start_pos,
|
||||
"end_line": start_pos,
|
||||
}
|
||||
|
||||
def _extract_code_block(self, start_pos):
|
||||
end_pos = start_pos
|
||||
content_lines = [self.lines[start_pos]]
|
||||
|
||||
# Find the end of the code block
|
||||
for i in range(start_pos + 1, len(self.lines)):
|
||||
content_lines.append(self.lines[i])
|
||||
end_pos = i
|
||||
if self.lines[i].strip().startswith("```"):
|
||||
break
|
||||
|
||||
return {
|
||||
"type": "code_block",
|
||||
"content": "\n".join(content_lines),
|
||||
"start_line": start_pos,
|
||||
"end_line": end_pos,
|
||||
}
|
||||
|
||||
def _extract_list_block(self, start_pos):
|
||||
end_pos = start_pos
|
||||
content_lines = []
|
||||
|
||||
i = start_pos
|
||||
while i < len(self.lines):
|
||||
line = self.lines[i]
|
||||
# check if this line is a list item or continuation of a list
|
||||
if (
|
||||
re.match(r"^\s*[-*+]\s+.*$", line)
|
||||
or re.match(r"^\s*\d+\.\s+.*$", line)
|
||||
or (i > start_pos and not line.strip())
|
||||
or (i > start_pos and re.match(r"^\s{2,}[-*+]\s+.*$", line))
|
||||
or (i > start_pos and re.match(r"^\s{2,}\d+\.\s+.*$", line))
|
||||
or (i > start_pos and re.match(r"^\s+\w+.*$", line))
|
||||
):
|
||||
content_lines.append(line)
|
||||
end_pos = i
|
||||
i += 1
|
||||
else:
|
||||
break
|
||||
|
||||
return {
|
||||
"type": "list_block",
|
||||
"content": "\n".join(content_lines),
|
||||
"start_line": start_pos,
|
||||
"end_line": end_pos,
|
||||
}
|
||||
|
||||
def _extract_blockquote(self, start_pos):
|
||||
end_pos = start_pos
|
||||
content_lines = []
|
||||
|
||||
i = start_pos
|
||||
while i < len(self.lines):
|
||||
line = self.lines[i]
|
||||
if line.strip().startswith(">") or (i > start_pos and not line.strip()):
|
||||
content_lines.append(line)
|
||||
end_pos = i
|
||||
i += 1
|
||||
else:
|
||||
break
|
||||
|
||||
return {
|
||||
"type": "blockquote",
|
||||
"content": "\n".join(content_lines),
|
||||
"start_line": start_pos,
|
||||
"end_line": end_pos,
|
||||
}
|
||||
|
||||
def _extract_text_block(self, start_pos):
|
||||
"""Extract a text block (paragraphs, inline elements) until next block element"""
|
||||
end_pos = start_pos
|
||||
content_lines = [self.lines[start_pos]]
|
||||
|
||||
i = start_pos + 1
|
||||
while i < len(self.lines):
|
||||
line = self.lines[i]
|
||||
# stop if we encounter a block element
|
||||
if re.match(r"^#{1,6}\s+.*$", line) or line.strip().startswith("```") or re.match(r"^\s*[-*+]\s+.*$", line) or re.match(r"^\s*\d+\.\s+.*$", line) or line.strip().startswith(">"):
|
||||
break
|
||||
elif not line.strip():
|
||||
# check if the next line is a block element
|
||||
if i + 1 < len(self.lines) and (
|
||||
re.match(r"^#{1,6}\s+.*$", self.lines[i + 1])
|
||||
or self.lines[i + 1].strip().startswith("```")
|
||||
or re.match(r"^\s*[-*+]\s+.*$", self.lines[i + 1])
|
||||
or re.match(r"^\s*\d+\.\s+.*$", self.lines[i + 1])
|
||||
or self.lines[i + 1].strip().startswith(">")
|
||||
):
|
||||
break
|
||||
else:
|
||||
content_lines.append(line)
|
||||
end_pos = i
|
||||
i += 1
|
||||
else:
|
||||
content_lines.append(line)
|
||||
end_pos = i
|
||||
i += 1
|
||||
|
||||
return {
|
||||
"type": "text_block",
|
||||
"content": "\n".join(content_lines),
|
||||
"start_line": start_pos,
|
||||
"end_line": end_pos,
|
||||
}
|
||||
|
||||
@ -169,7 +169,7 @@ class EntityResolution(Extractor):
|
||||
logging.info(f"Created resolution prompt {len(text)} bytes for {len(candidate_resolution_i[1])} entity pairs of type {candidate_resolution_i[0]}")
|
||||
async with chat_limiter:
|
||||
try:
|
||||
with trio.move_on_after(240) as cancel_scope:
|
||||
with trio.move_on_after(280) as cancel_scope:
|
||||
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
||||
if cancel_scope.cancelled_caught:
|
||||
logging.warning("_resolve_candidate._chat timeout, skipping...")
|
||||
|
||||
@ -47,7 +47,7 @@ class Extractor:
|
||||
self._language = language
|
||||
self._entity_types = entity_types or DEFAULT_ENTITY_TYPES
|
||||
|
||||
@timeout(60*5)
|
||||
@timeout(60*20)
|
||||
def _chat(self, system, history, gen_conf={}):
|
||||
hist = deepcopy(history)
|
||||
conf = deepcopy(gen_conf)
|
||||
|
||||
@ -16,6 +16,9 @@
|
||||
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import wraps
|
||||
@ -53,6 +56,13 @@ JSON_RESPONSE = True
|
||||
|
||||
|
||||
class RAGFlowConnector:
|
||||
_MAX_DATASET_CACHE = 32
|
||||
_MAX_DOCUMENT_CACHE = 128
|
||||
_CACHE_TTL = 300
|
||||
|
||||
_dataset_metadata_cache: OrderedDict[str, tuple[dict, float | int]] = OrderedDict() # "dataset_id" -> (metadata, expiry_ts)
|
||||
_document_metadata_cache: OrderedDict[str, tuple[list[tuple[str, dict]], float | int]] = OrderedDict() # "dataset_id" -> ([(document_id, doc_metadata)], expiry_ts)
|
||||
|
||||
def __init__(self, base_url: str, version="v1"):
|
||||
self.base_url = base_url
|
||||
self.version = version
|
||||
@ -72,6 +82,43 @@ class RAGFlowConnector:
|
||||
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header, json=json)
|
||||
return res
|
||||
|
||||
def _is_cache_valid(self, ts):
|
||||
return time.time() < ts
|
||||
|
||||
def _get_expiry_timestamp(self):
|
||||
offset = random.randint(-30, 30)
|
||||
return time.time() + self._CACHE_TTL + offset
|
||||
|
||||
def _get_cached_dataset_metadata(self, dataset_id):
|
||||
entry = self._dataset_metadata_cache.get(dataset_id)
|
||||
if entry:
|
||||
data, ts = entry
|
||||
if self._is_cache_valid(ts):
|
||||
self._dataset_metadata_cache.move_to_end(dataset_id)
|
||||
return data
|
||||
return None
|
||||
|
||||
def _set_cached_dataset_metadata(self, dataset_id, metadata):
|
||||
self._dataset_metadata_cache[dataset_id] = (metadata, self._get_expiry_timestamp())
|
||||
self._dataset_metadata_cache.move_to_end(dataset_id)
|
||||
if len(self._dataset_metadata_cache) > self._MAX_DATASET_CACHE:
|
||||
self._dataset_metadata_cache.popitem(last=False)
|
||||
|
||||
def _get_cached_document_metadata_by_dataset(self, dataset_id):
|
||||
entry = self._document_metadata_cache.get(dataset_id)
|
||||
if entry:
|
||||
data_list, ts = entry
|
||||
if self._is_cache_valid(ts):
|
||||
self._document_metadata_cache.move_to_end(dataset_id)
|
||||
return {doc_id: doc_meta for doc_id, doc_meta in data_list}
|
||||
return None
|
||||
|
||||
def _set_cached_document_metadata_by_dataset(self, dataset_id, doc_id_meta_list):
|
||||
self._document_metadata_cache[dataset_id] = (doc_id_meta_list, self._get_expiry_timestamp())
|
||||
self._document_metadata_cache.move_to_end(dataset_id)
|
||||
if len(self._document_metadata_cache) > self._MAX_DOCUMENT_CACHE:
|
||||
self._document_metadata_cache.popitem(last=False)
|
||||
|
||||
def list_datasets(self, page: int = 1, page_size: int = 1000, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None):
|
||||
res = self._get("/datasets", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||
if not res:
|
||||
@ -87,10 +134,38 @@ class RAGFlowConnector:
|
||||
return ""
|
||||
|
||||
def retrieval(
|
||||
self, dataset_ids, document_ids=None, question="", page=1, page_size=30, similarity_threshold=0.2, vector_similarity_weight=0.3, top_k=1024, rerank_id: str | None = None, keyword: bool = False
|
||||
self,
|
||||
dataset_ids,
|
||||
document_ids=None,
|
||||
question="",
|
||||
page=1,
|
||||
page_size=30,
|
||||
similarity_threshold=0.2,
|
||||
vector_similarity_weight=0.3,
|
||||
top_k=1024,
|
||||
rerank_id: str | None = None,
|
||||
keyword: bool = False,
|
||||
force_refresh: bool = False,
|
||||
):
|
||||
if document_ids is None:
|
||||
document_ids = []
|
||||
|
||||
# If no dataset_ids provided or empty list, get all available dataset IDs
|
||||
if not dataset_ids:
|
||||
dataset_list_str = self.list_datasets()
|
||||
dataset_ids = []
|
||||
|
||||
# Parse the dataset list to extract IDs
|
||||
if dataset_list_str:
|
||||
for line in dataset_list_str.strip().split('\n'):
|
||||
if line.strip():
|
||||
try:
|
||||
dataset_info = json.loads(line.strip())
|
||||
dataset_ids.append(dataset_info["id"])
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
# Skip malformed lines
|
||||
continue
|
||||
|
||||
data_json = {
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
@ -110,12 +185,127 @@ class RAGFlowConnector:
|
||||
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
data = res["data"]
|
||||
chunks = []
|
||||
for chunk_data in res["data"].get("chunks"):
|
||||
chunks.append(json.dumps(chunk_data, ensure_ascii=False))
|
||||
return [types.TextContent(type="text", text="\n".join(chunks))]
|
||||
|
||||
# Cache document metadata and dataset information
|
||||
document_cache, dataset_cache = self._get_document_metadata_cache(dataset_ids, force_refresh=force_refresh)
|
||||
|
||||
# Process chunks with enhanced field mapping including per-chunk metadata
|
||||
for chunk_data in data.get("chunks", []):
|
||||
enhanced_chunk = self._map_chunk_fields(chunk_data, dataset_cache, document_cache)
|
||||
chunks.append(enhanced_chunk)
|
||||
|
||||
# Build structured response (no longer need response-level document_metadata)
|
||||
response = {
|
||||
"chunks": chunks,
|
||||
"pagination": {
|
||||
"page": data.get("page", page),
|
||||
"page_size": data.get("page_size", page_size),
|
||||
"total_chunks": data.get("total", len(chunks)),
|
||||
"total_pages": (data.get("total", len(chunks)) + page_size - 1) // page_size,
|
||||
},
|
||||
"query_info": {
|
||||
"question": question,
|
||||
"similarity_threshold": similarity_threshold,
|
||||
"vector_weight": vector_similarity_weight,
|
||||
"keyword_search": keyword,
|
||||
"dataset_count": len(dataset_ids),
|
||||
},
|
||||
}
|
||||
|
||||
return [types.TextContent(type="text", text=json.dumps(response, ensure_ascii=False))]
|
||||
|
||||
raise Exception([types.TextContent(type="text", text=res.get("message"))])
|
||||
|
||||
def _get_document_metadata_cache(self, dataset_ids, force_refresh=False):
|
||||
"""Cache document metadata for all documents in the specified datasets"""
|
||||
document_cache = {}
|
||||
dataset_cache = {}
|
||||
|
||||
try:
|
||||
for dataset_id in dataset_ids:
|
||||
dataset_meta = None if force_refresh else self._get_cached_dataset_metadata(dataset_id)
|
||||
if not dataset_meta:
|
||||
# First get dataset info for name
|
||||
dataset_res = self._get("/datasets", {"id": dataset_id, "page_size": 1})
|
||||
if dataset_res and dataset_res.status_code == 200:
|
||||
dataset_data = dataset_res.json()
|
||||
if dataset_data.get("code") == 0 and dataset_data.get("data"):
|
||||
dataset_info = dataset_data["data"][0]
|
||||
dataset_meta = {"name": dataset_info.get("name", "Unknown"), "description": dataset_info.get("description", "")}
|
||||
self._set_cached_dataset_metadata(dataset_id, dataset_meta)
|
||||
if dataset_meta:
|
||||
dataset_cache[dataset_id] = dataset_meta
|
||||
|
||||
docs = None if force_refresh else self._get_cached_document_metadata_by_dataset(dataset_id)
|
||||
if docs is None:
|
||||
docs_res = self._get(f"/datasets/{dataset_id}/documents")
|
||||
docs_data = docs_res.json()
|
||||
if docs_data.get("code") == 0 and docs_data.get("data", {}).get("docs"):
|
||||
doc_id_meta_list = []
|
||||
docs = {}
|
||||
for doc in docs_data["data"]["docs"]:
|
||||
doc_id = doc.get("id")
|
||||
if not doc_id:
|
||||
continue
|
||||
doc_meta = {
|
||||
"document_id": doc_id,
|
||||
"name": doc.get("name", ""),
|
||||
"location": doc.get("location", ""),
|
||||
"type": doc.get("type", ""),
|
||||
"size": doc.get("size"),
|
||||
"chunk_count": doc.get("chunk_count"),
|
||||
# "chunk_method": doc.get("chunk_method", ""),
|
||||
"create_date": doc.get("create_date", ""),
|
||||
"update_date": doc.get("update_date", ""),
|
||||
# "process_begin_at": doc.get("process_begin_at", ""),
|
||||
# "process_duration": doc.get("process_duration"),
|
||||
# "progress": doc.get("progress"),
|
||||
# "progress_msg": doc.get("progress_msg", ""),
|
||||
# "status": doc.get("status", ""),
|
||||
# "run": doc.get("run", ""),
|
||||
"token_count": doc.get("token_count"),
|
||||
# "source_type": doc.get("source_type", ""),
|
||||
"thumbnail": doc.get("thumbnail", ""),
|
||||
"dataset_id": doc.get("dataset_id", dataset_id),
|
||||
"meta_fields": doc.get("meta_fields", {}),
|
||||
# "parser_config": doc.get("parser_config", {})
|
||||
}
|
||||
doc_id_meta_list.append((doc_id, doc_meta))
|
||||
docs[doc_id] = doc_meta
|
||||
self._set_cached_document_metadata_by_dataset(dataset_id, doc_id_meta_list)
|
||||
if docs:
|
||||
document_cache.update(docs)
|
||||
|
||||
except Exception:
|
||||
# Gracefully handle metadata cache failures
|
||||
pass
|
||||
|
||||
return document_cache, dataset_cache
|
||||
|
||||
def _map_chunk_fields(self, chunk_data, dataset_cache, document_cache):
|
||||
"""Preserve all original API fields and add per-chunk document metadata"""
|
||||
# Start with ALL raw data from API (preserve everything like original version)
|
||||
mapped = dict(chunk_data)
|
||||
|
||||
# Add dataset name enhancement
|
||||
dataset_id = chunk_data.get("dataset_id") or chunk_data.get("kb_id")
|
||||
if dataset_id and dataset_id in dataset_cache:
|
||||
mapped["dataset_name"] = dataset_cache[dataset_id]["name"]
|
||||
else:
|
||||
mapped["dataset_name"] = "Unknown"
|
||||
|
||||
# Add document name convenience field
|
||||
mapped["document_name"] = chunk_data.get("document_keyword", "")
|
||||
|
||||
# Add per-chunk document metadata
|
||||
document_id = chunk_data.get("document_id")
|
||||
if document_id and document_id in document_cache:
|
||||
mapped["document_metadata"] = document_cache[document_id]
|
||||
|
||||
return mapped
|
||||
|
||||
|
||||
class RAGFlowCtx:
|
||||
def __init__(self, connector: RAGFlowConnector):
|
||||
@ -195,7 +385,58 @@ async def list_tools(*, connector) -> list[types.Tool]:
|
||||
"items": {"type": "string"},
|
||||
"description": "Optional array of document IDs to search within."
|
||||
},
|
||||
"question": {"type": "string", "description": "The question or query to search for."},
|
||||
"question": {
|
||||
"type": "string",
|
||||
"description": "The question or query to search for."
|
||||
},
|
||||
"page": {
|
||||
"type": "integer",
|
||||
"description": "Page number for pagination",
|
||||
"default": 1,
|
||||
"minimum": 1,
|
||||
},
|
||||
"page_size": {
|
||||
"type": "integer",
|
||||
"description": "Number of results to return per page (default: 10, max recommended: 50 to avoid token limits)",
|
||||
"default": 10,
|
||||
"minimum": 1,
|
||||
"maximum": 100,
|
||||
},
|
||||
"similarity_threshold": {
|
||||
"type": "number",
|
||||
"description": "Minimum similarity threshold for results",
|
||||
"default": 0.2,
|
||||
"minimum": 0.0,
|
||||
"maximum": 1.0,
|
||||
},
|
||||
"vector_similarity_weight": {
|
||||
"type": "number",
|
||||
"description": "Weight for vector similarity vs term similarity",
|
||||
"default": 0.3,
|
||||
"minimum": 0.0,
|
||||
"maximum": 1.0,
|
||||
},
|
||||
"keyword": {
|
||||
"type": "boolean",
|
||||
"description": "Enable keyword-based search",
|
||||
"default": False,
|
||||
},
|
||||
"top_k": {
|
||||
"type": "integer",
|
||||
"description": "Maximum results to consider before ranking",
|
||||
"default": 1024,
|
||||
"minimum": 1,
|
||||
"maximum": 1024,
|
||||
},
|
||||
"rerank_id": {
|
||||
"type": "string",
|
||||
"description": "Optional reranking model identifier",
|
||||
},
|
||||
"force_refresh": {
|
||||
"type": "boolean",
|
||||
"description": "Set to true only if fresh dataset and document metadata is explicitly required. Otherwise, cached metadata is used (default: false).",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
"required": ["question"],
|
||||
},
|
||||
@ -209,6 +450,16 @@ async def call_tool(name: str, arguments: dict, *, connector) -> list[types.Text
|
||||
if name == "ragflow_retrieval":
|
||||
document_ids = arguments.get("document_ids", [])
|
||||
dataset_ids = arguments.get("dataset_ids", [])
|
||||
question = arguments.get("question", "")
|
||||
page = arguments.get("page", 1)
|
||||
page_size = arguments.get("page_size", 10)
|
||||
similarity_threshold = arguments.get("similarity_threshold", 0.2)
|
||||
vector_similarity_weight = arguments.get("vector_similarity_weight", 0.3)
|
||||
keyword = arguments.get("keyword", False)
|
||||
top_k = arguments.get("top_k", 1024)
|
||||
rerank_id = arguments.get("rerank_id")
|
||||
force_refresh = arguments.get("force_refresh", False)
|
||||
|
||||
|
||||
# If no dataset_ids provided or empty list, get all available dataset IDs
|
||||
if not dataset_ids:
|
||||
@ -229,7 +480,15 @@ async def call_tool(name: str, arguments: dict, *, connector) -> list[types.Text
|
||||
return connector.retrieval(
|
||||
dataset_ids=dataset_ids,
|
||||
document_ids=document_ids,
|
||||
question=arguments["question"],
|
||||
question=question,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
similarity_threshold=similarity_threshold,
|
||||
vector_similarity_weight=vector_similarity_weight,
|
||||
keyword=keyword,
|
||||
top_k=top_k,
|
||||
rerank_id=rerank_id,
|
||||
force_refresh=force_refresh,
|
||||
)
|
||||
raise ValueError(f"Tool not found: {name}")
|
||||
|
||||
|
||||
@ -45,7 +45,7 @@ dependencies = [
|
||||
"html-text==0.6.2",
|
||||
"httpx[socks]==0.27.2",
|
||||
"huggingface-hub>=0.25.0,<0.26.0",
|
||||
"infinity-sdk==0.6.0-dev4",
|
||||
"infinity-sdk==0.6.0.dev5",
|
||||
"infinity-emb>=0.0.66,<0.0.67",
|
||||
"itsdangerous==2.1.2",
|
||||
"json-repair==0.35.0",
|
||||
|
||||
@ -30,7 +30,7 @@ from tika import parser
|
||||
|
||||
from api.db import LLMType
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from deepdoc.parser import DocxParser, ExcelParser, HtmlParser, JsonParser, MarkdownParser, PdfParser, TxtParser
|
||||
from deepdoc.parser import DocxParser, ExcelParser, HtmlParser, JsonParser, MarkdownElementExtractor, MarkdownParser, PdfParser, TxtParser
|
||||
from deepdoc.parser.figure_parser import VisionFigureParser, vision_figure_parser_figure_data_wrapper
|
||||
from deepdoc.parser.pdf_parser import PlainParser, VisionParser
|
||||
from rag.nlp import concat_img, find_codec, naive_merge, naive_merge_with_images, naive_merge_docx, rag_tokenizer, tokenize_chunks, tokenize_chunks_with_images, tokenize_table
|
||||
@ -289,7 +289,7 @@ class Pdf(PdfParser):
|
||||
return [(b["text"], self._line_tag(b, zoomin)) for b in self.boxes], tbls, figures
|
||||
else:
|
||||
tbls = self._extract_table_figure(True, zoomin, True, True)
|
||||
# self._naive_vertical_merge()
|
||||
self._naive_vertical_merge()
|
||||
self._concat_downward()
|
||||
# self._filter_forpages()
|
||||
logging.info("layouts cost: {}s".format(timer() - first_start))
|
||||
@ -350,17 +350,14 @@ class Markdown(MarkdownParser):
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
txt = f.read()
|
||||
|
||||
remainder, tables = self.extract_tables_and_remainder(f'{txt}\n', separate_tables=separate_tables)
|
||||
sections = []
|
||||
|
||||
extractor = MarkdownElementExtractor(txt)
|
||||
element_sections = extractor.extract_elements()
|
||||
sections = [(element, "") for element in element_sections]
|
||||
|
||||
tbls = []
|
||||
for sec in remainder.split("\n"):
|
||||
if sec.strip().find("#") == 0:
|
||||
sections.append((sec, ""))
|
||||
elif sections and sections[-1][0].strip().find("#") == 0:
|
||||
sec_, _ = sections.pop(-1)
|
||||
sections.append((sec_ + "\n" + sec, ""))
|
||||
else:
|
||||
sections.append((sec, ""))
|
||||
for table in tables:
|
||||
tbls.append(((None, markdown(table, extensions=['markdown.extensions.tables'])), ""))
|
||||
return sections, tbls
|
||||
|
||||
@ -42,7 +42,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
self._prompt = prompt
|
||||
self._max_token = max_token
|
||||
|
||||
@timeout(60)
|
||||
@timeout(60*20)
|
||||
async def _chat(self, system, history, gen_conf):
|
||||
response = get_llm_cache(self._llm_model.llm_name, system, history, gen_conf)
|
||||
if response:
|
||||
@ -56,7 +56,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
set_llm_cache(self._llm_model.llm_name, system, response, history, gen_conf)
|
||||
return response
|
||||
|
||||
@timeout(2)
|
||||
@timeout(20)
|
||||
async def _embedding_encode(self, txt):
|
||||
response = get_embed_cache(self._embd_model.llm_name, txt)
|
||||
if response is not None:
|
||||
@ -86,7 +86,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
layers = [(0, len(chunks))]
|
||||
start, end = 0, len(chunks)
|
||||
|
||||
@timeout(60)
|
||||
@timeout(60*20)
|
||||
async def summarize(ck_idx: list[int]):
|
||||
nonlocal chunks
|
||||
texts = [chunks[i][0] for i in ck_idx]
|
||||
|
||||
6
uv.lock
generated
6
uv.lock
generated
@ -2603,7 +2603,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "infinity-sdk"
|
||||
version = "0.6.0.dev4"
|
||||
version = "0.6.0.dev5"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
@ -2620,7 +2620,7 @@ dependencies = [
|
||||
{ name = "thrift" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/cc/645ed8de15952940c7308a788036376583a5fc29fdcf3e4bc75b5ad0c881/infinity_sdk-0.6.0.dev4-py3-none-any.whl", hash = "sha256:f8f4bd8a44e3fae7b4228b5c9e9a16559b4905f50d2d7d0a3d18f39974613e7a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fe/a4/6079bf9790f16badc01e7b79a28c90bec407cfcaa8a2ed37e4a68120f87a/infinity_sdk-0.6.0.dev5-py3-none-any.whl", hash = "sha256:510ac408d5cd9d3d4df33c7c0877f55c5ae8a6019e465190c86d58012a319179" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5471,7 +5471,7 @@ requires-dist = [
|
||||
{ name = "httpx", extras = ["socks"], specifier = "==0.27.2" },
|
||||
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
||||
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0.dev4" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0.dev5" },
|
||||
{ name = "itsdangerous", specifier = "==2.1.2" },
|
||||
{ name = "json-repair", specifier = "==0.35.0" },
|
||||
{ name = "langfuse", specifier = ">=2.60.0" },
|
||||
|
||||
@ -14,7 +14,7 @@ module.exports = {
|
||||
'error',
|
||||
{
|
||||
'**/*.{jsx,tsx}': 'KEBAB_CASE',
|
||||
'**/*.{js,ts}': 'KEBAB_CASE',
|
||||
'**/*.{js,ts}': '[a-z0-9.-]*',
|
||||
},
|
||||
],
|
||||
'check-file/folder-naming-convention': [
|
||||
|
||||
@ -85,7 +85,7 @@ function Root({ children }: React.PropsWithChildren) {
|
||||
<Sonner position={'top-right'} expand richColors closeButton></Sonner>
|
||||
<Toaster />
|
||||
</ConfigProvider>
|
||||
<ReactQueryDevtools buttonPosition={'top-left'} />
|
||||
<ReactQueryDevtools buttonPosition={'top-left'} initialIsOpen={false} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@ -8,47 +8,93 @@ import {
|
||||
} from '@/components/ui/dialog';
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { Dispatch, SetStateAction, useCallback, useState } from 'react';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { TFunction } from 'i18next';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { z } from 'zod';
|
||||
import { FileUploader } from '../file-uploader';
|
||||
import { RAGFlowFormItem } from '../ragflow-form';
|
||||
import { Form } from '../ui/form';
|
||||
import { Switch } from '../ui/switch';
|
||||
|
||||
type UploaderTabsProps = {
|
||||
setFiles: Dispatch<SetStateAction<File[]>>;
|
||||
function buildUploadFormSchema(t: TFunction) {
|
||||
const FormSchema = z.object({
|
||||
parseOnCreation: z.boolean().optional(),
|
||||
fileList: z
|
||||
.array(z.instanceof(File))
|
||||
.min(1, { message: t('fileManager.pleaseUploadAtLeastOneFile') }),
|
||||
});
|
||||
|
||||
return FormSchema;
|
||||
}
|
||||
|
||||
export type UploadFormSchemaType = z.infer<
|
||||
ReturnType<typeof buildUploadFormSchema>
|
||||
>;
|
||||
|
||||
const UploadFormId = 'UploadFormId';
|
||||
|
||||
type UploadFormProps = {
|
||||
submit: (values?: UploadFormSchemaType) => void;
|
||||
showParseOnCreation?: boolean;
|
||||
};
|
||||
|
||||
export function UploaderTabs({ setFiles }: UploaderTabsProps) {
|
||||
function UploadForm({ submit, showParseOnCreation }: UploadFormProps) {
|
||||
const { t } = useTranslation();
|
||||
const FormSchema = buildUploadFormSchema(t);
|
||||
|
||||
type UploadFormSchemaType = z.infer<typeof FormSchema>;
|
||||
const form = useForm<UploadFormSchemaType>({
|
||||
resolver: zodResolver(FormSchema),
|
||||
defaultValues: {
|
||||
parseOnCreation: false,
|
||||
fileList: [],
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<Tabs defaultValue="account">
|
||||
<TabsList className="grid w-full grid-cols-2 mb-4">
|
||||
<TabsTrigger value="account">{t('fileManager.local')}</TabsTrigger>
|
||||
<TabsTrigger value="password">{t('fileManager.s3')}</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="account">
|
||||
<FileUploader
|
||||
maxFileCount={8}
|
||||
maxSize={8 * 1024 * 1024}
|
||||
onValueChange={setFiles}
|
||||
accept={{ '*': [] }}
|
||||
/>
|
||||
</TabsContent>
|
||||
<TabsContent value="password">{t('common.comingSoon')}</TabsContent>
|
||||
</Tabs>
|
||||
<Form {...form}>
|
||||
<form
|
||||
onSubmit={form.handleSubmit(submit)}
|
||||
id={UploadFormId}
|
||||
className="space-y-4"
|
||||
>
|
||||
{showParseOnCreation && (
|
||||
<RAGFlowFormItem
|
||||
name="parseOnCreation"
|
||||
label={t('fileManager.parseOnCreation')}
|
||||
>
|
||||
{(field) => (
|
||||
<Switch
|
||||
onCheckedChange={field.onChange}
|
||||
checked={field.value}
|
||||
></Switch>
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
)}
|
||||
<RAGFlowFormItem name="fileList" label={t('fileManager.file')}>
|
||||
{(field) => (
|
||||
<FileUploader
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
accept={{ '*': [] }}
|
||||
/>
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
type FileUploadDialogProps = IModalProps<UploadFormSchemaType> &
|
||||
Pick<UploadFormProps, 'showParseOnCreation'>;
|
||||
export function FileUploadDialog({
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
}: IModalProps<File[]>) {
|
||||
showParseOnCreation = false,
|
||||
}: FileUploadDialogProps) {
|
||||
const { t } = useTranslation();
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
|
||||
const handleOk = useCallback(() => {
|
||||
onOk?.(files);
|
||||
}, [files, onOk]);
|
||||
|
||||
return (
|
||||
<Dialog open onOpenChange={hideModal}>
|
||||
@ -56,9 +102,21 @@ export function FileUploadDialog({
|
||||
<DialogHeader>
|
||||
<DialogTitle>{t('fileManager.uploadFile')}</DialogTitle>
|
||||
</DialogHeader>
|
||||
<UploaderTabs setFiles={setFiles}></UploaderTabs>
|
||||
<Tabs defaultValue="account">
|
||||
<TabsList className="grid w-full grid-cols-2 mb-4">
|
||||
<TabsTrigger value="account">{t('fileManager.local')}</TabsTrigger>
|
||||
<TabsTrigger value="password">{t('fileManager.s3')}</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="account">
|
||||
<UploadForm
|
||||
submit={onOk!}
|
||||
showParseOnCreation={showParseOnCreation}
|
||||
></UploadForm>
|
||||
</TabsContent>
|
||||
<TabsContent value="password">{t('common.comingSoon')}</TabsContent>
|
||||
</Tabs>
|
||||
<DialogFooter>
|
||||
<ButtonLoading type="submit" onClick={handleOk} loading={loading}>
|
||||
<ButtonLoading type="submit" loading={loading} form={UploadFormId}>
|
||||
{t('common.save')}
|
||||
</ButtonLoading>
|
||||
</DialogFooter>
|
||||
|
||||
@ -15,6 +15,7 @@ import { Progress } from '@/components/ui/progress';
|
||||
import { ScrollArea } from '@/components/ui/scroll-area';
|
||||
import { useControllableState } from '@/hooks/use-controllable-state';
|
||||
import { cn, formatBytes } from '@/lib/utils';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function isFileWithPreview(file: File): file is File & { preview: string } {
|
||||
return 'preview' in file && typeof file.preview === 'string';
|
||||
@ -168,14 +169,14 @@ export function FileUploader(props: FileUploaderProps) {
|
||||
accept = {
|
||||
'image/*': [],
|
||||
},
|
||||
maxSize = 1024 * 1024 * 2,
|
||||
maxFileCount = 1,
|
||||
maxSize = 1024 * 1024 * 10000000,
|
||||
maxFileCount = 100000000000,
|
||||
multiple = false,
|
||||
disabled = false,
|
||||
className,
|
||||
...dropzoneProps
|
||||
} = props;
|
||||
|
||||
const { t } = useTranslation();
|
||||
const [files, setFiles] = useControllableState({
|
||||
prop: valueProp,
|
||||
onChange: onValueChange,
|
||||
@ -267,7 +268,7 @@ export function FileUploader(props: FileUploaderProps) {
|
||||
<div
|
||||
{...getRootProps()}
|
||||
className={cn(
|
||||
'group relative grid h-52 w-full cursor-pointer place-items-center rounded-lg border-2 border-dashed border-muted-foreground/25 px-5 py-2.5 text-center transition hover:bg-muted/25',
|
||||
'group relative grid h-72 w-full cursor-pointer place-items-center rounded-lg border-2 border-dashed border-muted-foreground/25 px-5 py-2.5 text-center transition hover:bg-muted/25',
|
||||
'ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
|
||||
isDragActive && 'border-muted-foreground/50',
|
||||
isDisabled && 'pointer-events-none opacity-60',
|
||||
@ -298,14 +299,15 @@ export function FileUploader(props: FileUploaderProps) {
|
||||
</div>
|
||||
<div className="flex flex-col gap-px">
|
||||
<p className="font-medium text-muted-foreground">
|
||||
Drag {`'n'`} drop files here, or click to select files
|
||||
{t('knowledgeDetails.uploadTitle')}
|
||||
</p>
|
||||
<p className="text-sm text-muted-foreground/70">
|
||||
You can upload
|
||||
{t('knowledgeDetails.uploadDescription')}
|
||||
{/* You can upload
|
||||
{maxFileCount > 1
|
||||
? ` ${maxFileCount === Infinity ? 'multiple' : maxFileCount}
|
||||
files (up to ${formatBytes(maxSize)} each)`
|
||||
: ` a file with ${formatBytes(maxSize)}`}
|
||||
: ` a file with ${formatBytes(maxSize)}`} */}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { Card, CardContent } from '@/components/ui/card';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { ReactNode } from 'react';
|
||||
|
||||
interface IProps {
|
||||
data: {
|
||||
@ -11,8 +12,9 @@ interface IProps {
|
||||
};
|
||||
onClick?: () => void;
|
||||
moreDropdown: React.ReactNode;
|
||||
sharedBadge?: ReactNode;
|
||||
}
|
||||
export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
||||
export function HomeCard({ data, onClick, moreDropdown, sharedBadge }: IProps) {
|
||||
return (
|
||||
<Card
|
||||
className="bg-bg-card border-colors-outline-neutral-standard"
|
||||
@ -31,7 +33,7 @@ export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
||||
</div>
|
||||
<div className="flex flex-col justify-between gap-1 flex-1 h-full w-[calc(100%-50px)]">
|
||||
<section className="flex justify-between">
|
||||
<div className="text-[20px] font-bold w-80% leading-5">
|
||||
<div className="text-[20px] font-bold w-80% leading-5 text-ellipsis overflow-hidden">
|
||||
{data.name}
|
||||
</div>
|
||||
{moreDropdown}
|
||||
@ -41,10 +43,11 @@ export function HomeCard({ data, onClick, moreDropdown }: IProps) {
|
||||
<div className="whitespace-nowrap overflow-hidden text-ellipsis">
|
||||
{data.description}
|
||||
</div>
|
||||
<div>
|
||||
<div className="flex justify-between items-center">
|
||||
<p className="text-sm opacity-80">
|
||||
{formatDate(data.update_time)}
|
||||
</p>
|
||||
{sharedBadge}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { ChevronDown } from 'lucide-react';
|
||||
import { Funnel } from 'lucide-react';
|
||||
import React, {
|
||||
ChangeEventHandler,
|
||||
PropsWithChildren,
|
||||
@ -25,20 +25,20 @@ export const FilterButton = React.forwardRef<
|
||||
>(({ count = 0, ...props }, ref) => {
|
||||
return (
|
||||
<Button variant="secondary" {...props} ref={ref}>
|
||||
<span
|
||||
{/* <span
|
||||
className={cn({
|
||||
'text-text-primary': count > 0,
|
||||
'text-text-sub-title-invert': count === 0,
|
||||
})}
|
||||
>
|
||||
Filter
|
||||
</span>
|
||||
</span> */}
|
||||
{count > 0 && (
|
||||
<span className="rounded-full bg-text-badge px-1 text-xs ">
|
||||
{count}
|
||||
</span>
|
||||
)}
|
||||
<ChevronDown />
|
||||
<Funnel />
|
||||
</Button>
|
||||
);
|
||||
});
|
||||
|
||||
@ -58,7 +58,10 @@ export function MetadataFilter({ prefix = '' }: MetadataFilterProps) {
|
||||
name={methodName}
|
||||
tooltip={t('metadataTip')}
|
||||
>
|
||||
<SelectWithSearch options={MetadataOptions} />
|
||||
<SelectWithSearch
|
||||
options={MetadataOptions}
|
||||
triggerClassName="!bg-bg-input"
|
||||
/>
|
||||
</RAGFlowFormItem>
|
||||
)}
|
||||
{hasKnowledge && metadata === DatasetMetadata.Manual && (
|
||||
|
||||
@ -5,6 +5,7 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { ReactNode, cloneElement, isValidElement } from 'react';
|
||||
import { ControllerRenderProps, useFormContext } from 'react-hook-form';
|
||||
|
||||
@ -13,6 +14,7 @@ type RAGFlowFormItemProps = {
|
||||
label: ReactNode;
|
||||
tooltip?: ReactNode;
|
||||
children: ReactNode | ((field: ControllerRenderProps) => ReactNode);
|
||||
horizontal?: boolean;
|
||||
};
|
||||
|
||||
export function RAGFlowFormItem({
|
||||
@ -20,6 +22,7 @@ export function RAGFlowFormItem({
|
||||
label,
|
||||
tooltip,
|
||||
children,
|
||||
horizontal = false,
|
||||
}: RAGFlowFormItemProps) {
|
||||
const form = useFormContext();
|
||||
return (
|
||||
@ -27,8 +30,14 @@ export function RAGFlowFormItem({
|
||||
control={form.control}
|
||||
name={name}
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel tooltip={tooltip}>{label}</FormLabel>
|
||||
<FormItem
|
||||
className={cn({
|
||||
'flex items-center': horizontal,
|
||||
})}
|
||||
>
|
||||
<FormLabel tooltip={tooltip} className={cn({ 'w-1/4': horizontal })}>
|
||||
{label}
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
{typeof children === 'function'
|
||||
? children(field)
|
||||
|
||||
@ -8,9 +8,5 @@ export function SharedBadge({ children }: PropsWithChildren) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<span className="bg-text-secondary rounded-sm px-1 text-bg-base text-xs">
|
||||
{children}
|
||||
</span>
|
||||
);
|
||||
return <span className="bg-bg-card rounded-sm px-1 text-xs">{children}</span>;
|
||||
}
|
||||
|
||||
@ -17,7 +17,7 @@ const buttonVariants = cva(
|
||||
outline:
|
||||
'border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50',
|
||||
secondary:
|
||||
'bg-secondary text-secondary-foreground shadow-xs hover:bg-secondary/80',
|
||||
'bg-bg-input text-secondary-foreground shadow-xs hover:bg-bg-input/80',
|
||||
ghost:
|
||||
'hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50',
|
||||
link: 'text-primary underline-offset-4 hover:underline',
|
||||
|
||||
@ -116,7 +116,10 @@ export { ExpandedInput, Input, SearchInput };
|
||||
|
||||
type NumberInputProps = { onChange?(value: number): void } & InputProps;
|
||||
|
||||
export const NumberInput = ({ onChange, ...props }: NumberInputProps) => {
|
||||
export const NumberInput = React.forwardRef<
|
||||
HTMLInputElement,
|
||||
NumberInputProps & { value: Value; onChange(value: Value): void }
|
||||
>(function NumberInput({ onChange, ...props }, ref) {
|
||||
return (
|
||||
<Input
|
||||
type="number"
|
||||
@ -125,6 +128,7 @@ export const NumberInput = ({ onChange, ...props }: NumberInputProps) => {
|
||||
onChange?.(value === '' ? 0 : Number(value)); // convert to number
|
||||
}}
|
||||
{...props}
|
||||
ref={ref}
|
||||
></Input>
|
||||
);
|
||||
};
|
||||
});
|
||||
|
||||
@ -12,13 +12,13 @@ const Progress = React.forwardRef<
|
||||
<ProgressPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn(
|
||||
'relative h-4 w-full overflow-hidden rounded-full bg-secondary',
|
||||
'relative h-4 w-full overflow-hidden rounded-full bg-bg-accent',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ProgressPrimitive.Indicator
|
||||
className="h-full w-full flex-1 bg-primary transition-all"
|
||||
className="h-full w-full flex-1 bg-accent-primary transition-all"
|
||||
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
|
||||
/>
|
||||
</ProgressPrimitive.Root>
|
||||
|
||||
@ -23,6 +23,7 @@ export interface SegmentedProps
|
||||
prefixCls?: string;
|
||||
direction?: 'ltr' | 'rtl';
|
||||
motionName?: string;
|
||||
activeClassName?: string;
|
||||
}
|
||||
|
||||
export function Segmented({
|
||||
@ -30,6 +31,7 @@ export function Segmented({
|
||||
value,
|
||||
onChange,
|
||||
className,
|
||||
activeClassName,
|
||||
}: SegmentedProps) {
|
||||
const [selectedValue, setSelectedValue] = React.useState<
|
||||
SegmentedValue | undefined
|
||||
@ -57,9 +59,12 @@ export function Segmented({
|
||||
className={cn(
|
||||
'inline-flex items-center px-6 py-2 text-base font-normal rounded-3xl cursor-pointer',
|
||||
{
|
||||
'bg-text-primary': selectedValue === actualValue,
|
||||
'text-bg-base': selectedValue === actualValue,
|
||||
'text-bg-base bg-metallic-gradient border-b-[#00BEB4] border-b-2':
|
||||
selectedValue === actualValue,
|
||||
},
|
||||
activeClassName && selectedValue === actualValue
|
||||
? activeClassName
|
||||
: '',
|
||||
)}
|
||||
onClick={() => handleOnChange(actualValue)}
|
||||
>
|
||||
|
||||
@ -54,7 +54,7 @@ const Textarea = forwardRef<HTMLTextAreaElement, TextareaProps>(
|
||||
return (
|
||||
<textarea
|
||||
className={cn(
|
||||
'flex min-h-[80px] w-full bg-bg-card rounded-md border border-input px-3 py-2 text-base ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm overflow-hidden',
|
||||
'flex min-h-[80px] w-full bg-bg-input rounded-md border border-input px-3 py-2 text-base ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm overflow-hidden',
|
||||
className,
|
||||
)}
|
||||
rows={autoSize?.minRows ?? props.rows ?? undefined}
|
||||
|
||||
4
web/src/constants/permission.ts
Normal file
4
web/src/constants/permission.ts
Normal file
@ -0,0 +1,4 @@
|
||||
export enum PermissionRole {
|
||||
Me = 'me',
|
||||
Team = 'team',
|
||||
}
|
||||
@ -1,4 +1,5 @@
|
||||
import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-filter-submit';
|
||||
import { ResponseType } from '@/interfaces/database/base';
|
||||
import {
|
||||
IDocumentInfo,
|
||||
IDocumentInfoFilter,
|
||||
@ -45,9 +46,9 @@ export const useUploadNextDocument = () => {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
} = useMutation<ResponseType<IDocumentInfo[]>, Error, File[]>({
|
||||
mutationKey: [DocumentApiAction.UploadDocument],
|
||||
mutationFn: async (fileList: File[]) => {
|
||||
mutationFn: async (fileList) => {
|
||||
const formData = new FormData();
|
||||
formData.append('kb_id', id!);
|
||||
fileList.forEach((file: any) => {
|
||||
|
||||
@ -70,7 +70,7 @@ export default {
|
||||
review: 'from 500+ reviews',
|
||||
},
|
||||
header: {
|
||||
knowledgeBase: 'Knowledge Base',
|
||||
knowledgeBase: 'Dataset',
|
||||
chat: 'Chat',
|
||||
register: 'Register',
|
||||
signin: 'Sign in',
|
||||
@ -86,7 +86,7 @@ export default {
|
||||
knowledgeList: {
|
||||
welcome: 'Welcome back',
|
||||
description: 'Which knowledge bases will you use today?',
|
||||
createKnowledgeBase: 'Create knowledge base',
|
||||
createKnowledgeBase: 'Create Dataset',
|
||||
name: 'Name',
|
||||
namePlaceholder: 'Please input name!',
|
||||
doc: 'Docs',
|
||||
@ -845,6 +845,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
uploadLimit:
|
||||
'Each file must not exceed 10MB, and the total number of files must not exceed 128.',
|
||||
destinationFolder: 'Destination folder',
|
||||
pleaseUploadAtLeastOneFile: 'Please upload at least one file',
|
||||
},
|
||||
flow: {
|
||||
cite: 'Cite',
|
||||
@ -1441,6 +1442,7 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
showQueryMindmap: 'Show Query Mindmap',
|
||||
embedApp: 'Embed App',
|
||||
relatedSearch: 'Related Search',
|
||||
descriptionValue: 'You are an intelligent assistant.',
|
||||
okText: 'Save',
|
||||
cancelText: 'Cancel',
|
||||
},
|
||||
|
||||
@ -799,6 +799,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
fileError: '文件错误',
|
||||
uploadLimit: '文件大小不能超过10M,文件总数不超过128个',
|
||||
destinationFolder: '目标文件夹',
|
||||
pleaseUploadAtLeastOneFile: '请上传至少一个文件',
|
||||
},
|
||||
flow: {
|
||||
flow: '工作流',
|
||||
@ -1344,6 +1345,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
showQueryMindmap: '显示查询思维导图',
|
||||
embedApp: '嵌入网站',
|
||||
relatedSearch: '相关搜索',
|
||||
descriptionValue: '你是一位智能助手。',
|
||||
okText: '保存',
|
||||
cancelText: '返回',
|
||||
},
|
||||
|
||||
@ -63,7 +63,6 @@ export function UploadAgentForm({ hideModal, onOk }: IModalProps<any>) {
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
maxFileCount={1}
|
||||
maxSize={4 * 1024 * 1024}
|
||||
accept={{ '*.json': [FileMimeType.Json] }}
|
||||
/>
|
||||
</FormControl>
|
||||
|
||||
@ -3,15 +3,15 @@ import { RunningStatus } from '@/constants/knowledge';
|
||||
export const RunningStatusMap = {
|
||||
[RunningStatus.UNSTART]: {
|
||||
label: 'UNSTART',
|
||||
color: 'cyan',
|
||||
color: 'var(--accent-primary)',
|
||||
},
|
||||
[RunningStatus.RUNNING]: {
|
||||
label: 'Parsing',
|
||||
color: 'blue',
|
||||
color: 'var(--team-member)',
|
||||
},
|
||||
[RunningStatus.CANCEL]: { label: 'CANCEL', color: 'orange' },
|
||||
[RunningStatus.DONE]: { label: 'SUCCESS', color: 'blue' },
|
||||
[RunningStatus.FAIL]: { label: 'FAIL', color: 'red' },
|
||||
[RunningStatus.CANCEL]: { label: 'CANCEL', color: 'var(--state-warning)' },
|
||||
[RunningStatus.DONE]: { label: 'SUCCESS', color: 'var(--state-success)' },
|
||||
[RunningStatus.FAIL]: { label: 'FAIL', color: 'var(--state-error' },
|
||||
};
|
||||
|
||||
export * from '@/constants/knowledge';
|
||||
|
||||
@ -11,7 +11,7 @@ import { IDocumentInfo } from '@/interfaces/database/document';
|
||||
import { formatFileSize } from '@/utils/common-util';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { downloadDocument } from '@/utils/file-util';
|
||||
import { ArrowDownToLine, FolderPen, ScrollText, Trash2 } from 'lucide-react';
|
||||
import { Download, Eye, PenLine, Trash2 } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
import { UseRenameDocumentShowType } from './use-rename-document';
|
||||
import { isParserRunning } from './utils';
|
||||
@ -57,12 +57,12 @@ export function DatasetActionCell({
|
||||
disabled={isRunning}
|
||||
onClick={handleRename}
|
||||
>
|
||||
<FolderPen />
|
||||
<PenLine />
|
||||
</Button>
|
||||
<HoverCard>
|
||||
<HoverCardTrigger>
|
||||
<Button variant="ghost" disabled={isRunning} size={'sm'}>
|
||||
<ScrollText />
|
||||
<Eye />
|
||||
</Button>
|
||||
</HoverCardTrigger>
|
||||
<HoverCardContent className="w-[40vw] max-h-[40vh] overflow-auto">
|
||||
@ -93,7 +93,7 @@ export function DatasetActionCell({
|
||||
disabled={isRunning}
|
||||
size={'sm'}
|
||||
>
|
||||
<ArrowDownToLine />
|
||||
<Download />
|
||||
</Button>
|
||||
)}
|
||||
<ConfirmDeleteDialog onOk={handleRemove}>
|
||||
|
||||
@ -164,7 +164,7 @@ export function DatasetTable({
|
||||
)}
|
||||
</TableBody>
|
||||
</Table>
|
||||
<div className="flex items-center justify-end py-4">
|
||||
<div className="flex items-center justify-end py-4 absolute bottom-3 right-3">
|
||||
<div className="space-x-2">
|
||||
<RAGFlowPagination
|
||||
{...pick(pagination, 'current', 'pageSize')}
|
||||
|
||||
@ -111,6 +111,7 @@ export default function Dataset() {
|
||||
hideModal={hideDocumentUploadModal}
|
||||
onOk={onDocumentUploadOk}
|
||||
loading={documentUploadLoading}
|
||||
showParseOnCreation
|
||||
></FileUploadDialog>
|
||||
)}
|
||||
{createVisible && (
|
||||
|
||||
@ -17,7 +17,7 @@ function Dot({ run }: { run: RunningStatus }) {
|
||||
const runningStatus = RunningStatusMap[run];
|
||||
return (
|
||||
<span
|
||||
className={'size-2 inline-block rounded'}
|
||||
className={'size-1 inline-block rounded'}
|
||||
style={{ backgroundColor: runningStatus.color }}
|
||||
></span>
|
||||
);
|
||||
@ -89,7 +89,7 @@ export function ParsingCard({ record }: IProps) {
|
||||
return (
|
||||
<HoverCard>
|
||||
<HoverCardTrigger asChild>
|
||||
<Button variant={'ghost'} size={'sm'}>
|
||||
<Button variant={'transparent'} className="border-none" size={'sm'}>
|
||||
<Dot run={record.run}></Dot>
|
||||
</Button>
|
||||
</HoverCardTrigger>
|
||||
|
||||
@ -14,7 +14,7 @@ import {
|
||||
import { Progress } from '@/components/ui/progress';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { IDocumentInfo } from '@/interfaces/database/document';
|
||||
import { CircleX, Play, RefreshCw } from 'lucide-react';
|
||||
import { CircleX, RefreshCw } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { RunningStatus } from './constant';
|
||||
@ -24,11 +24,13 @@ import { useHandleRunDocumentByIds } from './use-run-document';
|
||||
import { UseSaveMetaShowType } from './use-save-meta';
|
||||
import { isParserRunning } from './utils';
|
||||
const IconMap = {
|
||||
[RunningStatus.UNSTART]: <Play />,
|
||||
[RunningStatus.RUNNING]: <CircleX />,
|
||||
[RunningStatus.CANCEL]: <RefreshCw />,
|
||||
[RunningStatus.DONE]: <RefreshCw />,
|
||||
[RunningStatus.FAIL]: <RefreshCw />,
|
||||
[RunningStatus.UNSTART]: (
|
||||
<div className="w-0 h-0 border-l-[10px] border-l-accent-primary border-t-8 border-r-4 border-b-8 border-transparent"></div>
|
||||
),
|
||||
[RunningStatus.RUNNING]: <CircleX size={14} color="var(--state-error)" />,
|
||||
[RunningStatus.CANCEL]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||
[RunningStatus.DONE]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||
[RunningStatus.FAIL]: <RefreshCw size={14} color="var(--accent-primary)" />,
|
||||
};
|
||||
|
||||
export function ParsingStatusCell({
|
||||
@ -60,11 +62,11 @@ export function ParsingStatusCell({
|
||||
}, [record, showSetMetaModal]);
|
||||
|
||||
return (
|
||||
<section className="flex gap-2 items-center">
|
||||
<div className="w-28 flex items-center justify-between">
|
||||
<section className="flex gap-8 items-center">
|
||||
<div className="w-fit flex items-center justify-between">
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant={'ghost'} size={'sm'}>
|
||||
<Button variant={'transparent'} className="border-none" size={'sm'}>
|
||||
{parser_id === 'naive' ? 'general' : parser_id}
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
@ -77,7 +79,6 @@ export function ParsingStatusCell({
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
<Separator orientation="vertical" className="h-2.5" />
|
||||
</div>
|
||||
<ConfirmDeleteDialog
|
||||
title={t(`knowledgeDetails.redo`, { chunkNum: chunk_num })}
|
||||
@ -85,17 +86,17 @@ export function ParsingStatusCell({
|
||||
onOk={handleOperationIconClick(true)}
|
||||
onCancel={handleOperationIconClick(false)}
|
||||
>
|
||||
<Button
|
||||
variant={'ghost'}
|
||||
size={'sm'}
|
||||
<div
|
||||
className="cursor-pointer flex items-center gap-3"
|
||||
onClick={
|
||||
isZeroChunk || isRunning
|
||||
? handleOperationIconClick(false)
|
||||
: () => {}
|
||||
}
|
||||
>
|
||||
<Separator orientation="vertical" className="h-2.5" />
|
||||
{operationIcon}
|
||||
</Button>
|
||||
</div>
|
||||
</ConfirmDeleteDialog>
|
||||
{isParserRunning(run) ? (
|
||||
<HoverCard>
|
||||
|
||||
@ -65,7 +65,8 @@ export function useDatasetTableColumns({
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
variant="transparent"
|
||||
className="border-none"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||
>
|
||||
{t('name')}
|
||||
@ -103,7 +104,8 @@ export function useDatasetTableColumns({
|
||||
header: ({ column }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
variant="transparent"
|
||||
className="border-none"
|
||||
onClick={() => column.toggleSorting(column.getIsSorted() === 'asc')}
|
||||
>
|
||||
{t('uploadDate')}
|
||||
@ -141,7 +143,7 @@ export function useDatasetTableColumns({
|
||||
},
|
||||
{
|
||||
accessorKey: 'run',
|
||||
header: t('parsingStatus'),
|
||||
header: t('Parse'),
|
||||
// meta: { cellClassName: 'min-w-[20vw]' },
|
||||
cell: ({ row }) => {
|
||||
return (
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
import { UploadFormSchemaType } from '@/components/file-upload-dialog';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useUploadNextDocument } from '@/hooks/use-document-request';
|
||||
import {
|
||||
useRunDocument,
|
||||
useUploadNextDocument,
|
||||
} from '@/hooks/use-document-request';
|
||||
import { getUnSupportedFilesCount } from '@/utils/document-util';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
@ -10,14 +14,24 @@ export const useHandleUploadDocument = () => {
|
||||
showModal: showDocumentUploadModal,
|
||||
} = useSetModalState();
|
||||
const { uploadDocument, loading } = useUploadNextDocument();
|
||||
const { runDocumentByIds } = useRunDocument();
|
||||
|
||||
const onDocumentUploadOk = useCallback(
|
||||
async (fileList: File[]): Promise<number | undefined> => {
|
||||
async ({ fileList, parseOnCreation }: UploadFormSchemaType) => {
|
||||
if (fileList.length > 0) {
|
||||
const ret: any = await uploadDocument(fileList);
|
||||
const ret = await uploadDocument(fileList);
|
||||
if (typeof ret?.message !== 'string') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (ret.code === 0 && parseOnCreation) {
|
||||
runDocumentByIds({
|
||||
documentIds: ret.data.map((x) => x.id),
|
||||
run: 1,
|
||||
shouldDelete: false,
|
||||
});
|
||||
}
|
||||
|
||||
const count = getUnSupportedFilesCount(ret?.message);
|
||||
/// 500 error code indicates that some file types are not supported
|
||||
let code = ret?.code;
|
||||
@ -31,7 +45,7 @@ export const useHandleUploadDocument = () => {
|
||||
return code;
|
||||
}
|
||||
},
|
||||
[uploadDocument, hideDocumentUploadModal],
|
||||
[uploadDocument, runDocumentByIds, hideDocumentUploadModal],
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@ -19,7 +19,7 @@ export default function DatasetWrapper() {
|
||||
const { data } = useFetchKnowledgeBaseConfiguration();
|
||||
|
||||
return (
|
||||
<section>
|
||||
<section className="flex h-full flex-col w-full">
|
||||
<PageHeader>
|
||||
<Breadcrumb>
|
||||
<BreadcrumbList>
|
||||
@ -35,7 +35,7 @@ export default function DatasetWrapper() {
|
||||
</BreadcrumbList>
|
||||
</Breadcrumb>
|
||||
</PageHeader>
|
||||
<div className="flex flex-1">
|
||||
<div className="flex flex-1 min-h-0">
|
||||
<SideBar></SideBar>
|
||||
<div className="flex-1">
|
||||
<Outlet />
|
||||
|
||||
@ -66,10 +66,10 @@ export function ChunkMethodForm() {
|
||||
}, [finalParserId]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<section className="overflow-auto max-h-[76vh]">
|
||||
<section className="h-full flex flex-col">
|
||||
<div className="overflow-auto flex-1 min-h-0">
|
||||
<ConfigurationComponent></ConfigurationComponent>
|
||||
</section>
|
||||
</div>
|
||||
<div className="text-right pt-4 flex justify-end gap-3">
|
||||
<Button
|
||||
type="reset"
|
||||
@ -112,6 +112,6 @@ export function ChunkMethodForm() {
|
||||
{t('knowledgeConfiguration.save')}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import CategoryPanel from './category-panel';
|
||||
@ -14,20 +15,22 @@ export default ({
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: tab === 'chunkMethodForm' ? 'block' : 'none',
|
||||
}}
|
||||
className={cn('hidden flex-1', {
|
||||
'flex flex-col': tab === 'chunkMethodForm',
|
||||
})}
|
||||
>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setVisible(!visible);
|
||||
}}
|
||||
>
|
||||
Learn More
|
||||
</Button>
|
||||
<div>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setVisible(!visible);
|
||||
}}
|
||||
>
|
||||
Learn More
|
||||
</Button>
|
||||
</div>
|
||||
<div
|
||||
className="bg-[#FFF]/10 p-[20px] rounded-[12px] mt-[10px] relative"
|
||||
className="bg-[#FFF]/10 p-[20px] rounded-[12px] mt-[10px] relative flex-1 overflow-auto"
|
||||
style={{ display: visible ? 'block' : 'none' }}
|
||||
>
|
||||
<CategoryPanel chunkMethod={parserId}></CategoryPanel>
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import { FormContainer } from '@/components/form-container';
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Button, ButtonLoading } from '@/components/ui/button';
|
||||
import {
|
||||
FormControl,
|
||||
FormField,
|
||||
@ -9,9 +11,10 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { PermissionRole } from '@/constants/permission';
|
||||
import { useUpdateKnowledge } from '@/hooks/knowledge-hooks';
|
||||
import { transformFile2Base64 } from '@/utils/file-util';
|
||||
import { Loader2Icon, Pencil, Upload } from 'lucide-react';
|
||||
import { Pencil, Upload } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@ -33,6 +36,13 @@ export function GeneralForm() {
|
||||
const parser_id = defaultValues['parser_id'];
|
||||
const { id: kb_id } = useParams();
|
||||
|
||||
const teamOptions = useMemo(() => {
|
||||
return Object.values(PermissionRole).map((x) => ({
|
||||
label: t('knowledgeConfiguration.' + x),
|
||||
value: x,
|
||||
}));
|
||||
}, [t]);
|
||||
|
||||
// init avatar file if it exists in defaultValues
|
||||
useEffect(() => {
|
||||
if (!avatarFile) {
|
||||
@ -171,24 +181,35 @@ export function GeneralForm() {
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<RAGFlowFormItem
|
||||
name="permission"
|
||||
label={t('knowledgeConfiguration.permissions')}
|
||||
tooltip={t('knowledgeConfiguration.permissionsTip')}
|
||||
horizontal
|
||||
>
|
||||
<SelectWithSearch
|
||||
options={teamOptions}
|
||||
triggerClassName="w-3/4"
|
||||
></SelectWithSearch>
|
||||
</RAGFlowFormItem>
|
||||
</FormContainer>
|
||||
<div className="text-right pt-4 flex justify-end gap-3">
|
||||
<Button
|
||||
type="reset"
|
||||
className="bg-transparent text-color-white hover:bg-transparent border-gray-500 border-[1px]"
|
||||
variant={'outline'}
|
||||
onClick={() => {
|
||||
form.reset();
|
||||
}}
|
||||
>
|
||||
{t('knowledgeConfiguration.cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
<ButtonLoading
|
||||
type="button"
|
||||
disabled={submitLoading}
|
||||
loading={submitLoading}
|
||||
onClick={() => {
|
||||
(async () => {
|
||||
let isValidate = await form.formControl.trigger('name');
|
||||
const { name, description } = form.formState.values;
|
||||
let isValidate = await form.trigger('name');
|
||||
const { name, description, permission } = form.getValues();
|
||||
const avatar = avatarBase64Str;
|
||||
|
||||
if (isValidate) {
|
||||
@ -198,14 +219,14 @@ export function GeneralForm() {
|
||||
name,
|
||||
description,
|
||||
avatar,
|
||||
permission,
|
||||
});
|
||||
}
|
||||
})();
|
||||
}}
|
||||
>
|
||||
{submitLoading && <Loader2Icon className="animate-spin" />}
|
||||
{t('knowledgeConfiguration.save')}
|
||||
</Button>
|
||||
</ButtonLoading>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
@ -6,6 +6,7 @@ import {
|
||||
TabsTrigger,
|
||||
} from '@/components/ui/tabs-underlined';
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
import { PermissionRole } from '@/constants/permission';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { useState } from 'react';
|
||||
import { useForm, useWatch } from 'react-hook-form';
|
||||
@ -43,7 +44,7 @@ export default function DatasetSettings() {
|
||||
defaultValues: {
|
||||
name: '',
|
||||
parser_id: DocumentParserType.Naive,
|
||||
permission: 'me',
|
||||
permission: PermissionRole.Me,
|
||||
parser_config: {
|
||||
layout_recognize: DocumentType.DeepDOC,
|
||||
chunk_token_num: 512,
|
||||
@ -81,22 +82,23 @@ export default function DatasetSettings() {
|
||||
}
|
||||
|
||||
return (
|
||||
<section className="p-5 ">
|
||||
<section className="p-5 h-full flex flex-col">
|
||||
<TopTitle
|
||||
title={t('knowledgeDetails.configuration')}
|
||||
description={t('knowledgeConfiguration.titleDescription')}
|
||||
></TopTitle>
|
||||
<div className="flex gap-14">
|
||||
<div className="flex gap-14 flex-1 min-h-0">
|
||||
<Form {...form}>
|
||||
<form
|
||||
onSubmit={form.handleSubmit(onSubmit)}
|
||||
className="space-y-6 basis-full min-w-[1000px] max-w-[1000px]"
|
||||
className="space-y-6 flex-1"
|
||||
>
|
||||
<Tabs
|
||||
defaultValue="generalForm"
|
||||
onValueChange={(val) => {
|
||||
setCurrentTab(val);
|
||||
}}
|
||||
className="h-full flex flex-col"
|
||||
>
|
||||
<TabsList className="grid bg-transparent grid-cols-2 rounded-none text-foreground">
|
||||
<TabsTrigger
|
||||
@ -120,10 +122,10 @@ export default function DatasetSettings() {
|
||||
</div>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="generalForm">
|
||||
<TabsContent value="generalForm" className="flex-1 min-h-0">
|
||||
<GeneralForm></GeneralForm>
|
||||
</TabsContent>
|
||||
<TabsContent value="chunkMethodForm">
|
||||
<TabsContent value="chunkMethodForm" className="flex-1 min-h-0">
|
||||
<ChunkMethodForm></ChunkMethodForm>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
|
||||
@ -62,8 +62,8 @@ export function SideBar({ refreshCount }: PropType) {
|
||||
name={data.name}
|
||||
className="size-16"
|
||||
></RAGFlowAvatar>
|
||||
<div className=" text-text-secondary text-xs space-y-1">
|
||||
<h3 className="text-lg font-semibold line-clamp-1 text-text-primary">
|
||||
<div className=" text-text-secondary text-xs space-y-1 overflow-hidden">
|
||||
<h3 className="text-lg font-semibold line-clamp-1 text-text-primary text-ellipsis overflow-hidden">
|
||||
{data.name}
|
||||
</h3>
|
||||
<div className="flex justify-between">
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { HomeCard } from '@/components/home-card';
|
||||
import { MoreButton } from '@/components/more-button';
|
||||
import { SharedBadge } from '@/components/shared-badge';
|
||||
import { Card, CardContent } from '@/components/ui/card';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { IKnowledge } from '@/interfaces/database/knowledge';
|
||||
@ -32,6 +33,7 @@ export function DatasetCard({
|
||||
<MoreButton></MoreButton>
|
||||
</DatasetDropdown>
|
||||
}
|
||||
sharedBadge={<SharedBadge>{dataset.nickname}</SharedBadge>}
|
||||
onClick={navigateToDataset(dataset.id)}
|
||||
/>
|
||||
);
|
||||
@ -41,7 +43,7 @@ export function SeeAllCard() {
|
||||
const { navigateToDatasetList } = useNavigatePage();
|
||||
|
||||
return (
|
||||
<Card className="w-40" onClick={navigateToDatasetList}>
|
||||
<Card className="w-40 flex-none" onClick={navigateToDatasetList}>
|
||||
<CardContent className="p-2.5 pt-1 w-full h-full flex items-center justify-center gap-1.5 text-text-secondary">
|
||||
See All <ChevronRight className="size-4" />
|
||||
</CardContent>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { ButtonLoading } from '@/components/ui/button';
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@ -74,7 +74,11 @@ export function InputForm({ onOk }: IModalProps<any>) {
|
||||
);
|
||||
}
|
||||
|
||||
export function DatasetCreatingDialog({ hideModal, onOk }: IModalProps<any>) {
|
||||
export function DatasetCreatingDialog({
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
}: IModalProps<any>) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
@ -85,9 +89,9 @@ export function DatasetCreatingDialog({ hideModal, onOk }: IModalProps<any>) {
|
||||
</DialogHeader>
|
||||
<InputForm onOk={onOk}></InputForm>
|
||||
<DialogFooter>
|
||||
<Button type="submit" form={FormId}>
|
||||
<ButtonLoading type="submit" form={FormId} loading={loading}>
|
||||
{t('common.save')}
|
||||
</Button>
|
||||
</ButtonLoading>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { UploadFormSchemaType } from '@/components/file-upload-dialog';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useUploadFile } from '@/hooks/use-file-request';
|
||||
import { useCallback } from 'react';
|
||||
@ -13,7 +14,7 @@ export const useHandleUploadFile = () => {
|
||||
const id = useGetFolderId();
|
||||
|
||||
const onFileUploadOk = useCallback(
|
||||
async (fileList: File[]): Promise<number | undefined> => {
|
||||
async ({ fileList }: UploadFormSchemaType): Promise<number | undefined> => {
|
||||
if (fileList.length > 0) {
|
||||
const ret: number = await uploadFile({ fileList, parentId: id });
|
||||
if (ret === 0) {
|
||||
|
||||
@ -51,7 +51,8 @@ export function Applications() {
|
||||
options={options}
|
||||
value={val}
|
||||
onChange={handleChange}
|
||||
className="bg-transparent"
|
||||
className="bg-bg-card border border-border-button rounded-full"
|
||||
activeClassName="bg-text-primary border-none"
|
||||
></Segmented>
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-4">
|
||||
|
||||
@ -30,7 +30,7 @@ export function Datasets() {
|
||||
<CardSkeleton />
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex gap-4 flex-1">
|
||||
<div className="grid gap-6 sm:grid-cols-1 md:grid-cols-2 lg:grid-cols-4 xl:grid-cols-6 2xl:grid-cols-8 max-h-[78vh] overflow-auto">
|
||||
{kbs.slice(0, 6).map((dataset) => (
|
||||
<DatasetCard
|
||||
key={dataset.id}
|
||||
|
||||
@ -14,6 +14,7 @@ import {
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
|
||||
@ -21,6 +22,17 @@ export default function ChatBasicSetting() {
|
||||
const { t } = useTranslate('chat');
|
||||
const form = useFormContext();
|
||||
|
||||
const languageOptions = [
|
||||
{ value: 'English', label: 'English' },
|
||||
{ value: 'Chinese', label: 'Chinese' },
|
||||
{ value: 'Spanish', label: 'Spanish' },
|
||||
{ value: 'French', label: 'French' },
|
||||
{ value: 'German', label: 'German' },
|
||||
{ value: 'Japanese', label: 'Japanese' },
|
||||
{ value: 'Korean', label: 'Korean' },
|
||||
{ value: 'Vietnamese', label: 'Vietnamese' },
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<FormField
|
||||
@ -35,7 +47,6 @@ export default function ChatBasicSetting() {
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
maxFileCount={1}
|
||||
maxSize={4 * 1024 * 1024}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
@ -56,6 +67,30 @@ export default function ChatBasicSetting() {
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="language"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>{t('language')}</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder={t('common.languagePlaceholder')} />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{languageOptions.map((option) => (
|
||||
<SelectItem key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="description"
|
||||
|
||||
@ -35,13 +35,18 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
|
||||
shouldUnregister: true,
|
||||
defaultValues: {
|
||||
name: '',
|
||||
icon: [],
|
||||
language: 'English',
|
||||
description: '',
|
||||
kb_ids: [],
|
||||
prompt_config: {
|
||||
quote: true,
|
||||
keyword: false,
|
||||
tts: false,
|
||||
use_kg: false,
|
||||
refine_multiturn: true,
|
||||
system: '',
|
||||
parameters: [],
|
||||
},
|
||||
top_n: 8,
|
||||
vector_similarity_weight: 0.2,
|
||||
@ -89,25 +94,28 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
|
||||
}, [data, form]);
|
||||
|
||||
return (
|
||||
<section className="p-5 w-[440px] border-l">
|
||||
<section className="p-5 w-[440px] border-l flex flex-col">
|
||||
<div className="flex justify-between items-center text-base pb-2">
|
||||
{t('chat.chatSetting')}
|
||||
<X className="size-4 cursor-pointer" onClick={switchSettingVisible} />
|
||||
</div>
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit, onInvalid)}>
|
||||
<section className="space-y-6 overflow-auto max-h-[82vh] pr-4">
|
||||
<form
|
||||
onSubmit={form.handleSubmit(onSubmit, onInvalid)}
|
||||
className="flex-1 flex flex-col min-h-0"
|
||||
>
|
||||
<section className="space-y-6 overflow-auto flex-1 pr-4 min-h-0">
|
||||
<ChatBasicSetting></ChatBasicSetting>
|
||||
<Separator />
|
||||
<ChatPromptEngine></ChatPromptEngine>
|
||||
<Separator />
|
||||
<ChatModelSettings></ChatModelSettings>
|
||||
</section>
|
||||
<div className="space-x-5 text-right">
|
||||
<div className="space-x-5 text-right pt-4">
|
||||
<Button variant={'outline'} onClick={switchSettingVisible}>
|
||||
{t('chat.cancel')}
|
||||
</Button>
|
||||
<ButtonLoading className=" my-4" type="submit" loading={loading}>
|
||||
<ButtonLoading type="submit" loading={loading}>
|
||||
{t('common.save')}
|
||||
</ButtonLoading>
|
||||
</div>
|
||||
|
||||
@ -34,11 +34,11 @@ export function useChatSettingSchema() {
|
||||
name: z.string().min(1, { message: t('assistantNameMessage') }),
|
||||
icon: z.array(z.instanceof(File)),
|
||||
language: z.string().min(1, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
message: t('languageMessage'),
|
||||
}),
|
||||
description: z.string(),
|
||||
description: z.string().optional(),
|
||||
kb_ids: z.array(z.string()).min(0, {
|
||||
message: 'Username must be at least 1 characters.',
|
||||
message: t('knowledgeBasesMessage'),
|
||||
}),
|
||||
prompt_config: promptConfigSchema,
|
||||
...rerankFormSchema,
|
||||
|
||||
@ -128,7 +128,7 @@ export default function SearchPage() {
|
||||
</div>
|
||||
<div className="absolute right-5 top-4 ">
|
||||
<Button
|
||||
className="bg-text-primary text-bg-base border-b-[#00BEB4] border-b-2"
|
||||
className="bg-text-primary text-bg-base border-b-accent-primary border-b-2"
|
||||
onClick={() => {
|
||||
handleOperate().then((res) => {
|
||||
console.log(res, 'res');
|
||||
|
||||
@ -27,7 +27,7 @@ export default function SearchPage({
|
||||
<div className="relative z-10 px-8 pt-8 flex text-transparent flex-col justify-center items-center w-[780px]">
|
||||
<h1
|
||||
className={cn(
|
||||
'text-4xl font-bold bg-gradient-to-r from-sky-600 from-30% via-sky-500 via-60% to-emerald-500 bg-clip-text',
|
||||
'text-4xl font-bold bg-gradient-to-l from-[#40EBE3] to-[#4A51FF] bg-clip-text',
|
||||
)}
|
||||
>
|
||||
RAGFlow
|
||||
|
||||
@ -113,7 +113,7 @@ export function LlmSettingFieldItems({
|
||||
<FormControl>
|
||||
<SelectWithSearch
|
||||
options={options || modelOptions}
|
||||
triggerClassName="bg-bg-card"
|
||||
triggerClassName="!bg-bg-input"
|
||||
{...field}
|
||||
></SelectWithSearch>
|
||||
</FormControl>
|
||||
|
||||
@ -114,8 +114,8 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
const [avatarBase64Str, setAvatarBase64Str] = useState(''); // Avatar Image base64
|
||||
const [datasetList, setDatasetList] = useState<MultiSelectOptionType[]>([]);
|
||||
const [datasetSelectEmbdId, setDatasetSelectEmbdId] = useState('');
|
||||
const descriptionDefaultValue = 'You are an intelligent assistant.';
|
||||
const { t } = useTranslation();
|
||||
const descriptionDefaultValue = t('search.descriptionValue');
|
||||
const resetForm = useCallback(() => {
|
||||
formMethods.reset({
|
||||
search_id: data?.id,
|
||||
@ -415,7 +415,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
<FormLabel>{t('search.description')}</FormLabel>
|
||||
<FormControl>
|
||||
<Textarea
|
||||
placeholder="You are an intelligent assistant."
|
||||
placeholder={descriptionDefaultValue}
|
||||
{...field}
|
||||
onFocus={() => {
|
||||
if (field.value === descriptionDefaultValue) {
|
||||
@ -444,7 +444,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
<span className="text-destructive mr-1"> *</span>
|
||||
{t('search.datasets')}
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<FormControl className="bg-bg-input">
|
||||
<MultiSelect
|
||||
options={datasetList}
|
||||
onValueChange={(value) => {
|
||||
@ -452,7 +452,6 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
}}
|
||||
showSelectAll={false}
|
||||
placeholder={t('chat.knowledgeBasesMessage')}
|
||||
variant="inverted"
|
||||
maxCount={10}
|
||||
defaultValue={field.value}
|
||||
{...field}
|
||||
@ -568,6 +567,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
<RAGFlowSelect
|
||||
{...field}
|
||||
options={rerankModelOptions}
|
||||
triggerClassName={'bg-bg-input'}
|
||||
// disabled={disabled}
|
||||
placeholder={'model'}
|
||||
/>
|
||||
|
||||
@ -83,7 +83,7 @@ export default function SearchingView({
|
||||
>
|
||||
<h1
|
||||
className={cn(
|
||||
'text-4xl font-bold bg-gradient-to-r from-sky-600 from-30% via-sky-500 via-60% to-emerald-500 bg-clip-text cursor-pointer',
|
||||
'text-4xl font-bold bg-gradient-to-l from-[#40EBE3] to-[#4A51FF] bg-clip-text cursor-pointer',
|
||||
)}
|
||||
onClick={() => {
|
||||
setIsSearching?.(false);
|
||||
|
||||
@ -59,8 +59,6 @@ export function ImportMcpForm({ hideModal, onOk }: IModalProps<any>) {
|
||||
<FileUploader
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
maxFileCount={1}
|
||||
maxSize={4 * 1024 * 1024}
|
||||
accept={{ '*.json': [FileMimeType.Json] }}
|
||||
/>
|
||||
</FormControl>
|
||||
|
||||
@ -70,116 +70,73 @@ const routes = [
|
||||
component: `@/pages${Routes.AgentShare}`,
|
||||
layout: false,
|
||||
},
|
||||
// {
|
||||
// path: '/',
|
||||
// component: '@/layouts',
|
||||
// layout: false,
|
||||
// wrappers: ['@/wrappers/auth'],
|
||||
// routes: [
|
||||
// { path: '/', redirect: '/knowledge' },
|
||||
// {
|
||||
// path: '/knowledge',
|
||||
// component: '@/pages/knowledge',
|
||||
// // component: '@/pages/knowledge/datasets',
|
||||
// },
|
||||
// {
|
||||
// path: '/knowledge',
|
||||
// component: '@/pages/add-knowledge',
|
||||
// routes: [
|
||||
// {
|
||||
// path: '/knowledge/dataset',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-dataset',
|
||||
// routes: [
|
||||
// {
|
||||
// path: '/knowledge/dataset',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-file',
|
||||
// },
|
||||
// {
|
||||
// path: '/knowledge/dataset/chunk',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-chunk',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/knowledge/configuration',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-setting',
|
||||
// },
|
||||
// {
|
||||
// path: '/knowledge/testing',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-testing',
|
||||
// },
|
||||
// {
|
||||
// path: '/knowledge/knowledgeGraph',
|
||||
// component: '@/pages/add-knowledge/components/knowledge-graph',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/chat',
|
||||
// component: '@/pages/chat',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting',
|
||||
// component: '@/pages/user-setting',
|
||||
// routes: [
|
||||
// { path: '/user-setting', redirect: '/user-setting/profile' },
|
||||
// {
|
||||
// path: '/user-setting/profile',
|
||||
// // component: '@/pages/user-setting/setting-profile',
|
||||
// component: '@/pages/user-setting/setting-profile',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/locale',
|
||||
// component: '@/pages/user-setting/setting-locale',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/password',
|
||||
// component: '@/pages/user-setting/setting-password',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/model',
|
||||
// component: '@/pages/user-setting/setting-model',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/team',
|
||||
// component: '@/pages/user-setting/setting-team',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/system',
|
||||
// component: '@/pages/user-setting/setting-system',
|
||||
// },
|
||||
// {
|
||||
// path: '/user-setting/api',
|
||||
// component: '@/pages/user-setting/setting-api',
|
||||
// },
|
||||
// {
|
||||
// path: `/user-setting${Routes.Mcp}`,
|
||||
// component: `@/pages${Routes.ProfileMcp}`,
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/file',
|
||||
// component: '@/pages/file-manager',
|
||||
// },
|
||||
// {
|
||||
// path: '/flow',
|
||||
// component: '@/pages/flow/list',
|
||||
// },
|
||||
// {
|
||||
// path: Routes.AgentList,
|
||||
// component: `@/pages/${Routes.Agents}`,
|
||||
// },
|
||||
// {
|
||||
// path: '/flow/:id',
|
||||
// component: '@/pages/flow',
|
||||
// },
|
||||
// {
|
||||
// path: '/search',
|
||||
// component: '@/pages/search',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
{
|
||||
path: Routes.Home,
|
||||
component: '@/layouts',
|
||||
layout: false,
|
||||
redirect: '/knowledge',
|
||||
},
|
||||
{
|
||||
path: '/knowledge',
|
||||
component: '@/pages/knowledge',
|
||||
},
|
||||
{
|
||||
path: '/knowledge',
|
||||
component: '@/pages/add-knowledge',
|
||||
routes: [
|
||||
{
|
||||
path: 'dataset',
|
||||
component: '@/pages/add-knowledge/components/knowledge-dataset',
|
||||
routes: [
|
||||
{
|
||||
path: '',
|
||||
component: '@/pages/add-knowledge/components/knowledge-file',
|
||||
},
|
||||
{
|
||||
path: 'chunk',
|
||||
component: '@/pages/add-knowledge/components/knowledge-chunk',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'configuration',
|
||||
component: '@/pages/add-knowledge/components/knowledge-setting',
|
||||
},
|
||||
{
|
||||
path: 'testing',
|
||||
component: '@/pages/add-knowledge/components/knowledge-testing',
|
||||
},
|
||||
{
|
||||
path: 'knowledgeGraph',
|
||||
component: '@/pages/add-knowledge/components/knowledge-graph',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
path: '/chat',
|
||||
component: '@/pages/chat',
|
||||
},
|
||||
{
|
||||
path: '/file',
|
||||
component: '@/pages/file-manager',
|
||||
},
|
||||
{
|
||||
path: '/flow',
|
||||
component: '@/pages/flow/list',
|
||||
},
|
||||
{
|
||||
path: Routes.AgentList,
|
||||
component: `@/pages/${Routes.Agents}`,
|
||||
},
|
||||
{
|
||||
path: '/flow/:id',
|
||||
component: '@/pages/flow',
|
||||
},
|
||||
{
|
||||
path: '/search',
|
||||
component: '@/pages/search',
|
||||
},
|
||||
{
|
||||
path: '/document/:id',
|
||||
component: '@/pages/document-viewer',
|
||||
|
||||
@ -58,6 +58,8 @@ module.exports = {
|
||||
|
||||
'bg-base': 'var(--bg-base)',
|
||||
'bg-card': 'var(--bg-card)',
|
||||
'bg-component': 'var(--bg-component)',
|
||||
'bg-input': 'var(--bg-input)',
|
||||
'text-primary': 'var(--text-primary)',
|
||||
'text-secondary': 'var(--text-secondary)',
|
||||
'text-disabled': 'var(--text-disabled)',
|
||||
@ -206,6 +208,10 @@ module.exports = {
|
||||
ring: 'hsl(var(--sidebar-ring))',
|
||||
},
|
||||
},
|
||||
backgroundImage: {
|
||||
'metallic-gradient':
|
||||
'linear-gradient(104deg, var(--text-primary) 30%, var(--metallic) 50%, var(--text-primary) 70%)',
|
||||
},
|
||||
borderRadius: {
|
||||
lg: `var(--radius)`,
|
||||
md: `calc(var(--radius) - 2px)`,
|
||||
|
||||
@ -90,11 +90,15 @@
|
||||
|
||||
--input-border: rgba(22, 22, 24, 0.2);
|
||||
|
||||
--metallic: #46464a;
|
||||
/* design colors */
|
||||
|
||||
--bg-base: #f6f6f7;
|
||||
--bg-base: #ffffff;
|
||||
/* card color , dividing line */
|
||||
--bg-card: rgba(0, 0, 0, 0.05);
|
||||
--bg-component: #ffffff;
|
||||
--bg-input: rgba(255, 255, 255, 0);
|
||||
--bg-accent: rgba(76, 164, 231, 0.05);
|
||||
/* Button ,Body text, Input completed text */
|
||||
--text-primary: #161618;
|
||||
--text-secondary: #75787a;
|
||||
@ -107,7 +111,7 @@
|
||||
--border-accent: #000000;
|
||||
--border-button: rgba(0, 0, 0, 0.1);
|
||||
/* Regulators, parsing, switches, variables */
|
||||
--accent-primary: #4ca4e7;
|
||||
--accent-primary: #00beb4;
|
||||
/* Output Variables Box */
|
||||
--bg-accent: rgba(76, 164, 231, 0.05);
|
||||
|
||||
@ -230,10 +234,13 @@
|
||||
|
||||
--input-border: rgba(255, 255, 255, 0.2);
|
||||
|
||||
--metallic: #fafafa;
|
||||
/* design colors */
|
||||
|
||||
--bg-base: #161618;
|
||||
--bg-card: rgba(255, 255, 255, 0.05);
|
||||
--bg-component: #202025;
|
||||
--bg-input: rgba(255, 255, 255, 0.05);
|
||||
--text-primary: #f6f6f7;
|
||||
--text-secondary: #b2b5b7;
|
||||
--text-disabled: #75787a;
|
||||
|
||||
Reference in New Issue
Block a user