mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Compare commits
7 Commits
2ffe6f7439
...
a6681d6366
| Author | SHA1 | Date | |
|---|---|---|---|
| a6681d6366 | |||
| 1388c4420d | |||
| 962bd5f5df | |||
| 627c11c429 | |||
| 4ba17361e9 | |||
| c946858328 | |||
| ba6e2af5fd |
@ -17,7 +17,11 @@ from common.data_source.exceptions import (
|
||||
InsufficientPermissionsError,
|
||||
ConnectorValidationError,
|
||||
)
|
||||
from common.data_source.interfaces import LoadConnector, PollConnector, SecondsSinceUnixEpoch
|
||||
from common.data_source.interfaces import (
|
||||
LoadConnector,
|
||||
PollConnector,
|
||||
SecondsSinceUnixEpoch,
|
||||
)
|
||||
from common.data_source.models import Document
|
||||
from common.data_source.utils import batch_generator, rl_requests
|
||||
|
||||
@ -42,7 +46,9 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
delimiter = "&" if "?" in file_url else "?"
|
||||
return f"{file_url}{delimiter}token={token}"
|
||||
|
||||
def _log_error(self, context: str, error: Exception, level: str = "warning") -> None:
|
||||
def _log_error(
|
||||
self, context: str, error: Exception, level: str = "warning"
|
||||
) -> None:
|
||||
"""Simplified logging wrapper"""
|
||||
msg = f"{context}: {error}"
|
||||
if level == "error":
|
||||
@ -73,7 +79,9 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
except MoodleException as e:
|
||||
if "invalidtoken" in str(e).lower():
|
||||
raise CredentialExpiredError("Moodle token is invalid or expired")
|
||||
raise ConnectorMissingCredentialError(f"Failed to initialize Moodle client: {e}")
|
||||
raise ConnectorMissingCredentialError(
|
||||
f"Failed to initialize Moodle client: {e}"
|
||||
)
|
||||
|
||||
def validate_connector_settings(self) -> None:
|
||||
if not self.moodle_client:
|
||||
@ -125,7 +133,9 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
logger.warning("No courses found to poll")
|
||||
return
|
||||
|
||||
yield from self._yield_in_batches(self._get_updated_content(courses, start, end))
|
||||
yield from self._yield_in_batches(
|
||||
self._get_updated_content(courses, start, end)
|
||||
)
|
||||
|
||||
@retry(tries=3, delay=1, backoff=2)
|
||||
def _get_enrolled_courses(self) -> list:
|
||||
@ -187,9 +197,7 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
except Exception as e:
|
||||
self._log_error(f"polling course {course.fullname}", e)
|
||||
|
||||
def _process_module(
|
||||
self, course, section, module
|
||||
) -> Optional[Document]:
|
||||
def _process_module(self, course, section, module) -> Optional[Document]:
|
||||
try:
|
||||
mtype = module.modname
|
||||
if mtype in ["label", "url"]:
|
||||
@ -224,11 +232,37 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
)
|
||||
|
||||
try:
|
||||
resp = rl_requests.get(self._add_token_to_url(file_info.fileurl), timeout=60)
|
||||
resp = rl_requests.get(
|
||||
self._add_token_to_url(file_info.fileurl), timeout=60
|
||||
)
|
||||
resp.raise_for_status()
|
||||
blob = resp.content
|
||||
ext = os.path.splitext(file_name)[1] or ".bin"
|
||||
semantic_id = f"{course.fullname} / {section.name} / {file_name}"
|
||||
|
||||
# Create metadata dictionary with relevant information
|
||||
metadata = {
|
||||
"moodle_url": self.moodle_url,
|
||||
"course_id": getattr(course, "id", None),
|
||||
"course_name": getattr(course, "fullname", None),
|
||||
"course_shortname": getattr(course, "shortname", None),
|
||||
"section_id": getattr(section, "id", None),
|
||||
"section_name": getattr(section, "name", None),
|
||||
"section_number": getattr(section, "section", None),
|
||||
"module_id": getattr(module, "id", None),
|
||||
"module_name": getattr(module, "name", None),
|
||||
"module_type": getattr(module, "modname", None),
|
||||
"module_instance": getattr(module, "instance", None),
|
||||
"file_url": getattr(file_info, "fileurl", None),
|
||||
"file_name": file_name,
|
||||
"file_size": getattr(file_info, "filesize", len(blob)),
|
||||
"file_type": getattr(file_info, "mimetype", None),
|
||||
"time_created": getattr(module, "timecreated", None),
|
||||
"time_modified": getattr(module, "timemodified", None),
|
||||
"visible": getattr(module, "visible", None),
|
||||
"groupmode": getattr(module, "groupmode", None),
|
||||
}
|
||||
|
||||
return Document(
|
||||
id=f"moodle_resource_{module.id}",
|
||||
source="moodle",
|
||||
@ -237,6 +271,7 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
blob=blob,
|
||||
doc_updated_at=datetime.fromtimestamp(ts or 0, tz=timezone.utc),
|
||||
size_bytes=len(blob),
|
||||
metadata=metadata,
|
||||
)
|
||||
except Exception as e:
|
||||
self._log_error(f"downloading resource {file_name}", e, "error")
|
||||
@ -247,7 +282,9 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
return None
|
||||
|
||||
try:
|
||||
result = self.moodle_client.mod.forum.get_forum_discussions(forumid=module.instance)
|
||||
result = self.moodle_client.mod.forum.get_forum_discussions(
|
||||
forumid=module.instance
|
||||
)
|
||||
disc_list = getattr(result, "discussions", [])
|
||||
if not disc_list:
|
||||
return None
|
||||
@ -264,6 +301,38 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
|
||||
blob = "\n".join(markdown).encode("utf-8")
|
||||
semantic_id = f"{course.fullname} / {section.name} / {module.name}"
|
||||
|
||||
# Create metadata dictionary with relevant information
|
||||
metadata = {
|
||||
"moodle_url": self.moodle_url,
|
||||
"course_id": getattr(course, "id", None),
|
||||
"course_name": getattr(course, "fullname", None),
|
||||
"course_shortname": getattr(course, "shortname", None),
|
||||
"section_id": getattr(section, "id", None),
|
||||
"section_name": getattr(section, "name", None),
|
||||
"section_number": getattr(section, "section", None),
|
||||
"module_id": getattr(module, "id", None),
|
||||
"module_name": getattr(module, "name", None),
|
||||
"module_type": getattr(module, "modname", None),
|
||||
"forum_id": getattr(module, "instance", None),
|
||||
"discussion_count": len(disc_list),
|
||||
"time_created": getattr(module, "timecreated", None),
|
||||
"time_modified": getattr(module, "timemodified", None),
|
||||
"visible": getattr(module, "visible", None),
|
||||
"groupmode": getattr(module, "groupmode", None),
|
||||
"discussions": [
|
||||
{
|
||||
"id": getattr(d, "id", None),
|
||||
"name": getattr(d, "name", None),
|
||||
"user_id": getattr(d, "userid", None),
|
||||
"user_fullname": getattr(d, "userfullname", None),
|
||||
"time_created": getattr(d, "timecreated", None),
|
||||
"time_modified": getattr(d, "timemodified", None),
|
||||
}
|
||||
for d in disc_list
|
||||
],
|
||||
}
|
||||
|
||||
return Document(
|
||||
id=f"moodle_forum_{module.id}",
|
||||
source="moodle",
|
||||
@ -272,6 +341,7 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
blob=blob,
|
||||
doc_updated_at=datetime.fromtimestamp(latest_ts or 0, tz=timezone.utc),
|
||||
size_bytes=len(blob),
|
||||
metadata=metadata,
|
||||
)
|
||||
except Exception as e:
|
||||
self._log_error(f"processing forum {module.name}", e)
|
||||
@ -293,11 +363,37 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
)
|
||||
|
||||
try:
|
||||
resp = rl_requests.get(self._add_token_to_url(file_info.fileurl), timeout=60)
|
||||
resp = rl_requests.get(
|
||||
self._add_token_to_url(file_info.fileurl), timeout=60
|
||||
)
|
||||
resp.raise_for_status()
|
||||
blob = resp.content
|
||||
ext = os.path.splitext(file_name)[1] or ".html"
|
||||
semantic_id = f"{course.fullname} / {section.name} / {module.name}"
|
||||
|
||||
# Create metadata dictionary with relevant information
|
||||
metadata = {
|
||||
"moodle_url": self.moodle_url,
|
||||
"course_id": getattr(course, "id", None),
|
||||
"course_name": getattr(course, "fullname", None),
|
||||
"course_shortname": getattr(course, "shortname", None),
|
||||
"section_id": getattr(section, "id", None),
|
||||
"section_name": getattr(section, "name", None),
|
||||
"section_number": getattr(section, "section", None),
|
||||
"module_id": getattr(module, "id", None),
|
||||
"module_name": getattr(module, "name", None),
|
||||
"module_type": getattr(module, "modname", None),
|
||||
"module_instance": getattr(module, "instance", None),
|
||||
"page_url": getattr(file_info, "fileurl", None),
|
||||
"file_name": file_name,
|
||||
"file_size": getattr(file_info, "filesize", len(blob)),
|
||||
"file_type": getattr(file_info, "mimetype", None),
|
||||
"time_created": getattr(module, "timecreated", None),
|
||||
"time_modified": getattr(module, "timemodified", None),
|
||||
"visible": getattr(module, "visible", None),
|
||||
"groupmode": getattr(module, "groupmode", None),
|
||||
}
|
||||
|
||||
return Document(
|
||||
id=f"moodle_page_{module.id}",
|
||||
source="moodle",
|
||||
@ -306,6 +402,7 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
blob=blob,
|
||||
doc_updated_at=datetime.fromtimestamp(ts or 0, tz=timezone.utc),
|
||||
size_bytes=len(blob),
|
||||
metadata=metadata,
|
||||
)
|
||||
except Exception as e:
|
||||
self._log_error(f"processing page {file_name}", e, "error")
|
||||
@ -326,6 +423,29 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
|
||||
semantic_id = f"{course.fullname} / {section.name} / {mname}"
|
||||
blob = markdown.encode("utf-8")
|
||||
|
||||
# Create metadata dictionary with relevant information
|
||||
metadata = {
|
||||
"moodle_url": self.moodle_url,
|
||||
"course_id": getattr(course, "id", None),
|
||||
"course_name": getattr(course, "fullname", None),
|
||||
"course_shortname": getattr(course, "shortname", None),
|
||||
"section_id": getattr(section, "id", None),
|
||||
"section_name": getattr(section, "name", None),
|
||||
"section_number": getattr(section, "section", None),
|
||||
"module_id": getattr(module, "id", None),
|
||||
"module_name": getattr(module, "name", None),
|
||||
"module_type": getattr(module, "modname", None),
|
||||
"activity_type": mtype,
|
||||
"activity_instance": getattr(module, "instance", None),
|
||||
"description": desc,
|
||||
"time_created": getattr(module, "timecreated", None),
|
||||
"time_modified": getattr(module, "timemodified", None),
|
||||
"added": getattr(module, "added", None),
|
||||
"visible": getattr(module, "visible", None),
|
||||
"groupmode": getattr(module, "groupmode", None),
|
||||
}
|
||||
|
||||
return Document(
|
||||
id=f"moodle_{mtype}_{module.id}",
|
||||
source="moodle",
|
||||
@ -334,6 +454,7 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
blob=blob,
|
||||
doc_updated_at=datetime.fromtimestamp(ts or 0, tz=timezone.utc),
|
||||
size_bytes=len(blob),
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
def _process_book(self, course, section, module) -> Optional[Document]:
|
||||
@ -342,8 +463,10 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
|
||||
contents = module.contents
|
||||
chapters = [
|
||||
c for c in contents
|
||||
if getattr(c, "fileurl", None) and os.path.basename(c.filename) == "index.html"
|
||||
c
|
||||
for c in contents
|
||||
if getattr(c, "fileurl", None)
|
||||
and os.path.basename(c.filename) == "index.html"
|
||||
]
|
||||
if not chapters:
|
||||
return None
|
||||
@ -356,17 +479,54 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
)
|
||||
|
||||
markdown_parts = [f"# {module.name}\n"]
|
||||
chapter_info = []
|
||||
|
||||
for ch in chapters:
|
||||
try:
|
||||
resp = rl_requests.get(self._add_token_to_url(ch.fileurl), timeout=60)
|
||||
resp.raise_for_status()
|
||||
html = resp.content.decode("utf-8", errors="ignore")
|
||||
markdown_parts.append(md(html) + "\n\n---\n")
|
||||
|
||||
# Collect chapter information for metadata
|
||||
chapter_info.append(
|
||||
{
|
||||
"chapter_id": getattr(ch, "chapterid", None),
|
||||
"title": getattr(ch, "title", None),
|
||||
"filename": getattr(ch, "filename", None),
|
||||
"fileurl": getattr(ch, "fileurl", None),
|
||||
"time_created": getattr(ch, "timecreated", None),
|
||||
"time_modified": getattr(ch, "timemodified", None),
|
||||
"size": getattr(ch, "filesize", None),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self._log_error(f"processing book chapter {ch.filename}", e)
|
||||
|
||||
blob = "\n".join(markdown_parts).encode("utf-8")
|
||||
semantic_id = f"{course.fullname} / {section.name} / {module.name}"
|
||||
|
||||
# Create metadata dictionary with relevant information
|
||||
metadata = {
|
||||
"moodle_url": self.moodle_url,
|
||||
"course_id": getattr(course, "id", None),
|
||||
"course_name": getattr(course, "fullname", None),
|
||||
"course_shortname": getattr(course, "shortname", None),
|
||||
"section_id": getattr(section, "id", None),
|
||||
"section_name": getattr(section, "name", None),
|
||||
"section_number": getattr(section, "section", None),
|
||||
"module_id": getattr(module, "id", None),
|
||||
"module_name": getattr(module, "name", None),
|
||||
"module_type": getattr(module, "modname", None),
|
||||
"book_id": getattr(module, "instance", None),
|
||||
"chapter_count": len(chapters),
|
||||
"chapters": chapter_info,
|
||||
"time_created": getattr(module, "timecreated", None),
|
||||
"time_modified": getattr(module, "timemodified", None),
|
||||
"visible": getattr(module, "visible", None),
|
||||
"groupmode": getattr(module, "groupmode", None),
|
||||
}
|
||||
|
||||
return Document(
|
||||
id=f"moodle_book_{module.id}",
|
||||
source="moodle",
|
||||
@ -375,4 +535,5 @@ class MoodleConnector(LoadConnector, PollConnector):
|
||||
blob=blob,
|
||||
doc_updated_at=datetime.fromtimestamp(latest_ts or 0, tz=timezone.utc),
|
||||
size_bytes=len(blob),
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
@ -23,6 +23,8 @@ import subprocess
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
def get_uuid():
|
||||
return uuid.uuid1().hex
|
||||
@ -106,3 +108,152 @@ def pip_install_torch():
|
||||
logging.info("Installing pytorch")
|
||||
pkg_names = ["torch>=2.5.0,<3.0.0"]
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", *pkg_names])
|
||||
|
||||
|
||||
def parse_mineru_paths() -> Dict[str, Path]:
|
||||
"""
|
||||
Parse MinerU-related paths based on the MINERU_EXECUTABLE environment variable.
|
||||
|
||||
Expected layout (default convention):
|
||||
MINERU_EXECUTABLE = /home/user/uv_tools/.venv/bin/mineru
|
||||
|
||||
From this path we derive:
|
||||
- mineru_exec : full path to the mineru executable
|
||||
- venv_dir : the virtual environment directory (.venv)
|
||||
- tools_dir : the parent tools directory (e.g. uv_tools)
|
||||
|
||||
If MINERU_EXECUTABLE is not set, we fall back to the default layout:
|
||||
$HOME/uv_tools/.venv/bin/mineru
|
||||
|
||||
Returns:
|
||||
A dict with keys:
|
||||
- "mineru_exec": Path
|
||||
- "venv_dir": Path
|
||||
- "tools_dir": Path
|
||||
"""
|
||||
mineru_exec_env = os.getenv("MINERU_EXECUTABLE")
|
||||
|
||||
if mineru_exec_env:
|
||||
# Use the path from the environment variable
|
||||
mineru_exec = Path(mineru_exec_env).expanduser().resolve()
|
||||
venv_dir = mineru_exec.parent.parent
|
||||
tools_dir = venv_dir.parent
|
||||
else:
|
||||
# Fall back to default convention: $HOME/uv_tools/.venv/bin/mineru
|
||||
home = Path(os.path.expanduser("~"))
|
||||
tools_dir = home / "uv_tools"
|
||||
venv_dir = tools_dir / ".venv"
|
||||
mineru_exec = venv_dir / "bin" / "mineru"
|
||||
|
||||
return {
|
||||
"mineru_exec": mineru_exec,
|
||||
"venv_dir": venv_dir,
|
||||
"tools_dir": tools_dir,
|
||||
}
|
||||
|
||||
|
||||
@once
|
||||
def install_mineru() -> None:
|
||||
"""
|
||||
Ensure MinerU is installed.
|
||||
|
||||
Behavior:
|
||||
1. MinerU is enabled only when USE_MINERU is true/yes/1/y.
|
||||
2. Resolve mineru_exec / venv_dir / tools_dir.
|
||||
3. If mineru exists and works, log success and exit.
|
||||
4. Otherwise:
|
||||
- Create tools_dir
|
||||
- Create venv if missing
|
||||
- Install mineru[core], fallback to mineru[all]
|
||||
- Validate with `--help`
|
||||
5. Log installation success.
|
||||
|
||||
NOTE:
|
||||
This function intentionally does NOT return the path.
|
||||
Logging is used to indicate status.
|
||||
"""
|
||||
# Check if MinerU is enabled
|
||||
use_mineru = os.getenv("USE_MINERU", "").strip().lower()
|
||||
if use_mineru == "false":
|
||||
logging.info("USE_MINERU=%r. Skipping MinerU installation.", use_mineru)
|
||||
return
|
||||
|
||||
# Resolve expected paths
|
||||
paths = parse_mineru_paths()
|
||||
mineru_exec: Path = paths["mineru_exec"]
|
||||
venv_dir: Path = paths["venv_dir"]
|
||||
tools_dir: Path = paths["tools_dir"]
|
||||
|
||||
# Construct environment variables for installation/execution
|
||||
env = os.environ.copy()
|
||||
env["VIRTUAL_ENV"] = str(venv_dir)
|
||||
env["PATH"] = str(venv_dir / "bin") + os.pathsep + env.get("PATH", "")
|
||||
|
||||
# Configure HuggingFace endpoint
|
||||
env.setdefault("HUGGINGFACE_HUB_ENDPOINT", os.getenv("HF_ENDPOINT") or "https://hf-mirror.com")
|
||||
|
||||
# Helper: check whether mineru works
|
||||
def mineru_works() -> bool:
|
||||
try:
|
||||
subprocess.check_call(
|
||||
[str(mineru_exec), "--help"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env,
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# If MinerU is already installed and functional
|
||||
if mineru_exec.is_file() and os.access(mineru_exec, os.X_OK) and mineru_works():
|
||||
logging.info("MinerU already installed.")
|
||||
os.environ["MINERU_EXECUTABLE"] = str(mineru_exec)
|
||||
return
|
||||
|
||||
logging.info("MinerU not found. Installing into virtualenv: %s", venv_dir)
|
||||
|
||||
# Ensure parent directory exists
|
||||
tools_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create venv if missing
|
||||
if not venv_dir.exists():
|
||||
subprocess.check_call(
|
||||
["uv", "venv", str(venv_dir)],
|
||||
cwd=str(tools_dir),
|
||||
env=env,
|
||||
# stdout=subprocess.DEVNULL,
|
||||
# stderr=subprocess.PIPE,
|
||||
)
|
||||
else:
|
||||
logging.info("Virtual environment exists at %s. Reusing it.", venv_dir)
|
||||
|
||||
# Helper for pip install
|
||||
def pip_install(pkg: str) -> None:
|
||||
subprocess.check_call(
|
||||
[
|
||||
"uv", "pip", "install", "-U", pkg,
|
||||
"-i", "https://mirrors.aliyun.com/pypi/simple",
|
||||
"--extra-index-url", "https://pypi.org/simple",
|
||||
],
|
||||
cwd=str(tools_dir),
|
||||
# stdout=subprocess.DEVNULL,
|
||||
# stderr=subprocess.PIPE,
|
||||
env=env,
|
||||
)
|
||||
|
||||
# Install core version first; fallback to all
|
||||
try:
|
||||
logging.info("Installing mineru[core] ...")
|
||||
pip_install("mineru[core]")
|
||||
except subprocess.CalledProcessError:
|
||||
logging.warning("mineru[core] installation failed. Installing mineru[all] ...")
|
||||
pip_install("mineru[all]")
|
||||
|
||||
# Validate installation
|
||||
if not mineru_works():
|
||||
logging.error("MinerU installation failed: %s does not work.", mineru_exec)
|
||||
raise RuntimeError(f"MinerU installation failed: {mineru_exec} is not functional")
|
||||
|
||||
os.environ["MINERU_EXECUTABLE"] = str(mineru_exec)
|
||||
logging.info("MinerU installation completed successfully. Executable: %s", mineru_exec)
|
||||
|
||||
25
docs/faq.mdx
25
docs/faq.mdx
@ -497,20 +497,6 @@ MinerU PDF document parsing is available starting from v0.22.0. RAGFlow supports
|
||||
|
||||
1. Prepare MinerU
|
||||
|
||||
- **If you deploy RAGFlow from source**, install MinerU into an isolated virtual environment (recommended path: `$HOME/uv_tools`):
|
||||
|
||||
```bash
|
||||
mkdir -p "$HOME/uv_tools"
|
||||
cd "$HOME/uv_tools"
|
||||
uv venv .venv
|
||||
source .venv/bin/activate
|
||||
uv pip install -U "mineru[core]" -i https://mirrors.aliyun.com/pypi/simple
|
||||
# or
|
||||
# uv pip install -U "mineru[all]" -i https://mirrors.aliyun.com/pypi/simple
|
||||
```
|
||||
|
||||
- **If you deploy RAGFlow with Docker**, you usually only need to turn on MinerU support in `docker/.env`:
|
||||
|
||||
```bash
|
||||
# docker/.env
|
||||
...
|
||||
@ -518,18 +504,15 @@ MinerU PDF document parsing is available starting from v0.22.0. RAGFlow supports
|
||||
...
|
||||
```
|
||||
|
||||
Enabling `USE_MINERU=true` will internally perform the same setup as the manual configuration (including setting the MinerU executable path and related environment variables). You only need the manual installation above if you are running from source or want full control over the MinerU installation.
|
||||
Enabling `USE_MINERU=true` will internally perform the same setup as the manual configuration (including setting the MinerU executable path and related environment variables).
|
||||
|
||||
|
||||
2. Start RAGFlow with MinerU enabled:
|
||||
|
||||
- **Source deployment** – in the RAGFlow repo, export the key MinerU-related variables and start the backend service:
|
||||
- **Source deployment** – in the RAGFlow repo, continue to start the backend service:
|
||||
|
||||
```bash
|
||||
# in RAGFlow repo
|
||||
export MINERU_EXECUTABLE="$HOME/uv_tools/.venv/bin/mineru"
|
||||
export MINERU_DELETE_OUTPUT=0 # keep output directory
|
||||
export MINERU_BACKEND=pipeline # or another backend you prefer
|
||||
|
||||
...
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
bash docker/launch_backend_service.sh
|
||||
|
||||
@ -16,15 +16,17 @@
|
||||
|
||||
import copy
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from io import BytesIO
|
||||
|
||||
from PIL import Image
|
||||
|
||||
from rag.nlp import tokenize, is_english
|
||||
from rag.nlp import rag_tokenizer
|
||||
from deepdoc.parser import PdfParser, PptParser, PlainParser
|
||||
from PyPDF2 import PdfReader as pdf2_read
|
||||
|
||||
from deepdoc.parser import PdfParser, PptParser, PlainParser
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
from rag.nlp import rag_tokenizer
|
||||
from rag.nlp import tokenize, is_english
|
||||
|
||||
|
||||
class Ppt(PptParser):
|
||||
def __call__(self, fnm, from_page, to_page, callback=None):
|
||||
@ -44,42 +46,106 @@ class Ppt(PptParser):
|
||||
buffered.seek(0)
|
||||
imgs.append(Image.open(buffered).copy())
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(f'ppt parse error at page {i+1}, original error: {str(e)}') from e
|
||||
raise RuntimeError(
|
||||
f'ppt parse error at page {i + 1}, original error: {str(e)}') from e
|
||||
assert len(imgs) == len(
|
||||
txts), "Slides text and image do not match: {} vs. {}".format(len(imgs), len(txts))
|
||||
txts), "Slides text and image do not match: {} vs. {}".format(
|
||||
len(imgs), len(txts))
|
||||
callback(0.9, "Image extraction finished")
|
||||
self.is_english = is_english(txts)
|
||||
return [(txts[i], imgs[i]) for i in range(len(txts))]
|
||||
|
||||
|
||||
class Pdf(PdfParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def __garbage(self, txt):
|
||||
txt = txt.lower().strip()
|
||||
if re.match(r"[0-9\.,%/-]+$", txt):
|
||||
return True
|
||||
if len(txt) < 3:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __call__(self, filename, binary=None, from_page=0,
|
||||
to_page=100000, zoomin=3, callback=None):
|
||||
from timeit import default_timer as timer
|
||||
start = timer()
|
||||
to_page=100000, zoomin=3, callback=None, **kwargs):
|
||||
# 1. OCR
|
||||
callback(msg="OCR started")
|
||||
self.__images__(filename if not binary else binary,
|
||||
zoomin, from_page, to_page, callback)
|
||||
callback(msg="Page {}~{}: OCR finished ({:.2f}s)".format(from_page, min(to_page, self.total_page), timer() - start))
|
||||
assert len(self.boxes) == len(self.page_images), "{} vs. {}".format(
|
||||
len(self.boxes), len(self.page_images))
|
||||
self.__images__(filename if not binary else binary, zoomin, from_page,
|
||||
to_page, callback)
|
||||
|
||||
# 2. Layout Analysis
|
||||
callback(msg="Layout Analysis")
|
||||
self._layouts_rec(zoomin)
|
||||
|
||||
# 3. Table Analysis
|
||||
callback(msg="Table Analysis")
|
||||
self._table_transformer_job(zoomin)
|
||||
|
||||
# 4. Text Merge
|
||||
self._text_merge()
|
||||
|
||||
# 5. Extract Tables (Force HTML)
|
||||
tbls = self._extract_table_figure(True, zoomin, True, True)
|
||||
|
||||
# 6. Re-assemble Page Content
|
||||
page_items = defaultdict(list)
|
||||
|
||||
# (A) Add text
|
||||
for b in self.boxes:
|
||||
if not (from_page < b["page_number"] <= to_page + from_page):
|
||||
continue
|
||||
page_items[b["page_number"]].append({
|
||||
"top": b["top"],
|
||||
"x0": b["x0"],
|
||||
"text": b["text"],
|
||||
"type": "text"
|
||||
})
|
||||
|
||||
# (B) Add table and figure
|
||||
for (img, content), positions in tbls:
|
||||
if not positions:
|
||||
continue
|
||||
|
||||
# Handle content type (list vs str)
|
||||
if isinstance(content, list):
|
||||
final_text = "\n".join(content)
|
||||
elif isinstance(content, str):
|
||||
final_text = content
|
||||
else:
|
||||
final_text = str(content)
|
||||
|
||||
try:
|
||||
# Parse positions
|
||||
pn_index = positions[0][0]
|
||||
if isinstance(pn_index, list):
|
||||
pn_index = pn_index[0]
|
||||
current_page_num = int(pn_index) + 1
|
||||
except Exception as e:
|
||||
print(f"Error parsing position: {e}")
|
||||
continue
|
||||
|
||||
if not (from_page < current_page_num <= to_page + from_page):
|
||||
continue
|
||||
|
||||
top = positions[0][3]
|
||||
left = positions[0][1]
|
||||
|
||||
page_items[current_page_num].append({
|
||||
"top": top,
|
||||
"x0": left,
|
||||
"text": final_text,
|
||||
"type": "table_or_figure"
|
||||
})
|
||||
|
||||
# 7. Generate result
|
||||
res = []
|
||||
for i in range(len(self.boxes)):
|
||||
lines = "\n".join([b["text"] for b in self.boxes[i]
|
||||
if not self.__garbage(b["text"])])
|
||||
res.append((lines, self.page_images[i]))
|
||||
callback(0.9, "Page {}~{}: Parsing finished".format(
|
||||
from_page, min(to_page, self.total_page)))
|
||||
for i in range(len(self.page_images)):
|
||||
current_pn = from_page + i + 1
|
||||
items = page_items.get(current_pn, [])
|
||||
# Sort by vertical position
|
||||
items.sort(key=lambda x: (x["top"], x["x0"]))
|
||||
full_page_text = "\n\n".join([item["text"] for item in items])
|
||||
if not full_page_text.strip():
|
||||
full_page_text = f"[No text or data found in Page {current_pn}]"
|
||||
page_img = self.page_images[i]
|
||||
res.append((full_page_text, page_img))
|
||||
|
||||
callback(0.9, "Parsing finished")
|
||||
|
||||
return res, []
|
||||
|
||||
|
||||
@ -106,14 +172,16 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
eng = lang.lower() == "english"
|
||||
doc = {
|
||||
"docnm_kwd": filename,
|
||||
"title_tks": rag_tokenizer.tokenize(re.sub(r"\.[a-zA-Z]+$", "", filename))
|
||||
"title_tks": rag_tokenizer.tokenize(
|
||||
re.sub(r"\.[a-zA-Z]+$", "", filename))
|
||||
}
|
||||
doc["title_sm_tks"] = rag_tokenizer.fine_grained_tokenize(doc["title_tks"])
|
||||
res = []
|
||||
if re.search(r"\.pptx?$", filename, re.IGNORECASE):
|
||||
ppt_parser = Ppt()
|
||||
for pn, (txt, img) in enumerate(ppt_parser(
|
||||
filename if not binary else binary, from_page, 1000000, callback)):
|
||||
filename if not binary else binary, from_page, 1000000,
|
||||
callback)):
|
||||
d = copy.deepcopy(doc)
|
||||
pn += from_page
|
||||
d["image"] = img
|
||||
@ -135,14 +203,14 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, _, _ = parser(
|
||||
filename = filename,
|
||||
binary = binary,
|
||||
from_page = from_page,
|
||||
to_page = to_page,
|
||||
lang = lang,
|
||||
callback = callback,
|
||||
pdf_cls = Pdf,
|
||||
layout_recognizer = layout_recognizer,
|
||||
filename=filename,
|
||||
binary=binary,
|
||||
from_page=from_page,
|
||||
to_page=to_page,
|
||||
lang=lang,
|
||||
callback=callback,
|
||||
pdf_cls=Pdf,
|
||||
layout_recognizer=layout_recognizer,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@ -151,7 +219,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
|
||||
if name in ["tcadp", "docling", "mineru"]:
|
||||
parser_config["chunk_token_num"] = 0
|
||||
|
||||
|
||||
callback(0.8, "Finish parsing.")
|
||||
|
||||
for pn, (txt, img) in enumerate(sections):
|
||||
@ -161,7 +229,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
d["image"] = img
|
||||
d["page_num_int"] = [pn + 1]
|
||||
d["top_int"] = [0]
|
||||
d["position_int"] = [(pn + 1, 0, img.size[0] if img else 0, 0, img.size[1] if img else 0)]
|
||||
d["position_int"] = [(pn + 1, 0, img.size[0] if img else 0, 0,
|
||||
img.size[1] if img else 0)]
|
||||
tokenize(d, txt, eng)
|
||||
res.append(d)
|
||||
return res
|
||||
@ -175,4 +244,5 @@ if __name__ == "__main__":
|
||||
|
||||
def dummy(a, b):
|
||||
pass
|
||||
|
||||
chunk(sys.argv[1], callback=dummy)
|
||||
|
||||
@ -68,6 +68,7 @@ from common.signal_utils import start_tracemalloc_and_snapshot, stop_tracemalloc
|
||||
from common.exceptions import TaskCanceledException
|
||||
from common import settings
|
||||
from common.constants import PAGERANK_FLD, TAG_FLD, SVR_CONSUMER_GROUP_NAME
|
||||
from common.misc_utils import install_mineru
|
||||
|
||||
BATCH_SIZE = 64
|
||||
|
||||
@ -1100,6 +1101,7 @@ async def main():
|
||||
show_configs()
|
||||
settings.init_settings()
|
||||
settings.check_and_install_torch()
|
||||
install_mineru()
|
||||
logging.info(f'settings.EMBEDDING_CFG: {settings.EMBEDDING_CFG}')
|
||||
settings.print_rag_settings()
|
||||
if sys.platform != "win32":
|
||||
|
||||
97
web/package-lock.json
generated
97
web/package-lock.json
generated
@ -76,6 +76,7 @@
|
||||
"pptx-preview": "^1.0.5",
|
||||
"rc-tween-one": "^3.0.6",
|
||||
"react": "^18.2.0",
|
||||
"react-audio-voice-recorder": "^2.2.0",
|
||||
"react-copy-to-clipboard": "^5.1.0",
|
||||
"react-day-picker": "^9.8.0",
|
||||
"react-dom": "^18.2.0",
|
||||
@ -2852,6 +2853,69 @@
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg": {
|
||||
"version": "0.11.6",
|
||||
"resolved": "https://registry.npmmirror.com/@ffmpeg/ffmpeg/-/ffmpeg-0.11.6.tgz",
|
||||
"integrity": "sha512-uN8J8KDjADEavPhNva6tYO9Fj0lWs9z82swF3YXnTxWMBoFLGq3LZ6FLlIldRKEzhOBKnkVfA8UnFJuvGvNxcA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-url": "^1.2.4",
|
||||
"node-fetch": "^2.6.1",
|
||||
"regenerator-runtime": "^0.13.7",
|
||||
"resolve-url": "^0.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.16.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg/node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg/node_modules/regenerator-runtime": {
|
||||
"version": "0.13.11",
|
||||
"resolved": "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
|
||||
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg/node_modules/tr46": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/tr46/-/tr46-0.0.3.tgz",
|
||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg/node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/@ffmpeg/ffmpeg/node_modules/whatwg-url": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tr46": "~0.0.3",
|
||||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@floating-ui/core": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmmirror.com/@floating-ui/core/-/core-0.6.2.tgz",
|
||||
@ -21653,6 +21717,12 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/is-url": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmmirror.com/is-url/-/is-url-1.2.4.tgz",
|
||||
"integrity": "sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-weakmap": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/is-weakmap/-/is-weakmap-2.0.1.tgz",
|
||||
@ -29630,6 +29700,30 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-audio-visualize": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmmirror.com/react-audio-visualize/-/react-audio-visualize-1.2.0.tgz",
|
||||
"integrity": "sha512-rfO5nmT0fp23gjU0y2WQT6+ZOq2ZsuPTMphchwX1PCz1Di4oaIr6x7JZII8MLrbHdG7UB0OHfGONTIsWdh67kQ==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": ">=16.2.0",
|
||||
"react-dom": ">=16.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-audio-voice-recorder": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmmirror.com/react-audio-voice-recorder/-/react-audio-voice-recorder-2.2.0.tgz",
|
||||
"integrity": "sha512-Hq+143Zs99vJojT/uFvtpxUuiIKoLbMhxhA7qgxe5v8hNXrh5/qTnvYP92hFaE5V+GyoCXlESONa0ufk7t5kHQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@ffmpeg/ffmpeg": "^0.11.6",
|
||||
"react-audio-visualize": "^1.1.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": ">=16.2.0",
|
||||
"react-dom": ">=16.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-copy-to-clipboard": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/react-copy-to-clipboard/-/react-copy-to-clipboard-5.1.0.tgz",
|
||||
@ -32102,8 +32196,7 @@
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmmirror.com/resolve-url/-/resolve-url-0.2.1.tgz",
|
||||
"integrity": "sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==",
|
||||
"deprecated": "https://github.com/lydell/resolve-url#deprecated",
|
||||
"dev": true
|
||||
"deprecated": "https://github.com/lydell/resolve-url#deprecated"
|
||||
},
|
||||
"node_modules/resolve.exports": {
|
||||
"version": "2.0.2",
|
||||
|
||||
@ -89,6 +89,7 @@
|
||||
"pptx-preview": "^1.0.5",
|
||||
"rc-tween-one": "^3.0.6",
|
||||
"react": "^18.2.0",
|
||||
"react-audio-voice-recorder": "^2.2.0",
|
||||
"react-copy-to-clipboard": "^5.1.0",
|
||||
"react-day-picker": "^9.8.0",
|
||||
"react-dom": "^18.2.0",
|
||||
|
||||
@ -9,7 +9,7 @@ import {
|
||||
useFetchManualSystemTokenList,
|
||||
useFetchSystemTokenList,
|
||||
useRemoveSystemToken,
|
||||
} from '@/hooks/user-setting-hooks';
|
||||
} from '@/hooks/use-user-setting-request';
|
||||
import { IStats } from '@/interfaces/database/chat';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { message } from 'antd';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useSelectParserList } from '@/hooks/user-setting-hooks';
|
||||
import { useSelectParserList } from '@/hooks/use-user-setting-request';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
|
||||
const ParserListMap = new Map([
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
import { useHandleChunkMethodSelectChange } from '@/hooks/logic-hooks';
|
||||
import { useSelectParserList } from '@/hooks/user-setting-hooks';
|
||||
import { useSelectParserList } from '@/hooks/use-user-setting-request';
|
||||
import { FormInstance } from 'antd';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ import { useFetchParserListOnMount, useShowAutoKeywords } from './hooks';
|
||||
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-request';
|
||||
import { IParserConfig } from '@/interfaces/database/document';
|
||||
import { IChangeParserConfigRequestBody } from '@/interfaces/request/document';
|
||||
import { get } from 'lodash';
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
|
||||
import { useFetchManualSystemTokenList } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchManualSystemTokenList } from '@/hooks/use-user-setting-request';
|
||||
import { useCallback } from 'react';
|
||||
import message from '../ui/message';
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { getExtension } from '@/utils/document-util';
|
||||
import SvgIcon from '../svg-icon';
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/document-hooks';
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/use-document-request';
|
||||
import { useEffect } from 'react';
|
||||
import styles from './index.less';
|
||||
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
import Image from '@/components/image';
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
|
||||
import {
|
||||
useFetchDocumentThumbnailsByIds,
|
||||
useGetDocumentUrl,
|
||||
} from '@/hooks/document-hooks';
|
||||
} from '@/hooks/use-document-request';
|
||||
import { IReference, IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import {
|
||||
currentReg,
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import IndentedTree from './indented-tree';
|
||||
|
||||
import { useFetchKnowledgeGraph } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeGraph } from '@/hooks/use-knowledge-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { Modal } from 'antd';
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeList } from '@/hooks/use-knowledge-request';
|
||||
import { useBuildQueryVariableOptions } from '@/pages/agent/hooks/use-get-begin-query';
|
||||
import { UserOutlined } from '@ant-design/icons';
|
||||
import { Avatar as AntAvatar, Form, Select, Space } from 'antd';
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/use-llm-request';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { camelCase } from 'lodash';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/use-llm-request';
|
||||
import { Form, Select } from 'antd';
|
||||
import { camelCase } from 'lodash';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||
import { Popover as AntPopover, Select as AntSelect } from 'antd';
|
||||
import LlmSettingItems from '../llm-setting-items';
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||
import * as SelectPrimitive from '@radix-ui/react-select';
|
||||
import { forwardRef, memo, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@ -7,7 +7,7 @@ import { Flex, Form, InputNumber, Select, Slider, Switch, Tooltip } from 'antd';
|
||||
import camelCase from 'lodash/camelCase';
|
||||
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||
import { setChatVariableEnabledFieldValuePage } from '@/utils/chat';
|
||||
import { QuestionCircleOutlined } from '@ant-design/icons';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { SelectWithSearch } from '../originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '../ragflow-form';
|
||||
|
||||
@ -13,11 +13,11 @@ import remarkGfm from 'remark-gfm';
|
||||
import remarkMath from 'remark-math';
|
||||
import { visitParents } from 'unist-util-visit-parents';
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/document-hooks';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import 'katex/dist/katex.min.css'; // `rehype-katex` does not import the CSS for you
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/use-document-request';
|
||||
import {
|
||||
currentReg,
|
||||
preprocessLaTeX,
|
||||
|
||||
@ -18,7 +18,9 @@ import { cn } from '@/lib/utils';
|
||||
import { t } from 'i18next';
|
||||
import { CircleStop, Paperclip, Send, Upload, X } from 'lucide-react';
|
||||
import * as React from 'react';
|
||||
import { useEffect } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
import { AudioButton } from '../ui/audio-button';
|
||||
|
||||
interface IProps {
|
||||
disabled: boolean;
|
||||
@ -52,6 +54,22 @@ export function NextMessageInput({
|
||||
removeFile,
|
||||
}: IProps) {
|
||||
const [files, setFiles] = React.useState<File[]>([]);
|
||||
const [audioInputValue, setAudioInputValue] = React.useState<string | null>(
|
||||
null,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (audioInputValue !== null) {
|
||||
onInputChange({
|
||||
target: { value: audioInputValue },
|
||||
} as React.ChangeEvent<HTMLTextAreaElement>);
|
||||
|
||||
setTimeout(() => {
|
||||
onPressEnter();
|
||||
setAudioInputValue(null);
|
||||
}, 0);
|
||||
}
|
||||
}, [audioInputValue, onInputChange, onPressEnter]);
|
||||
|
||||
const onFileReject = React.useCallback((file: File, message: string) => {
|
||||
toast(message, {
|
||||
@ -171,15 +189,24 @@ export function NextMessageInput({
|
||||
<CircleStop />
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
className="size-5 rounded-sm"
|
||||
disabled={
|
||||
sendDisabled || isUploading || sendLoading || !value.trim()
|
||||
}
|
||||
>
|
||||
<Send />
|
||||
<span className="sr-only">Send message</span>
|
||||
</Button>
|
||||
<div className="flex items-center gap-3">
|
||||
{/* <div className="bg-bg-input rounded-md hover:bg-bg-card p-1"> */}
|
||||
<AudioButton
|
||||
onOk={(value) => {
|
||||
setAudioInputValue(value);
|
||||
}}
|
||||
/>
|
||||
{/* </div> */}
|
||||
<Button
|
||||
className="size-5 rounded-sm"
|
||||
disabled={
|
||||
sendDisabled || isUploading || sendLoading || !value.trim()
|
||||
}
|
||||
>
|
||||
<Send />
|
||||
<span className="sr-only">Send message</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</form>
|
||||
|
||||
@ -13,7 +13,6 @@ import remarkGfm from 'remark-gfm';
|
||||
import remarkMath from 'remark-math';
|
||||
import { visitParents } from 'unist-util-visit-parents';
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/document-hooks';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import 'katex/dist/katex.min.css'; // `rehype-katex` does not import the CSS for you
|
||||
@ -26,6 +25,7 @@ import {
|
||||
showImage,
|
||||
} from '@/utils/chat';
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/use-document-request';
|
||||
import { cn } from '@/lib/utils';
|
||||
import classNames from 'classnames';
|
||||
import { omit } from 'lodash';
|
||||
|
||||
@ -3,6 +3,8 @@ import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { IRemoveMessageById } from '@/hooks/logic-hooks';
|
||||
import { AgentChatContext } from '@/pages/agent/context';
|
||||
import { downloadFile } from '@/services/file-manager-service';
|
||||
import { downloadFileFromBlob } from '@/utils/file-util';
|
||||
import {
|
||||
DeleteOutlined,
|
||||
DislikeOutlined,
|
||||
@ -12,7 +14,7 @@ import {
|
||||
SyncOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Radio, Tooltip } from 'antd';
|
||||
import { NotebookText } from 'lucide-react';
|
||||
import { Download, NotebookText } from 'lucide-react';
|
||||
import { useCallback, useContext } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ToggleGroup, ToggleGroupItem } from '../ui/toggle-group';
|
||||
@ -28,6 +30,11 @@ interface IProps {
|
||||
audioBinary?: string;
|
||||
showLoudspeaker?: boolean;
|
||||
showLog?: boolean;
|
||||
attachment?: {
|
||||
file_name: string;
|
||||
doc_id: string;
|
||||
format: string;
|
||||
};
|
||||
}
|
||||
|
||||
export const AssistantGroupButton = ({
|
||||
@ -38,6 +45,7 @@ export const AssistantGroupButton = ({
|
||||
showLikeButton,
|
||||
showLoudspeaker = true,
|
||||
showLog = true,
|
||||
attachment,
|
||||
}: IProps) => {
|
||||
const { visible, hideModal, showModal, onFeedbackOk, loading } =
|
||||
useSendFeedback(messageId);
|
||||
@ -98,6 +106,27 @@ export const AssistantGroupButton = ({
|
||||
<NotebookText className="size-4" />
|
||||
</ToggleGroupItem>
|
||||
)}
|
||||
{!!attachment?.doc_id && (
|
||||
<ToggleGroupItem
|
||||
value="g"
|
||||
onClick={async () => {
|
||||
try {
|
||||
const response = await downloadFile({
|
||||
docId: attachment.doc_id,
|
||||
ext: attachment.format,
|
||||
});
|
||||
const blob = new Blob([response.data], {
|
||||
type: response.data.type,
|
||||
});
|
||||
downloadFileFromBlob(blob, attachment.file_name);
|
||||
} catch (error) {
|
||||
console.error('Download failed:', error);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Download size={16} />
|
||||
</ToggleGroupItem>
|
||||
)}
|
||||
</ToggleGroup>
|
||||
{visible && (
|
||||
<FeedbackModal
|
||||
|
||||
@ -21,10 +21,8 @@ import { INodeEvent, MessageEventType } from '@/hooks/use-send-message';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { AgentChatContext } from '@/pages/agent/context';
|
||||
import { WorkFlowTimeline } from '@/pages/agent/log-sheet/workflow-timeline';
|
||||
import { downloadFile } from '@/services/file-manager-service';
|
||||
import { downloadFileFromBlob } from '@/utils/file-util';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { Atom, ChevronDown, ChevronUp, Download } from 'lucide-react';
|
||||
import { Atom, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import MarkdownContent from '../next-markdown-content';
|
||||
import { RAGFlowAvatar } from '../ragflow-avatar';
|
||||
import { useTheme } from '../theme-provider';
|
||||
@ -176,6 +174,7 @@ function MessageItem({
|
||||
audioBinary={item.audio_binary}
|
||||
showLoudspeaker={showLoudspeaker}
|
||||
showLog={showLog}
|
||||
attachment={item.attachment}
|
||||
></AssistantGroupButton>
|
||||
)}
|
||||
{!isShare && (
|
||||
@ -187,6 +186,7 @@ function MessageItem({
|
||||
audioBinary={item.audio_binary}
|
||||
showLoudspeaker={showLoudspeaker}
|
||||
showLog={showLog}
|
||||
attachment={item.attachment}
|
||||
></AssistantGroupButton>
|
||||
)}
|
||||
</>
|
||||
@ -250,7 +250,7 @@ function MessageItem({
|
||||
{isUser && (
|
||||
<UploadedMessageFiles files={item.files}></UploadedMessageFiles>
|
||||
)}
|
||||
{isAssistant && item.attachment && item.attachment.doc_id && (
|
||||
{/* {isAssistant && item.attachment && item.attachment.doc_id && (
|
||||
<div className="w-full flex items-center justify-end">
|
||||
<Button
|
||||
variant="link"
|
||||
@ -275,7 +275,7 @@ function MessageItem({
|
||||
<Download size={16} />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
)} */}
|
||||
</section>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
@ -1,7 +1,3 @@
|
||||
import {
|
||||
useGetChunkHighlights,
|
||||
useGetDocumentUrl,
|
||||
} from '@/hooks/document-hooks';
|
||||
import { IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import FileError from '@/pages/document-viewer/file-error';
|
||||
@ -17,6 +13,10 @@ import {
|
||||
} from 'react-pdf-highlighter';
|
||||
import { useCatchDocumentError } from './hooks';
|
||||
|
||||
import {
|
||||
useGetChunkHighlights,
|
||||
useGetDocumentUrl,
|
||||
} from '@/hooks/use-document-request';
|
||||
import styles from './index.less';
|
||||
|
||||
interface IProps {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/use-llm-request';
|
||||
import { Select as AntSelect, Form, message, Slider } from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
|
||||
@ -5,7 +5,7 @@ import SelectFiles from './select-files';
|
||||
import {
|
||||
useAllTestingResult,
|
||||
useSelectTestingResult,
|
||||
} from '@/hooks/knowledge-hooks';
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import styles from './index.less';
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ import { useTranslate } from '@/hooks/common-hooks';
|
||||
import {
|
||||
useAllTestingResult,
|
||||
useSelectTestingResult,
|
||||
} from '@/hooks/knowledge-hooks';
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { ITestingDocument } from '@/interfaces/database/knowledge';
|
||||
import { EyeOutlined } from '@ant-design/icons';
|
||||
import { Button, Table, TableProps, Tooltip } from 'antd';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { PropsWithChildren } from 'react';
|
||||
|
||||
export function SharedBadge({ children }: PropsWithChildren) {
|
||||
|
||||
422
web/src/components/ui/audio-button.tsx
Normal file
422
web/src/components/ui/audio-button.tsx
Normal file
@ -0,0 +1,422 @@
|
||||
import { AudioRecorder, useAudioRecorder } from 'react-audio-voice-recorder';
|
||||
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Authorization } from '@/constants/authorization';
|
||||
import { cn } from '@/lib/utils';
|
||||
import api from '@/utils/api';
|
||||
import { getAuthorization } from '@/utils/authorization-util';
|
||||
import { Loader2, Mic, Square } from 'lucide-react';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useIsDarkTheme } from '../theme-provider';
|
||||
import { Input } from './input';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from './popover';
|
||||
const VoiceVisualizer = ({ isRecording }: { isRecording: boolean }) => {
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const analyserRef = useRef<AnalyserNode | null>(null);
|
||||
const animationFrameRef = useRef<number>(0);
|
||||
const streamRef = useRef<MediaStream | null>(null);
|
||||
const isDark = useIsDarkTheme();
|
||||
|
||||
const startVisualization = async () => {
|
||||
try {
|
||||
// Check if the browser supports getUserMedia
|
||||
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
||||
console.error('Browser does not support getUserMedia API');
|
||||
return;
|
||||
}
|
||||
// Request microphone permission
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
streamRef.current = stream;
|
||||
|
||||
// Create audio context and analyzer
|
||||
const audioContext = new (window.AudioContext ||
|
||||
(window as any).webkitAudioContext)();
|
||||
audioContextRef.current = audioContext;
|
||||
|
||||
const analyser = audioContext.createAnalyser();
|
||||
analyserRef.current = analyser;
|
||||
analyser.fftSize = 32;
|
||||
|
||||
// Connect audio nodes
|
||||
const source = audioContext.createMediaStreamSource(stream);
|
||||
source.connect(analyser);
|
||||
|
||||
// Start drawing
|
||||
draw();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Unable to access microphone for voice visualization:',
|
||||
error,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const stopVisualization = () => {
|
||||
// Stop animation frame
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
|
||||
// Stop audio stream
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
|
||||
// Close audio context
|
||||
if (audioContextRef.current && audioContextRef.current.state !== 'closed') {
|
||||
audioContextRef.current.close();
|
||||
}
|
||||
|
||||
// Clear canvas
|
||||
const canvas = canvasRef.current;
|
||||
if (canvas) {
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx) {
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
}
|
||||
}
|
||||
};
|
||||
useEffect(() => {
|
||||
if (isRecording) {
|
||||
startVisualization();
|
||||
} else {
|
||||
stopVisualization();
|
||||
}
|
||||
|
||||
return () => {
|
||||
stopVisualization();
|
||||
};
|
||||
}, [isRecording]);
|
||||
const draw = () => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
|
||||
const analyser = analyserRef.current;
|
||||
if (!analyser) return;
|
||||
|
||||
// Set canvas dimensions
|
||||
const width = canvas.clientWidth;
|
||||
const height = canvas.clientHeight;
|
||||
const centerY = height / 2;
|
||||
|
||||
if (canvas.width !== width || canvas.height !== height) {
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
}
|
||||
|
||||
// Clear canvas
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
|
||||
// Get frequency data
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
|
||||
// Draw waveform
|
||||
const barWidth = (width / bufferLength) * 1.5;
|
||||
let x = 0;
|
||||
|
||||
for (let i = 0; i < bufferLength; i = i + 2) {
|
||||
const barHeight = (dataArray[i] / 255) * centerY;
|
||||
|
||||
// Create gradient
|
||||
const gradient = ctx.createLinearGradient(
|
||||
0,
|
||||
centerY - barHeight,
|
||||
0,
|
||||
centerY + barHeight,
|
||||
);
|
||||
gradient.addColorStop(0, '#3ba05c'); // Blue
|
||||
gradient.addColorStop(1, '#3ba05c'); // Light blue
|
||||
// gradient.addColorStop(0, isDark ? '#fff' : '#000'); // Blue
|
||||
// gradient.addColorStop(1, isDark ? '#eee' : '#eee'); // Light blue
|
||||
|
||||
ctx.fillStyle = gradient;
|
||||
ctx.fillRect(x, centerY - barHeight, barWidth, barHeight * 2);
|
||||
|
||||
x += barWidth + 2;
|
||||
}
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full h-6 bg-transparent flex items-center justify-center overflow-hidden ">
|
||||
<canvas ref={canvasRef} className="w-full h-full" />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const VoiceInputBox = ({
|
||||
isRecording,
|
||||
onStop,
|
||||
recordingTime,
|
||||
value,
|
||||
}: {
|
||||
value: string;
|
||||
isRecording: boolean;
|
||||
onStop: () => void;
|
||||
recordingTime: number;
|
||||
}) => {
|
||||
// Format recording time
|
||||
const formatTime = (seconds: number) => {
|
||||
const mins = Math.floor(seconds / 60);
|
||||
const secs = seconds % 60;
|
||||
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full">
|
||||
<div className=" absolute w-full h-6 translate-y-full">
|
||||
<VoiceVisualizer isRecording={isRecording} />
|
||||
</div>
|
||||
<Input
|
||||
rootClassName="w-full"
|
||||
className="flex-1 "
|
||||
readOnly
|
||||
value={value}
|
||||
suffix={
|
||||
<div className="flex justify-end px-1 items-center gap-1 w-20">
|
||||
<Button
|
||||
variant={'ghost'}
|
||||
size="sm"
|
||||
className="text-text-primary p-1 border-none hover:bg-transparent"
|
||||
onClick={onStop}
|
||||
>
|
||||
<Square className="text-text-primary" size={12} />
|
||||
</Button>
|
||||
<span className="text-xs text-text-secondary">
|
||||
{formatTime(recordingTime)}
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
export const AudioButton = ({
|
||||
onOk,
|
||||
}: {
|
||||
onOk?: (transcript: string) => void;
|
||||
}) => {
|
||||
// const [showInputBox, setShowInputBox] = useState(false);
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const [recordingTime, setRecordingTime] = useState(0);
|
||||
const [transcript, setTranscript] = useState('');
|
||||
const [popoverOpen, setPopoverOpen] = useState(false);
|
||||
const recorderControls = useAudioRecorder();
|
||||
const intervalRef = useRef<NodeJS.Timeout | null>(null);
|
||||
// Handle logic after recording is complete
|
||||
const handleRecordingComplete = async (blob: Blob) => {
|
||||
setIsRecording(false);
|
||||
|
||||
// const url = URL.createObjectURL(blob);
|
||||
// const a = document.createElement('a');
|
||||
// a.href = url;
|
||||
// a.download = 'recording.webm';
|
||||
// document.body.appendChild(a);
|
||||
// a.click();
|
||||
|
||||
setIsProcessing(true);
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
intervalRef.current = null;
|
||||
}
|
||||
try {
|
||||
const audioFile = new File([blob], 'recording.webm', {
|
||||
type: blob.type || 'audio/webm',
|
||||
// type: 'audio/mpeg',
|
||||
});
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', audioFile);
|
||||
formData.append('stream', 'false');
|
||||
|
||||
const response = await fetch(api.sequence2txt, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
[Authorization]: getAuthorization(),
|
||||
// 'Content-Type': blob.type || 'audio/webm',
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
// if (!response.ok) {
|
||||
// throw new Error(`HTTP error! status: ${response.status}`);
|
||||
// }
|
||||
|
||||
// if (!response.body) {
|
||||
// throw new Error('ReadableStream not supported in this browser');
|
||||
// }
|
||||
|
||||
console.log('Response:', response);
|
||||
const { data, code } = await response.json();
|
||||
if (code === 0 && data && data.text) {
|
||||
setTranscript(data.text);
|
||||
console.log('Transcript:', data.text);
|
||||
onOk?.(data.text);
|
||||
}
|
||||
setPopoverOpen(false);
|
||||
} catch (error) {
|
||||
console.error('Failed to process audio:', error);
|
||||
// setTranscript(t('voiceRecorder.processingError'));
|
||||
} finally {
|
||||
setIsProcessing(false);
|
||||
}
|
||||
};
|
||||
|
||||
// Start recording
|
||||
const startRecording = () => {
|
||||
recorderControls.startRecording();
|
||||
setIsRecording(true);
|
||||
// setShowInputBox(true);
|
||||
setPopoverOpen(true);
|
||||
setRecordingTime(0);
|
||||
|
||||
// Start timing
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
}
|
||||
intervalRef.current = setInterval(() => {
|
||||
setRecordingTime((prev) => prev + 1);
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
// Stop recording
|
||||
const stopRecording = () => {
|
||||
recorderControls.stopRecording();
|
||||
setIsRecording(false);
|
||||
// setShowInputBox(false);
|
||||
setPopoverOpen(false);
|
||||
setRecordingTime(0);
|
||||
|
||||
// Clear timer
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
intervalRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
// Clear transcription content
|
||||
// const clearTranscript = () => {
|
||||
// setTranscript('');
|
||||
// };
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
return (
|
||||
<div>
|
||||
{false && (
|
||||
<div className="flex flex-col items-center space-y-4">
|
||||
<div className="relative">
|
||||
<Popover
|
||||
open={popoverOpen}
|
||||
onOpenChange={(open) => {
|
||||
setPopoverOpen(true);
|
||||
}}
|
||||
>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
if (isRecording) {
|
||||
stopRecording();
|
||||
} else {
|
||||
startRecording();
|
||||
}
|
||||
}}
|
||||
className={`w-6 h-6 p-2 rounded-full border-none bg-transparent hover:bg-transparent ${
|
||||
isRecording ? 'animate-pulse' : ''
|
||||
}`}
|
||||
disabled={isProcessing}
|
||||
>
|
||||
<Mic size={16} className="text-text-primary" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
align="end"
|
||||
sideOffset={-20}
|
||||
className="p-0 border-none"
|
||||
>
|
||||
<VoiceInputBox
|
||||
isRecording={isRecording}
|
||||
value={transcript}
|
||||
onStop={stopRecording}
|
||||
recordingTime={recordingTime}
|
||||
/>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className=" relative w-6 h-6 flex items-center justify-center">
|
||||
{isRecording && (
|
||||
<div
|
||||
className={cn(
|
||||
'absolute inset-0 w-full h-6 rounded-full overflow-hidden flex items-center justify-center p-1',
|
||||
{ 'bg-state-success-5': isRecording },
|
||||
)}
|
||||
>
|
||||
<VoiceVisualizer isRecording={isRecording} />
|
||||
</div>
|
||||
)}
|
||||
{isRecording && (
|
||||
<div className="absolute inset-0 rounded-full border-2 border-state-success animate-ping opacity-75"></div>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
// onMouseDown={() => {
|
||||
// startRecording();
|
||||
// }}
|
||||
// onMouseUp={() => {
|
||||
// stopRecording();
|
||||
// }}
|
||||
onClick={() => {
|
||||
if (isRecording) {
|
||||
stopRecording();
|
||||
} else {
|
||||
startRecording();
|
||||
}
|
||||
}}
|
||||
className={`w-6 h-6 p-2 rounded-md border-none bg-transparent hover:bg-state-success-5 ${
|
||||
isRecording
|
||||
? 'animate-pulse bg-state-success-5 text-state-success'
|
||||
: ''
|
||||
}`}
|
||||
disabled={isProcessing}
|
||||
>
|
||||
{isProcessing ? (
|
||||
<Loader2 size={16} className=" animate-spin" />
|
||||
) : isRecording ? (
|
||||
<></>
|
||||
) : (
|
||||
// <Mic size={16} className="text-text-primary" />
|
||||
// <Square size={12} className="text-text-primary" />
|
||||
<Mic size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Hide original component */}
|
||||
<div className="hidden">
|
||||
<AudioRecorder
|
||||
onRecordingComplete={handleRecordingComplete}
|
||||
recorderControls={recorderControls}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@ -9,10 +9,23 @@ export interface InputProps
|
||||
value?: string | number | readonly string[] | undefined;
|
||||
prefix?: React.ReactNode;
|
||||
suffix?: React.ReactNode;
|
||||
rootClassName?: string;
|
||||
}
|
||||
|
||||
const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
({ className, type, value, onChange, prefix, suffix, ...props }, ref) => {
|
||||
(
|
||||
{
|
||||
className,
|
||||
rootClassName,
|
||||
type,
|
||||
value,
|
||||
onChange,
|
||||
prefix,
|
||||
suffix,
|
||||
...props
|
||||
},
|
||||
ref,
|
||||
) => {
|
||||
const isControlled = value !== undefined;
|
||||
const { defaultValue, ...restProps } = props;
|
||||
const inputValue = isControlled ? value : defaultValue;
|
||||
@ -89,7 +102,7 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
|
||||
if (prefix || suffix || isPasswordInput) {
|
||||
return (
|
||||
<div className="relative">
|
||||
<div className={cn('relative', rootClassName)}>
|
||||
{prefix && (
|
||||
<span
|
||||
ref={prefixRef}
|
||||
|
||||
@ -1,210 +0,0 @@
|
||||
import { ResponseGetType, ResponseType } from '@/interfaces/database/base';
|
||||
import { IChunk, IKnowledgeFile } from '@/interfaces/database/knowledge';
|
||||
import kbService from '@/services/knowledge-service';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { PaginationProps, message } from 'antd';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
import {
|
||||
useGetKnowledgeSearchParams,
|
||||
useSetPaginationParams,
|
||||
} from './route-hook';
|
||||
|
||||
export interface IChunkListResult {
|
||||
searchString?: string;
|
||||
handleInputChange?: React.ChangeEventHandler<HTMLInputElement>;
|
||||
pagination: PaginationProps;
|
||||
setPagination?: (pagination: { page: number; pageSize: number }) => void;
|
||||
available: number | undefined;
|
||||
handleSetAvailable: (available: number | undefined) => void;
|
||||
}
|
||||
|
||||
export const useFetchNextChunkList = (): ResponseGetType<{
|
||||
data: IChunk[];
|
||||
total: number;
|
||||
documentInfo: IKnowledgeFile;
|
||||
}> &
|
||||
IChunkListResult => {
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const { documentId } = useGetKnowledgeSearchParams();
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const [available, setAvailable] = useState<number | undefined>();
|
||||
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
|
||||
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: [
|
||||
'fetchChunkList',
|
||||
documentId,
|
||||
pagination.current,
|
||||
pagination.pageSize,
|
||||
debouncedSearchString,
|
||||
available,
|
||||
],
|
||||
placeholderData: (previousData) =>
|
||||
previousData ?? { data: [], total: 0, documentInfo: {} }, // https://github.com/TanStack/query/issues/8183
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.chunk_list({
|
||||
doc_id: documentId,
|
||||
page: pagination.current,
|
||||
size: pagination.pageSize,
|
||||
available_int: available,
|
||||
keywords: searchString,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
const res = data.data;
|
||||
return {
|
||||
data: res.chunks,
|
||||
total: res.total,
|
||||
documentInfo: res.doc,
|
||||
};
|
||||
}
|
||||
|
||||
return (
|
||||
data?.data ?? {
|
||||
data: [],
|
||||
total: 0,
|
||||
documentInfo: {},
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
setPagination({ page: 1 });
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange, setPagination],
|
||||
);
|
||||
|
||||
const handleSetAvailable = useCallback(
|
||||
(a: number | undefined) => {
|
||||
setPagination({ page: 1 });
|
||||
setAvailable(a);
|
||||
},
|
||||
[setAvailable, setPagination],
|
||||
);
|
||||
|
||||
return {
|
||||
data,
|
||||
loading,
|
||||
pagination,
|
||||
setPagination,
|
||||
searchString,
|
||||
handleInputChange: onInputChange,
|
||||
available,
|
||||
handleSetAvailable,
|
||||
};
|
||||
};
|
||||
|
||||
export const useSelectChunkList = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const data = queryClient.getQueriesData<{
|
||||
data: IChunk[];
|
||||
total: number;
|
||||
documentInfo: IKnowledgeFile;
|
||||
}>({ queryKey: ['fetchChunkList'] });
|
||||
|
||||
return data?.at(-1)?.[1];
|
||||
};
|
||||
|
||||
export const useDeleteChunk = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { setPaginationParams } = useSetPaginationParams();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteChunk'],
|
||||
mutationFn: async (params: { chunkIds: string[]; doc_id: string }) => {
|
||||
const { data } = await kbService.rm_chunk(params);
|
||||
if (data.code === 0) {
|
||||
setPaginationParams(1);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteChunk: mutateAsync };
|
||||
};
|
||||
|
||||
export const useSwitchChunk = () => {
|
||||
const { t } = useTranslation();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['switchChunk'],
|
||||
mutationFn: async (params: {
|
||||
chunk_ids?: string[];
|
||||
available_int?: number;
|
||||
doc_id: string;
|
||||
}) => {
|
||||
const { data } = await kbService.switch_chunk(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.modified'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, switchChunk: mutateAsync };
|
||||
};
|
||||
|
||||
export const useCreateChunk = () => {
|
||||
const { t } = useTranslation();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['createChunk'],
|
||||
mutationFn: async (payload: any) => {
|
||||
let service = kbService.create_chunk;
|
||||
if (payload.chunk_id) {
|
||||
service = kbService.set_chunk;
|
||||
}
|
||||
const { data } = await service(payload);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.created'));
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
||||
}, 1000); // Delay to ensure the list is updated
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, createChunk: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchChunk = (chunkId?: string): ResponseType<any> => {
|
||||
const { data } = useQuery({
|
||||
queryKey: ['fetchChunk'],
|
||||
enabled: !!chunkId,
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const data = await kbService.get_chunk({
|
||||
chunk_id: chunkId,
|
||||
});
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return data;
|
||||
};
|
||||
@ -1,482 +0,0 @@
|
||||
import { IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import { IDocumentInfo } from '@/interfaces/database/document';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import {
|
||||
IChangeParserConfigRequestBody,
|
||||
IDocumentMetaRequestBody,
|
||||
} from '@/interfaces/request/document';
|
||||
import i18n from '@/locales/config';
|
||||
import kbService, { listDocument } from '@/services/knowledge-service';
|
||||
import api, { api_host } from '@/utils/api';
|
||||
import { buildChunkHighlights } from '@/utils/document-util';
|
||||
import { post } from '@/utils/request';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { UploadFile, message } from 'antd';
|
||||
import { get } from 'lodash';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { IHighlight } from 'react-pdf-highlighter';
|
||||
import { useParams } from 'umi';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
import {
|
||||
useGetKnowledgeSearchParams,
|
||||
useSetPaginationParams,
|
||||
} from './route-hook';
|
||||
|
||||
export const useGetDocumentUrl = (documentId?: string) => {
|
||||
const getDocumentUrl = useCallback(
|
||||
(id?: string) => {
|
||||
return `${api_host}/document/get/${documentId || id}`;
|
||||
},
|
||||
[documentId],
|
||||
);
|
||||
|
||||
return getDocumentUrl;
|
||||
};
|
||||
|
||||
export const useGetChunkHighlights = (
|
||||
selectedChunk: IChunk | IReferenceChunk,
|
||||
) => {
|
||||
const [size, setSize] = useState({ width: 849, height: 1200 });
|
||||
|
||||
const highlights: IHighlight[] = useMemo(() => {
|
||||
return buildChunkHighlights(selectedChunk, size);
|
||||
}, [selectedChunk, size]);
|
||||
|
||||
const setWidthAndHeight = (width: number, height: number) => {
|
||||
setSize((pre) => {
|
||||
if (pre.height !== height || pre.width !== width) {
|
||||
return { height, width };
|
||||
}
|
||||
return pre;
|
||||
});
|
||||
};
|
||||
|
||||
return { highlights, setWidthAndHeight };
|
||||
};
|
||||
|
||||
export const useFetchNextDocumentList = () => {
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const { id } = useParams();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<{
|
||||
docs: IDocumentInfo[];
|
||||
total: number;
|
||||
}>({
|
||||
queryKey: ['fetchDocumentList', searchString, pagination],
|
||||
initialData: { docs: [], total: 0 },
|
||||
refetchInterval: 15000,
|
||||
enabled: !!knowledgeId || !!id,
|
||||
queryFn: async () => {
|
||||
const ret = await listDocument({
|
||||
kb_id: knowledgeId || id,
|
||||
keywords: searchString,
|
||||
page_size: pagination.pageSize,
|
||||
page: pagination.current,
|
||||
});
|
||||
if (ret.data.code === 0) {
|
||||
return ret.data.data;
|
||||
}
|
||||
|
||||
return {
|
||||
docs: [],
|
||||
total: 0,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
setPagination({ page: 1 });
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange, setPagination],
|
||||
);
|
||||
|
||||
return {
|
||||
loading,
|
||||
searchString,
|
||||
documents: data.docs,
|
||||
pagination: { ...pagination, total: data?.total },
|
||||
handleInputChange: onInputChange,
|
||||
setPagination,
|
||||
};
|
||||
};
|
||||
|
||||
export const useSetNextDocumentStatus = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['updateDocumentStatus'],
|
||||
mutationFn: async ({
|
||||
status,
|
||||
documentId,
|
||||
}: {
|
||||
status: boolean;
|
||||
documentId: string | string[];
|
||||
}) => {
|
||||
const ids = Array.isArray(documentId) ? documentId : [documentId];
|
||||
const { data } = await kbService.document_change_status({
|
||||
doc_ids: ids,
|
||||
status: Number(status),
|
||||
});
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t('message.modified'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
}
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { setDocumentStatus: mutateAsync, data, loading };
|
||||
};
|
||||
|
||||
export const useSaveNextDocumentName = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['saveDocumentName'],
|
||||
mutationFn: async ({
|
||||
name,
|
||||
documentId,
|
||||
}: {
|
||||
name: string;
|
||||
documentId: string;
|
||||
}) => {
|
||||
const { data } = await kbService.document_rename({
|
||||
doc_id: documentId,
|
||||
name: name,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t('message.renamed'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { loading, saveName: mutateAsync, data };
|
||||
};
|
||||
|
||||
export const useCreateNextDocument = () => {
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
const { setPaginationParams, page } = useSetPaginationParams();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['createDocument'],
|
||||
mutationFn: async (name: string) => {
|
||||
const { data } = await kbService.document_create({
|
||||
name,
|
||||
kb_id: knowledgeId,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
if (page === 1) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
} else {
|
||||
setPaginationParams(); // fetch document list
|
||||
}
|
||||
|
||||
message.success(i18n.t('message.created'));
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { createDocument: mutateAsync, loading, data };
|
||||
};
|
||||
|
||||
export const useSetNextDocumentParser = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['setDocumentParser'],
|
||||
mutationFn: async ({
|
||||
parserId,
|
||||
documentId,
|
||||
parserConfig,
|
||||
}: {
|
||||
parserId: string;
|
||||
documentId: string;
|
||||
parserConfig: IChangeParserConfigRequestBody;
|
||||
}) => {
|
||||
const { data } = await kbService.document_change_parser({
|
||||
parser_id: parserId,
|
||||
doc_id: documentId,
|
||||
parser_config: parserConfig,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
|
||||
message.success(i18n.t('message.modified'));
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { setDocumentParser: mutateAsync, data, loading };
|
||||
};
|
||||
|
||||
export const useUploadNextDocument = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['uploadDocument'],
|
||||
mutationFn: async (fileList: UploadFile[]) => {
|
||||
const formData = new FormData();
|
||||
formData.append('kb_id', knowledgeId);
|
||||
fileList.forEach((file: any) => {
|
||||
formData.append('file', file);
|
||||
});
|
||||
|
||||
try {
|
||||
const ret = await kbService.document_upload(formData);
|
||||
const code = get(ret, 'data.code');
|
||||
|
||||
if (code === 0 || code === 500) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
}
|
||||
return ret?.data;
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
return {
|
||||
code: 500,
|
||||
message: error + '',
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { uploadDocument: mutateAsync, loading, data };
|
||||
};
|
||||
|
||||
export const useNextWebCrawl = () => {
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['webCrawl'],
|
||||
mutationFn: async ({ name, url }: { name: string; url: string }) => {
|
||||
const formData = new FormData();
|
||||
formData.append('name', name);
|
||||
formData.append('url', url);
|
||||
formData.append('kb_id', knowledgeId);
|
||||
|
||||
const ret = await kbService.web_crawl(formData);
|
||||
const code = get(ret, 'data.code');
|
||||
if (code === 0) {
|
||||
message.success(i18n.t('message.uploaded'));
|
||||
}
|
||||
|
||||
return code;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data,
|
||||
loading,
|
||||
webCrawl: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useRunNextDocument = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['runDocumentByIds'],
|
||||
mutationFn: async ({
|
||||
documentIds,
|
||||
run,
|
||||
shouldDelete,
|
||||
}: {
|
||||
documentIds: string[];
|
||||
run: number;
|
||||
shouldDelete: boolean;
|
||||
}) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchDocumentList'],
|
||||
});
|
||||
|
||||
const ret = await kbService.document_run({
|
||||
doc_ids: documentIds,
|
||||
run,
|
||||
delete: shouldDelete,
|
||||
});
|
||||
const code = get(ret, 'data.code');
|
||||
if (code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
message.success(i18n.t('message.operated'));
|
||||
}
|
||||
|
||||
return code;
|
||||
},
|
||||
});
|
||||
|
||||
return { runDocumentByIds: mutateAsync, loading, data };
|
||||
};
|
||||
|
||||
export const useFetchDocumentInfosByIds = () => {
|
||||
const [ids, setDocumentIds] = useState<string[]>([]);
|
||||
|
||||
const idList = useMemo(() => {
|
||||
return ids.filter((x) => typeof x === 'string' && x !== '');
|
||||
}, [ids]);
|
||||
|
||||
const { data } = useQuery<IDocumentInfo[]>({
|
||||
queryKey: ['fetchDocumentInfos', idList],
|
||||
enabled: idList.length > 0,
|
||||
initialData: [],
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.document_infos({ doc_ids: idList });
|
||||
if (data.code === 0) {
|
||||
return data.data;
|
||||
}
|
||||
|
||||
return [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, setDocumentIds };
|
||||
};
|
||||
|
||||
export const useFetchDocumentThumbnailsByIds = () => {
|
||||
const [ids, setDocumentIds] = useState<string[]>([]);
|
||||
const { data } = useQuery<Record<string, string>>({
|
||||
queryKey: ['fetchDocumentThumbnails', ids],
|
||||
enabled: ids.length > 0,
|
||||
initialData: {},
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.document_thumbnails({ doc_ids: ids });
|
||||
if (data.code === 0) {
|
||||
return data.data;
|
||||
}
|
||||
return {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, setDocumentIds };
|
||||
};
|
||||
|
||||
export const useRemoveNextDocument = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['removeDocument'],
|
||||
mutationFn: async (documentIds: string | string[]) => {
|
||||
const { data } = await kbService.document_rm({ doc_id: documentIds });
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t('message.deleted'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, removeDocument: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteDocument = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteDocument'],
|
||||
mutationFn: async (documentIds: string[]) => {
|
||||
const data = await kbService.document_delete({ doc_ids: documentIds });
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteDocument: mutateAsync };
|
||||
};
|
||||
|
||||
export const useParseDocument = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['parseDocument'],
|
||||
mutationFn: async (url: string) => {
|
||||
try {
|
||||
const data = await post(api.parse, { url });
|
||||
if (data?.code === 0) {
|
||||
message.success(i18n.t('message.uploaded'));
|
||||
}
|
||||
return data;
|
||||
} catch (error) {
|
||||
message.error('error');
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { parseDocument: mutateAsync, data, loading };
|
||||
};
|
||||
|
||||
export const useSetDocumentMeta = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['setDocumentMeta'],
|
||||
mutationFn: async (params: IDocumentMetaRequestBody) => {
|
||||
try {
|
||||
const { data } = await kbService.setMeta({
|
||||
meta: params.meta,
|
||||
doc_id: params.documentId,
|
||||
});
|
||||
|
||||
if (data?.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDocumentList'] });
|
||||
|
||||
message.success(i18n.t('message.modified'));
|
||||
}
|
||||
return data?.code;
|
||||
} catch (error) {
|
||||
message.error('error');
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { setDocumentMeta: mutateAsync, data, loading };
|
||||
};
|
||||
@ -1,293 +0,0 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { ResponseType } from '@/interfaces/database/base';
|
||||
import { IFolder } from '@/interfaces/database/file-manager';
|
||||
import { IConnectRequestBody } from '@/interfaces/request/file-manager';
|
||||
import fileManagerService from '@/services/file-manager-service';
|
||||
import { downloadFileFromBlob } from '@/utils/file-util';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { PaginationProps, UploadFile } from 'antd';
|
||||
import React, { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSearchParams } from 'umi';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
import { useSetPaginationParams } from './route-hook';
|
||||
|
||||
export const useGetFolderId = () => {
|
||||
const [searchParams] = useSearchParams();
|
||||
const id = searchParams.get('folderId') as string;
|
||||
|
||||
return id ?? '';
|
||||
};
|
||||
|
||||
export interface IListResult {
|
||||
searchString: string;
|
||||
handleInputChange: React.ChangeEventHandler<HTMLInputElement>;
|
||||
pagination: PaginationProps;
|
||||
setPagination: (pagination: { page: number; pageSize: number }) => void;
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
export const useFetchPureFileList = () => {
|
||||
const { mutateAsync, isPending: loading } = useMutation({
|
||||
mutationKey: ['fetchPureFileList'],
|
||||
gcTime: 0,
|
||||
|
||||
mutationFn: async (parentId: string) => {
|
||||
const { data } = await fileManagerService.listFile({
|
||||
parent_id: parentId,
|
||||
});
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { loading, fetchList: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchFileList = (): ResponseType<any> & IListResult => {
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const id = useGetFolderId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: [
|
||||
'fetchFileList',
|
||||
{
|
||||
id,
|
||||
searchString,
|
||||
...pagination,
|
||||
},
|
||||
],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await fileManagerService.listFile({
|
||||
parent_id: id,
|
||||
keywords: searchString,
|
||||
page_size: pagination.pageSize,
|
||||
page: pagination.current,
|
||||
});
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
setPagination({ page: 1 });
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange, setPagination],
|
||||
);
|
||||
|
||||
return {
|
||||
...data,
|
||||
searchString,
|
||||
handleInputChange: onInputChange,
|
||||
pagination: { ...pagination, total: data?.data?.total },
|
||||
setPagination,
|
||||
loading,
|
||||
};
|
||||
};
|
||||
|
||||
export const useDeleteFile = () => {
|
||||
const { setPaginationParams } = useSetPaginationParams();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteFile'],
|
||||
mutationFn: async (params: { fileIds: string[]; parentId: string }) => {
|
||||
const { data } = await fileManagerService.removeFile(params);
|
||||
if (data.code === 0) {
|
||||
setPaginationParams(1); // TODO: There should be a better way to paginate the request list
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDownloadFile = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['downloadFile'],
|
||||
mutationFn: async (params: { id: string; filename?: string }) => {
|
||||
const response = await fileManagerService.getFile({}, params.id);
|
||||
const blob = new Blob([response.data], { type: response.data.type });
|
||||
downloadFileFromBlob(blob, params.filename);
|
||||
},
|
||||
});
|
||||
return { data, loading, downloadFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useRenameFile = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['renameFile'],
|
||||
mutationFn: async (params: { fileId: string; name: string }) => {
|
||||
const { data } = await fileManagerService.renameFile(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.renamed'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, renameFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchParentFolderList = (): IFolder[] => {
|
||||
const id = useGetFolderId();
|
||||
const { data } = useQuery({
|
||||
queryKey: ['fetchParentFolderList', id],
|
||||
initialData: [],
|
||||
enabled: !!id,
|
||||
queryFn: async () => {
|
||||
const { data } = await fileManagerService.getAllParentFolder({
|
||||
fileId: id,
|
||||
});
|
||||
|
||||
return data?.data?.parent_folders?.toReversed() ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
export const useCreateFolder = () => {
|
||||
const { setPaginationParams } = useSetPaginationParams();
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['createFolder'],
|
||||
mutationFn: async (params: { parentId: string; name: string }) => {
|
||||
const { data } = await fileManagerService.createFolder({
|
||||
...params,
|
||||
type: 'folder',
|
||||
});
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.created'));
|
||||
setPaginationParams(1);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, createFolder: mutateAsync };
|
||||
};
|
||||
|
||||
export const useUploadFile = () => {
|
||||
const { setPaginationParams } = useSetPaginationParams();
|
||||
const { t } = useTranslation();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['uploadFile'],
|
||||
mutationFn: async (params: {
|
||||
fileList: UploadFile[];
|
||||
parentId: string;
|
||||
}) => {
|
||||
const fileList = params.fileList;
|
||||
const pathList = params.fileList.map(
|
||||
(file) => (file as any).webkitRelativePath,
|
||||
);
|
||||
const formData = new FormData();
|
||||
formData.append('parent_id', params.parentId);
|
||||
fileList.forEach((file: any, index: number) => {
|
||||
formData.append('file', file);
|
||||
formData.append('path', pathList[index]);
|
||||
});
|
||||
try {
|
||||
const ret = await fileManagerService.uploadFile(formData);
|
||||
if (ret?.data.code === 0) {
|
||||
message.success(t('message.uploaded'));
|
||||
setPaginationParams(1);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return ret?.data?.code;
|
||||
} catch (error) {
|
||||
console.log('🚀 ~ useUploadFile ~ error:', error);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, uploadFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useConnectToKnowledge = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['connectFileToKnowledge'],
|
||||
mutationFn: async (params: IConnectRequestBody) => {
|
||||
const { data } = await fileManagerService.connectFileToKnowledge(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.operated'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, connectFileToKnowledge: mutateAsync };
|
||||
};
|
||||
|
||||
export interface IMoveFileBody {
|
||||
src_file_ids: string[];
|
||||
dest_file_id: string; // target folder id
|
||||
}
|
||||
|
||||
export const useMoveFile = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['moveFile'],
|
||||
mutationFn: async (params: IMoveFileBody) => {
|
||||
const { data } = await fileManagerService.moveFile(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.operated'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFileList'] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, moveFile: mutateAsync };
|
||||
};
|
||||
@ -1,316 +0,0 @@
|
||||
import { DSL, IFlow } from '@/interfaces/database/flow';
|
||||
import { IDebugSingleRequestBody } from '@/interfaces/request/flow';
|
||||
import i18n from '@/locales/config';
|
||||
import { useGetSharedChatSearchParams } from '@/pages/next-chats/hooks/use-send-shared-message';
|
||||
import flowService from '@/services/flow-service';
|
||||
import { buildMessageListWithUuid } from '@/utils/chat';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { message } from 'antd';
|
||||
import { set } from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import { useParams } from 'umi';
|
||||
|
||||
export const useFetchFlowList = (): { data: IFlow[]; loading: boolean } => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['fetchFlowList'],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await flowService.listCanvas();
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useFetchListVersion = (
|
||||
canvas_id: string,
|
||||
): {
|
||||
data: {
|
||||
created_at: string;
|
||||
title: string;
|
||||
id: string;
|
||||
}[];
|
||||
loading: boolean;
|
||||
} => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['fetchListVersion'],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await flowService.getListVersion({}, canvas_id);
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useFetchVersion = (
|
||||
version_id?: string,
|
||||
): {
|
||||
data?: IFlow;
|
||||
loading: boolean;
|
||||
} => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['fetchVersion', version_id],
|
||||
initialData: undefined,
|
||||
gcTime: 0,
|
||||
enabled: !!version_id, // Only call API when both values are provided
|
||||
queryFn: async () => {
|
||||
if (!version_id) return undefined;
|
||||
|
||||
const { data } = await flowService.getVersion({}, version_id);
|
||||
|
||||
return data?.data ?? undefined;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useFetchFlow = (): {
|
||||
data: IFlow;
|
||||
loading: boolean;
|
||||
refetch: () => void;
|
||||
} => {
|
||||
const { id } = useParams();
|
||||
const { sharedId } = useGetSharedChatSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery({
|
||||
queryKey: ['flowDetail'],
|
||||
initialData: {} as IFlow,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
refetchOnWindowFocus: false,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await flowService.getCanvas({}, sharedId || id);
|
||||
|
||||
const messageList = buildMessageListWithUuid(
|
||||
get(data, 'data.dsl.messages', []),
|
||||
);
|
||||
set(data, 'data.dsl.messages', messageList);
|
||||
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
export const useSettingFlow = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['SettingFlow'],
|
||||
mutationFn: async (params: any) => {
|
||||
const ret = await flowService.settingCanvas(params);
|
||||
if (ret?.data?.code === 0) {
|
||||
message.success('success');
|
||||
} else {
|
||||
message.error(ret?.data?.data);
|
||||
}
|
||||
return ret;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, settingFlow: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchFlowSSE = (): {
|
||||
data: IFlow;
|
||||
loading: boolean;
|
||||
refetch: () => void;
|
||||
} => {
|
||||
const { sharedId } = useGetSharedChatSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery({
|
||||
queryKey: ['flowDetailSSE'],
|
||||
initialData: {} as IFlow,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
refetchOnWindowFocus: false,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
if (!sharedId) return {};
|
||||
const { data } = await flowService.getCanvasSSE({}, sharedId);
|
||||
|
||||
const messageList = buildMessageListWithUuid(
|
||||
get(data, 'data.dsl.messages', []),
|
||||
);
|
||||
set(data, 'data.dsl.messages', messageList);
|
||||
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
export const useSetFlow = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['setFlow'],
|
||||
mutationFn: async (params: {
|
||||
id?: string;
|
||||
title?: string;
|
||||
dsl?: DSL;
|
||||
avatar?: string;
|
||||
}) => {
|
||||
const { data = {} } = await flowService.setCanvas(params);
|
||||
if (data.code === 0) {
|
||||
message.success(
|
||||
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
||||
);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchFlowList'] });
|
||||
}
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, setFlow: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteFlow = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteFlow'],
|
||||
mutationFn: async (canvasIds: string[]) => {
|
||||
const { data } = await flowService.removeCanvas({ canvasIds });
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['infiniteFetchFlowListTeam'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteFlow: mutateAsync };
|
||||
};
|
||||
|
||||
export const useRunFlow = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['runFlow'],
|
||||
mutationFn: async (params: { id: string; dsl: DSL }) => {
|
||||
const { data } = await flowService.runCanvas(params);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.modified`));
|
||||
}
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, runFlow: mutateAsync };
|
||||
};
|
||||
|
||||
export const useResetFlow = () => {
|
||||
const { id } = useParams();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['resetFlow'],
|
||||
mutationFn: async () => {
|
||||
const { data } = await flowService.resetCanvas({ id });
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, resetFlow: mutateAsync };
|
||||
};
|
||||
|
||||
export const useTestDbConnect = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['testDbConnect'],
|
||||
mutationFn: async (params: any) => {
|
||||
const ret = await flowService.testDbConnect(params);
|
||||
if (ret?.data?.code === 0) {
|
||||
message.success(ret?.data?.data);
|
||||
} else {
|
||||
message.error(ret?.data?.data);
|
||||
}
|
||||
return ret;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, testDbConnect: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchInputElements = (componentId?: string) => {
|
||||
const { id } = useParams();
|
||||
|
||||
const { data, isPending: loading } = useQuery({
|
||||
queryKey: ['fetchInputElements', id, componentId],
|
||||
initialData: [],
|
||||
enabled: !!id && !!componentId,
|
||||
retryOnMount: false,
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
try {
|
||||
const { data } = await flowService.getInputElements({
|
||||
id,
|
||||
component_id: componentId,
|
||||
});
|
||||
return data?.data ?? [];
|
||||
} catch (error) {
|
||||
console.log('🚀 ~ queryFn: ~ error:', error);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useDebugSingle = () => {
|
||||
const { id } = useParams();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['debugSingle'],
|
||||
mutationFn: async (params: IDebugSingleRequestBody) => {
|
||||
const ret = await flowService.debugSingle({ id, ...params });
|
||||
if (ret?.data?.code !== 0) {
|
||||
message.error(ret?.data?.message);
|
||||
}
|
||||
return ret?.data?.data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, debugSingle: mutateAsync };
|
||||
};
|
||||
@ -1,498 +0,0 @@
|
||||
import { ResponsePostType } from '@/interfaces/database/base';
|
||||
import {
|
||||
IKnowledge,
|
||||
IKnowledgeGraph,
|
||||
IRenameTag,
|
||||
ITestingResult,
|
||||
} from '@/interfaces/database/knowledge';
|
||||
import i18n from '@/locales/config';
|
||||
import kbService, {
|
||||
deleteKnowledgeGraph,
|
||||
getKnowledgeGraph,
|
||||
listDataset,
|
||||
listTag,
|
||||
removeTag,
|
||||
renameTag,
|
||||
} from '@/services/knowledge-service';
|
||||
import {
|
||||
useInfiniteQuery,
|
||||
useIsMutating,
|
||||
useMutation,
|
||||
useMutationState,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { message } from 'antd';
|
||||
import { useState } from 'react';
|
||||
import { useParams, useSearchParams } from 'umi';
|
||||
import { useHandleSearchChange } from './logic-hooks';
|
||||
import { useSetPaginationParams } from './route-hook';
|
||||
|
||||
export const useKnowledgeBaseId = (): string => {
|
||||
const [searchParams] = useSearchParams();
|
||||
const { id } = useParams();
|
||||
const knowledgeBaseId = searchParams.get('id') || id;
|
||||
|
||||
return knowledgeBaseId || '';
|
||||
};
|
||||
|
||||
export const useFetchKnowledgeBaseConfiguration = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<IKnowledge>({
|
||||
queryKey: ['fetchKnowledgeDetail'],
|
||||
initialData: {} as IKnowledge,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.get_kb_detail({
|
||||
kb_id: knowledgeBaseId,
|
||||
});
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useFetchKnowledgeList = (
|
||||
shouldFilterListWithoutDocument: boolean = false,
|
||||
): {
|
||||
list: IKnowledge[];
|
||||
loading: boolean;
|
||||
} => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['fetchKnowledgeList'],
|
||||
initialData: [],
|
||||
gcTime: 0, // https://tanstack.com/query/latest/docs/framework/react/guides/caching?from=reactQueryV3
|
||||
queryFn: async () => {
|
||||
const { data } = await listDataset();
|
||||
const list = data?.data?.kbs ?? [];
|
||||
return shouldFilterListWithoutDocument
|
||||
? list.filter((x: IKnowledge) => x.chunk_num > 0)
|
||||
: list;
|
||||
},
|
||||
});
|
||||
|
||||
return { list: data, loading };
|
||||
};
|
||||
|
||||
export const useSelectKnowledgeOptions = () => {
|
||||
const { list } = useFetchKnowledgeList();
|
||||
|
||||
const options = list?.map((item) => ({
|
||||
label: item.name,
|
||||
value: item.id,
|
||||
}));
|
||||
|
||||
return options;
|
||||
};
|
||||
|
||||
export const useInfiniteFetchKnowledgeList = () => {
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
|
||||
|
||||
const PageSize = 30;
|
||||
|
||||
const {
|
||||
data,
|
||||
error,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetching,
|
||||
isFetchingNextPage,
|
||||
status,
|
||||
} = useInfiniteQuery({
|
||||
queryKey: ['infiniteFetchKnowledgeList', debouncedSearchString],
|
||||
queryFn: async ({ pageParam }) => {
|
||||
const { data } = await listDataset({
|
||||
page: pageParam,
|
||||
page_size: PageSize,
|
||||
keywords: debouncedSearchString,
|
||||
});
|
||||
const list = data?.data ?? [];
|
||||
return list;
|
||||
},
|
||||
initialPageParam: 1,
|
||||
getNextPageParam: (lastPage, pages, lastPageParam) => {
|
||||
if (lastPageParam * PageSize <= lastPage.total) {
|
||||
return lastPageParam + 1;
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
return {
|
||||
data,
|
||||
loading: isFetching,
|
||||
error,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetching,
|
||||
isFetchingNextPage,
|
||||
status,
|
||||
handleInputChange,
|
||||
searchString,
|
||||
};
|
||||
};
|
||||
|
||||
export const useCreateKnowledge = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['infiniteFetchKnowledgeList'],
|
||||
mutationFn: async (params: { id?: string; name: string }) => {
|
||||
const { data = {} } = await kbService.createKb(params);
|
||||
if (data.code === 0) {
|
||||
message.success(
|
||||
i18n.t(`message.${params?.id ? 'modified' : 'created'}`),
|
||||
);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeList'] });
|
||||
}
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, createKnowledge: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteKnowledge = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteKnowledge'],
|
||||
mutationFn: async (id: string) => {
|
||||
const { data } = await kbService.rmKb({ kb_id: id });
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.deleted`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['infiniteFetchKnowledgeList'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteKnowledge: mutateAsync };
|
||||
};
|
||||
|
||||
//#region knowledge configuration
|
||||
|
||||
export const useUpdateKnowledge = (shouldFetchList = false) => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['saveKnowledge'],
|
||||
mutationFn: async (params: Record<string, any>) => {
|
||||
const { data = {} } = await kbService.updateKb({
|
||||
kb_id: params?.kb_id ? params?.kb_id : knowledgeBaseId,
|
||||
...params,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.updated`));
|
||||
if (shouldFetchList) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchKnowledgeListByPage'],
|
||||
});
|
||||
} else {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchKnowledgeDetail'] });
|
||||
}
|
||||
}
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, saveKnowledgeConfiguration: mutateAsync };
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Retrieval testing
|
||||
|
||||
export const useTestChunkRetrieval = (): ResponsePostType<ITestingResult> & {
|
||||
testChunk: (...params: any[]) => void;
|
||||
} => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const { page, size: pageSize } = useSetPaginationParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['testChunk'], // This method is invalid
|
||||
gcTime: 0,
|
||||
mutationFn: async (values: any) => {
|
||||
const { data } = await kbService.retrieval_test({
|
||||
...values,
|
||||
kb_id: values.kb_id ?? knowledgeBaseId,
|
||||
page,
|
||||
size: pageSize,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
const res = data.data;
|
||||
return {
|
||||
...res,
|
||||
documents: res.doc_aggs,
|
||||
};
|
||||
}
|
||||
return (
|
||||
data?.data ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data: data ?? { chunks: [], documents: [], total: 0 },
|
||||
loading,
|
||||
testChunk: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useTestChunkAllRetrieval = (): ResponsePostType<ITestingResult> & {
|
||||
testChunkAll: (...params: any[]) => void;
|
||||
} => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const { page, size: pageSize } = useSetPaginationParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['testChunkAll'], // This method is invalid
|
||||
gcTime: 0,
|
||||
mutationFn: async (values: any) => {
|
||||
const { data } = await kbService.retrieval_test({
|
||||
...values,
|
||||
kb_id: values.kb_id ?? knowledgeBaseId,
|
||||
doc_ids: [],
|
||||
page,
|
||||
size: pageSize,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
const res = data.data;
|
||||
return {
|
||||
...res,
|
||||
documents: res.doc_aggs,
|
||||
};
|
||||
}
|
||||
return (
|
||||
data?.data ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data: data ?? { chunks: [], documents: [], total: 0 },
|
||||
loading,
|
||||
testChunkAll: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useChunkIsTesting = () => {
|
||||
return useIsMutating({ mutationKey: ['testChunk'] }) > 0;
|
||||
};
|
||||
|
||||
export const useSelectTestingResult = (): ITestingResult => {
|
||||
const data = useMutationState({
|
||||
filters: { mutationKey: ['testChunk'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.data;
|
||||
},
|
||||
});
|
||||
return (data.at(-1) ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}) as ITestingResult;
|
||||
};
|
||||
|
||||
export const useSelectIsTestingSuccess = () => {
|
||||
const status = useMutationState({
|
||||
filters: { mutationKey: ['testChunk'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.status;
|
||||
},
|
||||
});
|
||||
return status.at(-1) === 'success';
|
||||
};
|
||||
|
||||
export const useAllTestingSuccess = () => {
|
||||
const status = useMutationState({
|
||||
filters: { mutationKey: ['testChunkAll'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.status;
|
||||
},
|
||||
});
|
||||
return status.at(-1) === 'success';
|
||||
};
|
||||
|
||||
export const useAllTestingResult = (): ITestingResult => {
|
||||
const data = useMutationState({
|
||||
filters: { mutationKey: ['testChunkAll'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.data;
|
||||
},
|
||||
});
|
||||
return (data.at(-1) ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}) as ITestingResult;
|
||||
};
|
||||
//#endregion
|
||||
|
||||
//#region tags
|
||||
|
||||
export const useFetchTagList = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<Array<[string, number]>>({
|
||||
queryKey: ['fetchTagList'],
|
||||
initialData: [],
|
||||
gcTime: 0, // https://tanstack.com/query/latest/docs/framework/react/guides/caching?from=reactQueryV3
|
||||
queryFn: async () => {
|
||||
const { data } = await listTag(knowledgeBaseId);
|
||||
const list = data?.data || [];
|
||||
return list;
|
||||
},
|
||||
});
|
||||
|
||||
return { list: data, loading };
|
||||
};
|
||||
|
||||
export const useDeleteTag = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteTag'],
|
||||
mutationFn: async (tags: string[]) => {
|
||||
const { data } = await removeTag(knowledgeBaseId, tags);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.deleted`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchTagList'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteTag: mutateAsync };
|
||||
};
|
||||
|
||||
export const useRenameTag = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['renameTag'],
|
||||
mutationFn: async (params: IRenameTag) => {
|
||||
const { data } = await renameTag(knowledgeBaseId, params);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.modified`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchTagList'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, renameTag: mutateAsync };
|
||||
};
|
||||
|
||||
export const useTagIsRenaming = () => {
|
||||
return useIsMutating({ mutationKey: ['renameTag'] }) > 0;
|
||||
};
|
||||
|
||||
export const useFetchTagListByKnowledgeIds = () => {
|
||||
const [knowledgeIds, setKnowledgeIds] = useState<string[]>([]);
|
||||
|
||||
const { data, isFetching: loading } = useQuery<Array<[string, number]>>({
|
||||
queryKey: ['fetchTagListByKnowledgeIds'],
|
||||
enabled: knowledgeIds.length > 0,
|
||||
initialData: [],
|
||||
gcTime: 0, // https://tanstack.com/query/latest/docs/framework/react/guides/caching?from=reactQueryV3
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.listTagByKnowledgeIds({
|
||||
kb_ids: knowledgeIds.join(','),
|
||||
});
|
||||
const list = data?.data || [];
|
||||
return list;
|
||||
},
|
||||
});
|
||||
|
||||
return { list: data, loading, setKnowledgeIds };
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
export function useFetchKnowledgeGraph() {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<IKnowledgeGraph>({
|
||||
queryKey: ['fetchKnowledgeGraph', knowledgeBaseId],
|
||||
initialData: { graph: {}, mind_map: {} } as IKnowledgeGraph,
|
||||
enabled: !!knowledgeBaseId,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await getKnowledgeGraph(knowledgeBaseId);
|
||||
return data?.data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
}
|
||||
|
||||
export const useRemoveKnowledgeGraph = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['removeKnowledgeGraph'],
|
||||
mutationFn: async () => {
|
||||
const { data } = await deleteKnowledgeGraph(knowledgeBaseId);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.deleted`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchKnowledgeGraph'],
|
||||
});
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, removeKnowledgeGraph: mutateAsync };
|
||||
};
|
||||
@ -29,7 +29,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { useTranslate } from './common-hooks';
|
||||
import { useSetPaginationParams } from './route-hook';
|
||||
import { useFetchTenantInfo, useSaveSetting } from './user-setting-hooks';
|
||||
import { useFetchTenantInfo, useSaveSetting } from './use-user-setting-request';
|
||||
|
||||
export function usePrevious<T>(value: T) {
|
||||
const ref = useRef<T>();
|
||||
|
||||
@ -752,3 +752,37 @@ export function useCancelConversation() {
|
||||
|
||||
return { data, loading, cancelConversation: mutateAsync };
|
||||
}
|
||||
|
||||
export const useFetchFlowSSE = (): {
|
||||
data: IFlow;
|
||||
loading: boolean;
|
||||
refetch: () => void;
|
||||
} => {
|
||||
const { sharedId } = useGetSharedChatSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery({
|
||||
queryKey: ['flowDetailSSE'],
|
||||
initialData: {} as IFlow,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
refetchOnWindowFocus: false,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
if (!sharedId) return {};
|
||||
const { data } = await agentService.getCanvasSSE(sharedId);
|
||||
|
||||
const messageList = buildMessageListWithUuid(
|
||||
get(data, 'data.dsl.messages', []),
|
||||
);
|
||||
set(data, 'data.dsl.messages', messageList);
|
||||
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
@ -459,7 +459,7 @@ export function useUploadAndParseFile() {
|
||||
|
||||
const { data } = await chatService.uploadAndParse(
|
||||
{
|
||||
url: api.upload_and_parse(conversationId || id),
|
||||
url: api.upload_and_parse,
|
||||
signal: controller.current.signal,
|
||||
data: formData,
|
||||
onUploadProgress: ({ progress }) => {
|
||||
|
||||
@ -1,17 +1,108 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { ResponseGetType } from '@/interfaces/database/base';
|
||||
import { ResponseGetType, ResponseType } from '@/interfaces/database/base';
|
||||
import { IChunk, IKnowledgeFile } from '@/interfaces/database/knowledge';
|
||||
import kbService from '@/services/knowledge-service';
|
||||
import { useMutation, useQuery } from '@tanstack/react-query';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { PaginationProps, message } from 'antd';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { IChunkListResult } from './chunk-hooks';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
import { useGetKnowledgeSearchParams } from './route-hook';
|
||||
import {
|
||||
useGetKnowledgeSearchParams,
|
||||
useSetPaginationParams,
|
||||
} from './route-hook';
|
||||
|
||||
export interface IChunkListResult {
|
||||
searchString?: string;
|
||||
handleInputChange?: React.ChangeEventHandler<HTMLInputElement>;
|
||||
pagination: PaginationProps;
|
||||
setPagination?: (pagination: { page: number; pageSize: number }) => void;
|
||||
available: number | undefined;
|
||||
handleSetAvailable: (available: number | undefined) => void;
|
||||
}
|
||||
|
||||
export const useSelectChunkList = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const data = queryClient.getQueriesData<{
|
||||
data: IChunk[];
|
||||
total: number;
|
||||
documentInfo: IKnowledgeFile;
|
||||
}>({ queryKey: ['fetchChunkList'] });
|
||||
|
||||
return data?.at(-1)?.[1];
|
||||
};
|
||||
|
||||
export const useDeleteChunk = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { setPaginationParams } = useSetPaginationParams();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteChunk'],
|
||||
mutationFn: async (params: { chunkIds: string[]; doc_id: string }) => {
|
||||
const { data } = await kbService.rm_chunk(params);
|
||||
if (data.code === 0) {
|
||||
setPaginationParams(1);
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteChunk: mutateAsync };
|
||||
};
|
||||
|
||||
export const useCreateChunk = () => {
|
||||
const { t } = useTranslation();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['createChunk'],
|
||||
mutationFn: async (payload: any) => {
|
||||
let service = kbService.create_chunk;
|
||||
if (payload.chunk_id) {
|
||||
service = kbService.set_chunk;
|
||||
}
|
||||
const { data } = await service(payload);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.created'));
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchChunkList'] });
|
||||
}, 1000); // Delay to ensure the list is updated
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, createChunk: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchChunk = (chunkId?: string): ResponseType<any> => {
|
||||
const { data } = useQuery({
|
||||
queryKey: ['fetchChunk'],
|
||||
enabled: !!chunkId,
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const data = await kbService.get_chunk({
|
||||
chunk_id: chunkId,
|
||||
});
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
export const useFetchNextChunkList = (
|
||||
enabled = true,
|
||||
|
||||
@ -1,20 +1,27 @@
|
||||
import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-filter-submit';
|
||||
import { post } from '@/utils/next-request';
|
||||
|
||||
import message from '@/components/ui/message';
|
||||
import { ResponseType } from '@/interfaces/database/base';
|
||||
import { IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import {
|
||||
IDocumentInfo,
|
||||
IDocumentInfoFilter,
|
||||
} from '@/interfaces/database/document';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import {
|
||||
IChangeParserConfigRequestBody,
|
||||
IDocumentMetaRequestBody,
|
||||
} from '@/interfaces/request/document';
|
||||
import i18n from '@/locales/config';
|
||||
import kbService, { listDocument } from '@/services/knowledge-service';
|
||||
import api, { api_host } from '@/utils/api';
|
||||
import { buildChunkHighlights } from '@/utils/document-util';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { get } from 'lodash';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { IHighlight } from 'react-pdf-highlighter';
|
||||
import { useParams } from 'umi';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
@ -36,6 +43,9 @@ export const enum DocumentApiAction {
|
||||
SetDocumentMeta = 'setDocumentMeta',
|
||||
FetchDocumentFilter = 'fetchDocumentFilter',
|
||||
CreateDocument = 'createDocument',
|
||||
WebCrawl = 'webCrawl',
|
||||
FetchDocumentThumbnails = 'fetchDocumentThumbnails',
|
||||
ParseDocument = 'parseDocument',
|
||||
}
|
||||
|
||||
export const useUploadNextDocument = () => {
|
||||
@ -430,3 +440,108 @@ export const useCreateDocument = () => {
|
||||
|
||||
return { createDocument: mutateAsync, loading, data };
|
||||
};
|
||||
|
||||
export const useGetDocumentUrl = (documentId?: string) => {
|
||||
const getDocumentUrl = useCallback(
|
||||
(id?: string) => {
|
||||
return `${api_host}/document/get/${documentId || id}`;
|
||||
},
|
||||
[documentId],
|
||||
);
|
||||
|
||||
return getDocumentUrl;
|
||||
};
|
||||
|
||||
export const useGetChunkHighlights = (
|
||||
selectedChunk: IChunk | IReferenceChunk,
|
||||
) => {
|
||||
const [size, setSize] = useState({ width: 849, height: 1200 });
|
||||
|
||||
const highlights: IHighlight[] = useMemo(() => {
|
||||
return buildChunkHighlights(selectedChunk, size);
|
||||
}, [selectedChunk, size]);
|
||||
|
||||
const setWidthAndHeight = (width: number, height: number) => {
|
||||
setSize((pre) => {
|
||||
if (pre.height !== height || pre.width !== width) {
|
||||
return { height, width };
|
||||
}
|
||||
return pre;
|
||||
});
|
||||
};
|
||||
|
||||
return { highlights, setWidthAndHeight };
|
||||
};
|
||||
|
||||
export const useNextWebCrawl = () => {
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [DocumentApiAction.WebCrawl],
|
||||
mutationFn: async ({ name, url }: { name: string; url: string }) => {
|
||||
const formData = new FormData();
|
||||
formData.append('name', name);
|
||||
formData.append('url', url);
|
||||
formData.append('kb_id', knowledgeId);
|
||||
|
||||
const ret = await kbService.web_crawl(formData);
|
||||
const code = get(ret, 'data.code');
|
||||
if (code === 0) {
|
||||
message.success(i18n.t('message.uploaded'));
|
||||
}
|
||||
|
||||
return code;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data,
|
||||
loading,
|
||||
webCrawl: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useFetchDocumentThumbnailsByIds = () => {
|
||||
const [ids, setDocumentIds] = useState<string[]>([]);
|
||||
const { data } = useQuery<Record<string, string>>({
|
||||
queryKey: [DocumentApiAction.FetchDocumentThumbnails, ids],
|
||||
enabled: ids.length > 0,
|
||||
initialData: {},
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.document_thumbnails({ doc_ids: ids });
|
||||
if (data.code === 0) {
|
||||
return data.data;
|
||||
}
|
||||
return {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, setDocumentIds };
|
||||
};
|
||||
|
||||
export const useParseDocument = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [DocumentApiAction.ParseDocument],
|
||||
mutationFn: async (url: string) => {
|
||||
try {
|
||||
const { data } = await post(api.parse, { url });
|
||||
if (data?.code === 0) {
|
||||
message.success(i18n.t('message.uploaded'));
|
||||
}
|
||||
return data;
|
||||
} catch (error) {
|
||||
message.error('error');
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return { parseDocument: mutateAsync, data, loading };
|
||||
};
|
||||
|
||||
@ -3,7 +3,9 @@ import {
|
||||
IFetchFileListResult,
|
||||
IFolder,
|
||||
} from '@/interfaces/database/file-manager';
|
||||
import { IConnectRequestBody } from '@/interfaces/request/file-manager';
|
||||
import fileManagerService from '@/services/file-manager-service';
|
||||
import { downloadFileFromBlob } from '@/utils/file-util';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { PaginationProps } from 'antd';
|
||||
@ -23,6 +25,10 @@ export const enum FileApiAction {
|
||||
CreateFolder = 'createFolder',
|
||||
FetchParentFolderList = 'fetchParentFolderList',
|
||||
DeleteFile = 'deleteFile',
|
||||
DownloadFile = 'downloadFile',
|
||||
RenameFile = 'renameFile',
|
||||
ConnectFileToKnowledge = 'connectFileToKnowledge',
|
||||
FetchPureFileList = 'fetchPureFileList',
|
||||
}
|
||||
|
||||
export const useGetFolderId = () => {
|
||||
@ -229,3 +235,85 @@ export const useDeleteFile = () => {
|
||||
|
||||
return { data, loading, deleteFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDownloadFile = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [FileApiAction.DownloadFile],
|
||||
mutationFn: async (params: { id: string; filename?: string }) => {
|
||||
const response = await fileManagerService.getFile({}, params.id);
|
||||
const blob = new Blob([response.data], { type: response.data.type });
|
||||
downloadFileFromBlob(blob, params.filename);
|
||||
},
|
||||
});
|
||||
return { data, loading, downloadFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useRenameFile = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [FileApiAction.RenameFile],
|
||||
mutationFn: async (params: { fileId: string; name: string }) => {
|
||||
const { data } = await fileManagerService.renameFile(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.renamed'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [FileApiAction.FetchFileList],
|
||||
});
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, renameFile: mutateAsync };
|
||||
};
|
||||
|
||||
export const useConnectToKnowledge = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [FileApiAction.ConnectFileToKnowledge],
|
||||
mutationFn: async (params: IConnectRequestBody) => {
|
||||
const { data } = await fileManagerService.connectFileToKnowledge(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.operated'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [FileApiAction.FetchFileList],
|
||||
});
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, connectFileToKnowledge: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchPureFileList = () => {
|
||||
const { mutateAsync, isPending: loading } = useMutation({
|
||||
mutationKey: [FileApiAction.FetchPureFileList],
|
||||
gcTime: 0,
|
||||
|
||||
mutationFn: async (parentId: string) => {
|
||||
const { data } = await fileManagerService.listFile({
|
||||
parent_id: parentId,
|
||||
});
|
||||
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
return { loading, fetchList: mutateAsync };
|
||||
};
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-filter-submit';
|
||||
import message from '@/components/ui/message';
|
||||
import { ResponsePostType } from '@/interfaces/database/base';
|
||||
import {
|
||||
IKnowledge,
|
||||
IKnowledgeGraph,
|
||||
IKnowledgeResult,
|
||||
INextTestingResult,
|
||||
IRenameTag,
|
||||
ITestingResult,
|
||||
} from '@/interfaces/database/knowledge';
|
||||
import { ITestRetrievalRequestBody } from '@/interfaces/request/knowledge';
|
||||
import i18n from '@/locales/config';
|
||||
@ -12,8 +15,17 @@ import kbService, {
|
||||
deleteKnowledgeGraph,
|
||||
getKnowledgeGraph,
|
||||
listDataset,
|
||||
listTag,
|
||||
removeTag,
|
||||
renameTag,
|
||||
} from '@/services/knowledge-service';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
useIsMutating,
|
||||
useMutation,
|
||||
useMutationState,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useParams, useSearchParams } from 'umi';
|
||||
@ -21,6 +33,7 @@ import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
import { useSetPaginationParams } from './route-hook';
|
||||
|
||||
export const enum KnowledgeApiAction {
|
||||
TestRetrieval = 'testRetrieval',
|
||||
@ -354,3 +367,252 @@ export const useFetchKnowledgeList = (
|
||||
|
||||
return { list: data, loading };
|
||||
};
|
||||
|
||||
export const useSelectKnowledgeOptions = () => {
|
||||
const { list } = useFetchKnowledgeList();
|
||||
|
||||
const options = list?.map((item) => ({
|
||||
label: item.name,
|
||||
value: item.id,
|
||||
}));
|
||||
|
||||
return options;
|
||||
};
|
||||
|
||||
//#region tags
|
||||
export const useRenameTag = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['renameTag'],
|
||||
mutationFn: async (params: IRenameTag) => {
|
||||
const { data } = await renameTag(knowledgeBaseId, params);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.modified`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchTagList'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, renameTag: mutateAsync };
|
||||
};
|
||||
|
||||
export const useTagIsRenaming = () => {
|
||||
return useIsMutating({ mutationKey: ['renameTag'] }) > 0;
|
||||
};
|
||||
|
||||
export const useFetchTagListByKnowledgeIds = () => {
|
||||
const [knowledgeIds, setKnowledgeIds] = useState<string[]>([]);
|
||||
|
||||
const { data, isFetching: loading } = useQuery<Array<[string, number]>>({
|
||||
queryKey: ['fetchTagListByKnowledgeIds'],
|
||||
enabled: knowledgeIds.length > 0,
|
||||
initialData: [],
|
||||
gcTime: 0, // https://tanstack.com/query/latest/docs/framework/react/guides/caching?from=reactQueryV3
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.listTagByKnowledgeIds({
|
||||
kb_ids: knowledgeIds.join(','),
|
||||
});
|
||||
const list = data?.data || [];
|
||||
return list;
|
||||
},
|
||||
});
|
||||
|
||||
return { list: data, loading, setKnowledgeIds };
|
||||
};
|
||||
|
||||
export const useFetchTagList = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<Array<[string, number]>>({
|
||||
queryKey: ['fetchTagList'],
|
||||
initialData: [],
|
||||
gcTime: 0, // https://tanstack.com/query/latest/docs/framework/react/guides/caching?from=reactQueryV3
|
||||
queryFn: async () => {
|
||||
const { data } = await listTag(knowledgeBaseId);
|
||||
const list = data?.data || [];
|
||||
return list;
|
||||
},
|
||||
});
|
||||
|
||||
return { list: data, loading };
|
||||
};
|
||||
|
||||
export const useDeleteTag = () => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteTag'],
|
||||
mutationFn: async (tags: string[]) => {
|
||||
const { data } = await removeTag(knowledgeBaseId, tags);
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.deleted`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchTagList'],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteTag: mutateAsync };
|
||||
};
|
||||
|
||||
// #endregion
|
||||
|
||||
//#region Retrieval testing
|
||||
|
||||
export const useTestChunkRetrieval = (): ResponsePostType<ITestingResult> & {
|
||||
testChunk: (...params: any[]) => void;
|
||||
} => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const { page, size: pageSize } = useSetPaginationParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['testChunk'], // This method is invalid
|
||||
gcTime: 0,
|
||||
mutationFn: async (values: any) => {
|
||||
const { data } = await kbService.retrieval_test({
|
||||
...values,
|
||||
kb_id: values.kb_id ?? knowledgeBaseId,
|
||||
page,
|
||||
size: pageSize,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
const res = data.data;
|
||||
return {
|
||||
...res,
|
||||
documents: res.doc_aggs,
|
||||
};
|
||||
}
|
||||
return (
|
||||
data?.data ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data: data ?? { chunks: [], documents: [], total: 0 },
|
||||
loading,
|
||||
testChunk: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useTestChunkAllRetrieval = (): ResponsePostType<ITestingResult> & {
|
||||
testChunkAll: (...params: any[]) => void;
|
||||
} => {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const { page, size: pageSize } = useSetPaginationParams();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['testChunkAll'], // This method is invalid
|
||||
gcTime: 0,
|
||||
mutationFn: async (values: any) => {
|
||||
const { data } = await kbService.retrieval_test({
|
||||
...values,
|
||||
kb_id: values.kb_id ?? knowledgeBaseId,
|
||||
doc_ids: [],
|
||||
page,
|
||||
size: pageSize,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
const res = data.data;
|
||||
return {
|
||||
...res,
|
||||
documents: res.doc_aggs,
|
||||
};
|
||||
}
|
||||
return (
|
||||
data?.data ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
data: data ?? { chunks: [], documents: [], total: 0 },
|
||||
loading,
|
||||
testChunkAll: mutateAsync,
|
||||
};
|
||||
};
|
||||
|
||||
export const useChunkIsTesting = () => {
|
||||
return useIsMutating({ mutationKey: ['testChunk'] }) > 0;
|
||||
};
|
||||
|
||||
export const useSelectTestingResult = (): ITestingResult => {
|
||||
const data = useMutationState({
|
||||
filters: { mutationKey: ['testChunk'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.data;
|
||||
},
|
||||
});
|
||||
return (data.at(-1) ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}) as ITestingResult;
|
||||
};
|
||||
|
||||
export const useSelectIsTestingSuccess = () => {
|
||||
const status = useMutationState({
|
||||
filters: { mutationKey: ['testChunk'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.status;
|
||||
},
|
||||
});
|
||||
return status.at(-1) === 'success';
|
||||
};
|
||||
|
||||
export const useAllTestingSuccess = () => {
|
||||
const status = useMutationState({
|
||||
filters: { mutationKey: ['testChunkAll'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.status;
|
||||
},
|
||||
});
|
||||
return status.at(-1) === 'success';
|
||||
};
|
||||
|
||||
export const useAllTestingResult = (): ITestingResult => {
|
||||
const data = useMutationState({
|
||||
filters: { mutationKey: ['testChunkAll'] },
|
||||
select: (mutation) => {
|
||||
return mutation.state.data;
|
||||
},
|
||||
});
|
||||
return (data.at(-1) ?? {
|
||||
chunks: [],
|
||||
documents: [],
|
||||
total: 0,
|
||||
}) as ITestingResult;
|
||||
};
|
||||
//#endregion
|
||||
|
||||
@ -1,47 +0,0 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import userService from '@/services/user-service';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
import {
|
||||
IThirdOAIModelCollection as IThirdAiModelCollection,
|
||||
IThirdOAIModel,
|
||||
} from '@/interfaces/database/llm';
|
||||
import { buildLlmUuid } from '@/utils/llm-util';
|
||||
|
||||
export const enum LLMApiAction {
|
||||
LlmList = 'llmList',
|
||||
}
|
||||
|
||||
export const useFetchLlmList = (modelType?: LlmModelType) => {
|
||||
const { data } = useQuery<IThirdAiModelCollection>({
|
||||
queryKey: [LLMApiAction.LlmList],
|
||||
initialData: {},
|
||||
queryFn: async () => {
|
||||
const { data } = await userService.llm_list({ model_type: modelType });
|
||||
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
type IThirdOAIModelWithUuid = IThirdOAIModel & { uuid: string };
|
||||
|
||||
export function useSelectFlatLlmList(modelType?: LlmModelType) {
|
||||
const llmList = useFetchLlmList(modelType);
|
||||
|
||||
return Object.values(llmList).reduce<IThirdOAIModelWithUuid[]>((pre, cur) => {
|
||||
pre.push(...cur.map((x) => ({ ...x, uuid: buildLlmUuid(x) })));
|
||||
|
||||
return pre;
|
||||
}, []);
|
||||
}
|
||||
|
||||
export function useFindLlmByUuid(modelType?: LlmModelType) {
|
||||
const flatList = useSelectFlatLlmList(modelType);
|
||||
|
||||
return (uuid: string) => {
|
||||
return flatList.find((x) => x.uuid === uuid);
|
||||
};
|
||||
}
|
||||
@ -20,11 +20,15 @@ import { DefaultOptionType } from 'antd/es/select';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export const useFetchLlmList = (
|
||||
modelType?: LlmModelType,
|
||||
): IThirdAiModelCollection => {
|
||||
const { data } = useQuery({
|
||||
queryKey: ['llmList'],
|
||||
import { buildLlmUuid } from '@/utils/llm-util';
|
||||
|
||||
export const enum LLMApiAction {
|
||||
LlmList = 'llmList',
|
||||
}
|
||||
|
||||
export const useFetchLlmList = (modelType?: LlmModelType) => {
|
||||
const { data } = useQuery<IThirdAiModelCollection>({
|
||||
queryKey: [LLMApiAction.LlmList],
|
||||
initialData: {},
|
||||
queryFn: async () => {
|
||||
const { data } = await userService.llm_list({ model_type: modelType });
|
||||
@ -36,24 +40,25 @@ export const useFetchLlmList = (
|
||||
return data;
|
||||
};
|
||||
|
||||
export const useSelectLlmOptions = () => {
|
||||
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();
|
||||
type IThirdOAIModelWithUuid = IThirdOAIModel & { uuid: string };
|
||||
|
||||
const embeddingModelOptions = useMemo(() => {
|
||||
return Object.entries(llmInfo).map(([key, value]) => {
|
||||
return {
|
||||
label: key,
|
||||
options: value.map((x) => ({
|
||||
label: getRealModelName(x.llm_name),
|
||||
value: `${x.llm_name}@${x.fid}`,
|
||||
disabled: !x.available,
|
||||
})),
|
||||
};
|
||||
});
|
||||
}, [llmInfo]);
|
||||
export function useSelectFlatLlmList(modelType?: LlmModelType) {
|
||||
const llmList = useFetchLlmList(modelType);
|
||||
|
||||
return embeddingModelOptions;
|
||||
};
|
||||
return Object.values(llmList).reduce<IThirdOAIModelWithUuid[]>((pre, cur) => {
|
||||
pre.push(...cur.map((x) => ({ ...x, uuid: buildLlmUuid(x) })));
|
||||
|
||||
return pre;
|
||||
}, []);
|
||||
}
|
||||
|
||||
export function useFindLlmByUuid(modelType?: LlmModelType) {
|
||||
const flatList = useSelectFlatLlmList(modelType);
|
||||
|
||||
return (uuid: string) => {
|
||||
return flatList.find((x) => x.uuid === uuid);
|
||||
};
|
||||
}
|
||||
|
||||
function buildLlmOptionsWithIcon(x: IThirdOAIModel) {
|
||||
return {
|
||||
@ -6,9 +6,6 @@ import userService, {
|
||||
} from '@/services/user-service';
|
||||
import authorizationUtil, { redirectToLogin } from '@/utils/authorization-util';
|
||||
import { useMutation, useQuery } from '@tanstack/react-query';
|
||||
import { Form } from 'antd';
|
||||
import { FormInstance } from 'antd/lib';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export interface ILoginRequestBody {
|
||||
@ -134,19 +131,3 @@ export const useLogout = () => {
|
||||
|
||||
return { data, loading, logout: mutateAsync };
|
||||
};
|
||||
|
||||
export const useHandleSubmittable = (form: FormInstance) => {
|
||||
const [submittable, setSubmittable] = useState<boolean>(false);
|
||||
|
||||
// Watch all values
|
||||
const values = Form.useWatch([], form);
|
||||
|
||||
useEffect(() => {
|
||||
form
|
||||
.validateFields({ validateOnly: true })
|
||||
.then(() => setSubmittable(true))
|
||||
.catch(() => setSubmittable(false));
|
||||
}, [form, values]);
|
||||
|
||||
return { submittable };
|
||||
};
|
||||
@ -51,6 +51,7 @@ export interface IAttachment {
|
||||
}
|
||||
export interface IMessageData {
|
||||
content: string;
|
||||
audio_binary: string;
|
||||
outputs: any;
|
||||
start_to_think?: boolean;
|
||||
end_to_think?: boolean;
|
||||
|
||||
@ -72,7 +72,7 @@ export const useFetchTenantInfo = (
|
||||
): ResponseGetType<ITenantInfo> => {
|
||||
const { t } = useTranslation();
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: [UserSettingApiAction.TenantInfo],
|
||||
queryKey: [UserSettingApiAction.TenantInfo, showEmptyModelWarn],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
|
||||
@ -1,433 +0,0 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { LanguageTranslationMap } from '@/constants/common';
|
||||
import { ResponseGetType } from '@/interfaces/database/base';
|
||||
import { IToken } from '@/interfaces/database/chat';
|
||||
import { ITenantInfo } from '@/interfaces/database/knowledge';
|
||||
import { ILangfuseConfig } from '@/interfaces/database/system';
|
||||
import {
|
||||
ISystemStatus,
|
||||
ITenant,
|
||||
ITenantUser,
|
||||
IUserInfo,
|
||||
} from '@/interfaces/database/user-setting';
|
||||
import { ISetLangfuseConfigRequestBody } from '@/interfaces/request/system';
|
||||
import userService, {
|
||||
addTenantUser,
|
||||
agreeTenant,
|
||||
deleteTenantUser,
|
||||
listTenant,
|
||||
listTenantUser,
|
||||
} from '@/services/user-service';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { Modal } from 'antd';
|
||||
import DOMPurify from 'dompurify';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { history } from 'umi';
|
||||
|
||||
export const useFetchUserInfo = (): ResponseGetType<IUserInfo> => {
|
||||
const { i18n } = useTranslation();
|
||||
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['userInfo'],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await userService.user_info();
|
||||
if (data.code === 0) {
|
||||
i18n.changeLanguage(
|
||||
LanguageTranslationMap[
|
||||
data.data.language as keyof typeof LanguageTranslationMap
|
||||
],
|
||||
);
|
||||
}
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useFetchTenantInfo = (
|
||||
showEmptyModelWarn = false,
|
||||
): ResponseGetType<ITenantInfo> => {
|
||||
const { t } = useTranslation();
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['tenantInfo', showEmptyModelWarn],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data: res } = await userService.get_tenant_info();
|
||||
if (res.code === 0) {
|
||||
// llm_id is chat_id
|
||||
// asr_id is speech2txt
|
||||
const { data } = res;
|
||||
if (
|
||||
showEmptyModelWarn &&
|
||||
(isEmpty(data.embd_id) || isEmpty(data.llm_id))
|
||||
) {
|
||||
Modal.warning({
|
||||
title: t('common.warn'),
|
||||
content: (
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: DOMPurify.sanitize(t('setting.modelProvidersWarn')),
|
||||
}}
|
||||
></div>
|
||||
),
|
||||
onOk() {
|
||||
history.push('/user-setting/model');
|
||||
},
|
||||
});
|
||||
}
|
||||
data.chat_id = data.llm_id;
|
||||
data.speech2text_id = data.asr_id;
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
return res;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
|
||||
export const useSelectParserList = (): Array<{
|
||||
value: string;
|
||||
label: string;
|
||||
}> => {
|
||||
const { data: tenantInfo } = useFetchTenantInfo(true);
|
||||
const parserList = useMemo(() => {
|
||||
const parserArray: Array<string> = tenantInfo?.parser_ids?.split(',') ?? [];
|
||||
return parserArray.map((x) => {
|
||||
const arr = x.split(':');
|
||||
return { value: arr[0], label: arr[1] };
|
||||
});
|
||||
}, [tenantInfo]);
|
||||
|
||||
return parserList;
|
||||
};
|
||||
|
||||
export const useSaveSetting = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['saveSetting'],
|
||||
mutationFn: async (
|
||||
userInfo: { new_password: string } | Partial<IUserInfo>,
|
||||
) => {
|
||||
const { data } = await userService.setting(userInfo);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.modified'));
|
||||
queryClient.invalidateQueries({ queryKey: ['userInfo'] });
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, saveSetting: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchSystemVersion = () => {
|
||||
const [version, setVersion] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const fetchSystemVersion = useCallback(async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const { data } = await userService.getSystemVersion();
|
||||
if (data.code === 0) {
|
||||
setVersion(data.data);
|
||||
setLoading(false);
|
||||
}
|
||||
} catch (error) {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
return { fetchSystemVersion, version, loading };
|
||||
};
|
||||
|
||||
export const useFetchSystemStatus = () => {
|
||||
const [systemStatus, setSystemStatus] = useState<ISystemStatus>(
|
||||
{} as ISystemStatus,
|
||||
);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const fetchSystemStatus = useCallback(async () => {
|
||||
setLoading(true);
|
||||
const { data } = await userService.getSystemStatus();
|
||||
if (data.code === 0) {
|
||||
setSystemStatus(data.data);
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
return {
|
||||
systemStatus,
|
||||
fetchSystemStatus,
|
||||
loading,
|
||||
};
|
||||
};
|
||||
|
||||
export const useFetchManualSystemTokenList = () => {
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['fetchManualSystemTokenList'],
|
||||
mutationFn: async () => {
|
||||
const { data } = await userService.listToken();
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, fetchSystemTokenList: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchSystemTokenList = () => {
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery<IToken[]>({
|
||||
queryKey: ['fetchSystemTokenList'],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await userService.listToken();
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
export const useRemoveSystemToken = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['removeSystemToken'],
|
||||
mutationFn: async (token: string) => {
|
||||
const { data } = await userService.removeToken({}, token);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.deleted'));
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, removeToken: mutateAsync };
|
||||
};
|
||||
|
||||
export const useCreateSystemToken = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['createSystemToken'],
|
||||
mutationFn: async (params: Record<string, any>) => {
|
||||
const { data } = await userService.createToken(params);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchSystemTokenList'] });
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, createToken: mutateAsync };
|
||||
};
|
||||
|
||||
export const useListTenantUser = () => {
|
||||
const { data: tenantInfo } = useFetchTenantInfo();
|
||||
const tenantId = tenantInfo.tenant_id;
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery<ITenantUser[]>({
|
||||
queryKey: ['listTenantUser', tenantId],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
enabled: !!tenantId,
|
||||
queryFn: async () => {
|
||||
const { data } = await listTenantUser(tenantId);
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
export const useAddTenantUser = () => {
|
||||
const { data: tenantInfo } = useFetchTenantInfo();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['addTenantUser'],
|
||||
mutationFn: async (email: string) => {
|
||||
const { data } = await addTenantUser(tenantInfo.tenant_id, email);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, addTenantUser: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteTenantUser = () => {
|
||||
const { data: tenantInfo } = useFetchTenantInfo();
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteTenantUser'],
|
||||
mutationFn: async ({
|
||||
userId,
|
||||
tenantId,
|
||||
}: {
|
||||
userId: string;
|
||||
tenantId?: string;
|
||||
}) => {
|
||||
const { data } = await deleteTenantUser({
|
||||
tenantId: tenantId ?? tenantInfo.tenant_id,
|
||||
userId,
|
||||
});
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.deleted'));
|
||||
queryClient.invalidateQueries({ queryKey: ['listTenantUser'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteTenantUser: mutateAsync };
|
||||
};
|
||||
|
||||
export const useListTenant = () => {
|
||||
const { data: tenantInfo } = useFetchTenantInfo();
|
||||
const tenantId = tenantInfo.tenant_id;
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery<ITenant[]>({
|
||||
queryKey: ['listTenant', tenantId],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
enabled: !!tenantId,
|
||||
queryFn: async () => {
|
||||
const { data } = await listTenant();
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
};
|
||||
|
||||
export const useAgreeTenant = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['agreeTenant'],
|
||||
mutationFn: async (tenantId: string) => {
|
||||
const { data } = await agreeTenant(tenantId);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.operated'));
|
||||
queryClient.invalidateQueries({ queryKey: ['listTenant'] });
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, agreeTenant: mutateAsync };
|
||||
};
|
||||
|
||||
export const useSetLangfuseConfig = () => {
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['setLangfuseConfig'],
|
||||
mutationFn: async (params: ISetLangfuseConfigRequestBody) => {
|
||||
const { data } = await userService.setLangfuseConfig(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.operated'));
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, setLangfuseConfig: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteLangfuseConfig = () => {
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteLangfuseConfig'],
|
||||
mutationFn: async () => {
|
||||
const { data } = await userService.deleteLangfuseConfig();
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.deleted'));
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteLangfuseConfig: mutateAsync };
|
||||
};
|
||||
|
||||
export const useFetchLangfuseConfig = () => {
|
||||
const { data, isFetching: loading } = useQuery<ILangfuseConfig>({
|
||||
queryKey: ['fetchLangfuseConfig'],
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await userService.getLangfuseConfig();
|
||||
|
||||
return data?.data;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
};
|
||||
@ -8,7 +8,10 @@ import User from '../user';
|
||||
import { useTheme } from '@/components/theme-provider';
|
||||
import { LanguageList, LanguageMap, ThemeEnum } from '@/constants/common';
|
||||
import { useChangeLanguage } from '@/hooks/logic-hooks';
|
||||
import { useFetchUserInfo, useListTenant } from '@/hooks/user-setting-hooks';
|
||||
import {
|
||||
useFetchUserInfo,
|
||||
useListTenant,
|
||||
} from '@/hooks/use-user-setting-request';
|
||||
import { TenantRole } from '@/pages/user-setting/constants';
|
||||
import { BellRing, CircleHelp, MoonIcon, SunIcon } from 'lucide-react';
|
||||
import { useNavigate } from 'umi';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { Avatar } from 'antd';
|
||||
import React from 'react';
|
||||
import { history } from 'umi';
|
||||
|
||||
@ -13,7 +13,7 @@ import { LanguageList, LanguageMap, ThemeEnum } from '@/constants/common';
|
||||
import { useChangeLanguage } from '@/hooks/logic-hooks';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { useNavigateWithFromState } from '@/hooks/route-hook';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { Routes } from '@/routes';
|
||||
import { camelCase } from 'lodash';
|
||||
import {
|
||||
|
||||
@ -1072,6 +1072,7 @@ Example: Virtual Hosted Style`,
|
||||
pleaseUploadAtLeastOneFile: 'Please upload at least one file',
|
||||
},
|
||||
flow: {
|
||||
autoPlay: 'Auto play audio',
|
||||
downloadFileTypeTip: 'The file type to download',
|
||||
downloadFileType: 'Download file type',
|
||||
formatTypeError: 'Format or type error',
|
||||
|
||||
@ -992,6 +992,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
pleaseUploadAtLeastOneFile: '请上传至少一个文件',
|
||||
},
|
||||
flow: {
|
||||
autoPlay: '自动播放',
|
||||
downloadFileTypeTip: '文件下载的类型',
|
||||
downloadFileType: '文件类型',
|
||||
formatTypeError: '格式或类型错误',
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeList } from '@/hooks/use-knowledge-request';
|
||||
import { IRetrievalNode } from '@/interfaces/database/flow';
|
||||
import { NodeProps, Position } from '@xyflow/react';
|
||||
import classNames from 'classnames';
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
useFetchAgent,
|
||||
useUploadCanvasFileWithProgress,
|
||||
} from '@/hooks/use-agent-request';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { buildMessageUuidWithRole } from '@/utils/chat';
|
||||
import { memo, useCallback } from 'react';
|
||||
import { useParams } from 'umi';
|
||||
|
||||
@ -50,10 +50,13 @@ export function findMessageFromList(eventList: IEventList) {
|
||||
|
||||
let startIndex = -1;
|
||||
let endIndex = -1;
|
||||
|
||||
let audioBinary = undefined;
|
||||
messageEventList.forEach((x, idx) => {
|
||||
const { data } = x;
|
||||
const { content, start_to_think, end_to_think } = data;
|
||||
const { content, start_to_think, end_to_think, audio_binary } = data;
|
||||
if (audio_binary) {
|
||||
audioBinary = audio_binary;
|
||||
}
|
||||
if (start_to_think === true) {
|
||||
nextContent += '<think>' + content;
|
||||
startIndex = idx;
|
||||
@ -82,6 +85,7 @@ export function findMessageFromList(eventList: IEventList) {
|
||||
return {
|
||||
id: eventList[0]?.message_id,
|
||||
content: nextContent,
|
||||
audio_binary: audioBinary,
|
||||
attachment: workflowFinished?.data?.outputs?.attachment || {},
|
||||
};
|
||||
}
|
||||
@ -393,12 +397,15 @@ export const useSendAgentMessage = ({
|
||||
}, [sendMessageInTaskMode]);
|
||||
|
||||
useEffect(() => {
|
||||
const { content, id, attachment } = findMessageFromList(answerList);
|
||||
const { content, id, attachment, audio_binary } =
|
||||
findMessageFromList(answerList);
|
||||
const inputAnswer = findInputFromList(answerList);
|
||||
const answer = content || getLatestError(answerList);
|
||||
|
||||
if (answerList.length > 0) {
|
||||
addNewestOneAnswer({
|
||||
answer: answer ?? '',
|
||||
audio_binary: audio_binary,
|
||||
attachment: attachment as IAttachment,
|
||||
id: id,
|
||||
...inputAnswer,
|
||||
|
||||
@ -7,7 +7,7 @@ import {
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Popover, PopoverContent } from '@/components/ui/popover';
|
||||
import { useParseDocument } from '@/hooks/document-hooks';
|
||||
import { useParseDocument } from '@/hooks/use-document-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { PropsWithChildren } from 'react';
|
||||
@ -37,7 +37,7 @@ export const PopoverForm = ({
|
||||
defaultValues: values,
|
||||
resolver: zodResolver(FormSchema),
|
||||
});
|
||||
const { parseDocument, loading } = useParseDocument();
|
||||
const { parseDocument } = useParseDocument();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// useResetFormOnCloseModal({
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { RAGFlowSelect } from '@/components/ui/select';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { X } from 'lucide-react';
|
||||
import { memo } from 'react';
|
||||
@ -36,12 +37,14 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
)
|
||||
.optional(),
|
||||
output_format: z.string().optional(),
|
||||
auto_play: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const form = useForm({
|
||||
defaultValues: {
|
||||
...values,
|
||||
output_format: values.output_format,
|
||||
auto_play: values.auto_play,
|
||||
},
|
||||
resolver: zodResolver(FormSchema),
|
||||
});
|
||||
@ -56,40 +59,6 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
return (
|
||||
<Form {...form}>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
|
||||
{t('flow.downloadFileType')}
|
||||
</FormLabel>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={`output_format`}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1">
|
||||
<FormControl>
|
||||
<RAGFlowSelect
|
||||
options={Object.keys(ExportFileType).map(
|
||||
(key: string) => {
|
||||
return {
|
||||
value:
|
||||
ExportFileType[
|
||||
key as keyof typeof ExportFileType
|
||||
],
|
||||
label: key,
|
||||
};
|
||||
},
|
||||
)}
|
||||
{...field}
|
||||
onValueChange={field.onChange}
|
||||
placeholder={t('common.selectPlaceholder')}
|
||||
allowClear
|
||||
></RAGFlowSelect>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</FormItem>
|
||||
</FormContainer>
|
||||
<FormContainer>
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.msgTip')}>{t('flow.msg')}</FormLabel>
|
||||
@ -132,6 +101,57 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
</FormContainer>
|
||||
<FormContainer>
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.downloadFileTypeTip')}>
|
||||
{t('flow.downloadFileType')}
|
||||
</FormLabel>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={`output_format`}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1">
|
||||
<FormControl>
|
||||
<RAGFlowSelect
|
||||
options={Object.keys(ExportFileType).map(
|
||||
(key: string) => {
|
||||
return {
|
||||
value:
|
||||
ExportFileType[
|
||||
key as keyof typeof ExportFileType
|
||||
],
|
||||
label: key,
|
||||
};
|
||||
},
|
||||
)}
|
||||
{...field}
|
||||
onValueChange={field.onChange}
|
||||
placeholder={t('common.selectPlaceholder')}
|
||||
allowClear
|
||||
></RAGFlowSelect>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</FormItem>
|
||||
<FormItem>
|
||||
<FormLabel>{t('flow.autoPlay')}</FormLabel>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={`auto_play`}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1">
|
||||
<FormControl>
|
||||
<Switch
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
/>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</FormItem>
|
||||
</FormContainer>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/use-llm-request';
|
||||
import {
|
||||
LargeModelFormField,
|
||||
OutputFormatFormFieldProps,
|
||||
|
||||
@ -5,7 +5,7 @@ import {
|
||||
useCreateSystemToken,
|
||||
useFetchSystemTokenList,
|
||||
useRemoveSystemToken,
|
||||
} from '@/hooks/user-setting-hooks';
|
||||
} from '@/hooks/use-user-setting-request';
|
||||
import { IStats } from '@/interfaces/database/chat';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import MessageItem from '@/components/next-message-item';
|
||||
import { Modal } from '@/components/ui/modal/modal';
|
||||
import { useFetchAgent } from '@/hooks/use-agent-request';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { IAgentLogMessage } from '@/interfaces/database/agent';
|
||||
import {
|
||||
IMessage,
|
||||
|
||||
@ -17,7 +17,7 @@ import { Modal } from '@/components/ui/modal/modal';
|
||||
import Space from '@/components/ui/space';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { useFetchChunk } from '@/hooks/chunk-hooks';
|
||||
import { useFetchChunk } from '@/hooks/use-chunk-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { FieldValues, FormProvider, useForm } from 'react-hook-form';
|
||||
|
||||
@ -8,8 +8,10 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { NumberInput } from '@/components/ui/input';
|
||||
import { useFetchTagListByKnowledgeIds } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
useFetchKnowledgeBaseConfiguration,
|
||||
useFetchTagListByKnowledgeIds,
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { CircleMinus, Plus } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useFieldArray, useFormContext } from 'react-hook-form';
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
import { ReactComponent as FilterIcon } from '@/assets/filter.svg';
|
||||
import { KnowledgeRouteKey } from '@/constants/knowledge';
|
||||
import { IChunkListResult, useSelectChunkList } from '@/hooks/chunk-hooks';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useKnowledgeBaseId } from '@/hooks/knowledge-hooks';
|
||||
import {
|
||||
IChunkListResult,
|
||||
useSelectChunkList,
|
||||
} from '@/hooks/use-chunk-request';
|
||||
import { useKnowledgeBaseId } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
ArrowLeftOutlined,
|
||||
CheckCircleOutlined,
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import { useSetModalState, useShowDeleteConfirm } from '@/hooks/common-hooks';
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
import {
|
||||
useCreateChunk,
|
||||
useDeleteChunk,
|
||||
useSelectChunkList,
|
||||
} from '@/hooks/chunk-hooks';
|
||||
import { useSetModalState, useShowDeleteConfirm } from '@/hooks/common-hooks';
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
} from '@/hooks/use-chunk-request';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { buildChunkHighlights } from '@/utils/document-util';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import { useKnowledgeBaseId } from '@/hooks/knowledge-hooks';
|
||||
import {
|
||||
useNavigateWithFromState,
|
||||
useSecondPathName,
|
||||
useThirdPathName,
|
||||
} from '@/hooks/route-hook';
|
||||
import { useKnowledgeBaseId } from '@/hooks/use-knowledge-request';
|
||||
import { Breadcrumb } from 'antd';
|
||||
import { ItemType } from 'antd/es/breadcrumb/Breadcrumb';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
@ -17,7 +17,7 @@ import { Modal } from '@/components/ui/modal/modal';
|
||||
import Space from '@/components/ui/space';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { useFetchChunk } from '@/hooks/chunk-hooks';
|
||||
import { useFetchChunk } from '@/hooks/use-chunk-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
|
||||
@ -8,8 +8,10 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { NumberInput } from '@/components/ui/input';
|
||||
import { useFetchTagListByKnowledgeIds } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
useFetchKnowledgeBaseConfiguration,
|
||||
useFetchTagListByKnowledgeIds,
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { CircleMinus, Plus } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useFieldArray, useFormContext } from 'react-hook-form';
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
import { ReactComponent as FilterIcon } from '@/assets/filter.svg';
|
||||
import { KnowledgeRouteKey } from '@/constants/knowledge';
|
||||
import { IChunkListResult, useSelectChunkList } from '@/hooks/chunk-hooks';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useKnowledgeBaseId } from '@/hooks/knowledge-hooks';
|
||||
import {
|
||||
IChunkListResult,
|
||||
useSelectChunkList,
|
||||
} from '@/hooks/use-chunk-request';
|
||||
import { useKnowledgeBaseId } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
ArrowLeftOutlined,
|
||||
CheckCircleOutlined,
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import { TimelineNode } from '@/components/originui/timeline';
|
||||
import message from '@/components/ui/message';
|
||||
import { useCreateChunk, useDeleteChunk } from '@/hooks/chunk-hooks';
|
||||
import { useSetModalState, useShowDeleteConfirm } from '@/hooks/common-hooks';
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
import { useFetchMessageTrace } from '@/hooks/use-agent-request';
|
||||
import { useCreateChunk, useDeleteChunk } from '@/hooks/use-chunk-request';
|
||||
import kbService from '@/services/knowledge-service';
|
||||
import { formatSecondsToHumanReadable } from '@/utils/date';
|
||||
import { buildChunkHighlights } from '@/utils/document-util';
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useSelectParserList } from '@/hooks/user-setting-hooks';
|
||||
import { useSelectParserList } from '@/hooks/use-user-setting-request';
|
||||
import { Col, Divider, Empty, Row, Typography } from 'antd';
|
||||
import DOMPurify from 'dompurify';
|
||||
import camelCase from 'lodash/camelCase';
|
||||
|
||||
@ -9,7 +9,7 @@ import {
|
||||
} from '@/components/ui/form';
|
||||
import { MultiSelect } from '@/components/ui/multi-select';
|
||||
import { FormLayout } from '@/constants/form';
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeList } from '@/hooks/use-knowledge-request';
|
||||
import { Form, Select, Space } from 'antd';
|
||||
import DOMPurify from 'dompurify';
|
||||
import { useFormContext, useWatch } from 'react-hook-form';
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-request';
|
||||
import { useSelectParserList } from '@/hooks/user-setting-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/use-llm-request';
|
||||
import { useSelectParserList } from '@/hooks/use-user-setting-request';
|
||||
import kbService from '@/services/knowledge-service';
|
||||
import { useIsFetching } from '@tanstack/react-query';
|
||||
import { pick } from 'lodash';
|
||||
|
||||
@ -33,7 +33,7 @@ import {
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from '@/components/ui/tooltip';
|
||||
import { useDeleteTag, useFetchTagList } from '@/hooks/knowledge-hooks';
|
||||
import { useDeleteTag, useFetchTagList } from '@/hooks/use-knowledge-request';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useRenameKnowledgeTag } from '../hooks';
|
||||
|
||||
@ -7,7 +7,7 @@ import {
|
||||
} from '@/components/ui/dialog';
|
||||
import { LoadingButton } from '@/components/ui/loading-button';
|
||||
import { TagRenameId } from '@/constants/knowledge';
|
||||
import { useTagIsRenaming } from '@/hooks/knowledge-hooks';
|
||||
import { useTagIsRenaming } from '@/hooks/use-knowledge-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { RenameForm } from './rename-form';
|
||||
|
||||
@ -14,7 +14,7 @@ import {
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { TagRenameId } from '@/constants/knowledge';
|
||||
import { useRenameTag } from '@/hooks/knowledge-hooks';
|
||||
import { useRenameTag } from '@/hooks/use-knowledge-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useFetchTagList } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchTagList } from '@/hooks/use-knowledge-request';
|
||||
import { Chart } from '@antv/g2';
|
||||
import { sumBy } from 'lodash';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
|
||||
@ -1,69 +1,11 @@
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useNextWebCrawl } from '@/hooks/document-hooks';
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
import { IDocumentInfo } from '@/interfaces/database/document';
|
||||
import { formatDate, formatSecondsToHumanReadable } from '@/utils/date';
|
||||
import { formatBytes } from '@/utils/file-util';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useNavigate } from 'umi';
|
||||
import { ILogInfo } from '../process-log-modal';
|
||||
import { RunningStatus } from './constant';
|
||||
|
||||
export const useNavigateToOtherPage = () => {
|
||||
const navigate = useNavigate();
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const linkToUploadPage = useCallback(() => {
|
||||
navigate(`/knowledge/dataset/upload?id=${knowledgeId}`);
|
||||
}, [navigate, knowledgeId]);
|
||||
|
||||
const toChunk = useCallback((id: string) => {}, []);
|
||||
|
||||
return { linkToUploadPage, toChunk };
|
||||
};
|
||||
|
||||
export const useGetRowSelection = () => {
|
||||
const [selectedRowKeys, setSelectedRowKeys] = useState<React.Key[]>([]);
|
||||
|
||||
const rowSelection = {
|
||||
selectedRowKeys,
|
||||
onChange: (newSelectedRowKeys: React.Key[]) => {
|
||||
setSelectedRowKeys(newSelectedRowKeys);
|
||||
},
|
||||
};
|
||||
|
||||
return rowSelection;
|
||||
};
|
||||
|
||||
export const useHandleWebCrawl = () => {
|
||||
const {
|
||||
visible: webCrawlUploadVisible,
|
||||
hideModal: hideWebCrawlUploadModal,
|
||||
showModal: showWebCrawlUploadModal,
|
||||
} = useSetModalState();
|
||||
const { webCrawl, loading } = useNextWebCrawl();
|
||||
|
||||
const onWebCrawlUploadOk = useCallback(
|
||||
async (name: string, url: string) => {
|
||||
const ret = await webCrawl({ name, url });
|
||||
if (ret === 0) {
|
||||
hideWebCrawlUploadModal();
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
},
|
||||
[webCrawl, hideWebCrawlUploadModal],
|
||||
);
|
||||
|
||||
return {
|
||||
webCrawlUploadLoading: loading,
|
||||
onWebCrawlUploadOk,
|
||||
webCrawlUploadVisible,
|
||||
hideWebCrawlUploadModal,
|
||||
showWebCrawlUploadModal,
|
||||
};
|
||||
};
|
||||
|
||||
export const useShowLog = (documents: IDocumentInfo[]) => {
|
||||
const { showModal, hideModal, visible } = useSetModalState();
|
||||
const [record, setRecord] = useState<IDocumentInfo>();
|
||||
|
||||
@ -40,7 +40,7 @@ export default function RetrievalTesting() {
|
||||
<Plus /> Add New Test
|
||||
</Button> */}
|
||||
</div>
|
||||
<div className="h-[calc(100vh-241px)] overflow-auto scrollbar-thin">
|
||||
<div className="h-[calc(100vh-241px)] overflow-auto scrollbar-thin px-1">
|
||||
<TestingForm
|
||||
loading={loading}
|
||||
setValues={setValues}
|
||||
|
||||
@ -100,9 +100,9 @@ export function TestingResult({
|
||||
</>
|
||||
)}
|
||||
{!data.chunks?.length && !loading && (
|
||||
<div className="flex justify-center items-center w-full h-[calc(100vh-241px)]">
|
||||
<div className="flex justify-center items-center w-full h-[calc(100vh-280px)]">
|
||||
<div>
|
||||
<Empty type={EmptyType.SearchData}>
|
||||
<Empty type={EmptyType.SearchData} iconWidth={80}>
|
||||
{data.isRuned && (
|
||||
<div className="text-text-secondary">
|
||||
{t('knowledgeDetails.noTestResultsForRuned')}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useFetchTenantInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchTenantInfo } from '@/hooks/use-user-setting-request';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
export function useDisplayOwnerName() {
|
||||
|
||||
@ -5,7 +5,7 @@ import {
|
||||
import { FileIcon } from '@/components/icon-font';
|
||||
import NewDocumentLink from '@/components/new-document-link';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { useDownloadFile } from '@/hooks/file-manager-hooks';
|
||||
import { useDownloadFile } from '@/hooks/use-file-request';
|
||||
import { IFile } from '@/interfaces/database/file-manager';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
|
||||
@ -1,8 +1,5 @@
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import {
|
||||
useConnectToKnowledge,
|
||||
useRenameFile,
|
||||
} from '@/hooks/file-manager-hooks';
|
||||
import { useConnectToKnowledge, useRenameFile } from '@/hooks/use-file-request';
|
||||
import { IFile } from '@/interfaces/database/file-manager';
|
||||
import { TableRowSelection } from 'antd/es/table/interface';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
@ -15,7 +15,7 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { MultiSelect } from '@/components/ui/multi-select';
|
||||
import { useSelectKnowledgeOptions } from '@/hooks/knowledge-hooks';
|
||||
import { useSelectKnowledgeOptions } from '@/hooks/use-knowledge-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { Link2 } from 'lucide-react';
|
||||
|
||||
@ -10,7 +10,7 @@ import {
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from '@/components/ui/dialog';
|
||||
import { useFetchPureFileList } from '@/hooks/file-manager-hooks';
|
||||
import { useFetchPureFileList } from '@/hooks/use-file-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IFile } from '@/interfaces/database/file-manager';
|
||||
import { isEmpty } from 'lodash';
|
||||
|
||||
@ -5,8 +5,8 @@ import {
|
||||
useLoginChannels,
|
||||
useLoginWithChannel,
|
||||
useRegister,
|
||||
} from '@/hooks/login-hooks';
|
||||
import { useSystemConfig } from '@/hooks/system-hooks';
|
||||
} from '@/hooks/use-login-request';
|
||||
import { useSystemConfig } from '@/hooks/use-system-request';
|
||||
import { rsaPsw } from '@/utils';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@ -20,7 +20,7 @@ import {
|
||||
useGetChatSearchParams,
|
||||
useSetDialog,
|
||||
} from '@/hooks/use-chat-request';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { buildMessageUuidWithRole } from '@/utils/chat';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { t } from 'i18next';
|
||||
|
||||
@ -8,7 +8,7 @@ import {
|
||||
useFetchDialog,
|
||||
useGetChatSearchParams,
|
||||
} from '@/hooks/use-chat-request';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { useFetchUserInfo } from '@/hooks/use-user-setting-request';
|
||||
import { buildMessageUuidWithRole } from '@/utils/chat';
|
||||
import {
|
||||
useGetSendButtonDisabled,
|
||||
|
||||
@ -5,7 +5,7 @@ import PdfSheet from '@/components/pdf-drawer';
|
||||
import { useClickDrawer } from '@/components/pdf-drawer/hooks';
|
||||
import { useSyncThemeFromParams } from '@/components/theme-provider';
|
||||
import { MessageType, SharedFrom } from '@/constants/chat';
|
||||
import { useFetchFlowSSE } from '@/hooks/flow-hooks';
|
||||
import { useFetchFlowSSE } from '@/hooks/use-agent-request';
|
||||
import {
|
||||
useFetchExternalChatInfo,
|
||||
useFetchNextConversationSSE,
|
||||
|
||||
@ -4,7 +4,7 @@ import { Modal } from '@/components/ui/modal/modal';
|
||||
import {
|
||||
useGetChunkHighlights,
|
||||
useGetDocumentUrl,
|
||||
} from '@/hooks/document-hooks';
|
||||
} from '@/hooks/use-document-request';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
|
||||
@ -1,13 +1,15 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { SharedFrom } from '@/constants/chat';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useSelectTestingResult } from '@/hooks/knowledge-hooks';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useSendMessageWithSse,
|
||||
} from '@/hooks/logic-hooks';
|
||||
import { useSetPaginationParams } from '@/hooks/route-hook';
|
||||
import { useKnowledgeBaseId } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
useKnowledgeBaseId,
|
||||
useSelectTestingResult,
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { ResponsePostType } from '@/interfaces/database/base';
|
||||
import { IAnswer } from '@/interfaces/database/chat';
|
||||
import { ITestingResult } from '@/interfaces/database/knowledge';
|
||||
|
||||
@ -14,7 +14,7 @@ import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import {
|
||||
useFetchTenantInfo,
|
||||
useFetchUserInfo,
|
||||
} from '@/hooks/user-setting-hooks';
|
||||
} from '@/hooks/use-user-setting-request';
|
||||
import { Send, Settings } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@ -14,7 +14,6 @@ import remarkGfm from 'remark-gfm';
|
||||
import remarkMath from 'remark-math';
|
||||
import { visitParents } from 'unist-util-visit-parents';
|
||||
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/document-hooks';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import 'katex/dist/katex.min.css'; // `rehype-katex` does not import the CSS for you
|
||||
@ -33,6 +32,7 @@ import {
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from '@/components/ui/popover';
|
||||
import { useFetchDocumentThumbnailsByIds } from '@/hooks/use-document-request';
|
||||
import classNames from 'classnames';
|
||||
import { omit } from 'lodash';
|
||||
import { pipe } from 'lodash/fp';
|
||||
|
||||
@ -19,7 +19,7 @@ import {
|
||||
useAllTestingResult,
|
||||
useChunkIsTesting,
|
||||
useSelectTestingResult,
|
||||
} from '@/hooks/knowledge-hooks';
|
||||
} from '@/hooks/use-knowledge-request';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { CheckIcon, ChevronDown, Files, XIcon } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
|
||||
@ -29,12 +29,12 @@ import { RAGFlowSelect } from '@/components/ui/select';
|
||||
import { Spin } from '@/components/ui/spin';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
|
||||
import { useFetchKnowledgeList } from '@/hooks/use-knowledge-request';
|
||||
import {
|
||||
useComposeLlmOptionsByModelTypes,
|
||||
useSelectLlmOptionsByModelType,
|
||||
} from '@/hooks/llm-hooks';
|
||||
import { useFetchTenantInfo } from '@/hooks/user-setting-hooks';
|
||||
} from '@/hooks/use-llm-request';
|
||||
import { useFetchTenantInfo } from '@/hooks/use-user-setting-request';
|
||||
import { IKnowledge } from '@/interfaces/database/knowledge';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user