mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
25 Commits
v0.21.0
...
43ea312144
| Author | SHA1 | Date | |
|---|---|---|---|
| 43ea312144 | |||
| ce05696d95 | |||
| 0f62bfda21 | |||
| 70ffe2b4e8 | |||
| e76db6e222 | |||
| 7b664b5a84 | |||
| 8a41057236 | |||
| 447041d265 | |||
| f0375c4acd | |||
| 8af769de41 | |||
| f808bc32ba | |||
| e8cb1d8fc4 | |||
| 4e86ee4ff9 | |||
| c99034f717 | |||
| 86b254d214 | |||
| 1c38f4cefb | |||
| 74c195cd36 | |||
| e48bec1cbf | |||
| 205a5eb9f5 | |||
| 8844826208 | |||
| 8fe4281d81 | |||
| fb1bedbd3c | |||
| 6e55b9146c | |||
| 071ea9c493 | |||
| 5037a28e4d |
@ -135,7 +135,7 @@ releases! 🌟
|
||||
## 🔎 System Architecture
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 Get Started
|
||||
|
||||
@ -129,7 +129,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
## 🔎 Arsitektur Sistem
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 Mulai
|
||||
|
||||
@ -109,7 +109,7 @@
|
||||
## 🔎 システム構成
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 初期設定
|
||||
|
||||
@ -109,7 +109,7 @@
|
||||
## 🔎 시스템 아키텍처
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 시작하기
|
||||
|
||||
@ -129,7 +129,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
## 🔎 Arquitetura do Sistema
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 Primeiros Passos
|
||||
|
||||
@ -132,7 +132,7 @@
|
||||
## 🔎 系統架構
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 快速開始
|
||||
|
||||
@ -132,7 +132,7 @@
|
||||
## 🔎 系统架构
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/d6ac5664-c237-4200-a7c2-a4a00691b485" width="1000"/>
|
||||
<img src="https://github.com/user-attachments/assets/31b0dd6f-ca4f-445a-9457-70cb44a381b2" width="1000"/>
|
||||
</div>
|
||||
|
||||
## 🎬 快速开始
|
||||
|
||||
@ -48,13 +48,20 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Install ragflow-cli.
|
||||
```bash
|
||||
pip install ragflow-cli
|
||||
pip install ragflow-cli==0.21.0
|
||||
```
|
||||
3. Launch the CLI client:
|
||||
```bash
|
||||
ragflow-cli -h 0.0.0.0 -p 9381
|
||||
ragflow-cli -h 127.0.0.1 -p 9381
|
||||
```
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
You will be prompted to enter the superuser's password to log in.
|
||||
The default password is admin.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- -h: RAGFlow admin server host address
|
||||
|
||||
- -p: RAGFlow admin server port
|
||||
|
||||
|
||||
|
||||
|
||||
@ -213,12 +213,8 @@ class AdminCLI(Cmd):
|
||||
|
||||
def onecmd(self, command: str) -> bool:
|
||||
try:
|
||||
# print(f"command: {command}")
|
||||
result = self.parse_command(command)
|
||||
|
||||
# if 'type' in result and result.get('type') == 'empty':
|
||||
# return False
|
||||
|
||||
if isinstance(result, dict):
|
||||
if 'type' in result and result.get('type') == 'empty':
|
||||
return False
|
||||
@ -341,9 +337,9 @@ class AdminCLI(Cmd):
|
||||
row = "|"
|
||||
for col in columns:
|
||||
value = str(item.get(col, ''))
|
||||
if len(value) > col_widths[col]:
|
||||
if get_string_width(value) > col_widths[col]:
|
||||
value = value[:col_widths[col] - 3] + "..."
|
||||
row += f" {value:<{col_widths[col]}} |"
|
||||
row += f" {value:<{col_widths[col] - (get_string_width(value) - len(value))}} |"
|
||||
print(row)
|
||||
|
||||
print(separator)
|
||||
@ -452,7 +448,7 @@ class AdminCLI(Cmd):
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to get all users, code: {res_json['code']}, message: {res_json['message']}")
|
||||
print(f"Fail to get all services, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_show_service(self, command):
|
||||
service_id: int = command['number']
|
||||
@ -463,14 +459,14 @@ class AdminCLI(Cmd):
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
res_data = res_json['data']
|
||||
if res_data['alive']:
|
||||
print(f"Service {res_data['service_name']} is alive. Detail:")
|
||||
if 'status' in res_data and res_data['status'] == 'alive':
|
||||
print(f"Service {res_data['service_name']} is alive, ")
|
||||
if isinstance(res_data['message'], str):
|
||||
print(res_data['message'])
|
||||
else:
|
||||
self._print_table_simple(res_data['message'])
|
||||
else:
|
||||
print(f"Service {res_data['service_name']} is down. Detail: {res_data['message']}")
|
||||
print(f"Service {res_data['service_name']} is down, {res_data['message']}")
|
||||
else:
|
||||
print(f"Fail to show service, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow-cli"
|
||||
version = "0.21.0.dev5"
|
||||
version = "0.21.0"
|
||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
|
||||
@ -26,7 +26,7 @@ from routes import admin_bp
|
||||
from api.utils.log_utils import init_root_logger
|
||||
from api.constants import SERVICE_CONF
|
||||
from api import settings
|
||||
from admin.server.config import load_configurations, SERVICE_CONFIGS
|
||||
from config import load_configurations, SERVICE_CONFIGS
|
||||
|
||||
stop_event = threading.Event()
|
||||
|
||||
|
||||
@ -26,6 +26,8 @@ from urllib.parse import urlparse
|
||||
|
||||
|
||||
class ServiceConfigs:
|
||||
configs = dict
|
||||
|
||||
def __init__(self):
|
||||
self.configs = []
|
||||
self.lock = threading.Lock()
|
||||
@ -229,7 +231,8 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
host: str = v['host']
|
||||
http_port: int = v['http_port']
|
||||
config = RAGFlowServerConfig(id=id_count, name=name, host=host, port=http_port,
|
||||
service_type="ragflow_server", detail_func_name="check_ragflow_server_alive")
|
||||
service_type="ragflow_server",
|
||||
detail_func_name="check_ragflow_server_alive")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "es":
|
||||
@ -254,7 +257,8 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
host = parts[0]
|
||||
port = int(parts[1])
|
||||
database: str = v.get('db_name', 'default_db')
|
||||
config = InfinityConfig(id=id_count, name=name, host=host, port=port, service_type="retrieval", retrieval_type="infinity",
|
||||
config = InfinityConfig(id=id_count, name=name, host=host, port=port, service_type="retrieval",
|
||||
retrieval_type="infinity",
|
||||
db_name=database, detail_func_name="get_infinity_status")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
@ -266,7 +270,8 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
port = int(parts[1])
|
||||
user = v.get('user')
|
||||
password = v.get('password')
|
||||
config = MinioConfig(id=id_count, name=name, host=host, port=port, user=user, password=password, service_type="file_store",
|
||||
config = MinioConfig(id=id_count, name=name, host=host, port=port, user=user, password=password,
|
||||
service_type="file_store",
|
||||
store_type="minio", detail_func_name="check_minio_alive")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
|
||||
@ -17,7 +17,7 @@
|
||||
|
||||
from flask import Blueprint, request
|
||||
|
||||
from admin.server.auth import login_verify
|
||||
from auth import login_verify
|
||||
from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from api.common.exceptions import AdminException
|
||||
|
||||
@ -27,7 +27,7 @@ from api.utils.crypt import decrypt
|
||||
from api.utils import health_utils
|
||||
|
||||
from api.common.exceptions import AdminException, UserAlreadyExistsError, UserNotFoundError
|
||||
from admin.server.config import SERVICE_CONFIGS
|
||||
from config import SERVICE_CONFIGS
|
||||
|
||||
|
||||
class UserMgr:
|
||||
@ -181,12 +181,12 @@ class ServiceMgr:
|
||||
config_dict = config.to_dict()
|
||||
try:
|
||||
service_detail = ServiceMgr.get_service_details(service_id)
|
||||
if service_detail['alive']:
|
||||
config_dict['status'] = 'Alive'
|
||||
if "status" in service_detail:
|
||||
config_dict['status'] = service_detail['status']
|
||||
else:
|
||||
config_dict['status'] = 'Timeout'
|
||||
config_dict['status'] = 'timeout'
|
||||
except Exception:
|
||||
config_dict['status'] = 'Timeout'
|
||||
config_dict['status'] = 'timeout'
|
||||
result.append(config_dict)
|
||||
return result
|
||||
|
||||
@ -206,7 +206,7 @@ class ServiceMgr:
|
||||
}
|
||||
service_info = service_config_mapping.get(service_id, {})
|
||||
if not service_info:
|
||||
raise AdminException(f"Invalid service_id: {service_id}")
|
||||
raise AdminException(f"invalid service_id: {service_id}")
|
||||
|
||||
detail_func = getattr(health_utils, service_info.get('detail_func_name'))
|
||||
res = detail_func()
|
||||
|
||||
@ -60,7 +60,7 @@ def list_chunk():
|
||||
}
|
||||
if "available_int" in req:
|
||||
query["available_int"] = int(req["available_int"])
|
||||
sres = settings.retriever.search(query, search.index_name(tenant_id), kb_ids, highlight=True)
|
||||
sres = settings.retriever.search(query, search.index_name(tenant_id), kb_ids, highlight=["content_ltks"])
|
||||
res = {"total": sres.total, "chunks": [], "doc": doc.to_dict()}
|
||||
for id in sres.ids:
|
||||
d = {
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@ -234,54 +235,63 @@ def get_all_parent_folders():
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@manager.route('/rm', methods=['POST']) # noqa: F821
|
||||
@manager.route("/rm", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
@validate_request("file_ids")
|
||||
def rm():
|
||||
req = request.json
|
||||
file_ids = req["file_ids"]
|
||||
|
||||
def _delete_single_file(file):
|
||||
try:
|
||||
if file.location:
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
except Exception:
|
||||
logging.exception(f"Fail to remove object: {file.parent_id}/{file.location}")
|
||||
|
||||
informs = File2DocumentService.get_by_file_id(file.id)
|
||||
for inform in informs:
|
||||
doc_id = inform.document_id
|
||||
e, doc = DocumentService.get_by_id(doc_id)
|
||||
if e and doc:
|
||||
tenant_id = DocumentService.get_tenant_id(doc_id)
|
||||
if tenant_id:
|
||||
DocumentService.remove_document(doc, tenant_id)
|
||||
File2DocumentService.delete_by_file_id(file.id)
|
||||
|
||||
FileService.delete(file)
|
||||
|
||||
def _delete_folder_recursive(folder, tenant_id):
|
||||
sub_files = FileService.list_all_files_by_parent_id(folder.id)
|
||||
for sub_file in sub_files:
|
||||
if sub_file.type == FileType.FOLDER.value:
|
||||
_delete_folder_recursive(sub_file, tenant_id)
|
||||
else:
|
||||
_delete_single_file(sub_file)
|
||||
|
||||
FileService.delete(folder)
|
||||
|
||||
try:
|
||||
for file_id in file_ids:
|
||||
e, file = FileService.get_by_id(file_id)
|
||||
if not e:
|
||||
if not e or not file:
|
||||
return get_data_error_result(message="File or Folder not found!")
|
||||
if not file.tenant_id:
|
||||
return get_data_error_result(message="Tenant not found!")
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
return get_json_result(data=False, message="No authorization.", code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
if file.source_type == FileSource.KNOWLEDGEBASE:
|
||||
continue
|
||||
|
||||
if file.type == FileType.FOLDER.value:
|
||||
file_id_list = FileService.get_all_innermost_file_ids(file_id, [])
|
||||
for inner_file_id in file_id_list:
|
||||
e, file = FileService.get_by_id(inner_file_id)
|
||||
if not e:
|
||||
return get_data_error_result(message="File not found!")
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
FileService.delete_folder_by_pf_id(current_user.id, file_id)
|
||||
else:
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
if not FileService.delete(file):
|
||||
return get_data_error_result(
|
||||
message="Database error (File removal)!")
|
||||
_delete_folder_recursive(file, current_user.id)
|
||||
continue
|
||||
|
||||
# delete file2document
|
||||
informs = File2DocumentService.get_by_file_id(file_id)
|
||||
for inform in informs:
|
||||
doc_id = inform.document_id
|
||||
e, doc = DocumentService.get_by_id(doc_id)
|
||||
if not e:
|
||||
return get_data_error_result(message="Document not found!")
|
||||
tenant_id = DocumentService.get_tenant_id(doc_id)
|
||||
if not tenant_id:
|
||||
return get_data_error_result(message="Tenant not found!")
|
||||
if not DocumentService.remove_document(doc, tenant_id):
|
||||
return get_data_error_result(
|
||||
message="Database error (Document removal)!")
|
||||
File2DocumentService.delete_by_file_id(file_id)
|
||||
_delete_single_file(file)
|
||||
|
||||
return get_json_result(data=True)
|
||||
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -355,31 +365,89 @@ def get(file_id):
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@manager.route('/mv', methods=['POST']) # noqa: F821
|
||||
@manager.route("/mv", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
@validate_request("src_file_ids", "dest_file_id")
|
||||
def move():
|
||||
req = request.json
|
||||
try:
|
||||
file_ids = req["src_file_ids"]
|
||||
parent_id = req["dest_file_id"]
|
||||
dest_parent_id = req["dest_file_id"]
|
||||
|
||||
ok, dest_folder = FileService.get_by_id(dest_parent_id)
|
||||
if not ok or not dest_folder:
|
||||
return get_data_error_result(message="Parent Folder not found!")
|
||||
|
||||
files = FileService.get_by_ids(file_ids)
|
||||
files_dict = {}
|
||||
for file in files:
|
||||
files_dict[file.id] = file
|
||||
if not files:
|
||||
return get_data_error_result(message="Source files not found!")
|
||||
|
||||
files_dict = {f.id: f for f in files}
|
||||
|
||||
for file_id in file_ids:
|
||||
file = files_dict[file_id]
|
||||
file = files_dict.get(file_id)
|
||||
if not file:
|
||||
return get_data_error_result(message="File or Folder not found!")
|
||||
if not file.tenant_id:
|
||||
return get_data_error_result(message="Tenant not found!")
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
fe, _ = FileService.get_by_id(parent_id)
|
||||
if not fe:
|
||||
return get_data_error_result(message="Parent Folder not found!")
|
||||
FileService.move_file(file_ids, parent_id)
|
||||
return get_json_result(
|
||||
data=False,
|
||||
message="No authorization.",
|
||||
code=settings.RetCode.AUTHENTICATION_ERROR,
|
||||
)
|
||||
|
||||
def _move_entry_recursive(source_file_entry, dest_folder):
|
||||
if source_file_entry.type == FileType.FOLDER.value:
|
||||
existing_folder = FileService.query(name=source_file_entry.name, parent_id=dest_folder.id)
|
||||
if existing_folder:
|
||||
new_folder = existing_folder[0]
|
||||
else:
|
||||
new_folder = FileService.insert(
|
||||
{
|
||||
"id": get_uuid(),
|
||||
"parent_id": dest_folder.id,
|
||||
"tenant_id": source_file_entry.tenant_id,
|
||||
"created_by": current_user.id,
|
||||
"name": source_file_entry.name,
|
||||
"location": "",
|
||||
"size": 0,
|
||||
"type": FileType.FOLDER.value,
|
||||
}
|
||||
)
|
||||
|
||||
sub_files = FileService.list_all_files_by_parent_id(source_file_entry.id)
|
||||
for sub_file in sub_files:
|
||||
_move_entry_recursive(sub_file, new_folder)
|
||||
|
||||
FileService.delete_by_id(source_file_entry.id)
|
||||
return
|
||||
|
||||
old_parent_id = source_file_entry.parent_id
|
||||
old_location = source_file_entry.location
|
||||
filename = source_file_entry.name
|
||||
|
||||
new_location = filename
|
||||
while STORAGE_IMPL.obj_exist(dest_folder.id, new_location):
|
||||
new_location += "_"
|
||||
|
||||
try:
|
||||
STORAGE_IMPL.move(old_parent_id, old_location, dest_folder.id, new_location)
|
||||
except Exception as storage_err:
|
||||
raise RuntimeError(f"Move file failed at storage layer: {str(storage_err)}")
|
||||
|
||||
FileService.update_by_id(
|
||||
source_file_entry.id,
|
||||
{
|
||||
"parent_id": dest_folder.id,
|
||||
"location": new_location,
|
||||
},
|
||||
)
|
||||
|
||||
for file in files:
|
||||
_move_entry_recursive(file, dest_folder)
|
||||
|
||||
return get_json_result(data=True)
|
||||
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -194,6 +194,9 @@ def add_llm():
|
||||
elif factory == "Azure-OpenAI":
|
||||
api_key = apikey_json(["api_key", "api_version"])
|
||||
|
||||
elif factory == "OpenRouter":
|
||||
api_key = apikey_json(["api_key", "provider_order"])
|
||||
|
||||
llm = {
|
||||
"tenant_id": current_user.id,
|
||||
"llm_factory": factory,
|
||||
|
||||
@ -15,11 +15,14 @@
|
||||
#
|
||||
import json
|
||||
import logging
|
||||
import string
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from flask import redirect, request, session
|
||||
from flask import redirect, request, session, Response
|
||||
from flask_login import current_user, login_required, login_user, logout_user
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
|
||||
@ -46,6 +49,19 @@ from api.utils.api_utils import (
|
||||
validate_request,
|
||||
)
|
||||
from api.utils.crypt import decrypt
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from api.apps import smtp_mail_server
|
||||
from api.utils.web_utils import (
|
||||
send_email_html,
|
||||
OTP_LENGTH,
|
||||
OTP_TTL_SECONDS,
|
||||
ATTEMPT_LIMIT,
|
||||
ATTEMPT_LOCK_SECONDS,
|
||||
RESEND_COOLDOWN_SECONDS,
|
||||
otp_keys,
|
||||
hash_code,
|
||||
captcha_key,
|
||||
)
|
||||
|
||||
|
||||
@manager.route("/login", methods=["POST", "GET"]) # noqa: F821
|
||||
@ -825,3 +841,170 @@ def set_tenant_info():
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@manager.route("/forget/captcha", methods=["GET"]) # noqa: F821
|
||||
def forget_get_captcha():
|
||||
"""
|
||||
GET /forget/captcha?email=<email>
|
||||
- Generate an image captcha and cache it in Redis under key captcha:{email} with TTL = OTP_TTL_SECONDS.
|
||||
- Returns the captcha as a PNG image.
|
||||
"""
|
||||
email = (request.args.get("email") or "")
|
||||
if not email:
|
||||
return get_json_result(data=False, code=settings.RetCode.ARGUMENT_ERROR, message="email is required")
|
||||
|
||||
users = UserService.query(email=email)
|
||||
if not users:
|
||||
return get_json_result(data=False, code=settings.RetCode.DATA_ERROR, message="invalid email")
|
||||
|
||||
# Generate captcha text
|
||||
allowed = string.ascii_uppercase + string.digits
|
||||
captcha_text = "".join(secrets.choice(allowed) for _ in range(OTP_LENGTH))
|
||||
REDIS_CONN.set(captcha_key(email), captcha_text, 60) # Valid for 60 seconds
|
||||
|
||||
from captcha.image import ImageCaptcha
|
||||
image = ImageCaptcha(width=300, height=120, font_sizes=[50, 60, 70])
|
||||
img_bytes = image.generate(captcha_text).read()
|
||||
return Response(img_bytes, mimetype="image/png")
|
||||
|
||||
|
||||
@manager.route("/forget/otp", methods=["POST"]) # noqa: F821
|
||||
def forget_send_otp():
|
||||
"""
|
||||
POST /forget/otp
|
||||
- Verify the image captcha stored at captcha:{email} (case-insensitive).
|
||||
- On success, generate an email OTP (A–Z with length = OTP_LENGTH), store hash + salt (and timestamp) in Redis with TTL, reset attempts and cooldown, and send the OTP via email.
|
||||
"""
|
||||
req = request.get_json()
|
||||
email = req.get("email") or ""
|
||||
captcha = (req.get("captcha") or "").strip()
|
||||
|
||||
if not email or not captcha:
|
||||
return get_json_result(data=False, code=settings.RetCode.ARGUMENT_ERROR, message="email and captcha required")
|
||||
|
||||
users = UserService.query(email=email)
|
||||
if not users:
|
||||
return get_json_result(data=False, code=settings.RetCode.DATA_ERROR, message="invalid email")
|
||||
|
||||
stored_captcha = REDIS_CONN.get(captcha_key(email))
|
||||
if not stored_captcha:
|
||||
return get_json_result(data=False, code=settings.RetCode.NOT_EFFECTIVE, message="invalid or expired captcha")
|
||||
if (stored_captcha or "").strip().lower() != captcha.lower():
|
||||
return get_json_result(data=False, code=settings.RetCode.AUTHENTICATION_ERROR, message="invalid or expired captcha")
|
||||
|
||||
# Delete captcha to prevent reuse
|
||||
REDIS_CONN.delete(captcha_key(email))
|
||||
|
||||
k_code, k_attempts, k_last, k_lock = otp_keys(email)
|
||||
now = int(time.time())
|
||||
last_ts = REDIS_CONN.get(k_last)
|
||||
if last_ts:
|
||||
try:
|
||||
elapsed = now - int(last_ts)
|
||||
except Exception:
|
||||
elapsed = RESEND_COOLDOWN_SECONDS
|
||||
remaining = RESEND_COOLDOWN_SECONDS - elapsed
|
||||
if remaining > 0:
|
||||
return get_json_result(data=False, code=settings.RetCode.NOT_EFFECTIVE, message=f"you still have to wait {remaining} seconds")
|
||||
|
||||
# Generate OTP (uppercase letters only) and store hashed
|
||||
otp = "".join(secrets.choice(string.ascii_uppercase) for _ in range(OTP_LENGTH))
|
||||
salt = os.urandom(16)
|
||||
code_hash = hash_code(otp, salt)
|
||||
REDIS_CONN.set(k_code, f"{code_hash}:{salt.hex()}", OTP_TTL_SECONDS)
|
||||
REDIS_CONN.set(k_attempts, 0, OTP_TTL_SECONDS)
|
||||
REDIS_CONN.set(k_last, now, OTP_TTL_SECONDS)
|
||||
REDIS_CONN.delete(k_lock)
|
||||
|
||||
ttl_min = OTP_TTL_SECONDS // 60
|
||||
|
||||
if not smtp_mail_server:
|
||||
logging.warning("SMTP mail server not initialized; skip sending email.")
|
||||
else:
|
||||
try:
|
||||
send_email_html(
|
||||
subject="Your Password Reset Code",
|
||||
to_email=email,
|
||||
template_key="reset_code",
|
||||
code=otp,
|
||||
ttl_min=ttl_min,
|
||||
)
|
||||
except Exception:
|
||||
return get_json_result(data=False, code=settings.RetCode.SERVER_ERROR, message="failed to send email")
|
||||
|
||||
return get_json_result(data=True, code=settings.RetCode.SUCCESS, message="verification passed, email sent")
|
||||
|
||||
|
||||
@manager.route("/forget", methods=["POST"]) # noqa: F821
|
||||
def forget():
|
||||
"""
|
||||
POST: Verify email + OTP and reset password, then log the user in.
|
||||
Request JSON: { email, otp, new_password, confirm_new_password }
|
||||
"""
|
||||
req = request.get_json()
|
||||
email = req.get("email") or ""
|
||||
otp = (req.get("otp") or "").strip()
|
||||
new_pwd = req.get("new_password")
|
||||
new_pwd2 = req.get("confirm_new_password")
|
||||
|
||||
if not all([email, otp, new_pwd, new_pwd2]):
|
||||
return get_json_result(data=False, code=settings.RetCode.ARGUMENT_ERROR, message="email, otp and passwords are required")
|
||||
|
||||
# For reset, passwords are provided as-is (no decrypt needed)
|
||||
if new_pwd != new_pwd2:
|
||||
return get_json_result(data=False, code=settings.RetCode.ARGUMENT_ERROR, message="passwords do not match")
|
||||
|
||||
users = UserService.query(email=email)
|
||||
if not users:
|
||||
return get_json_result(data=False, code=settings.RetCode.DATA_ERROR, message="invalid email")
|
||||
|
||||
user = users[0]
|
||||
# Verify OTP from Redis
|
||||
k_code, k_attempts, k_last, k_lock = otp_keys(email)
|
||||
if REDIS_CONN.get(k_lock):
|
||||
return get_json_result(data=False, code=settings.RetCode.NOT_EFFECTIVE, message="too many attempts, try later")
|
||||
|
||||
stored = REDIS_CONN.get(k_code)
|
||||
if not stored:
|
||||
return get_json_result(data=False, code=settings.RetCode.NOT_EFFECTIVE, message="expired otp")
|
||||
|
||||
try:
|
||||
stored_hash, salt_hex = str(stored).split(":", 1)
|
||||
salt = bytes.fromhex(salt_hex)
|
||||
except Exception:
|
||||
return get_json_result(data=False, code=settings.RetCode.EXCEPTION_ERROR, message="otp storage corrupted")
|
||||
|
||||
# Case-insensitive verification: OTP generated uppercase
|
||||
calc = hash_code(otp.upper(), salt)
|
||||
if calc != stored_hash:
|
||||
# bump attempts
|
||||
try:
|
||||
attempts = int(REDIS_CONN.get(k_attempts) or 0) + 1
|
||||
except Exception:
|
||||
attempts = 1
|
||||
REDIS_CONN.set(k_attempts, attempts, OTP_TTL_SECONDS)
|
||||
if attempts >= ATTEMPT_LIMIT:
|
||||
REDIS_CONN.set(k_lock, int(time.time()), ATTEMPT_LOCK_SECONDS)
|
||||
return get_json_result(data=False, code=settings.RetCode.AUTHENTICATION_ERROR, message="expired otp")
|
||||
|
||||
# Success: consume OTP and reset password
|
||||
REDIS_CONN.delete(k_code)
|
||||
REDIS_CONN.delete(k_attempts)
|
||||
REDIS_CONN.delete(k_last)
|
||||
REDIS_CONN.delete(k_lock)
|
||||
|
||||
try:
|
||||
UserService.update_user_password(user.id, new_pwd)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
return get_json_result(data=False, code=settings.RetCode.EXCEPTION_ERROR, message="failed to reset password")
|
||||
|
||||
# Auto login (reuse login flow)
|
||||
user.access_token = get_uuid()
|
||||
login_user(user)
|
||||
user.update_time = (current_timestamp(),)
|
||||
user.update_date = (datetime_format(datetime.now()),)
|
||||
user.save()
|
||||
msg = "Password reset successful. Logged in."
|
||||
return construct_response(data=user.to_json(), auth=user.get_id(), message=msg)
|
||||
|
||||
@ -313,9 +313,75 @@ class RetryingPooledMySQLDatabase(PooledMySQLDatabase):
|
||||
raise
|
||||
|
||||
|
||||
class RetryingPooledPostgresqlDatabase(PooledPostgresqlDatabase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.max_retries = kwargs.pop("max_retries", 5)
|
||||
self.retry_delay = kwargs.pop("retry_delay", 1)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql, params=None, commit=True):
|
||||
for attempt in range(self.max_retries + 1):
|
||||
try:
|
||||
return super().execute_sql(sql, params, commit)
|
||||
except (OperationalError, InterfaceError) as e:
|
||||
# PostgreSQL specific error codes
|
||||
# 57P01: admin_shutdown
|
||||
# 57P02: crash_shutdown
|
||||
# 57P03: cannot_connect_now
|
||||
# 08006: connection_failure
|
||||
# 08003: connection_does_not_exist
|
||||
# 08000: connection_exception
|
||||
error_messages = ['connection', 'server closed', 'connection refused',
|
||||
'no connection to the server', 'terminating connection']
|
||||
|
||||
should_retry = any(msg in str(e).lower() for msg in error_messages)
|
||||
|
||||
if should_retry and attempt < self.max_retries:
|
||||
logging.warning(
|
||||
f"PostgreSQL connection issue (attempt {attempt+1}/{self.max_retries}): {e}"
|
||||
)
|
||||
self._handle_connection_loss()
|
||||
time.sleep(self.retry_delay * (2 ** attempt))
|
||||
else:
|
||||
logging.error(f"PostgreSQL execution failure: {e}")
|
||||
raise
|
||||
return None
|
||||
|
||||
def _handle_connection_loss(self):
|
||||
try:
|
||||
self.close()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self.connect()
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to reconnect to PostgreSQL: {e}")
|
||||
time.sleep(0.1)
|
||||
self.connect()
|
||||
|
||||
def begin(self):
|
||||
for attempt in range(self.max_retries + 1):
|
||||
try:
|
||||
return super().begin()
|
||||
except (OperationalError, InterfaceError) as e:
|
||||
error_messages = ['connection', 'server closed', 'connection refused',
|
||||
'no connection to the server', 'terminating connection']
|
||||
|
||||
should_retry = any(msg in str(e).lower() for msg in error_messages)
|
||||
|
||||
if should_retry and attempt < self.max_retries:
|
||||
logging.warning(
|
||||
f"PostgreSQL connection lost during transaction (attempt {attempt+1}/{self.max_retries})"
|
||||
)
|
||||
self._handle_connection_loss()
|
||||
time.sleep(self.retry_delay * (2 ** attempt))
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
class PooledDatabase(Enum):
|
||||
MYSQL = RetryingPooledMySQLDatabase
|
||||
POSTGRES = PooledPostgresqlDatabase
|
||||
POSTGRES = RetryingPooledPostgresqlDatabase
|
||||
|
||||
|
||||
class DatabaseMigrator(Enum):
|
||||
|
||||
@ -476,6 +476,16 @@ class FileService(CommonService):
|
||||
|
||||
return err, files
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def list_all_files_by_parent_id(cls, parent_id):
|
||||
try:
|
||||
files = cls.model.select().where((cls.model.parent_id == parent_id) & (cls.model.id != parent_id))
|
||||
return list(files)
|
||||
except Exception:
|
||||
logging.exception("list_by_parent_id failed")
|
||||
raise RuntimeError("Database error (list_by_parent_id)!")
|
||||
|
||||
@staticmethod
|
||||
def parse_docs(file_objs, user_id):
|
||||
exe = ThreadPoolExecutor(max_workers=12)
|
||||
|
||||
25
api/utils/email_templates.py
Normal file
25
api/utils/email_templates.py
Normal file
@ -0,0 +1,25 @@
|
||||
"""
|
||||
Reusable HTML email templates and registry.
|
||||
"""
|
||||
|
||||
# Invitation email template
|
||||
INVITE_EMAIL_TMPL = """
|
||||
<p>Hi {{email}},</p>
|
||||
<p>{{inviter}} has invited you to join their team (ID: {{tenant_id}}).</p>
|
||||
<p>Click the link below to complete your registration:<br>
|
||||
<a href="{{invite_url}}">{{invite_url}}</a></p>
|
||||
<p>If you did not request this, please ignore this email.</p>
|
||||
"""
|
||||
|
||||
# Password reset code template
|
||||
RESET_CODE_EMAIL_TMPL = """
|
||||
<p>Hello,</p>
|
||||
<p>Your password reset code is: <b>{{ code }}</b></p>
|
||||
<p>This code will expire in {{ ttl_min }} minutes.</p>
|
||||
"""
|
||||
|
||||
# Template registry
|
||||
EMAIL_TEMPLATES = {
|
||||
"invite": INVITE_EMAIL_TMPL,
|
||||
"reset_code": RESET_CODE_EMAIL_TMPL,
|
||||
}
|
||||
@ -74,12 +74,12 @@ def get_es_cluster_stats() -> dict:
|
||||
raise Exception("Elasticsearch is not in use.")
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"status": "alive",
|
||||
"message": ESConnection().get_cluster_stats()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -90,12 +90,12 @@ def get_infinity_status():
|
||||
raise Exception("Infinity is not in use.")
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"status": "alive",
|
||||
"message": InfinityConnection().health()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -107,12 +107,12 @@ def get_mysql_status():
|
||||
headers = ['id', 'user', 'host', 'db', 'command', 'time', 'state', 'info']
|
||||
cursor.close()
|
||||
return {
|
||||
"alive": True,
|
||||
"status": "alive",
|
||||
"message": [dict(zip(headers, r)) for r in res_rows]
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -122,12 +122,12 @@ def check_minio_alive():
|
||||
try:
|
||||
response = requests.get(f'http://{rag_settings.MINIO["host"]}/minio/health/live')
|
||||
if response.status_code == 200:
|
||||
return {'alive': True, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
return {'alive': False, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
return {"status": "timeout", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -135,12 +135,12 @@ def check_minio_alive():
|
||||
def get_redis_info():
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"status": "alive",
|
||||
"message": REDIS_CONN.info()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -150,12 +150,12 @@ def check_ragflow_server_alive():
|
||||
try:
|
||||
response = requests.get(f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping')
|
||||
if response.status_code == 200:
|
||||
return {'alive': True, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
return {'alive': False, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
return {"status": "timeout", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
@ -192,9 +192,7 @@ def run_health_checks() -> tuple[dict, bool]:
|
||||
except Exception:
|
||||
result["storage"] = "nok"
|
||||
|
||||
|
||||
all_ok = (result.get("db") == "ok") and (result.get("redis") == "ok") and (result.get("doc_engine") == "ok") and (result.get("storage") == "ok")
|
||||
all_ok = (result.get("db") == "ok") and (result.get("redis") == "ok") and (result.get("doc_engine") == "ok") and (
|
||||
result.get("storage") == "ok")
|
||||
result["status"] = "ok" if all_ok else "nok"
|
||||
return result, all_ok
|
||||
|
||||
|
||||
|
||||
@ -24,6 +24,7 @@ from urllib.parse import urlparse
|
||||
from api.apps import smtp_mail_server
|
||||
from flask_mail import Message
|
||||
from flask import render_template_string
|
||||
from api.utils.email_templates import EMAIL_TEMPLATES
|
||||
from selenium import webdriver
|
||||
from selenium.common.exceptions import TimeoutException
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
@ -34,6 +35,12 @@ from selenium.webdriver.support.ui import WebDriverWait
|
||||
from webdriver_manager.chrome import ChromeDriverManager
|
||||
|
||||
|
||||
OTP_LENGTH = 8
|
||||
OTP_TTL_SECONDS = 5 * 60
|
||||
ATTEMPT_LIMIT = 5
|
||||
ATTEMPT_LOCK_SECONDS = 30 * 60
|
||||
RESEND_COOLDOWN_SECONDS = 60
|
||||
|
||||
|
||||
CONTENT_TYPE_MAP = {
|
||||
# Office
|
||||
@ -178,24 +185,49 @@ def get_float(req: dict, key: str, default: float | int = 10.0) -> float:
|
||||
return default
|
||||
|
||||
|
||||
INVITE_EMAIL_TMPL = """
|
||||
<p>Hi {{email}},</p>
|
||||
<p>{{inviter}} has invited you to join their team (ID: {{tenant_id}}).</p>
|
||||
<p>Click the link below to complete your registration:<br>
|
||||
<a href="{{invite_url}}">{{invite_url}}</a></p>
|
||||
<p>If you did not request this, please ignore this email.</p>
|
||||
"""
|
||||
def send_email_html(subject: str, to_email: str, template_key: str, **context):
|
||||
"""Generic HTML email sender using shared templates.
|
||||
template_key must exist in EMAIL_TEMPLATES.
|
||||
"""
|
||||
from api.apps import app
|
||||
tmpl = EMAIL_TEMPLATES.get(template_key)
|
||||
if not tmpl:
|
||||
raise ValueError(f"Unknown email template: {template_key}")
|
||||
with app.app_context():
|
||||
msg = Message(subject=subject, recipients=[to_email])
|
||||
msg.html = render_template_string(tmpl, **context)
|
||||
smtp_mail_server.send(msg)
|
||||
|
||||
|
||||
def send_invite_email(to_email, invite_url, tenant_id, inviter):
|
||||
from api.apps import app
|
||||
with app.app_context():
|
||||
msg = Message(subject="RAGFlow Invitation",
|
||||
recipients=[to_email])
|
||||
msg.html = render_template_string(
|
||||
INVITE_EMAIL_TMPL,
|
||||
email=to_email,
|
||||
invite_url=invite_url,
|
||||
tenant_id=tenant_id,
|
||||
inviter=inviter,
|
||||
)
|
||||
smtp_mail_server.send(msg)
|
||||
# Reuse the generic HTML sender with 'invite' template
|
||||
send_email_html(
|
||||
subject="RAGFlow Invitation",
|
||||
to_email=to_email,
|
||||
template_key="invite",
|
||||
email=to_email,
|
||||
invite_url=invite_url,
|
||||
tenant_id=tenant_id,
|
||||
inviter=inviter,
|
||||
)
|
||||
|
||||
|
||||
def otp_keys(email: str):
|
||||
email = (email or "").strip().lower()
|
||||
return (
|
||||
f"otp:{email}",
|
||||
f"otp_attempts:{email}",
|
||||
f"otp_last_sent:{email}",
|
||||
f"otp_lock:{email}",
|
||||
)
|
||||
|
||||
|
||||
def hash_code(code: str, salt: bytes) -> str:
|
||||
import hashlib
|
||||
import hmac
|
||||
return hmac.new(salt, (code or "").encode("utf-8"), hashlib.sha256).hexdigest()
|
||||
|
||||
|
||||
def captcha_key(email: str) -> str:
|
||||
return f"captcha:{email}"
|
||||
|
||||
|
||||
@ -31,7 +31,6 @@
|
||||
"entities_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
"pagerank_fea": {"type": "integer", "default": 0},
|
||||
"tag_feas": {"type": "varchar", "default": "", "analyzer": "rankfeatures"},
|
||||
|
||||
"from_entity_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
"to_entity_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
"entity_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
@ -39,6 +38,6 @@
|
||||
"source_id": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
"n_hop_with_weight": {"type": "varchar", "default": ""},
|
||||
"removed_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
|
||||
"doc_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"}
|
||||
"doc_type_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"},
|
||||
"toc_kwd": {"type": "varchar", "default": "", "analyzer": "whitespace-#"}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: -1
|
||||
sidebar_position: -10
|
||||
slug: /configure_knowledge_base
|
||||
---
|
||||
|
||||
@ -37,7 +37,7 @@ This section covers the following topics:
|
||||
|
||||
### Select chunking method
|
||||
|
||||
RAGFlow offers multiple chunking template to facilitate chunking files of different layouts and ensure semantic integrity. In **Chunking method**, you can choose the default template that suits the layouts and formats of your files. The following table shows the descriptions and the compatible file formats of each supported chunk template:
|
||||
RAGFlow offers multiple built-in chunking template to facilitate chunking files of different layouts and ensure semantic integrity. From the **Built-in** chunking method dropdown under **Parse type**, you can choose the default template that suits the layouts and formats of your files. The following table shows the descriptions and the compatible file formats of each supported chunk template:
|
||||
|
||||
| **Template** | Description | File format |
|
||||
|--------------|-----------------------------------------------------------------------|-----------------------------------------------------------------------------------------------|
|
||||
@ -54,9 +54,23 @@ RAGFlow offers multiple chunking template to facilitate chunking files of differ
|
||||
| One | Each document is chunked in its entirety (as one). | DOCX, XLSX, XLS (Excel 97-2003), PDF, TXT |
|
||||
| Tag | The dataset functions as a tag set for the others. | XLSX, CSV/TXT |
|
||||
|
||||
You can also change a file's chunking method on the **Datasets** page.
|
||||
You can also change a file's chunking method on the **Files** page.
|
||||
|
||||

|
||||

|
||||
|
||||
<details>
|
||||
<summary>From v0.21.0 onward, RAGFlow supports ingestion pipeline for customized data ingestion and cleansing workflows.</summary>
|
||||
|
||||
To use a customized data pipeline:
|
||||
|
||||
1. On the **Agent** page, click **+ Create agent** > **Create from blank**.
|
||||
2. Select **Ingestion pipeline** and name your data pipeline in the popup, then click **Save** to show the data pipeline canvas.
|
||||
3. After updating your data pipeline, click **Save** on the top right of the canvas.
|
||||
4. Navigate to the **Configuration** page of your dataset, select **Choose pipeline** in **Ingestion pipeline**.
|
||||
|
||||
*Your saved data pipeline will appear in the dropdown menu below.*
|
||||
|
||||
</details>
|
||||
|
||||
### Select embedding model
|
||||
|
||||
|
||||
@ -53,25 +53,31 @@ Whether to enable entity resolution. You can think of this as an entity deduplic
|
||||
- (Default) Disable entity resolution.
|
||||
- Enable entity resolution. This option consumes more tokens.
|
||||
|
||||
### Community report generation
|
||||
### Community reports
|
||||
|
||||
In a knowledge graph, a community is a cluster of entities linked by relationships. You can have the LLM generate an abstract for each community, known as a community report. See [here](https://www.microsoft.com/en-us/research/blog/graphrag-improving-global-search-via-dynamic-community-selection/) for more information. This indicates whether to generate community reports:
|
||||
|
||||
- Generate community reports. This option consumes more tokens.
|
||||
- (Default) Do not generate community reports.
|
||||
|
||||
## Procedure
|
||||
## Quickstart
|
||||
|
||||
1. On the **Configuration** page of your dataset, switch on **Extract knowledge graph** or adjust its settings as needed, and click **Save** to confirm your changes.
|
||||
1. Navigate to the **Configuration** page of your dataset and update:
|
||||
|
||||
- Entity types: *Required* - Specifies the entity types in the knowledge graph to generate. You don't have to stick with the default, but you need to customize them for your documents.
|
||||
- Method: *Optional*
|
||||
- Entity resolution: *Optional*
|
||||
- Community reports: *Optional*
|
||||
*The default knowledge graph configurations for your dataset are now set.*
|
||||
|
||||
- *The default knowledge graph configurations for your dataset are now set and files uploaded from this point onward will automatically use these settings during parsing.*
|
||||
- *Files parsed before this update will retain their original knowledge graph settings.*
|
||||
2. Navigate to the **Files** page of your dataset, click the **Generate** button on the top right corner of the page, then select **Knowledge graph** from the dropdown to initiate the knowledge graph generation process.
|
||||
|
||||
2. The knowledge graph of your dataset does *not* automatically update *until* a newly uploaded file is parsed.
|
||||
*You can click the pause button in the dropdown to halt the build process when necessary.*
|
||||
|
||||
_A **Knowledge graph** entry appears under **Configuration** once a knowledge graph is created._
|
||||
3. Go back to the **Configuration** page:
|
||||
|
||||
*Once a knowledge graph is generated, the **Knowledge graph** field changes from `Not generated` to `Generated at a specific timestamp`. You can delete it by clicking the recycle bin button to the right of the field.*
|
||||
|
||||
3. Click **Knowledge graph** to view the details of the generated graph.
|
||||
4. To use the created knowledge graph, do either of the following:
|
||||
|
||||
- In the **Chat setting** panel of your chat app, switch on the **Use knowledge graph** toggle.
|
||||
@ -79,17 +85,13 @@ In a knowledge graph, a community is a cluster of entities linked by relationshi
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### Can I have different knowledge graph settings for different files in my dataset?
|
||||
|
||||
Yes, you can. Just one graph is generated per dataset. The smaller graphs of your files will be *combined* into one big, unified graph at the end of the graph extraction process.
|
||||
|
||||
### Does the knowledge graph automatically update when I remove a related file?
|
||||
|
||||
Nope. The knowledge graph does *not* automatically update *until* a newly uploaded document is parsed.
|
||||
Nope. The knowledge graph does *not* update *until* you regenerate a knowledge graph for your dataset.
|
||||
|
||||
### How to remove a generated knowledge graph?
|
||||
|
||||
To remove the generated knowledge graph, delete all related files in your dataset. Although the **Knowledge graph** entry will still be visible, the graph has actually been deleted.
|
||||
On the **Configuration** page of your dataset, find the **Knoweledge graph** field and click the recycle bin button to the right of the field.
|
||||
|
||||
### Where is the created knowledge graph stored?
|
||||
|
||||
|
||||
@ -72,3 +72,22 @@ The maximum number of clusters to create. Defaults to 64, with a maximum limit o
|
||||
### Random seed
|
||||
|
||||
A random seed. Click **+** to change the seed value.
|
||||
|
||||
## Quickstart
|
||||
|
||||
1. Navigate to the **Configuration** page of your dataset and update:
|
||||
|
||||
- Prompt: *Optional* - We recommend that you keep it as-is until you understand the mechanism behind.
|
||||
- Max token: *Optional*
|
||||
- Threshold: *Optional*
|
||||
- Max cluster: *Optional*
|
||||
|
||||
2. Navigate to the **Files** page of your dataset, click the **Generate** button on the top right corner of the page, then select **RAPTOR** from the dropdown to initiate the RAPTOR build process.
|
||||
|
||||
*You can click the pause button in the dropdown to halt the build process when necessary.*
|
||||
|
||||
3. Go back to the **Configuration** page:
|
||||
|
||||
*The **RAPTOR** field changes from `Not generated` to `Generated at a specific timestamp` when a RAPTOR hierarchical tree structure is generated. You can delete it by clicking the recycle bin button to the right of the field.*
|
||||
|
||||
4. Once a RAPTOR hierarchical tree structure is generated, your chat assistant and **Retrieval** agent component will use it for retrieval as a default.
|
||||
|
||||
39
docs/guides/dataset/extract_table_of_contents.md
Normal file
39
docs/guides/dataset/extract_table_of_contents.md
Normal file
@ -0,0 +1,39 @@
|
||||
---
|
||||
sidebar_position: 4
|
||||
slug: /enable_table_of_contents
|
||||
---
|
||||
|
||||
# Extract table of contents
|
||||
|
||||
Extract table of contents (TOC) from documents to provide long context RAG and improve retrieval.
|
||||
|
||||
---
|
||||
|
||||
During indexing, this technique uses LLM to extract and generate chapter information, which is added to each chunk to provide sufficient global context. At the retrieval stage, it first uses the chunks matched by search, then supplements missing chunks based on the table of contents structure. This addresses issues caused by chunk fragmentation and insufficient context, improving answer quality.
|
||||
|
||||
:::danger WARNING
|
||||
Enabling TOC extraction requires significant memory, computational resources, and tokens.
|
||||
:::
|
||||
|
||||
## Prerequisites
|
||||
|
||||
The system's default chat model is used to summarize clustered content. Before proceeding, ensure that you have a chat model properly configured:
|
||||
|
||||

|
||||
|
||||
## Quickstart
|
||||
|
||||
1. Navigate to the **Configuration** page.
|
||||
|
||||
2. Enable **TOC Enhance**.
|
||||
|
||||
3. To use this technique during retrieval, do either of the following:
|
||||
|
||||
- In the **Chat setting** panel of your chat app, switch on the **TOC Enhance** toggle.
|
||||
- If you are using an agent, click the **Retrieval** agent component to specify the dataset(s) and switch on the **TOC Enhance** toggle.
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### Will previously parsed files be searched using the TOC enhancement feature once I enable `TOC Enhance`?
|
||||
|
||||
No. Only files parsed after you enable **TOC Enhance** will be searched using the TOC enhancement feature. To apply this feature to files parsed before enabling **TOC Enhance**, you must reparse them.
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
sidebar_position: -4
|
||||
slug: /select_pdf_parser
|
||||
---
|
||||
|
||||
@ -25,7 +25,7 @@ RAGFlow isn't one-size-fits-all. It is built for flexibility and supports deeper
|
||||
- **One**
|
||||
- To use a third-party visual model for parsing PDFs, ensure you have set a default img2txt model under **Set default models** on the **Model providers** page.
|
||||
|
||||
## Procedure
|
||||
## Quickstart
|
||||
|
||||
1. On your dataset's **Configuration** page, select a chunking method, say **General**.
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
sidebar_position: -7
|
||||
slug: /set_metada
|
||||
---
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
sidebar_position: -2
|
||||
slug: /set_page_rank
|
||||
---
|
||||
|
||||
|
||||
@ -42,8 +42,8 @@ A tag set is *not* involved in document indexing or retrieval. Do not specify a
|
||||
:::
|
||||
|
||||
1. Click **+ Create dataset** to create a dataset.
|
||||
2. Navigate to the **Configuration** page of the created dataset and choose **Tag** as the default chunking method.
|
||||
3. Navigate to the **Dataset** page and upload and parse your table file in XLSX, CSV, or TXT formats.
|
||||
2. Navigate to the **Configuration** page of the created dataset, select **Built-in** in **Ingestion pipeline**, then choose **Tag** as the default chunking method from the **Built-in** drop-down menu.
|
||||
3. Go back to the **Files** page and upload and parse your table file in XLSX, CSV, or TXT formats.
|
||||
_A tag cloud appears under the **Tag view** section, indicating the tag set is created:_
|
||||

|
||||
4. Click the **Table** tab to view the tag frequency table:
|
||||
|
||||
@ -46,16 +46,23 @@ The Admin CLI and Admin Service form a client-server architectural suite for RAG
|
||||
2. Install ragflow-cli.
|
||||
|
||||
```bash
|
||||
pip install ragflow-cli
|
||||
pip install ragflow-cli==0.21.0
|
||||
```
|
||||
|
||||
3. Launch the CLI client:
|
||||
|
||||
```bash
|
||||
ragflow-cli -h 0.0.0.0 -p 9381
|
||||
ragflow-cli -h 127.0.0.1 -p 9381
|
||||
```
|
||||
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
You will be prompted to enter the superuser's password to log in.
|
||||
The default password is admin.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- -h: RAGFlow admin server host address
|
||||
|
||||
- -p: RAGFlow admin server port
|
||||
|
||||
|
||||
|
||||
|
||||
@ -343,19 +343,20 @@ You can add keywords or questions to a file chunk to improve its ranking for que
|
||||
|
||||
Conversations in RAGFlow are based on a particular dataset or multiple datasets. Once you have created your dataset and finished file parsing, you can go ahead and start an AI conversation.
|
||||
|
||||
1. Click the **Chat** tab in the middle top of the mage **>** **Create an assistant** to show the **Chat Configuration** dialogue *of your next dialogue*.
|
||||
1. Click the **Chat** tab in the middle top of the page **>** **Create chat** to create a chat assistant.
|
||||
2. Click the created chat app to enter its configuration page.
|
||||
> RAGFlow offer the flexibility of choosing a different chat model for each dialogue, while allowing you to set the default models in **System Model Settings**.
|
||||
|
||||
2. Update **Assistant settings**:
|
||||
2. Update **Chat setting** on the right of the configuration page:
|
||||
|
||||
- Name your assistant and specify your datasets.
|
||||
- **Empty response**:
|
||||
- If you wish to *confine* RAGFlow's answers to your datasets, leave a response here. Then when it doesn't retrieve an answer, it *uniformly* responds with what you set here.
|
||||
- If you wish RAGFlow to *improvise* when it doesn't retrieve an answer from your datasets, leave it blank, which may give rise to hallucinations.
|
||||
|
||||
3. Update **Prompt engine** or leave it as is for the beginning.
|
||||
3. Update **System prompt** or leave it as is for the beginning.
|
||||
|
||||
4. Update **Model settings**.
|
||||
4. Select a chat model in the **Model** dropdown list.
|
||||
|
||||
5. Now, let's start the show:
|
||||
|
||||
|
||||
@ -45,7 +45,7 @@ Released on October 15, 2025.
|
||||
- Claude Sonnet 4.5
|
||||
- Meituan LongCat-Flash-Thinking
|
||||
|
||||
## New agent templates
|
||||
### New agent templates
|
||||
|
||||
- Company Research Report Deep Dive Agent: Designed for financial institutions to help analysts quickly organize information, generate research reports, and make investment decisions.
|
||||
- Orchestratable Ingestion Pipeline Template: Allows users to apply this template on the canvas to rapidly establish standardized data ingestion and cleansing processes.
|
||||
|
||||
@ -227,7 +227,7 @@ class Extractor:
|
||||
async def _handle_entity_relation_summary(self, entity_or_relation_name: str, description: str) -> str:
|
||||
summary_max_tokens = 512
|
||||
use_description = truncate(description, summary_max_tokens)
|
||||
description_list = (use_description.split(GRAPH_FIELD_SEP),)
|
||||
description_list = use_description.split(GRAPH_FIELD_SEP)
|
||||
if len(description_list) <= 12:
|
||||
return use_description
|
||||
prompt_template = SUMMARIZE_DESCRIPTIONS_PROMPT
|
||||
|
||||
@ -44,7 +44,7 @@ dependencies = [
|
||||
"groq==0.9.0",
|
||||
"hanziconv==0.3.2",
|
||||
"html-text==0.6.2",
|
||||
"httpx[socks]==0.27.2",
|
||||
"httpx[socks]>=0.28.1,<0.29.0",
|
||||
"huggingface-hub>=0.25.0,<0.26.0",
|
||||
"infinity-sdk==0.6.0",
|
||||
"infinity-emb>=0.0.66,<0.0.67",
|
||||
@ -56,7 +56,7 @@ dependencies = [
|
||||
"mistralai==0.4.2",
|
||||
"nltk==3.9.1",
|
||||
"numpy>=1.26.0,<2.0.0",
|
||||
"ollama==0.2.1",
|
||||
"ollama>=0.5.0",
|
||||
"onnxruntime==1.19.2; sys_platform == 'darwin' or platform_machine != 'x86_64'",
|
||||
"onnxruntime-gpu==1.19.2; sys_platform != 'darwin' and platform_machine == 'x86_64'",
|
||||
"openai>=1.45.0",
|
||||
@ -102,7 +102,8 @@ dependencies = [
|
||||
"tika==2.6.0",
|
||||
"tiktoken==0.7.0",
|
||||
"umap_learn==0.5.6",
|
||||
"vertexai==1.64.0",
|
||||
"vertexai==1.70.0",
|
||||
"google-genai>=1.41.0,<2.0.0",
|
||||
"volcengine==1.0.194",
|
||||
"voyageai==0.2.3",
|
||||
"webdriver-manager==4.0.1",
|
||||
@ -113,7 +114,7 @@ dependencies = [
|
||||
"xpinyin==0.7.6",
|
||||
"yfinance==0.2.65",
|
||||
"zhipuai==2.0.1",
|
||||
"google-generativeai>=0.8.1,<0.9.0",
|
||||
"google-generativeai>=0.8.1,<0.9.0", # Needed for cv_model and embedding_model
|
||||
"python-docx>=1.1.2,<2.0.0",
|
||||
"pypdf2>=3.0.1,<4.0.0",
|
||||
"graspologic>=3.4.1,<4.0.0",
|
||||
@ -135,6 +136,7 @@ dependencies = [
|
||||
"lark>=1.2.2",
|
||||
"mammoth>=1.11.0",
|
||||
"markdownify>=1.2.0",
|
||||
"captcha>=0.7.1",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
||||
@ -1165,15 +1165,13 @@ class GoogleChat(Base):
|
||||
else:
|
||||
self.client = AnthropicVertex(region=region, project_id=project_id)
|
||||
else:
|
||||
import vertexai.generative_models as glm
|
||||
from google.cloud import aiplatform
|
||||
from google import genai
|
||||
|
||||
if access_token:
|
||||
credits = service_account.Credentials.from_service_account_info(access_token)
|
||||
aiplatform.init(credentials=credits, project=project_id, location=region)
|
||||
credits = service_account.Credentials.from_service_account_info(access_token, scopes=scopes)
|
||||
self.client = genai.Client(vertexai=True, project=project_id, location=region, credentials=credits)
|
||||
else:
|
||||
aiplatform.init(project=project_id, location=region)
|
||||
self.client = glm.GenerativeModel(model_name=self.model_name)
|
||||
self.client = genai.Client(vertexai=True, project=project_id, location=region)
|
||||
|
||||
def _clean_conf(self, gen_conf):
|
||||
if "claude" in self.model_name:
|
||||
@ -1188,38 +1186,11 @@ class GoogleChat(Base):
|
||||
del gen_conf[k]
|
||||
return gen_conf
|
||||
|
||||
def _get_thinking_config(self, gen_conf):
|
||||
"""Extract and create ThinkingConfig from gen_conf.
|
||||
|
||||
Default behavior for Vertex AI Generative Models: thinking_budget=0 (disabled)
|
||||
unless explicitly specified by the user. This does not apply to Claude models.
|
||||
|
||||
Users can override by setting thinking_budget in gen_conf/llm_setting:
|
||||
- 0: Disabled (default)
|
||||
- 1-24576: Manual budget
|
||||
- -1: Auto (model decides)
|
||||
"""
|
||||
# Claude models don't support ThinkingConfig
|
||||
if "claude" in self.model_name:
|
||||
gen_conf.pop("thinking_budget", None)
|
||||
return None
|
||||
|
||||
# For Vertex AI Generative Models, default to thinking disabled
|
||||
thinking_budget = gen_conf.pop("thinking_budget", 0)
|
||||
|
||||
if thinking_budget is not None:
|
||||
try:
|
||||
import vertexai.generative_models as glm # type: ignore
|
||||
return glm.ThinkingConfig(thinking_budget=thinking_budget)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _chat(self, history, gen_conf={}, **kwargs):
|
||||
system = history[0]["content"] if history and history[0]["role"] == "system" else ""
|
||||
thinking_config = self._get_thinking_config(gen_conf)
|
||||
gen_conf = self._clean_conf(gen_conf)
|
||||
|
||||
if "claude" in self.model_name:
|
||||
gen_conf = self._clean_conf(gen_conf)
|
||||
response = self.client.messages.create(
|
||||
model=self.model_name,
|
||||
messages=[h for h in history if h["role"] != "system"],
|
||||
@ -1235,28 +1206,63 @@ class GoogleChat(Base):
|
||||
response["usage"]["input_tokens"] + response["usage"]["output_tokens"],
|
||||
)
|
||||
|
||||
self.client._system_instruction = system
|
||||
hist = []
|
||||
# Gemini models with google-genai SDK
|
||||
# Set default thinking_budget=0 if not specified
|
||||
if "thinking_budget" not in gen_conf:
|
||||
gen_conf["thinking_budget"] = 0
|
||||
|
||||
thinking_budget = gen_conf.pop("thinking_budget", 0)
|
||||
gen_conf = self._clean_conf(gen_conf)
|
||||
|
||||
# Build GenerateContentConfig
|
||||
try:
|
||||
from google.genai.types import GenerateContentConfig, ThinkingConfig, Content, Part
|
||||
except ImportError as e:
|
||||
logging.error(f"[GoogleChat] Failed to import google-genai: {e}. Please install: pip install google-genai>=1.41.0")
|
||||
raise
|
||||
|
||||
config_dict = {}
|
||||
if system:
|
||||
config_dict["system_instruction"] = system
|
||||
if "temperature" in gen_conf:
|
||||
config_dict["temperature"] = gen_conf["temperature"]
|
||||
if "top_p" in gen_conf:
|
||||
config_dict["top_p"] = gen_conf["top_p"]
|
||||
if "max_output_tokens" in gen_conf:
|
||||
config_dict["max_output_tokens"] = gen_conf["max_output_tokens"]
|
||||
|
||||
# Add ThinkingConfig
|
||||
config_dict["thinking_config"] = ThinkingConfig(thinking_budget=thinking_budget)
|
||||
|
||||
config = GenerateContentConfig(**config_dict)
|
||||
|
||||
# Convert history to google-genai Content format
|
||||
contents = []
|
||||
for item in history:
|
||||
if item["role"] == "system":
|
||||
continue
|
||||
hist.append(deepcopy(item))
|
||||
item = hist[-1]
|
||||
if "role" in item and item["role"] == "assistant":
|
||||
item["role"] = "model"
|
||||
if "content" in item:
|
||||
item["parts"] = [
|
||||
{
|
||||
"text": item.pop("content"),
|
||||
}
|
||||
]
|
||||
# google-genai uses 'model' instead of 'assistant'
|
||||
role = "model" if item["role"] == "assistant" else item["role"]
|
||||
content = Content(
|
||||
role=role,
|
||||
parts=[Part(text=item["content"])]
|
||||
)
|
||||
contents.append(content)
|
||||
|
||||
response = self.client.models.generate_content(
|
||||
model=self.model_name,
|
||||
contents=contents,
|
||||
config=config
|
||||
)
|
||||
|
||||
if thinking_config:
|
||||
response = self.client.generate_content(hist, generation_config=gen_conf, thinking_config=thinking_config)
|
||||
else:
|
||||
response = self.client.generate_content(hist, generation_config=gen_conf)
|
||||
ans = response.text
|
||||
return ans, response.usage_metadata.total_token_count
|
||||
# Get token count from response
|
||||
try:
|
||||
total_tokens = response.usage_metadata.total_token_count
|
||||
except Exception:
|
||||
total_tokens = 0
|
||||
|
||||
return ans, total_tokens
|
||||
|
||||
def chat_streamly(self, system, history, gen_conf={}, **kwargs):
|
||||
if "claude" in self.model_name:
|
||||
@ -1283,34 +1289,59 @@ class GoogleChat(Base):
|
||||
|
||||
yield total_tokens
|
||||
else:
|
||||
response = None
|
||||
total_tokens = 0
|
||||
self.client._system_instruction = system
|
||||
thinking_config = self._get_thinking_config(gen_conf)
|
||||
if "max_tokens" in gen_conf:
|
||||
gen_conf["max_output_tokens"] = gen_conf["max_tokens"]
|
||||
del gen_conf["max_tokens"]
|
||||
for k in list(gen_conf.keys()):
|
||||
if k not in ["temperature", "top_p", "max_output_tokens"]:
|
||||
del gen_conf[k]
|
||||
for item in history:
|
||||
if "role" in item and item["role"] == "assistant":
|
||||
item["role"] = "model"
|
||||
if "content" in item:
|
||||
item["parts"] = [
|
||||
{
|
||||
"text": item.pop("content"),
|
||||
}
|
||||
]
|
||||
# Gemini models with google-genai SDK
|
||||
ans = ""
|
||||
total_tokens = 0
|
||||
|
||||
# Set default thinking_budget=0 if not specified
|
||||
if "thinking_budget" not in gen_conf:
|
||||
gen_conf["thinking_budget"] = 0
|
||||
|
||||
thinking_budget = gen_conf.pop("thinking_budget", 0)
|
||||
gen_conf = self._clean_conf(gen_conf)
|
||||
|
||||
# Build GenerateContentConfig
|
||||
try:
|
||||
if thinking_config:
|
||||
response = self.client.generate_content(history, generation_config=gen_conf, thinking_config=thinking_config, stream=True)
|
||||
else:
|
||||
response = self.client.generate_content(history, generation_config=gen_conf, stream=True)
|
||||
for resp in response:
|
||||
ans = resp.text
|
||||
total_tokens += num_tokens_from_string(ans)
|
||||
from google.genai.types import GenerateContentConfig, ThinkingConfig, Content, Part
|
||||
except ImportError as e:
|
||||
logging.error(f"[GoogleChat] Failed to import google-genai: {e}. Please install: pip install google-genai>=1.41.0")
|
||||
raise
|
||||
|
||||
config_dict = {}
|
||||
if system:
|
||||
config_dict["system_instruction"] = system
|
||||
if "temperature" in gen_conf:
|
||||
config_dict["temperature"] = gen_conf["temperature"]
|
||||
if "top_p" in gen_conf:
|
||||
config_dict["top_p"] = gen_conf["top_p"]
|
||||
if "max_output_tokens" in gen_conf:
|
||||
config_dict["max_output_tokens"] = gen_conf["max_output_tokens"]
|
||||
|
||||
# Add ThinkingConfig
|
||||
config_dict["thinking_config"] = ThinkingConfig(thinking_budget=thinking_budget)
|
||||
|
||||
config = GenerateContentConfig(**config_dict)
|
||||
|
||||
# Convert history to google-genai Content format
|
||||
contents = []
|
||||
for item in history:
|
||||
# google-genai uses 'model' instead of 'assistant'
|
||||
role = "model" if item["role"] == "assistant" else item["role"]
|
||||
content = Content(
|
||||
role=role,
|
||||
parts=[Part(text=item["content"])]
|
||||
)
|
||||
contents.append(content)
|
||||
|
||||
try:
|
||||
for chunk in self.client.models.generate_content_stream(
|
||||
model=self.model_name,
|
||||
contents=contents,
|
||||
config=config
|
||||
):
|
||||
text = chunk.text
|
||||
ans = text
|
||||
total_tokens += num_tokens_from_string(text)
|
||||
yield ans
|
||||
|
||||
except Exception as e:
|
||||
@ -1394,6 +1425,9 @@ class LiteLLMBase(ABC):
|
||||
self.bedrock_ak = json.loads(key).get("bedrock_ak", "")
|
||||
self.bedrock_sk = json.loads(key).get("bedrock_sk", "")
|
||||
self.bedrock_region = json.loads(key).get("bedrock_region", "")
|
||||
elif self.provider == SupportedLiteLLMProvider.OpenRouter:
|
||||
self.api_key = json.loads(key).get("api_key", "")
|
||||
self.provider_order = json.loads(key).get("provider_order", "")
|
||||
|
||||
def _get_delay(self):
|
||||
"""Calculate retry delay time"""
|
||||
@ -1438,7 +1472,6 @@ class LiteLLMBase(ABC):
|
||||
timeout=self.timeout,
|
||||
)
|
||||
# response = self.client.chat.completions.create(model=self.model_name, messages=history, **gen_conf, **kwargs)
|
||||
|
||||
if any([not response.choices, not response.choices[0].message, not response.choices[0].message.content]):
|
||||
return "", 0
|
||||
ans = response.choices[0].message.content.strip()
|
||||
@ -1589,6 +1622,24 @@ class LiteLLMBase(ABC):
|
||||
"aws_region_name": self.bedrock_region,
|
||||
}
|
||||
)
|
||||
|
||||
if self.provider == SupportedLiteLLMProvider.OpenRouter:
|
||||
if self.provider_order:
|
||||
def _to_order_list(x):
|
||||
if x is None:
|
||||
return []
|
||||
if isinstance(x, str):
|
||||
return [s.strip() for s in x.split(",") if s.strip()]
|
||||
if isinstance(x, (list, tuple)):
|
||||
return [str(s).strip() for s in x if str(s).strip()]
|
||||
return []
|
||||
extra_body = {}
|
||||
provider_cfg = {}
|
||||
provider_order = _to_order_list(self.provider_order)
|
||||
provider_cfg["order"] = provider_order
|
||||
provider_cfg["allow_fallbacks"] = False
|
||||
extra_body["provider"] = provider_cfg
|
||||
completion_args.update({"extra_body": extra_body})
|
||||
return completion_args
|
||||
|
||||
def chat_with_tools(self, system: str, history: list, gen_conf: dict = {}):
|
||||
|
||||
@ -38,6 +38,7 @@ class Base(ABC):
|
||||
self.is_tools = False
|
||||
self.tools = []
|
||||
self.toolcall_sessions = {}
|
||||
self.extra_body = None
|
||||
|
||||
def describe(self, image):
|
||||
raise NotImplementedError("Please implement encode method!")
|
||||
@ -77,7 +78,8 @@ class Base(ABC):
|
||||
try:
|
||||
response = self.client.chat.completions.create(
|
||||
model=self.model_name,
|
||||
messages=self._form_history(system, history, images)
|
||||
messages=self._form_history(system, history, images),
|
||||
extra_body=self.extra_body,
|
||||
)
|
||||
return response.choices[0].message.content.strip(), response.usage.total_tokens
|
||||
except Exception as e:
|
||||
@ -90,7 +92,8 @@ class Base(ABC):
|
||||
response = self.client.chat.completions.create(
|
||||
model=self.model_name,
|
||||
messages=self._form_history(system, history, images),
|
||||
stream=True
|
||||
stream=True,
|
||||
extra_body=self.extra_body,
|
||||
)
|
||||
for resp in response:
|
||||
if not resp.choices[0].delta.content:
|
||||
@ -177,6 +180,7 @@ class GptV4(Base):
|
||||
res = self.client.chat.completions.create(
|
||||
model=self.model_name,
|
||||
messages=self.prompt(b64),
|
||||
extra_body=self.extra_body,
|
||||
)
|
||||
return res.choices[0].message.content.strip(), total_token_count_from_response(res)
|
||||
|
||||
@ -185,6 +189,7 @@ class GptV4(Base):
|
||||
res = self.client.chat.completions.create(
|
||||
model=self.model_name,
|
||||
messages=self.vision_llm_prompt(b64, prompt),
|
||||
extra_body=self.extra_body,
|
||||
)
|
||||
return res.choices[0].message.content.strip(),total_token_count_from_response(res)
|
||||
|
||||
@ -327,10 +332,27 @@ class OpenRouterCV(GptV4):
|
||||
):
|
||||
if not base_url:
|
||||
base_url = "https://openrouter.ai/api/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
api_key = json.loads(key).get("api_key", "")
|
||||
self.client = OpenAI(api_key=api_key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
self.lang = lang
|
||||
Base.__init__(self, **kwargs)
|
||||
provider_order = json.loads(key).get("provider_order", "")
|
||||
self.extra_body = {}
|
||||
if provider_order:
|
||||
def _to_order_list(x):
|
||||
if x is None:
|
||||
return []
|
||||
if isinstance(x, str):
|
||||
return [s.strip() for s in x.split(",") if s.strip()]
|
||||
if isinstance(x, (list, tuple)):
|
||||
return [str(s).strip() for s in x if str(s).strip()]
|
||||
return []
|
||||
provider_cfg = {}
|
||||
provider_order = _to_order_list(provider_order)
|
||||
provider_cfg["order"] = provider_order
|
||||
provider_cfg["allow_fallbacks"] = False
|
||||
self.extra_body["provider"] = provider_cfg
|
||||
|
||||
|
||||
class LocalAICV(GptV4):
|
||||
|
||||
@ -17,6 +17,7 @@ import json
|
||||
import logging
|
||||
import re
|
||||
import math
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
|
||||
@ -71,7 +72,7 @@ class Dealer:
|
||||
def search(self, req, idx_names: str | list[str],
|
||||
kb_ids: list[str],
|
||||
emb_mdl=None,
|
||||
highlight=False,
|
||||
highlight: bool | list = False,
|
||||
rank_feature: dict | None = None
|
||||
):
|
||||
filters = self.get_filters(req)
|
||||
@ -100,7 +101,11 @@ class Dealer:
|
||||
total = self.dataStore.getTotal(res)
|
||||
logging.debug("Dealer.search TOTAL: {}".format(total))
|
||||
else:
|
||||
highlightFields = ["content_ltks", "title_tks"] if highlight else []
|
||||
highlightFields = ["content_ltks", "title_tks"]
|
||||
if not highlight:
|
||||
highlightFields = []
|
||||
elif isinstance(highlight, list):
|
||||
highlightFields = highlight
|
||||
matchText, keywords = self.qryr.question(qst, min_match=0.3)
|
||||
if emb_mdl is None:
|
||||
matchExprs = [matchText]
|
||||
@ -154,7 +159,7 @@ class Dealer:
|
||||
query_vector=q_vec,
|
||||
aggregation=aggs,
|
||||
highlight=highlight,
|
||||
field=self.dataStore.getFields(res, src),
|
||||
field=self.dataStore.getFields(res, src + ["_score"]),
|
||||
keywords=keywords
|
||||
)
|
||||
|
||||
@ -354,10 +359,8 @@ class Dealer:
|
||||
if not question:
|
||||
return ranks
|
||||
|
||||
RERANK_LIMIT = 64
|
||||
RERANK_LIMIT = int(RERANK_LIMIT//page_size + ((RERANK_LIMIT%page_size)/(page_size*1.) + 0.5)) * page_size if page_size>1 else 1
|
||||
if RERANK_LIMIT < 1: ## when page_size is very large the RERANK_LIMIT will be 0.
|
||||
RERANK_LIMIT = 1
|
||||
# Ensure RERANK_LIMIT is multiple of page_size
|
||||
RERANK_LIMIT = math.ceil(64/page_size) * page_size if page_size>1 else 1
|
||||
req = {"kb_ids": kb_ids, "doc_ids": doc_ids, "page": math.ceil(page_size*page/RERANK_LIMIT), "size": RERANK_LIMIT,
|
||||
"question": question, "vector": True, "topk": top,
|
||||
"similarity": similarity_threshold,
|
||||
@ -376,15 +379,25 @@ class Dealer:
|
||||
vector_similarity_weight,
|
||||
rank_feature=rank_feature)
|
||||
else:
|
||||
sim, tsim, vsim = self.rerank(
|
||||
sres, question, 1 - vector_similarity_weight, vector_similarity_weight,
|
||||
rank_feature=rank_feature)
|
||||
lower_case_doc_engine = os.getenv('DOC_ENGINE', 'elasticsearch')
|
||||
if lower_case_doc_engine == "elasticsearch":
|
||||
# ElasticSearch doesn't normalize each way score before fusion.
|
||||
sim, tsim, vsim = self.rerank(
|
||||
sres, question, 1 - vector_similarity_weight, vector_similarity_weight,
|
||||
rank_feature=rank_feature)
|
||||
else:
|
||||
# Don't need rerank here since Infinity normalizes each way score before fusion.
|
||||
sim = [sres.field[id].get("_score", 0.0) for id in sres.ids]
|
||||
tsim = sim
|
||||
vsim = sim
|
||||
# Already paginated in search function
|
||||
idx = np.argsort(sim * -1)[(page - 1) * page_size:page * page_size]
|
||||
begin = ((page % (RERANK_LIMIT//page_size)) - 1) * page_size
|
||||
sim = sim[begin : begin + page_size]
|
||||
sim_np = np.array(sim)
|
||||
idx = np.argsort(sim_np * -1)
|
||||
dim = len(sres.query_vector)
|
||||
vector_column = f"q_{dim}_vec"
|
||||
zero_vector = [0.0] * dim
|
||||
sim_np = np.array(sim)
|
||||
filtered_count = (sim_np >= similarity_threshold).sum()
|
||||
ranks["total"] = int(filtered_count) # Convert from np.int64 to Python int otherwise JSON serializable error
|
||||
for i in idx:
|
||||
|
||||
@ -447,7 +447,7 @@ def build_TOC(task, docs, progress_callback):
|
||||
d["content_with_weight"] = json.dumps(toc, ensure_ascii=False)
|
||||
d["toc_kwd"] = "toc"
|
||||
d["available_int"] = 0
|
||||
d["page_num_int"] = 100000000
|
||||
d["page_num_int"] = [100000000]
|
||||
d["id"] = xxhash.xxh64((d["content_with_weight"] + str(d["doc_id"])).encode("utf-8", "surrogatepass")).hexdigest()
|
||||
return d
|
||||
|
||||
|
||||
@ -445,8 +445,8 @@ class InfinityConnection(DocStoreConnection):
|
||||
self.connPool.release_conn(inf_conn)
|
||||
res = concat_dataframes(df_list, output)
|
||||
if matchExprs:
|
||||
res["Sum"] = res[score_column] + res[PAGERANK_FLD]
|
||||
res = res.sort_values(by="Sum", ascending=False).reset_index(drop=True).drop(columns=["Sum"])
|
||||
res["_score"] = res[score_column] + res[PAGERANK_FLD]
|
||||
res = res.sort_values(by="_score", ascending=False).reset_index(drop=True)
|
||||
res = res.head(limit)
|
||||
logger.debug(f"INFINITY search final result: {str(res)}")
|
||||
return res, total_hits_count
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
import logging
|
||||
import time
|
||||
from minio import Minio
|
||||
from minio.commonconfig import CopySource
|
||||
from minio.error import S3Error
|
||||
from io import BytesIO
|
||||
from rag import settings
|
||||
@ -141,3 +142,36 @@ class RAGFlowMinio:
|
||||
except Exception:
|
||||
logging.exception(f"Fail to remove bucket {bucket}")
|
||||
|
||||
def copy(self, src_bucket, src_path, dest_bucket, dest_path):
|
||||
try:
|
||||
if not self.conn.bucket_exists(dest_bucket):
|
||||
self.conn.make_bucket(dest_bucket)
|
||||
|
||||
try:
|
||||
self.conn.stat_object(src_bucket, src_path)
|
||||
except Exception as e:
|
||||
logging.exception(f"Source object not found: {src_bucket}/{src_path}, {e}")
|
||||
return False
|
||||
|
||||
self.conn.copy_object(
|
||||
dest_bucket,
|
||||
dest_path,
|
||||
CopySource(src_bucket, src_path),
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logging.exception(f"Fail to copy {src_bucket}/{src_path} -> {dest_bucket}/{dest_path}")
|
||||
return False
|
||||
|
||||
def move(self, src_bucket, src_path, dest_bucket, dest_path):
|
||||
try:
|
||||
if self.copy(src_bucket, src_path, dest_bucket, dest_path):
|
||||
self.rm(src_bucket, src_path)
|
||||
return True
|
||||
else:
|
||||
logging.error(f"Copy failed, move aborted: {src_bucket}/{src_path}")
|
||||
return False
|
||||
except Exception:
|
||||
logging.exception(f"Fail to move {src_bucket}/{src_path} -> {dest_bucket}/{dest_path}")
|
||||
return False
|
||||
|
||||
67
uv.lock
generated
67
uv.lock
generated
@ -658,6 +658,18 @@ wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fb/2b/a64c2d25a37aeb921fddb929111413049fc5f8b9a4c1aefaffaafe768d54/cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", size = 9325, upload-time = "2024-02-26T20:33:20.308Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "captcha"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "pillow" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b4/65/8e186bb798f33ba390eab897c995b0fcee92bc030e0f40cb8ea01f34dd07/captcha-0.7.1.tar.gz", hash = "sha256:a1b462bcc633a64d8db5efa7754548a877c698d98f87716c620a707364cabd6b", size = 226561, upload-time = "2025-03-01T05:00:13.395Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/da/ff/3f0982ecd37c2d6a7266c22e7ea2e47d0773fe449984184c5316459d2776/captcha-0.7.1-py3-none-any.whl", hash = "sha256:8b73b5aba841ad1e5bdb856205bf5f09560b728ee890eb9dae42901219c8c599", size = 147606, upload-time = "2025-03-01T05:00:10.433Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cbor"
|
||||
version = "1.0.0"
|
||||
@ -2017,7 +2029,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-aiplatform"
|
||||
version = "1.64.0"
|
||||
version = "1.70.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "docstring-parser" },
|
||||
@ -2032,9 +2044,9 @@ dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "shapely" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/5e/e3/f86b429d000a9c25f25bcd122e4b6286aeef70a89acfd6ea088324af016c/google-cloud-aiplatform-1.64.0.tar.gz", hash = "sha256:475a612829b283eb8f783e773d37115c30db42e2e50065c8653db0c9bd18b0da", size = 6258492, upload-time = "2024-08-28T01:03:24.573Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/88/06/bc8028c03d4bedb85114c780a9f749b67ff06ce29d25dc7f1a99622f2692/google-cloud-aiplatform-1.70.0.tar.gz", hash = "sha256:e8edef6dbc7911380d0ea55c47544e799f62b891cb1a83b504ca1c09fff9884b", size = 6311624, upload-time = "2024-10-09T04:28:12.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7a/c5/cdf0eaeded413d5f6221f9c4f466a7714c79a1938c2f7221467d4a9b9859/google_cloud_aiplatform-1.64.0-py2.py3-none-any.whl", hash = "sha256:3a79ce2ec047868c348336624a60993464ca977fd258bcf609cc79309a8101c4", size = 5228409, upload-time = "2024-08-28T01:03:21.275Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/46/d9/280e5a9b5caf69322f64fa55f62bf447d76c5fe30e8df6e93373f22c4bd7/google_cloud_aiplatform-1.70.0-py2.py3-none-any.whl", hash = "sha256:690e6041f03d3aa85102ac3f316c958d6f43a99aefb7fb3f8938dee56d08abd9", size = 5267225, upload-time = "2024-10-09T04:28:09.271Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2129,6 +2141,25 @@ wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-genai"
|
||||
version = "1.43.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "google-auth" },
|
||||
{ name = "httpx" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "requests" },
|
||||
{ name = "tenacity" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c1/75/992ca4462682949750709678b8efbc865222c9a16cf34504b69c5459606c/google_genai-1.43.0.tar.gz", hash = "sha256:84eb219d320759c5882bc2cdb4e2ac84544d00f5d12c7892c79fb03d71bfc9a4", size = 236132, upload-time = "2025-10-10T23:16:40.131Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/61/85/e90dda488d5044e6e4cd1b49e7e7f0cc7f4a2a1c8004e88a5122d42ea024/google_genai-1.43.0-py3-none-any.whl", hash = "sha256:be1d4b1acab268125d536fd81b73c38694a70cb08266759089154718924434fd", size = 236733, upload-time = "2025-10-10T23:16:38.809Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "google-generativeai"
|
||||
version = "0.8.5"
|
||||
@ -2472,18 +2503,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.27.2"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189, upload-time = "2024-08-27T12:54:01.334Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395, upload-time = "2024-08-27T12:53:59.653Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@ -3883,14 +3913,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ollama"
|
||||
version = "0.2.1"
|
||||
version = "0.6.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "pydantic" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/aa/2b/bda3e59080b136e90367bebb67d5072922a912f0e0b6f49be1b4eb79c109/ollama-0.2.1.tar.gz", hash = "sha256:fa316baa9a81eac3beb4affb0a17deb3008fdd6ed05b123c26306cfbe4c349b6", size = 9918, upload-time = "2024-06-05T19:00:52.447Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/d6/47/f9ee32467fe92744474a8c72e138113f3b529fc266eea76abfdec9a33f3b/ollama-0.6.0.tar.gz", hash = "sha256:da2b2d846b5944cfbcee1ca1e6ee0585f6c9d45a2fe9467cbcd096a37383da2f", size = 50811, upload-time = "2025-09-24T22:46:02.417Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7d/b7/8cc05807bfbc5b92da7fb94c525e1e56572a08eea7cdf3656e6c5dc6f9b1/ollama-0.2.1-py3-none-any.whl", hash = "sha256:b6e2414921c94f573a903d1069d682ba2fb2607070ea9e19ca4a7872f2a460ec", size = 9738, upload-time = "2024-06-05T19:00:47.437Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl", hash = "sha256:534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", size = 14130, upload-time = "2025-09-24T22:46:01.19Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5452,6 +5483,7 @@ dependencies = [
|
||||
{ name = "boto3" },
|
||||
{ name = "botocore" },
|
||||
{ name = "cachetools" },
|
||||
{ name = "captcha" },
|
||||
{ name = "chardet" },
|
||||
{ name = "click" },
|
||||
{ name = "cn2an" },
|
||||
@ -5476,6 +5508,7 @@ dependencies = [
|
||||
{ name = "flask-login" },
|
||||
{ name = "flask-mail" },
|
||||
{ name = "flask-session" },
|
||||
{ name = "google-genai" },
|
||||
{ name = "google-generativeai" },
|
||||
{ name = "google-search-results" },
|
||||
{ name = "graspologic" },
|
||||
@ -5608,6 +5641,7 @@ requires-dist = [
|
||||
{ name = "boto3", specifier = "==1.34.140" },
|
||||
{ name = "botocore", specifier = "==1.34.140" },
|
||||
{ name = "cachetools", specifier = "==5.3.3" },
|
||||
{ name = "captcha", specifier = ">=0.7.1" },
|
||||
{ name = "chardet", specifier = "==5.2.0" },
|
||||
{ name = "click", specifier = ">=8.1.8" },
|
||||
{ name = "cn2an", specifier = "==0.5.22" },
|
||||
@ -5635,13 +5669,14 @@ requires-dist = [
|
||||
{ name = "flask-login", specifier = "==0.6.3" },
|
||||
{ name = "flask-mail", specifier = ">=0.10.0" },
|
||||
{ name = "flask-session", specifier = "==0.8.0" },
|
||||
{ name = "google-genai", specifier = ">=1.41.0,<2.0.0" },
|
||||
{ name = "google-generativeai", specifier = ">=0.8.1,<0.9.0" },
|
||||
{ name = "google-search-results", specifier = "==2.4.2" },
|
||||
{ name = "graspologic", specifier = ">=3.4.1,<4.0.0" },
|
||||
{ name = "groq", specifier = "==0.9.0" },
|
||||
{ name = "hanziconv", specifier = "==0.3.2" },
|
||||
{ name = "html-text", specifier = "==0.6.2" },
|
||||
{ name = "httpx", extras = ["socks"], specifier = "==0.27.2" },
|
||||
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<0.29.0" },
|
||||
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
||||
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0" },
|
||||
@ -5660,7 +5695,7 @@ requires-dist = [
|
||||
{ name = "mistralai", specifier = "==0.4.2" },
|
||||
{ name = "nltk", specifier = "==3.9.1" },
|
||||
{ name = "numpy", specifier = ">=1.26.0,<2.0.0" },
|
||||
{ name = "ollama", specifier = "==0.2.1" },
|
||||
{ name = "ollama", specifier = ">=0.5.0" },
|
||||
{ name = "onnxruntime", marker = "platform_machine != 'x86_64' or sys_platform == 'darwin'", specifier = "==1.19.2" },
|
||||
{ name = "onnxruntime-gpu", marker = "platform_machine == 'x86_64' and sys_platform != 'darwin'", specifier = "==1.19.2" },
|
||||
{ name = "openai", specifier = ">=1.45.0" },
|
||||
@ -5716,7 +5751,7 @@ requires-dist = [
|
||||
{ name = "trio", specifier = ">=0.29.0" },
|
||||
{ name = "umap-learn", specifier = "==0.5.6" },
|
||||
{ name = "valkey", specifier = "==6.0.2" },
|
||||
{ name = "vertexai", specifier = "==1.64.0" },
|
||||
{ name = "vertexai", specifier = "==1.70.0" },
|
||||
{ name = "volcengine", specifier = "==1.0.194" },
|
||||
{ name = "voyageai", specifier = "==0.2.3" },
|
||||
{ name = "webdriver-manager", specifier = "==4.0.1" },
|
||||
@ -7217,14 +7252,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "vertexai"
|
||||
version = "1.64.0"
|
||||
version = "1.70.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-cloud-aiplatform" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/0a/36/2dcb9e212bc1ccaff83c897702e74d01cac65c2a664818e9cb5577a8418e/vertexai-1.64.0.tar.gz", hash = "sha256:d8bb42b64fe294180104e9210819dce694b50b27daf64b8b7725878eac65986c", size = 9289, upload-time = "2024-08-28T01:03:34.903Z" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/01/17/04958e273962f420cb89573c6423f231e34a684769ef49c6fed2b12cd7b1/vertexai-1.70.0.tar.gz", hash = "sha256:3af16f63c462dfc77600773fba366a99575b9fe4303fc080bd1cf823066c66fa", size = 9294, upload-time = "2024-10-09T04:28:23.814Z" }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f7/98/ce77d9111ffd3cd49154c44a9863b8507a0eb141058fb3fb6c04a65104c7/vertexai-1.64.0-py3-none-any.whl", hash = "sha256:967c17c09e28bc7d34ff6b2ef51a1953ded4750809bf174dd8b6c9c15017180e", size = 7274, upload-time = "2024-08-28T01:03:33.324Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4a/e4/ec11c62ba6e17457b68e089b740075c23b894e801545979c0f9d01208a81/vertexai-1.70.0-py3-none-any.whl", hash = "sha256:9e0c85013efa5cad41e37e23e9fcca7e959b409288ca22832a1b7b9ae6abc393", size = 7268, upload-time = "2024-10-09T04:28:21.864Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
143
web/package-lock.json
generated
143
web/package-lock.json
generated
@ -49,7 +49,7 @@
|
||||
"@xyflow/react": "^12.3.6",
|
||||
"ahooks": "^3.7.10",
|
||||
"antd": "^5.12.7",
|
||||
"axios": "^1.6.3",
|
||||
"axios": "^1.12.0",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"classnames": "^2.5.1",
|
||||
"clsx": "^2.1.1",
|
||||
@ -13551,12 +13551,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.7.2",
|
||||
"resolved": "https://registry.npmmirror.com/axios/-/axios-1.7.2.tgz",
|
||||
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
|
||||
"version": "1.12.0",
|
||||
"resolved": "https://registry.npmmirror.com/axios/-/axios-1.12.0.tgz",
|
||||
"integrity": "sha512-oXTDccv8PcfjZmPGlWsPSwtOJCZ/b6W5jAMCNcfwJbCzDckwG0jrYJFaWH1yvivfCXjVzV/SPDEhMB3Q+DSurg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"form-data": "^4.0.4",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
@ -14181,6 +14182,19 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/call-me-maybe": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz",
|
||||
@ -16795,6 +16809,20 @@
|
||||
"underscore": "^1.13.1"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/duplexer": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmmirror.com/duplexer/-/duplexer-0.1.2.tgz",
|
||||
@ -17364,12 +17392,10 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.0.tgz",
|
||||
"integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
|
||||
"dependencies": {
|
||||
"get-intrinsic": "^1.2.4"
|
||||
},
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
@ -17426,9 +17452,10 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/es-object-atoms": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.0.0.tgz",
|
||||
"integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
|
||||
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0"
|
||||
},
|
||||
@ -17437,13 +17464,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz",
|
||||
"integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==",
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.1"
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
@ -19233,12 +19262,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.0.tgz",
|
||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@ -19398,18 +19430,27 @@
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
|
||||
"integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"es-define-property": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"es-object-atoms": "^1.1.1",
|
||||
"function-bind": "^1.1.2",
|
||||
"has-proto": "^1.0.1",
|
||||
"has-symbols": "^1.0.3",
|
||||
"hasown": "^2.0.0"
|
||||
"get-proto": "^1.0.1",
|
||||
"gopd": "^1.2.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"math-intrinsics": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-nonce": {
|
||||
@ -19428,6 +19469,19 @@
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/get-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dunder-proto": "^1.0.1",
|
||||
"es-object-atoms": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/get-stdin": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/get-stdin/-/get-stdin-9.0.0.tgz",
|
||||
@ -19609,11 +19663,15 @@
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.0.1.tgz",
|
||||
"integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
|
||||
"dependencies": {
|
||||
"get-intrinsic": "^1.1.3"
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
@ -19746,11 +19804,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.0.3.tgz",
|
||||
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
@ -25234,6 +25296,15 @@
|
||||
"resolved": "https://registry.npmmirror.com/markdown-table/-/markdown-table-3.0.3.tgz",
|
||||
"integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw=="
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/mathml-tag-names": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmmirror.com/mathml-tag-names/-/mathml-tag-names-2.1.3.tgz",
|
||||
|
||||
@ -62,7 +62,7 @@
|
||||
"@xyflow/react": "^12.3.6",
|
||||
"ahooks": "^3.7.10",
|
||||
"antd": "^5.12.7",
|
||||
"axios": "^1.6.3",
|
||||
"axios": "^1.12.0",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"classnames": "^2.5.1",
|
||||
"clsx": "^2.1.1",
|
||||
|
||||
@ -340,7 +340,9 @@ export function ChunkMethodDialog({
|
||||
show={showAutoKeywords(selectedTag) || showExcelToHtml}
|
||||
className="space-y-3"
|
||||
>
|
||||
<EnableTocToggle />
|
||||
{selectedTag === DocumentParserType.Naive && (
|
||||
<EnableTocToggle />
|
||||
)}
|
||||
{showAutoKeywords(selectedTag) && (
|
||||
<>
|
||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||
|
||||
@ -5,6 +5,8 @@ import {
|
||||
} from '@/components/ui/collapsible';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { CollapsibleProps } from '@radix-ui/react-collapsible';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import * as React from 'react';
|
||||
import {
|
||||
PropsWithChildren,
|
||||
ReactNode,
|
||||
@ -67,3 +69,53 @@ export function Collapse({
|
||||
</Collapsible>
|
||||
);
|
||||
}
|
||||
|
||||
export type NodeCollapsibleProps<T extends any[]> = {
|
||||
items?: T;
|
||||
children: (item: T[0], idx: number) => ReactNode;
|
||||
className?: string;
|
||||
};
|
||||
export function NodeCollapsible<T extends any[]>({
|
||||
items = [] as unknown as T,
|
||||
children,
|
||||
className,
|
||||
}: NodeCollapsibleProps<T>) {
|
||||
const [isOpen, setIsOpen] = React.useState(false);
|
||||
|
||||
const nextClassName = cn('space-y-2', className);
|
||||
|
||||
const nextItems = items.every((x) => Array.isArray(x)) ? items.flat() : items;
|
||||
|
||||
return (
|
||||
<Collapsible
|
||||
open={isOpen}
|
||||
onOpenChange={setIsOpen}
|
||||
className={cn('relative', nextClassName)}
|
||||
>
|
||||
{nextItems.slice(0, 3).map(children)}
|
||||
<CollapsibleContent className={nextClassName}>
|
||||
{nextItems.slice(3).map(children)}
|
||||
</CollapsibleContent>
|
||||
{nextItems.length > 3 && (
|
||||
<CollapsibleTrigger
|
||||
asChild
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
className="absolute left-1/2 -translate-x-1/2 bottom-0 translate-y-1/2 cursor-pointer"
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'size-3 bg-text-secondary rounded-full flex items-center justify-center',
|
||||
{ 'bg-text-primary': isOpen },
|
||||
)}
|
||||
>
|
||||
{isOpen ? (
|
||||
<ChevronUp className="stroke-bg-component" />
|
||||
) : (
|
||||
<ChevronDown className="stroke-bg-component" />
|
||||
)}
|
||||
</div>
|
||||
</CollapsibleTrigger>
|
||||
)}
|
||||
</Collapsible>
|
||||
);
|
||||
}
|
||||
|
||||
@ -17,7 +17,7 @@ const buttonVariants = cva(
|
||||
outline:
|
||||
'border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50',
|
||||
secondary:
|
||||
'bg-bg-input text-secondary-foreground shadow-xs hover:bg-bg-input/80',
|
||||
'bg-bg-input text-text-primary shadow-xs hover:bg-bg-input/80 border border-border-button',
|
||||
ghost:
|
||||
'hover:bg-accent hover:text-accent-foreground dark:hover:bg-accent/50',
|
||||
link: 'text-primary underline-offset-4 hover:underline',
|
||||
|
||||
@ -27,6 +27,7 @@ export interface ModalProps {
|
||||
okText?: ReactNode | string;
|
||||
onOk?: () => void;
|
||||
onCancel?: () => void;
|
||||
disabled?: boolean;
|
||||
}
|
||||
export interface ModalType extends FC<ModalProps> {
|
||||
show: typeof modalIns.show;
|
||||
@ -55,6 +56,7 @@ const Modal: ModalType = ({
|
||||
confirmLoading,
|
||||
cancelText,
|
||||
okText,
|
||||
disabled = false,
|
||||
}) => {
|
||||
const sizeClasses = {
|
||||
small: 'max-w-md',
|
||||
@ -86,7 +88,7 @@ const Modal: ModalType = ({
|
||||
const handleChange = (open: boolean) => {
|
||||
onOpenChange?.(open);
|
||||
console.log('open', open, onOpenChange);
|
||||
if (open) {
|
||||
if (open && !disabled) {
|
||||
onOk?.();
|
||||
}
|
||||
if (!open) {
|
||||
@ -112,7 +114,7 @@ const Modal: ModalType = ({
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
disabled={confirmLoading}
|
||||
disabled={confirmLoading || disabled}
|
||||
onClick={() => handleOk()}
|
||||
className="px-2 py-1 bg-primary text-primary-foreground rounded-md hover:bg-primary/90"
|
||||
>
|
||||
|
||||
@ -291,7 +291,7 @@ export const RAGFlowSelect = forwardRef<
|
||||
onReset={handleReset}
|
||||
allowClear={allowClear}
|
||||
ref={ref}
|
||||
className={cn(triggerClassName, 'bg-bg-base')}
|
||||
className={cn('bg-bg-base', triggerClassName)}
|
||||
>
|
||||
<SelectValue placeholder={placeholder}>{label}</SelectValue>
|
||||
</SelectTrigger>
|
||||
|
||||
@ -161,7 +161,7 @@ export type IIterationNode = BaseNode;
|
||||
export type IIterationStartNode = BaseNode;
|
||||
export type IKeywordNode = BaseNode;
|
||||
export type ICodeNode = BaseNode<ICodeForm>;
|
||||
export type IAgentNode = BaseNode;
|
||||
export type IAgentNode<T = any> = BaseNode<T>;
|
||||
|
||||
export type RAGFlowNodeType =
|
||||
| IBeginNode
|
||||
|
||||
@ -115,7 +115,7 @@ export default {
|
||||
generateKnowledgeGraph:
|
||||
'This will extract entities and relationships from all your documents in this dataset. The process may take a while to complete.',
|
||||
generateRaptor:
|
||||
'This will extract entities and relationships from all your documents in this dataset. The process may take a while to complete.',
|
||||
'Performs recursive clustering and summarization of document chunks to build a hierarchical tree structure, enabling more context-aware retrieval across lengthy documents.',
|
||||
generate: 'Generate',
|
||||
raptor: 'RAPTOR',
|
||||
processingType: 'Processing Type',
|
||||
|
||||
@ -105,7 +105,7 @@ export default {
|
||||
generatedOn: '生成于',
|
||||
subbarFiles: '文件列表',
|
||||
generate: '生成',
|
||||
raptor: 'Raptor',
|
||||
raptor: 'RAPTOR',
|
||||
processingType: '处理类型',
|
||||
dataPipeline: '数据管道',
|
||||
operations: '操作',
|
||||
@ -258,7 +258,6 @@ export default {
|
||||
theDocumentBeingParsedCannotBeDeleted: '正在解析的文档不能被删除',
|
||||
},
|
||||
knowledgeConfiguration: {
|
||||
tocExtraction: '目录增强',
|
||||
tocExtractionTip:
|
||||
'对于已有的chunk生成层级结构的目录信息(每个文件一个目录)。在查询时,激活`目录增强`后,系统会用大模型去判断用户问题和哪些目录项相关,从而找到相关的chunk。',
|
||||
deleteGenerateModalContent: `
|
||||
|
||||
@ -1,12 +1,14 @@
|
||||
import LLMLabel from '@/components/llm-select/llm-label';
|
||||
import { IAgentNode } from '@/interfaces/database/flow';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Handle, NodeProps, Position } from '@xyflow/react';
|
||||
import { get } from 'lodash';
|
||||
import { memo, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { AgentExceptionMethod, NodeHandleId } from '../../constant';
|
||||
import { AgentFormSchemaType } from '../../form/agent-form';
|
||||
import useGraphStore from '../../store';
|
||||
import { isBottomSubAgent } from '../../utils';
|
||||
import { hasSubAgent, isBottomSubAgent } from '../../utils';
|
||||
import { CommonHandle, LeftEndHandle } from './handle';
|
||||
import { RightHandleStyle } from './handle-icon';
|
||||
import NodeHeader from './node-header';
|
||||
@ -18,7 +20,7 @@ function InnerAgentNode({
|
||||
data,
|
||||
isConnectable = true,
|
||||
selected,
|
||||
}: NodeProps<IAgentNode>) {
|
||||
}: NodeProps<IAgentNode<AgentFormSchemaType>>) {
|
||||
const edges = useGraphStore((state) => state.edges);
|
||||
const { t } = useTranslation();
|
||||
|
||||
@ -30,6 +32,12 @@ function InnerAgentNode({
|
||||
return get(data, 'form.exception_method');
|
||||
}, [data]);
|
||||
|
||||
const hasTools = useMemo(() => {
|
||||
const tools = get(data, 'form.tools', []);
|
||||
const mcp = get(data, 'form.mcp', []);
|
||||
return tools.length > 0 || mcp.length > 0;
|
||||
}, [data]);
|
||||
|
||||
const isGotoMethod = useMemo(() => {
|
||||
return exceptionMethod === AgentExceptionMethod.Goto;
|
||||
}, [exceptionMethod]);
|
||||
@ -51,7 +59,6 @@ function InnerAgentNode({
|
||||
></CommonHandle>
|
||||
</>
|
||||
)}
|
||||
|
||||
{isHeadAgent || (
|
||||
<Handle
|
||||
type="target"
|
||||
@ -67,7 +74,9 @@ function InnerAgentNode({
|
||||
isConnectable={false}
|
||||
id={NodeHandleId.AgentBottom}
|
||||
style={{ left: 180 }}
|
||||
className="!bg-accent-primary !size-2"
|
||||
className={cn('!bg-accent-primary !size-2 invisible', {
|
||||
visible: hasSubAgent(edges, id),
|
||||
})}
|
||||
></Handle>
|
||||
<Handle
|
||||
type="source"
|
||||
@ -75,7 +84,9 @@ function InnerAgentNode({
|
||||
isConnectable={false}
|
||||
id={NodeHandleId.Tool}
|
||||
style={{ left: 20 }}
|
||||
className="!bg-accent-primary !size-2"
|
||||
className={cn('!bg-accent-primary !size-2 invisible', {
|
||||
visible: hasTools,
|
||||
})}
|
||||
></Handle>
|
||||
<NodeHeader id={id} name={data.name} label={data.label}></NodeHeader>
|
||||
<section className="flex flex-col gap-2">
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledge-hooks';
|
||||
import { IRetrievalNode } from '@/interfaces/database/flow';
|
||||
@ -44,8 +45,8 @@ function InnerRetrievalNode({
|
||||
[styles.nodeHeader]: knowledgeBaseIds.length > 0,
|
||||
})}
|
||||
></NodeHeader>
|
||||
<section className="flex flex-col gap-2">
|
||||
{knowledgeBaseIds.map((id) => {
|
||||
<NodeCollapsible items={knowledgeBaseIds}>
|
||||
{(id) => {
|
||||
const item = knowledgeList.find((y) => id === y.id);
|
||||
const label = getLabel(id);
|
||||
|
||||
@ -63,8 +64,8 @@ function InnerRetrievalNode({
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</section>
|
||||
}}
|
||||
</NodeCollapsible>
|
||||
</NodeWrapper>
|
||||
</ToolBar>
|
||||
);
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
import { IAgentForm, IToolNode } from '@/interfaces/database/agent';
|
||||
import { Handle, NodeProps, Position } from '@xyflow/react';
|
||||
import { get } from 'lodash';
|
||||
@ -51,32 +52,38 @@ function InnerToolNode({
|
||||
isConnectable={isConnectable}
|
||||
className="!bg-accent-primary !size-2"
|
||||
></Handle>
|
||||
<ul className="space-y-2">
|
||||
{tools.map((x) => (
|
||||
<ToolCard
|
||||
key={x.component_name}
|
||||
onClick={handleClick(x.component_name)}
|
||||
className="cursor-pointer"
|
||||
data-tool={x.component_name}
|
||||
>
|
||||
<div className="flex gap-1 items-center pointer-events-none">
|
||||
<OperatorIcon name={x.component_name as Operator}></OperatorIcon>
|
||||
{x.component_name}
|
||||
</div>
|
||||
</ToolCard>
|
||||
))}
|
||||
<NodeCollapsible items={[tools, mcpList]}>
|
||||
{(x) => {
|
||||
if ('mcp_id' in x) {
|
||||
const mcp = x as unknown as IAgentForm['mcp'][number];
|
||||
return (
|
||||
<ToolCard
|
||||
onClick={handleClick(mcp.mcp_id)}
|
||||
className="cursor-pointer"
|
||||
data-tool={x.mcp_id}
|
||||
>
|
||||
{findMcpById(mcp.mcp_id)?.name}
|
||||
</ToolCard>
|
||||
);
|
||||
}
|
||||
|
||||
{mcpList.map((x) => (
|
||||
<ToolCard
|
||||
key={x.mcp_id}
|
||||
onClick={handleClick(x.mcp_id)}
|
||||
className="cursor-pointer"
|
||||
data-tool={x.mcp_id}
|
||||
>
|
||||
{findMcpById(x.mcp_id)?.name}
|
||||
</ToolCard>
|
||||
))}
|
||||
</ul>
|
||||
const tool = x as unknown as IAgentForm['tools'][number];
|
||||
return (
|
||||
<ToolCard
|
||||
onClick={handleClick(tool.component_name)}
|
||||
className="cursor-pointer"
|
||||
data-tool={tool.component_name}
|
||||
>
|
||||
<div className="flex gap-1 items-center pointer-events-none">
|
||||
<OperatorIcon
|
||||
name={tool.component_name as Operator}
|
||||
></OperatorIcon>
|
||||
{tool.component_name}
|
||||
</div>
|
||||
</ToolCard>
|
||||
);
|
||||
}}
|
||||
</NodeCollapsible>
|
||||
</NodeWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
@ -69,6 +69,8 @@ const FormSchema = z.object({
|
||||
cite: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export type AgentFormSchemaType = z.infer<typeof FormSchema>;
|
||||
|
||||
const outputList = buildOutputList(initialAgentValues.outputs);
|
||||
|
||||
function AgentForm({ node }: INextOperatorForm) {
|
||||
@ -92,7 +94,7 @@ function AgentForm({ node }: INextOperatorForm) {
|
||||
return isBottomSubAgent(edges, node?.id);
|
||||
}, [edges, node?.id]);
|
||||
|
||||
const form = useForm<z.infer<typeof FormSchema>>({
|
||||
const form = useForm<AgentFormSchemaType>({
|
||||
defaultValues: defaultValues,
|
||||
resolver: zodResolver(FormSchema),
|
||||
});
|
||||
|
||||
@ -26,6 +26,7 @@ export function useBuildPromptExtraPromptOptions(
|
||||
.map(([key, value]) => ({
|
||||
label: key,
|
||||
value: wrapPromptWithTag(value, key),
|
||||
icon: null,
|
||||
}))
|
||||
.filter((x) => {
|
||||
if (!has) {
|
||||
|
||||
@ -162,6 +162,13 @@ export function hasSubAgentOrTool(edges: Edge[], nodeId?: string) {
|
||||
return !!edge;
|
||||
}
|
||||
|
||||
export function hasSubAgent(edges: Edge[], nodeId?: string) {
|
||||
const edge = edges.find(
|
||||
(x) => x.source === nodeId && x.sourceHandle === NodeHandleId.AgentBottom,
|
||||
);
|
||||
return !!edge;
|
||||
}
|
||||
|
||||
// construct a dsl based on the node information of the graph
|
||||
export const buildDslComponentsByGraph = (
|
||||
nodes: RAGFlowNodeType[],
|
||||
|
||||
@ -19,7 +19,6 @@ import { Switch } from '@/components/ui/switch';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { useFetchChunk } from '@/hooks/chunk-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { FieldValues, FormProvider, useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@ -194,9 +193,9 @@ const ChunkCreatingModal: React.FC<IModalProps<any> & kFProps> = ({
|
||||
{t('chunk.enabled')}
|
||||
<Switch checked={checked} onCheckedChange={handleCheck} />
|
||||
</div>
|
||||
<div className="flex items-center gap-1" onClick={handleRemove}>
|
||||
{/* <div className="flex items-center gap-1" onClick={handleRemove}>
|
||||
<Trash2 size={16} /> {t('common.delete')}
|
||||
</div>
|
||||
</div> */}
|
||||
</Space>
|
||||
</section>
|
||||
)}
|
||||
|
||||
@ -40,7 +40,7 @@ import {
|
||||
useNavigatePage,
|
||||
} from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { useFetchKnowledgeBaseConfiguration } from '@/hooks/use-knowledge-request';
|
||||
import { useGetDocumentUrl } from '../../../knowledge-chunk/components/document-preview/hooks';
|
||||
import { useGetDocumentUrl } from './components/document-preview/hooks';
|
||||
import styles from './index.less';
|
||||
|
||||
const Chunk = () => {
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
.image {
|
||||
width: 100px !important;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.imagePreview {
|
||||
max-width: 50vw;
|
||||
max-height: 50vh;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.content {
|
||||
flex: 1;
|
||||
.chunkText;
|
||||
}
|
||||
|
||||
.contentEllipsis {
|
||||
.multipleLineEllipsis(3);
|
||||
}
|
||||
|
||||
.contentText {
|
||||
word-break: break-all !important;
|
||||
}
|
||||
|
||||
.chunkCard {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.cardSelected {
|
||||
background-color: @selectedBackgroundColor;
|
||||
}
|
||||
.cardSelectedDark {
|
||||
background-color: #ffffff2f;
|
||||
}
|
||||
@ -1,101 +0,0 @@
|
||||
import Image from '@/components/image';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { Card, Checkbox, CheckboxProps, Flex, Popover, Switch } from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import DOMPurify from 'dompurify';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
import { useTheme } from '@/components/theme-provider';
|
||||
import { ChunkTextMode } from '../../constant';
|
||||
import styles from './index.less';
|
||||
|
||||
interface IProps {
|
||||
item: IChunk;
|
||||
checked: boolean;
|
||||
switchChunk: (available?: number, chunkIds?: string[]) => void;
|
||||
editChunk: (chunkId: string) => void;
|
||||
handleCheckboxClick: (chunkId: string, checked: boolean) => void;
|
||||
selected: boolean;
|
||||
clickChunkCard: (chunkId: string) => void;
|
||||
textMode: ChunkTextMode;
|
||||
}
|
||||
|
||||
const ChunkCard = ({
|
||||
item,
|
||||
checked,
|
||||
handleCheckboxClick,
|
||||
editChunk,
|
||||
switchChunk,
|
||||
selected,
|
||||
clickChunkCard,
|
||||
textMode,
|
||||
}: IProps) => {
|
||||
const available = Number(item.available_int);
|
||||
const [enabled, setEnabled] = useState(false);
|
||||
const { theme } = useTheme();
|
||||
|
||||
const onChange = (checked: boolean) => {
|
||||
setEnabled(checked);
|
||||
switchChunk(available === 0 ? 1 : 0, [item.chunk_id]);
|
||||
};
|
||||
|
||||
const handleCheck: CheckboxProps['onChange'] = (e) => {
|
||||
handleCheckboxClick(item.chunk_id, e.target.checked);
|
||||
};
|
||||
|
||||
const handleContentDoubleClick = () => {
|
||||
editChunk(item.chunk_id);
|
||||
};
|
||||
|
||||
const handleContentClick = () => {
|
||||
clickChunkCard(item.chunk_id);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setEnabled(available === 1);
|
||||
}, [available]);
|
||||
|
||||
return (
|
||||
<Card
|
||||
className={classNames(styles.chunkCard, {
|
||||
[`${theme === 'dark' ? styles.cardSelectedDark : styles.cardSelected}`]:
|
||||
selected,
|
||||
})}
|
||||
>
|
||||
<Flex gap={'middle'} justify={'space-between'}>
|
||||
<Checkbox onChange={handleCheck} checked={checked}></Checkbox>
|
||||
{item.image_id && (
|
||||
<Popover
|
||||
placement="right"
|
||||
content={
|
||||
<Image id={item.image_id} className={styles.imagePreview}></Image>
|
||||
}
|
||||
>
|
||||
<Image id={item.image_id} className={styles.image}></Image>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
<section
|
||||
onDoubleClick={handleContentDoubleClick}
|
||||
onClick={handleContentClick}
|
||||
className={styles.content}
|
||||
>
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: DOMPurify.sanitize(item.content_with_weight),
|
||||
}}
|
||||
className={classNames(styles.contentText, {
|
||||
[styles.contentEllipsis]: textMode === ChunkTextMode.Ellipse,
|
||||
})}
|
||||
></div>
|
||||
</section>
|
||||
|
||||
<div>
|
||||
<Switch checked={enabled} onChange={onChange} />
|
||||
</div>
|
||||
</Flex>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChunkCard;
|
||||
@ -1,140 +0,0 @@
|
||||
import EditTag from '@/components/edit-tag';
|
||||
import { useFetchChunk } from '@/hooks/chunk-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { DeleteOutlined } from '@ant-design/icons';
|
||||
import { Divider, Form, Input, Modal, Space, Switch } from 'antd';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useDeleteChunkByIds } from '../../hooks';
|
||||
import {
|
||||
transformTagFeaturesArrayToObject,
|
||||
transformTagFeaturesObjectToArray,
|
||||
} from '../../utils';
|
||||
import { TagFeatureItem } from './tag-feature-item';
|
||||
|
||||
type FieldType = Pick<
|
||||
IChunk,
|
||||
'content_with_weight' | 'tag_kwd' | 'question_kwd' | 'important_kwd'
|
||||
>;
|
||||
|
||||
interface kFProps {
|
||||
doc_id: string;
|
||||
chunkId: string | undefined;
|
||||
parserId: string;
|
||||
}
|
||||
|
||||
const ChunkCreatingModal: React.FC<IModalProps<any> & kFProps> = ({
|
||||
doc_id,
|
||||
chunkId,
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
parserId,
|
||||
}) => {
|
||||
const [form] = Form.useForm();
|
||||
const [checked, setChecked] = useState(false);
|
||||
const { removeChunk } = useDeleteChunkByIds();
|
||||
const { data } = useFetchChunk(chunkId);
|
||||
const { t } = useTranslation();
|
||||
|
||||
const isTagParser = parserId === 'tag';
|
||||
|
||||
const handleOk = useCallback(async () => {
|
||||
try {
|
||||
const values = await form.validateFields();
|
||||
console.log('🚀 ~ handleOk ~ values:', values);
|
||||
|
||||
onOk?.({
|
||||
...values,
|
||||
tag_feas: transformTagFeaturesArrayToObject(values.tag_feas),
|
||||
available_int: checked ? 1 : 0, // available_int
|
||||
});
|
||||
} catch (errorInfo) {
|
||||
console.log('Failed:', errorInfo);
|
||||
}
|
||||
}, [checked, form, onOk]);
|
||||
|
||||
const handleRemove = useCallback(() => {
|
||||
if (chunkId) {
|
||||
return removeChunk([chunkId], doc_id);
|
||||
}
|
||||
}, [chunkId, doc_id, removeChunk]);
|
||||
|
||||
const handleCheck = useCallback(() => {
|
||||
setChecked(!checked);
|
||||
}, [checked]);
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.code === 0) {
|
||||
const { available_int, tag_feas } = data.data;
|
||||
form.setFieldsValue({
|
||||
...(data.data || {}),
|
||||
tag_feas: transformTagFeaturesObjectToArray(tag_feas),
|
||||
});
|
||||
|
||||
setChecked(available_int !== 0);
|
||||
}
|
||||
}, [data, form, chunkId]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={`${chunkId ? t('common.edit') : t('common.create')} ${t('chunk.chunk')}`}
|
||||
open={true}
|
||||
onOk={handleOk}
|
||||
onCancel={hideModal}
|
||||
okButtonProps={{ loading }}
|
||||
destroyOnClose
|
||||
>
|
||||
<Form form={form} autoComplete="off" layout={'vertical'}>
|
||||
<Form.Item<FieldType>
|
||||
label={t('chunk.chunk')}
|
||||
name="content_with_weight"
|
||||
rules={[{ required: true, message: t('chunk.chunkMessage') }]}
|
||||
>
|
||||
<Input.TextArea autoSize={{ minRows: 4, maxRows: 10 }} />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item<FieldType> label={t('chunk.keyword')} name="important_kwd">
|
||||
<EditTag></EditTag>
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('chunk.question')}
|
||||
name="question_kwd"
|
||||
tooltip={t('chunk.questionTip')}
|
||||
>
|
||||
<EditTag></EditTag>
|
||||
</Form.Item>
|
||||
{isTagParser && (
|
||||
<Form.Item<FieldType>
|
||||
label={t('knowledgeConfiguration.tagName')}
|
||||
name="tag_kwd"
|
||||
>
|
||||
<EditTag></EditTag>
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
{!isTagParser && <TagFeatureItem></TagFeatureItem>}
|
||||
</Form>
|
||||
|
||||
{chunkId && (
|
||||
<section>
|
||||
<Divider></Divider>
|
||||
<Space size={'large'}>
|
||||
<Switch
|
||||
checkedChildren={t('chunk.enabled')}
|
||||
unCheckedChildren={t('chunk.disabled')}
|
||||
onChange={handleCheck}
|
||||
checked={checked}
|
||||
/>
|
||||
|
||||
<span onClick={handleRemove}>
|
||||
<DeleteOutlined /> {t('common.delete')}
|
||||
</span>
|
||||
</Space>
|
||||
</section>
|
||||
)}
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
export default ChunkCreatingModal;
|
||||
@ -1,107 +0,0 @@
|
||||
import {
|
||||
useFetchKnowledgeBaseConfiguration,
|
||||
useFetchTagListByKnowledgeIds,
|
||||
} from '@/hooks/knowledge-hooks';
|
||||
import { MinusCircleOutlined, PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Form, InputNumber, Select } from 'antd';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { FormListItem } from '../../utils';
|
||||
|
||||
const FieldKey = 'tag_feas';
|
||||
|
||||
export const TagFeatureItem = () => {
|
||||
const form = Form.useFormInstance();
|
||||
const { t } = useTranslation();
|
||||
const { data: knowledgeConfiguration } = useFetchKnowledgeBaseConfiguration();
|
||||
|
||||
const { setKnowledgeIds, list } = useFetchTagListByKnowledgeIds();
|
||||
|
||||
const tagKnowledgeIds = useMemo(() => {
|
||||
return knowledgeConfiguration?.parser_config?.tag_kb_ids ?? [];
|
||||
}, [knowledgeConfiguration?.parser_config?.tag_kb_ids]);
|
||||
|
||||
const options = useMemo(() => {
|
||||
return list.map((x) => ({
|
||||
value: x[0],
|
||||
label: x[0],
|
||||
}));
|
||||
}, [list]);
|
||||
|
||||
const filterOptions = useCallback(
|
||||
(index: number) => {
|
||||
const tags: FormListItem[] = form.getFieldValue(FieldKey) ?? [];
|
||||
|
||||
// Exclude it's own current data
|
||||
const list = tags
|
||||
.filter((x, idx) => x && index !== idx)
|
||||
.map((x) => x.tag);
|
||||
|
||||
// Exclude the selected data from other options from one's own options.
|
||||
return options.filter((x) => !list.some((y) => x.value === y));
|
||||
},
|
||||
[form, options],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setKnowledgeIds(tagKnowledgeIds);
|
||||
}, [setKnowledgeIds, tagKnowledgeIds]);
|
||||
|
||||
return (
|
||||
<Form.Item label={t('knowledgeConfiguration.tags')}>
|
||||
<Form.List name={FieldKey} initialValue={[]}>
|
||||
{(fields, { add, remove }) => (
|
||||
<>
|
||||
{fields.map(({ key, name, ...restField }) => (
|
||||
<div key={key} className="flex gap-3 items-center">
|
||||
<div className="flex flex-1 gap-8">
|
||||
<Form.Item
|
||||
{...restField}
|
||||
name={[name, 'tag']}
|
||||
rules={[
|
||||
{ required: true, message: t('common.pleaseSelect') },
|
||||
]}
|
||||
className="w-2/3"
|
||||
>
|
||||
<Select
|
||||
showSearch
|
||||
placeholder={t('knowledgeConfiguration.tagName')}
|
||||
options={filterOptions(name)}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
{...restField}
|
||||
name={[name, 'frequency']}
|
||||
rules={[
|
||||
{ required: true, message: t('common.pleaseInput') },
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('knowledgeConfiguration.frequency')}
|
||||
max={10}
|
||||
min={0}
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
<MinusCircleOutlined
|
||||
onClick={() => remove(name)}
|
||||
className="mb-6"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
<Form.Item>
|
||||
<Button
|
||||
type="dashed"
|
||||
onClick={() => add()}
|
||||
block
|
||||
icon={<PlusOutlined />}
|
||||
>
|
||||
{t('knowledgeConfiguration.addTag')}
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</>
|
||||
)}
|
||||
</Form.List>
|
||||
</Form.Item>
|
||||
);
|
||||
};
|
||||
@ -1,221 +0,0 @@
|
||||
import { ReactComponent as FilterIcon } from '@/assets/filter.svg';
|
||||
import { KnowledgeRouteKey } from '@/constants/knowledge';
|
||||
import { IChunkListResult, useSelectChunkList } from '@/hooks/chunk-hooks';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useKnowledgeBaseId } from '@/hooks/knowledge-hooks';
|
||||
import {
|
||||
ArrowLeftOutlined,
|
||||
CheckCircleOutlined,
|
||||
CloseCircleOutlined,
|
||||
DeleteOutlined,
|
||||
DownOutlined,
|
||||
FilePdfOutlined,
|
||||
PlusOutlined,
|
||||
SearchOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Flex,
|
||||
Input,
|
||||
Menu,
|
||||
MenuProps,
|
||||
Popover,
|
||||
Radio,
|
||||
RadioChangeEvent,
|
||||
Segmented,
|
||||
SegmentedProps,
|
||||
Space,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { Link } from 'umi';
|
||||
import { ChunkTextMode } from '../../constant';
|
||||
|
||||
const { Text } = Typography;
|
||||
|
||||
interface IProps
|
||||
extends Pick<
|
||||
IChunkListResult,
|
||||
'searchString' | 'handleInputChange' | 'available' | 'handleSetAvailable'
|
||||
> {
|
||||
checked: boolean;
|
||||
selectAllChunk: (checked: boolean) => void;
|
||||
createChunk: () => void;
|
||||
removeChunk: () => void;
|
||||
switchChunk: (available: number) => void;
|
||||
changeChunkTextMode(mode: ChunkTextMode): void;
|
||||
}
|
||||
|
||||
const ChunkToolBar = ({
|
||||
selectAllChunk,
|
||||
checked,
|
||||
createChunk,
|
||||
removeChunk,
|
||||
switchChunk,
|
||||
changeChunkTextMode,
|
||||
available,
|
||||
handleSetAvailable,
|
||||
searchString,
|
||||
handleInputChange,
|
||||
}: IProps) => {
|
||||
const data = useSelectChunkList();
|
||||
const documentInfo = data?.documentInfo;
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
const [isShowSearchBox, setIsShowSearchBox] = useState(false);
|
||||
const { t } = useTranslate('chunk');
|
||||
|
||||
const handleSelectAllCheck = useCallback(
|
||||
(e: any) => {
|
||||
selectAllChunk(e.target.checked);
|
||||
},
|
||||
[selectAllChunk],
|
||||
);
|
||||
|
||||
const handleSearchIconClick = () => {
|
||||
setIsShowSearchBox(true);
|
||||
};
|
||||
|
||||
const handleSearchBlur = () => {
|
||||
if (!searchString?.trim()) {
|
||||
setIsShowSearchBox(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDelete = useCallback(() => {
|
||||
removeChunk();
|
||||
}, [removeChunk]);
|
||||
|
||||
const handleEnabledClick = useCallback(() => {
|
||||
switchChunk(1);
|
||||
}, [switchChunk]);
|
||||
|
||||
const handleDisabledClick = useCallback(() => {
|
||||
switchChunk(0);
|
||||
}, [switchChunk]);
|
||||
|
||||
const items: MenuProps['items'] = useMemo(() => {
|
||||
return [
|
||||
{
|
||||
key: '1',
|
||||
label: (
|
||||
<>
|
||||
<Checkbox onChange={handleSelectAllCheck} checked={checked}>
|
||||
<b>{t('selectAll')}</b>
|
||||
</Checkbox>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: '2',
|
||||
label: (
|
||||
<Space onClick={handleEnabledClick}>
|
||||
<CheckCircleOutlined />
|
||||
<b>{t('enabledSelected')}</b>
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: '3',
|
||||
label: (
|
||||
<Space onClick={handleDisabledClick}>
|
||||
<CloseCircleOutlined />
|
||||
<b>{t('disabledSelected')}</b>
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: '4',
|
||||
label: (
|
||||
<Space onClick={handleDelete}>
|
||||
<DeleteOutlined />
|
||||
<b>{t('deleteSelected')}</b>
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [
|
||||
checked,
|
||||
handleSelectAllCheck,
|
||||
handleDelete,
|
||||
handleEnabledClick,
|
||||
handleDisabledClick,
|
||||
t,
|
||||
]);
|
||||
|
||||
const content = (
|
||||
<Menu style={{ width: 200 }} items={items} selectable={false} />
|
||||
);
|
||||
|
||||
const handleFilterChange = (e: RadioChangeEvent) => {
|
||||
selectAllChunk(false);
|
||||
handleSetAvailable(e.target.value);
|
||||
};
|
||||
|
||||
const filterContent = (
|
||||
<Radio.Group onChange={handleFilterChange} value={available}>
|
||||
<Space direction="vertical">
|
||||
<Radio value={undefined}>{t('all')}</Radio>
|
||||
<Radio value={1}>{t('enabled')}</Radio>
|
||||
<Radio value={0}>{t('disabled')}</Radio>
|
||||
</Space>
|
||||
</Radio.Group>
|
||||
);
|
||||
|
||||
return (
|
||||
<Flex justify="space-between" align="center">
|
||||
<Space size={'middle'}>
|
||||
<Link
|
||||
to={`/knowledge/${KnowledgeRouteKey.Dataset}?id=${knowledgeBaseId}`}
|
||||
>
|
||||
<ArrowLeftOutlined />
|
||||
</Link>
|
||||
<FilePdfOutlined />
|
||||
<Text ellipsis={{ tooltip: documentInfo?.name }} style={{ width: 150 }}>
|
||||
{documentInfo?.name}
|
||||
</Text>
|
||||
</Space>
|
||||
<Space>
|
||||
<Segmented
|
||||
options={[
|
||||
{ label: t(ChunkTextMode.Full), value: ChunkTextMode.Full },
|
||||
{ label: t(ChunkTextMode.Ellipse), value: ChunkTextMode.Ellipse },
|
||||
]}
|
||||
onChange={changeChunkTextMode as SegmentedProps['onChange']}
|
||||
/>
|
||||
<Popover content={content} placement="bottom" arrow={false}>
|
||||
<Button>
|
||||
{t('bulk')}
|
||||
<DownOutlined />
|
||||
</Button>
|
||||
</Popover>
|
||||
{isShowSearchBox ? (
|
||||
<Input
|
||||
size="middle"
|
||||
placeholder={t('search')}
|
||||
prefix={<SearchOutlined />}
|
||||
allowClear
|
||||
onChange={handleInputChange}
|
||||
onBlur={handleSearchBlur}
|
||||
value={searchString}
|
||||
/>
|
||||
) : (
|
||||
<Button icon={<SearchOutlined />} onClick={handleSearchIconClick} />
|
||||
)}
|
||||
|
||||
<Popover content={filterContent} placement="bottom" arrow={false}>
|
||||
<Button icon={<FilterIcon />} />
|
||||
</Popover>
|
||||
<Button
|
||||
icon={<PlusOutlined />}
|
||||
type="primary"
|
||||
onClick={() => createChunk()}
|
||||
/>
|
||||
</Space>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChunkToolBar;
|
||||
@ -1,55 +0,0 @@
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
import { api_host } from '@/utils/api';
|
||||
import { useSize } from 'ahooks';
|
||||
import { CustomTextRenderer } from 'node_modules/react-pdf/dist/esm/shared/types';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
export const useDocumentResizeObserver = () => {
|
||||
const [containerWidth, setContainerWidth] = useState<number>();
|
||||
const [containerRef, setContainerRef] = useState<HTMLElement | null>(null);
|
||||
const size = useSize(containerRef);
|
||||
|
||||
const onResize = useCallback((width?: number) => {
|
||||
if (width) {
|
||||
setContainerWidth(width);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
onResize(size?.width);
|
||||
}, [size?.width, onResize]);
|
||||
|
||||
return { containerWidth, setContainerRef };
|
||||
};
|
||||
|
||||
function highlightPattern(text: string, pattern: string, pageNumber: number) {
|
||||
if (pageNumber === 2) {
|
||||
return `<mark>${text}</mark>`;
|
||||
}
|
||||
if (text.trim() !== '' && pattern.match(text)) {
|
||||
// return pattern.replace(text, (value) => `<mark>${value}</mark>`);
|
||||
return `<mark>${text}</mark>`;
|
||||
}
|
||||
return text.replace(pattern, (value) => `<mark>${value}</mark>`);
|
||||
}
|
||||
|
||||
export const useHighlightText = (searchText: string = '') => {
|
||||
const textRenderer: CustomTextRenderer = useCallback(
|
||||
(textItem) => {
|
||||
return highlightPattern(textItem.str, searchText, textItem.pageNumber);
|
||||
},
|
||||
[searchText],
|
||||
);
|
||||
|
||||
return textRenderer;
|
||||
};
|
||||
|
||||
export const useGetDocumentUrl = () => {
|
||||
const { documentId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const url = useMemo(() => {
|
||||
return `${api_host}/document/get/${documentId}`;
|
||||
}, [documentId]);
|
||||
|
||||
return url;
|
||||
};
|
||||
@ -1,12 +0,0 @@
|
||||
.documentContainer {
|
||||
width: 100%;
|
||||
height: calc(100vh - 284px);
|
||||
position: relative;
|
||||
:global(.PdfHighlighter) {
|
||||
overflow-x: hidden;
|
||||
}
|
||||
:global(.Highlight--scrolledTo .Highlight__part) {
|
||||
overflow-x: hidden;
|
||||
background-color: rgba(255, 226, 143, 1);
|
||||
}
|
||||
}
|
||||
@ -1,121 +0,0 @@
|
||||
import { Skeleton } from 'antd';
|
||||
import { memo, useEffect, useRef } from 'react';
|
||||
import {
|
||||
AreaHighlight,
|
||||
Highlight,
|
||||
IHighlight,
|
||||
PdfHighlighter,
|
||||
PdfLoader,
|
||||
Popup,
|
||||
} from 'react-pdf-highlighter';
|
||||
import { useGetDocumentUrl } from './hooks';
|
||||
|
||||
import { useCatchDocumentError } from '@/components/pdf-previewer/hooks';
|
||||
import FileError from '@/pages/document-viewer/file-error';
|
||||
import styles from './index.less';
|
||||
|
||||
interface IProps {
|
||||
highlights: IHighlight[];
|
||||
setWidthAndHeight: (width: number, height: number) => void;
|
||||
}
|
||||
const HighlightPopup = ({
|
||||
comment,
|
||||
}: {
|
||||
comment: { text: string; emoji: string };
|
||||
}) =>
|
||||
comment.text ? (
|
||||
<div className="Highlight__popup">
|
||||
{comment.emoji} {comment.text}
|
||||
</div>
|
||||
) : null;
|
||||
|
||||
// TODO: merge with DocumentPreviewer
|
||||
const Preview = ({ highlights: state, setWidthAndHeight }: IProps) => {
|
||||
const url = useGetDocumentUrl();
|
||||
|
||||
const ref = useRef<(highlight: IHighlight) => void>(() => {});
|
||||
const error = useCatchDocumentError(url);
|
||||
|
||||
const resetHash = () => {};
|
||||
|
||||
useEffect(() => {
|
||||
if (state.length > 0) {
|
||||
ref?.current(state[0]);
|
||||
}
|
||||
}, [state]);
|
||||
|
||||
return (
|
||||
<div className={styles.documentContainer}>
|
||||
<PdfLoader
|
||||
url={url}
|
||||
beforeLoad={<Skeleton active />}
|
||||
workerSrc="/pdfjs-dist/pdf.worker.min.js"
|
||||
errorMessage={<FileError>{error}</FileError>}
|
||||
>
|
||||
{(pdfDocument) => {
|
||||
pdfDocument.getPage(1).then((page) => {
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const width = viewport.width;
|
||||
const height = viewport.height;
|
||||
setWidthAndHeight(width, height);
|
||||
});
|
||||
|
||||
return (
|
||||
<PdfHighlighter
|
||||
pdfDocument={pdfDocument}
|
||||
enableAreaSelection={(event) => event.altKey}
|
||||
onScrollChange={resetHash}
|
||||
scrollRef={(scrollTo) => {
|
||||
ref.current = scrollTo;
|
||||
}}
|
||||
onSelectionFinished={() => null}
|
||||
highlightTransform={(
|
||||
highlight,
|
||||
index,
|
||||
setTip,
|
||||
hideTip,
|
||||
viewportToScaled,
|
||||
screenshot,
|
||||
isScrolledTo,
|
||||
) => {
|
||||
const isTextHighlight = !Boolean(
|
||||
highlight.content && highlight.content.image,
|
||||
);
|
||||
|
||||
const component = isTextHighlight ? (
|
||||
<Highlight
|
||||
isScrolledTo={isScrolledTo}
|
||||
position={highlight.position}
|
||||
comment={highlight.comment}
|
||||
/>
|
||||
) : (
|
||||
<AreaHighlight
|
||||
isScrolledTo={isScrolledTo}
|
||||
highlight={highlight}
|
||||
onChange={() => {}}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<Popup
|
||||
popupContent={<HighlightPopup {...highlight} />}
|
||||
onMouseOver={(popupContent) =>
|
||||
setTip(highlight, () => popupContent)
|
||||
}
|
||||
onMouseOut={hideTip}
|
||||
key={index}
|
||||
>
|
||||
{component}
|
||||
</Popup>
|
||||
);
|
||||
}}
|
||||
highlights={state}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
</PdfLoader>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default memo(Preview);
|
||||
@ -1,4 +0,0 @@
|
||||
export enum ChunkTextMode {
|
||||
Full = 'full',
|
||||
Ellipse = 'ellipse',
|
||||
}
|
||||
@ -1,129 +0,0 @@
|
||||
import {
|
||||
useCreateChunk,
|
||||
useDeleteChunk,
|
||||
useSelectChunkList,
|
||||
} from '@/hooks/chunk-hooks';
|
||||
import { useSetModalState, useShowDeleteConfirm } from '@/hooks/common-hooks';
|
||||
import { useGetKnowledgeSearchParams } from '@/hooks/route-hook';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { buildChunkHighlights } from '@/utils/document-util';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { IHighlight } from 'react-pdf-highlighter';
|
||||
import { ChunkTextMode } from './constant';
|
||||
|
||||
export const useHandleChunkCardClick = () => {
|
||||
const [selectedChunkId, setSelectedChunkId] = useState<string>('');
|
||||
|
||||
const handleChunkCardClick = useCallback((chunkId: string) => {
|
||||
setSelectedChunkId(chunkId);
|
||||
}, []);
|
||||
|
||||
return { handleChunkCardClick, selectedChunkId };
|
||||
};
|
||||
|
||||
export const useGetSelectedChunk = (selectedChunkId: string) => {
|
||||
const data = useSelectChunkList();
|
||||
return (
|
||||
data?.data?.find((x) => x.chunk_id === selectedChunkId) ?? ({} as IChunk)
|
||||
);
|
||||
};
|
||||
|
||||
export const useGetChunkHighlights = (selectedChunkId: string) => {
|
||||
const [size, setSize] = useState({ width: 849, height: 1200 });
|
||||
const selectedChunk: IChunk = useGetSelectedChunk(selectedChunkId);
|
||||
|
||||
const highlights: IHighlight[] = useMemo(() => {
|
||||
return buildChunkHighlights(selectedChunk, size);
|
||||
}, [selectedChunk, size]);
|
||||
|
||||
const setWidthAndHeight = useCallback((width: number, height: number) => {
|
||||
setSize((pre) => {
|
||||
if (pre.height !== height || pre.width !== width) {
|
||||
return { height, width };
|
||||
}
|
||||
return pre;
|
||||
});
|
||||
}, []);
|
||||
|
||||
return { highlights, setWidthAndHeight };
|
||||
};
|
||||
|
||||
// Switch chunk text to be fully displayed or ellipse
|
||||
export const useChangeChunkTextMode = () => {
|
||||
const [textMode, setTextMode] = useState<ChunkTextMode>(ChunkTextMode.Full);
|
||||
|
||||
const changeChunkTextMode = useCallback((mode: ChunkTextMode) => {
|
||||
setTextMode(mode);
|
||||
}, []);
|
||||
|
||||
return { textMode, changeChunkTextMode };
|
||||
};
|
||||
|
||||
export const useDeleteChunkByIds = (): {
|
||||
removeChunk: (chunkIds: string[], documentId: string) => Promise<number>;
|
||||
} => {
|
||||
const { deleteChunk } = useDeleteChunk();
|
||||
const showDeleteConfirm = useShowDeleteConfirm();
|
||||
|
||||
const removeChunk = useCallback(
|
||||
(chunkIds: string[], documentId: string) => () => {
|
||||
return deleteChunk({ chunkIds, doc_id: documentId });
|
||||
},
|
||||
[deleteChunk],
|
||||
);
|
||||
|
||||
const onRemoveChunk = useCallback(
|
||||
(chunkIds: string[], documentId: string): Promise<number> => {
|
||||
return showDeleteConfirm({ onOk: removeChunk(chunkIds, documentId) });
|
||||
},
|
||||
[removeChunk, showDeleteConfirm],
|
||||
);
|
||||
|
||||
return {
|
||||
removeChunk: onRemoveChunk,
|
||||
};
|
||||
};
|
||||
|
||||
export const useUpdateChunk = () => {
|
||||
const [chunkId, setChunkId] = useState<string | undefined>('');
|
||||
const {
|
||||
visible: chunkUpdatingVisible,
|
||||
hideModal: hideChunkUpdatingModal,
|
||||
showModal,
|
||||
} = useSetModalState();
|
||||
const { createChunk, loading } = useCreateChunk();
|
||||
const { documentId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const onChunkUpdatingOk = useCallback(
|
||||
async (params: IChunk) => {
|
||||
const code = await createChunk({
|
||||
...params,
|
||||
doc_id: documentId,
|
||||
chunk_id: chunkId,
|
||||
});
|
||||
|
||||
if (code === 0) {
|
||||
hideChunkUpdatingModal();
|
||||
}
|
||||
},
|
||||
[createChunk, hideChunkUpdatingModal, chunkId, documentId],
|
||||
);
|
||||
|
||||
const handleShowChunkUpdatingModal = useCallback(
|
||||
async (id?: string) => {
|
||||
setChunkId(id);
|
||||
showModal();
|
||||
},
|
||||
[showModal],
|
||||
);
|
||||
|
||||
return {
|
||||
chunkUpdatingLoading: loading,
|
||||
onChunkUpdatingOk,
|
||||
chunkUpdatingVisible,
|
||||
hideChunkUpdatingModal,
|
||||
showChunkUpdatingModal: handleShowChunkUpdatingModal,
|
||||
chunkId,
|
||||
documentId,
|
||||
};
|
||||
};
|
||||
@ -1,92 +0,0 @@
|
||||
.chunkPage {
|
||||
padding: 24px;
|
||||
|
||||
display: flex;
|
||||
// height: calc(100vh - 112px);
|
||||
flex-direction: column;
|
||||
|
||||
.filter {
|
||||
margin: 10px 0;
|
||||
display: flex;
|
||||
height: 32px;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.pagePdfWrapper {
|
||||
width: 60%;
|
||||
}
|
||||
|
||||
.pageWrapper {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.pageContent {
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
padding-right: 12px;
|
||||
overflow-y: auto;
|
||||
|
||||
.spin {
|
||||
min-height: 400px;
|
||||
}
|
||||
}
|
||||
|
||||
.documentPreview {
|
||||
width: 40%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.chunkContainer {
|
||||
display: flex;
|
||||
height: calc(100vh - 332px);
|
||||
}
|
||||
|
||||
.chunkOtherContainer {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.pageFooter {
|
||||
padding-top: 10px;
|
||||
height: 32px;
|
||||
}
|
||||
}
|
||||
|
||||
.container {
|
||||
height: 100px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
|
||||
.content {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
|
||||
.context {
|
||||
flex: 1;
|
||||
// width: 207px;
|
||||
height: 88px;
|
||||
overflow: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
.footer {
|
||||
height: 20px;
|
||||
|
||||
.text {
|
||||
margin-left: 10px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
:global {
|
||||
.ant-card-body {
|
||||
padding: 10px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
cursor: pointer;
|
||||
}
|
||||
@ -1,202 +0,0 @@
|
||||
import { useFetchNextChunkList, useSwitchChunk } from '@/hooks/chunk-hooks';
|
||||
import type { PaginationProps } from 'antd';
|
||||
import { Divider, Flex, Pagination, Space, Spin, message } from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import ChunkCard from './components/chunk-card';
|
||||
import CreatingModal from './components/chunk-creating-modal';
|
||||
import ChunkToolBar from './components/chunk-toolbar';
|
||||
import DocumentPreview from './components/document-preview/preview';
|
||||
import {
|
||||
useChangeChunkTextMode,
|
||||
useDeleteChunkByIds,
|
||||
useGetChunkHighlights,
|
||||
useHandleChunkCardClick,
|
||||
useUpdateChunk,
|
||||
} from './hooks';
|
||||
|
||||
import styles from './index.less';
|
||||
|
||||
const Chunk = () => {
|
||||
const [selectedChunkIds, setSelectedChunkIds] = useState<string[]>([]);
|
||||
const { removeChunk } = useDeleteChunkByIds();
|
||||
const {
|
||||
data: { documentInfo, data = [], total },
|
||||
pagination,
|
||||
loading,
|
||||
searchString,
|
||||
handleInputChange,
|
||||
available,
|
||||
handleSetAvailable,
|
||||
} = useFetchNextChunkList();
|
||||
const { handleChunkCardClick, selectedChunkId } = useHandleChunkCardClick();
|
||||
const isPdf = documentInfo?.type === 'pdf';
|
||||
|
||||
const { t } = useTranslation();
|
||||
const { changeChunkTextMode, textMode } = useChangeChunkTextMode();
|
||||
const { switchChunk } = useSwitchChunk();
|
||||
const {
|
||||
chunkUpdatingLoading,
|
||||
onChunkUpdatingOk,
|
||||
showChunkUpdatingModal,
|
||||
hideChunkUpdatingModal,
|
||||
chunkId,
|
||||
chunkUpdatingVisible,
|
||||
documentId,
|
||||
} = useUpdateChunk();
|
||||
|
||||
const onPaginationChange: PaginationProps['onShowSizeChange'] = (
|
||||
page,
|
||||
size,
|
||||
) => {
|
||||
setSelectedChunkIds([]);
|
||||
pagination.onChange?.(page, size);
|
||||
};
|
||||
|
||||
const selectAllChunk = useCallback(
|
||||
(checked: boolean) => {
|
||||
setSelectedChunkIds(checked ? data.map((x) => x.chunk_id) : []);
|
||||
},
|
||||
[data],
|
||||
);
|
||||
|
||||
const handleSingleCheckboxClick = useCallback(
|
||||
(chunkId: string, checked: boolean) => {
|
||||
setSelectedChunkIds((previousIds) => {
|
||||
const idx = previousIds.findIndex((x) => x === chunkId);
|
||||
const nextIds = [...previousIds];
|
||||
if (checked && idx === -1) {
|
||||
nextIds.push(chunkId);
|
||||
} else if (!checked && idx !== -1) {
|
||||
nextIds.splice(idx, 1);
|
||||
}
|
||||
return nextIds;
|
||||
});
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const showSelectedChunkWarning = useCallback(() => {
|
||||
message.warning(t('message.pleaseSelectChunk'));
|
||||
}, [t]);
|
||||
|
||||
const handleRemoveChunk = useCallback(async () => {
|
||||
if (selectedChunkIds.length > 0) {
|
||||
const resCode: number = await removeChunk(selectedChunkIds, documentId);
|
||||
if (resCode === 0) {
|
||||
setSelectedChunkIds([]);
|
||||
}
|
||||
} else {
|
||||
showSelectedChunkWarning();
|
||||
}
|
||||
}, [selectedChunkIds, documentId, removeChunk, showSelectedChunkWarning]);
|
||||
|
||||
const handleSwitchChunk = useCallback(
|
||||
async (available?: number, chunkIds?: string[]) => {
|
||||
let ids = chunkIds;
|
||||
if (!chunkIds) {
|
||||
ids = selectedChunkIds;
|
||||
if (selectedChunkIds.length === 0) {
|
||||
showSelectedChunkWarning();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const resCode: number = await switchChunk({
|
||||
chunk_ids: ids,
|
||||
available_int: available,
|
||||
doc_id: documentId,
|
||||
});
|
||||
if (!chunkIds && resCode === 0) {
|
||||
}
|
||||
},
|
||||
[switchChunk, documentId, selectedChunkIds, showSelectedChunkWarning],
|
||||
);
|
||||
|
||||
const { highlights, setWidthAndHeight } =
|
||||
useGetChunkHighlights(selectedChunkId);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className={styles.chunkPage}>
|
||||
<ChunkToolBar
|
||||
selectAllChunk={selectAllChunk}
|
||||
createChunk={showChunkUpdatingModal}
|
||||
removeChunk={handleRemoveChunk}
|
||||
checked={selectedChunkIds.length === data.length}
|
||||
switchChunk={handleSwitchChunk}
|
||||
changeChunkTextMode={changeChunkTextMode}
|
||||
searchString={searchString}
|
||||
handleInputChange={handleInputChange}
|
||||
available={available}
|
||||
handleSetAvailable={handleSetAvailable}
|
||||
></ChunkToolBar>
|
||||
<Divider></Divider>
|
||||
<Flex flex={1} gap={'middle'}>
|
||||
<Flex
|
||||
vertical
|
||||
className={isPdf ? styles.pagePdfWrapper : styles.pageWrapper}
|
||||
>
|
||||
<Spin spinning={loading} className={styles.spin} size="large">
|
||||
<div className={styles.pageContent}>
|
||||
<Space
|
||||
direction="vertical"
|
||||
size={'middle'}
|
||||
className={classNames(styles.chunkContainer, {
|
||||
[styles.chunkOtherContainer]: !isPdf,
|
||||
})}
|
||||
>
|
||||
{data.map((item) => (
|
||||
<ChunkCard
|
||||
item={item}
|
||||
key={item.chunk_id}
|
||||
editChunk={showChunkUpdatingModal}
|
||||
checked={selectedChunkIds.some(
|
||||
(x) => x === item.chunk_id,
|
||||
)}
|
||||
handleCheckboxClick={handleSingleCheckboxClick}
|
||||
switchChunk={handleSwitchChunk}
|
||||
clickChunkCard={handleChunkCardClick}
|
||||
selected={item.chunk_id === selectedChunkId}
|
||||
textMode={textMode}
|
||||
></ChunkCard>
|
||||
))}
|
||||
</Space>
|
||||
</div>
|
||||
</Spin>
|
||||
<div className={styles.pageFooter}>
|
||||
<Pagination
|
||||
{...pagination}
|
||||
total={total}
|
||||
size={'small'}
|
||||
onChange={onPaginationChange}
|
||||
/>
|
||||
</div>
|
||||
</Flex>
|
||||
{isPdf && (
|
||||
<section className={styles.documentPreview}>
|
||||
<DocumentPreview
|
||||
highlights={highlights}
|
||||
setWidthAndHeight={setWidthAndHeight}
|
||||
></DocumentPreview>
|
||||
</section>
|
||||
)}
|
||||
</Flex>
|
||||
</div>
|
||||
{chunkUpdatingVisible && (
|
||||
<CreatingModal
|
||||
doc_id={documentId}
|
||||
chunkId={chunkId}
|
||||
hideModal={hideChunkUpdatingModal}
|
||||
visible={chunkUpdatingVisible}
|
||||
loading={chunkUpdatingLoading}
|
||||
onOk={onChunkUpdatingOk}
|
||||
parserId={documentInfo.parser_id}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default Chunk;
|
||||
@ -1,24 +0,0 @@
|
||||
export type FormListItem = {
|
||||
frequency: number;
|
||||
tag: string;
|
||||
};
|
||||
|
||||
export function transformTagFeaturesArrayToObject(
|
||||
list: Array<FormListItem> = [],
|
||||
) {
|
||||
return list.reduce<Record<string, number>>((pre, cur) => {
|
||||
pre[cur.tag] = cur.frequency;
|
||||
|
||||
return pre;
|
||||
}, {});
|
||||
}
|
||||
|
||||
export function transformTagFeaturesObjectToArray(
|
||||
object: Record<string, number> = {},
|
||||
) {
|
||||
return Object.keys(object).reduce<Array<FormListItem>>((pre, key) => {
|
||||
pre.push({ frequency: object[key], tag: key });
|
||||
|
||||
return pre;
|
||||
}, []);
|
||||
}
|
||||
@ -1,3 +1,4 @@
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
import { BaseNode } from '@/interfaces/database/agent';
|
||||
import { NodeProps, Position } from '@xyflow/react';
|
||||
import { memo } from 'react';
|
||||
@ -37,17 +38,18 @@ function ParserNode({
|
||||
isConnectableEnd={false}
|
||||
></CommonHandle>
|
||||
<NodeHeader id={id} name={data.name} label={data.label}></NodeHeader>
|
||||
<section className="space-y-2">
|
||||
{data.form?.setups.map((x, idx) => (
|
||||
|
||||
<NodeCollapsible items={data.form?.setups}>
|
||||
{(x, idx) => (
|
||||
<LabelCard
|
||||
key={idx}
|
||||
className="flex justify- flex-col text-text-primary gap-1"
|
||||
className="flex flex-col text-text-primary gap-1"
|
||||
>
|
||||
<span className="text-text-secondary">Parser {idx + 1}</span>
|
||||
{t(`dataflow.fileFormatOptions.${x.fileFormat}`)}
|
||||
</LabelCard>
|
||||
))}
|
||||
</section>
|
||||
)}
|
||||
</NodeCollapsible>
|
||||
</NodeWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
@ -30,6 +30,7 @@ import { useWatchFormChange } from '../../hooks/use-watch-form-change';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { buildOutputList } from '../../utils/build-output-list';
|
||||
import { Output } from '../components/output';
|
||||
import { OutputFormatFormField } from './common-form-fields';
|
||||
import { EmailFormFields } from './email-form-fields';
|
||||
import { ImageFormFields } from './image-form-fields';
|
||||
import { PdfFormFields } from './pdf-form-fields';
|
||||
@ -146,10 +147,12 @@ function ParserItem({
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
<Widget prefix={prefix} fileType={fileFormat as FileType}></Widget>
|
||||
{/* <OutputFormatFormField
|
||||
prefix={prefix}
|
||||
fileType={fileFormat as FileType}
|
||||
/> */}
|
||||
<div className="hidden">
|
||||
<OutputFormatFormField
|
||||
prefix={prefix}
|
||||
fileType={fileFormat as FileType}
|
||||
/>
|
||||
</div>
|
||||
{index < fieldLength - 1 && <Separator />}
|
||||
</section>
|
||||
);
|
||||
|
||||
@ -4,8 +4,6 @@ import {
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
import { TagItems } from '../components/tag-item';
|
||||
|
||||
export function AudioConfiguration() {
|
||||
return (
|
||||
<ConfigurationFormContainer>
|
||||
@ -14,7 +12,7 @@ export function AudioConfiguration() {
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</>
|
||||
|
||||
<TagItems></TagItems>
|
||||
{/* <TagItems></TagItems> */}
|
||||
</ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import {
|
||||
ConfigurationFormContainer,
|
||||
MainContainer,
|
||||
@ -20,9 +19,9 @@ export function BookConfiguration() {
|
||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</ConfigurationFormContainer>
|
||||
<ConfigurationFormContainer>
|
||||
{/* <ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
</ConfigurationFormContainer> */}
|
||||
</MainContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -7,7 +7,6 @@ import {
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { Radio } from '@/components/ui/radio';
|
||||
import { RAGFlowSelect } from '@/components/ui/select';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { cn } from '@/lib/utils';
|
||||
@ -46,7 +45,7 @@ export function ChunkMethodItem(props: IProps) {
|
||||
</FormLabel>
|
||||
<div className={line === 1 ? 'w-3/4 ' : 'w-full'}>
|
||||
<FormControl>
|
||||
<RAGFlowSelect
|
||||
<SelectWithSearch
|
||||
{...field}
|
||||
options={parserList}
|
||||
placeholder={t('chunkMethodPlaceholder')}
|
||||
|
||||
@ -2,7 +2,6 @@ import {
|
||||
AutoKeywordsFormField,
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
export function EmailConfiguration() {
|
||||
@ -12,7 +11,7 @@ export function EmailConfiguration() {
|
||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</>
|
||||
<TagItems></TagItems>
|
||||
{/* <TagItems></TagItems> */}
|
||||
</ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import {
|
||||
ConfigurationFormContainer,
|
||||
MainContainer,
|
||||
@ -21,9 +20,9 @@ export function LawsConfiguration() {
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</ConfigurationFormContainer>
|
||||
|
||||
<ConfigurationFormContainer>
|
||||
{/* <ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
</ConfigurationFormContainer> */}
|
||||
</MainContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import {
|
||||
ConfigurationFormContainer,
|
||||
MainContainer,
|
||||
@ -21,7 +20,7 @@ export function ManualConfiguration() {
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</ConfigurationFormContainer>
|
||||
|
||||
<TagItems></TagItems>
|
||||
{/* <TagItems></TagItems> */}
|
||||
</MainContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
export function OneConfiguration() {
|
||||
@ -15,7 +14,7 @@ export function OneConfiguration() {
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</>
|
||||
|
||||
<TagItems></TagItems>
|
||||
{/* <TagItems></TagItems> */}
|
||||
</ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import {
|
||||
ConfigurationFormContainer,
|
||||
MainContainer,
|
||||
@ -20,9 +19,9 @@ export function PaperConfiguration() {
|
||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</ConfigurationFormContainer>
|
||||
<ConfigurationFormContainer>
|
||||
{/* <ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
</ConfigurationFormContainer> */}
|
||||
</MainContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -2,7 +2,6 @@ import {
|
||||
AutoKeywordsFormField,
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
export function PictureConfiguration() {
|
||||
@ -12,7 +11,7 @@ export function PictureConfiguration() {
|
||||
<AutoKeywordsFormField></AutoKeywordsFormField>
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</>
|
||||
<TagItems></TagItems>
|
||||
{/* <TagItems></TagItems> */}
|
||||
</ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ import {
|
||||
AutoQuestionsFormField,
|
||||
} from '@/components/auto-keywords-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import {
|
||||
ConfigurationFormContainer,
|
||||
MainContainer,
|
||||
@ -21,9 +20,9 @@ export function PresentationConfiguration() {
|
||||
<AutoQuestionsFormField></AutoQuestionsFormField>
|
||||
</ConfigurationFormContainer>
|
||||
|
||||
<ConfigurationFormContainer>
|
||||
{/* <ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
</ConfigurationFormContainer> */}
|
||||
</MainContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
export function QAConfiguration() {
|
||||
return (
|
||||
<ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
<></>
|
||||
// <ConfigurationFormContainer>
|
||||
// <TagItems></TagItems>
|
||||
// </ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
import { TagItems } from '../components/tag-item';
|
||||
import { ConfigurationFormContainer } from '../configuration-form-container';
|
||||
|
||||
export function ResumeConfiguration() {
|
||||
return (
|
||||
<ConfigurationFormContainer>
|
||||
<TagItems></TagItems>
|
||||
</ConfigurationFormContainer>
|
||||
<></>
|
||||
// <ConfigurationFormContainer>
|
||||
// <TagItems></TagItems>
|
||||
// </ConfigurationFormContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@ -15,7 +15,10 @@ import {
|
||||
import omit from 'lodash/omit';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & { vision: boolean };
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
vision: boolean;
|
||||
provider_order?: string;
|
||||
};
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
@ -128,6 +131,10 @@ const OllamaModal = ({
|
||||
{ value: 'speech2text', label: 'sequence2text' },
|
||||
{ value: 'tts', label: 'tts' },
|
||||
],
|
||||
[LLMFactory.OpenRouter]: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'image2text', label: 'image2text' },
|
||||
],
|
||||
Default: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
@ -233,6 +240,16 @@ const OllamaModal = ({
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
</Form.Item>
|
||||
{llmFactory === LLMFactory.OpenRouter && (
|
||||
<Form.Item<FieldType>
|
||||
label="Provider Order"
|
||||
name="provider_order"
|
||||
tooltip="Comma-separated provider list, e.g. Groq,Fireworks"
|
||||
rules={[]}
|
||||
>
|
||||
<Input placeholder="Groq,Fireworks" onKeyDown={handleKeyDown} />
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
|
||||
165
web/src/stories/node-collapsible.stories.tsx
Normal file
165
web/src/stories/node-collapsible.stories.tsx
Normal file
@ -0,0 +1,165 @@
|
||||
import { Form } from '@/components/ui/form';
|
||||
import type { Meta, StoryObj } from '@storybook/react-webpack5';
|
||||
import { useForm } from 'react-hook-form';
|
||||
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { z } from 'zod';
|
||||
|
||||
// More on how to set up stories at: https://storybook.js.org/docs/writing-stories#default-export
|
||||
const meta = {
|
||||
title: 'Example/NodeCollapsible',
|
||||
component: NodeCollapsible,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component: `
|
||||
## Component Description
|
||||
|
||||
NodeCollapsible is a specialized component for displaying collapsible content within nodes.
|
||||
It automatically shows only the first 3 items and provides a toggle button to expand/collapse the rest.
|
||||
The component is designed to work within the application's node-based UI, such as in agent or data flow canvases.
|
||||
|
||||
The toggle button is displayed as a small circle at the bottom center of the component when there are more than 3 items.
|
||||
`,
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
argTypes: {
|
||||
items: {
|
||||
control: 'object',
|
||||
description: 'Array of items to display in the collapsible component',
|
||||
},
|
||||
children: {
|
||||
control: false,
|
||||
description: 'Function to render each item',
|
||||
},
|
||||
className: {
|
||||
control: 'text',
|
||||
description: 'Additional CSS classes to apply to the component',
|
||||
},
|
||||
},
|
||||
} satisfies Meta<typeof NodeCollapsible>;
|
||||
|
||||
// Form wrapper decorator
|
||||
const WithFormProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
const form = useForm({
|
||||
defaultValues: {},
|
||||
resolver: zodResolver(z.object({})),
|
||||
});
|
||||
return <Form {...form}>{children}</Form>;
|
||||
};
|
||||
|
||||
const withFormProvider = (Story: any) => (
|
||||
<WithFormProvider>
|
||||
<Story />
|
||||
</WithFormProvider>
|
||||
);
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof meta>;
|
||||
|
||||
// More on writing stories with args: https://storybook.js.org/docs/writing-stories/args
|
||||
export const Default: Story = {
|
||||
decorators: [withFormProvider],
|
||||
args: {
|
||||
items: [
|
||||
'Document Analysis Parser',
|
||||
'Web Search Parser',
|
||||
'Database Query Parser',
|
||||
'Image Recognition Parser',
|
||||
'Audio Transcription Parser',
|
||||
'Video Processing Parser',
|
||||
'Code Analysis Parser',
|
||||
'Spreadsheet Parser',
|
||||
],
|
||||
children: (item: string) => (
|
||||
<div className="px-4 py-2 border rounded-md bg-bg-component">{item}</div>
|
||||
),
|
||||
},
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
story: `
|
||||
### Basic Usage
|
||||
|
||||
By default, the NodeCollapsible component shows the first 3 items and collapses the rest.
|
||||
A toggle button appears at the bottom when there are more than 3 items.
|
||||
|
||||
\`\`\`tsx
|
||||
import { NodeCollapsible } from '@/components/collapse';
|
||||
|
||||
<NodeCollapsible
|
||||
items={[
|
||||
'Document Analysis Parser',
|
||||
'Web Search Parser',
|
||||
'Database Query Parser',
|
||||
'Image Recognition Parser',
|
||||
'Audio Transcription Parser',
|
||||
'Video Processing Parser',
|
||||
'Code Analysis Parser',
|
||||
'Spreadsheet Parser'
|
||||
]}
|
||||
>
|
||||
{(item) => (
|
||||
<div className="px-4 py-2 border rounded-md bg-bg-component">
|
||||
{item}
|
||||
</div>
|
||||
)}
|
||||
</NodeCollapsible>
|
||||
\`\`\`
|
||||
`,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const WithFewItems: Story = {
|
||||
decorators: [withFormProvider],
|
||||
args: {
|
||||
items: ['Single Item'],
|
||||
children: (item: string) => (
|
||||
<div className="px-4 py-2 border rounded-md bg-bg-component">{item}</div>
|
||||
),
|
||||
},
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
story: `
|
||||
When there are 3 or fewer items, no toggle button is shown.
|
||||
`,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const WithManyItems: Story = {
|
||||
decorators: [withFormProvider],
|
||||
args: {
|
||||
items: [
|
||||
'Item 1',
|
||||
'Item 2',
|
||||
'Item 3',
|
||||
'Item 4',
|
||||
'Item 5',
|
||||
'Item 6',
|
||||
'Item 7',
|
||||
'Item 8',
|
||||
],
|
||||
children: (item: string) => (
|
||||
<div className="px-4 py-2 border rounded-md bg-bg-component">{item}</div>
|
||||
),
|
||||
},
|
||||
parameters: {
|
||||
docs: {
|
||||
description: {
|
||||
story: `
|
||||
When there are more than 3 items, a toggle button is shown at the bottom center.
|
||||
Clicking it will expand to show all items.
|
||||
`,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user