mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
13 Commits
adbb8319e0
...
42edecc98f
| Author | SHA1 | Date | |
|---|---|---|---|
| 42edecc98f | |||
| af98763e27 | |||
| 5a8fbc5a81 | |||
| 0cd8024c34 | |||
| 3bd1fefe1f | |||
| e18c408759 | |||
| 23b81eae77 | |||
| 66c01c7274 | |||
| 4b8ce08050 | |||
| ca30ef83bf | |||
| d469ae6d50 | |||
| f581a1c4e5 | |||
| 15c75bbf15 |
@ -23,6 +23,7 @@ from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
||||
from typing import Dict, List, Any
|
||||
from lark import Lark, Transformer, Tree
|
||||
import requests
|
||||
import getpass
|
||||
|
||||
GRAMMAR = r"""
|
||||
start: command
|
||||
@ -51,6 +52,7 @@ sql_command: list_services
|
||||
| revoke_permission
|
||||
| alter_user_role
|
||||
| show_user_permission
|
||||
| show_version
|
||||
|
||||
// meta command definition
|
||||
meta_command: "\\" meta_command_name [meta_args]
|
||||
@ -92,6 +94,7 @@ FOR: "FOR"i
|
||||
RESOURCES: "RESOURCES"i
|
||||
ON: "ON"i
|
||||
SET: "SET"i
|
||||
VERSION: "VERSION"i
|
||||
|
||||
list_services: LIST SERVICES ";"
|
||||
show_service: SHOW SERVICE NUMBER ";"
|
||||
@ -120,6 +123,8 @@ revoke_permission: REVOKE action_list ON identifier FROM ROLE identifier ";"
|
||||
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
||||
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
||||
|
||||
show_version: SHOW VERSION ";"
|
||||
|
||||
action_list: identifier ("," identifier)*
|
||||
|
||||
identifier: WORD
|
||||
@ -246,6 +251,9 @@ class AdminTransformer(Transformer):
|
||||
user_name = items[3]
|
||||
return {"type": "show_user_permission", "user_name": user_name}
|
||||
|
||||
def show_version(self, items):
|
||||
return {"type": "show_version"}
|
||||
|
||||
def action_list(self, items):
|
||||
return items
|
||||
|
||||
@ -359,7 +367,7 @@ class AdminCLI(Cmd):
|
||||
if single_command:
|
||||
admin_passwd = arguments['password']
|
||||
else:
|
||||
admin_passwd = input(f"password for {self.admin_account}: ").strip()
|
||||
admin_passwd = getpass.getpass(f"password for {self.admin_account}: ").strip()
|
||||
try:
|
||||
self.admin_password = encrypt(admin_passwd)
|
||||
response = self.session.post(url, json={'email': self.admin_account, 'password': self.admin_password})
|
||||
@ -555,6 +563,8 @@ class AdminCLI(Cmd):
|
||||
self._alter_user_role(command_dict)
|
||||
case 'show_user_permission':
|
||||
self._show_user_permission(command_dict)
|
||||
case 'show_version':
|
||||
self._show_version(command_dict)
|
||||
case 'meta':
|
||||
self._handle_meta_command(command_dict)
|
||||
case _:
|
||||
@ -861,6 +871,16 @@ class AdminCLI(Cmd):
|
||||
print(
|
||||
f"Fail to show user: {user_name_str} permission, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _show_version(self, command):
|
||||
print("show_version")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/version'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to show version, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_meta_command(self, command):
|
||||
meta_command = command['command']
|
||||
args = command.get('args', [])
|
||||
|
||||
@ -31,6 +31,7 @@ from config import load_configurations, SERVICE_CONFIGS
|
||||
from auth import init_default_admin, setup_auth
|
||||
from flask_session import Session
|
||||
from flask_login import LoginManager
|
||||
from common.versions import get_ragflow_version
|
||||
|
||||
stop_event = threading.Event()
|
||||
|
||||
@ -52,6 +53,7 @@ if __name__ == '__main__':
|
||||
os.environ.get("MAX_CONTENT_LENGTH", 1024 * 1024 * 1024)
|
||||
)
|
||||
Session(app)
|
||||
logging.info(f'RAGFlow version: {get_ragflow_version()}')
|
||||
show_configs()
|
||||
login_manager = LoginManager()
|
||||
login_manager.init_app(app)
|
||||
|
||||
@ -24,7 +24,7 @@ from flask_login import current_user, login_user
|
||||
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
||||
|
||||
from api.common.exceptions import AdminException, UserNotFoundError
|
||||
from api.db.init_data import encode_to_base64
|
||||
from api.common.base64 import encode_to_base64
|
||||
from api.db.services import UserService
|
||||
from common.constants import ActiveEnum, StatusEnum
|
||||
from api.utils.crypt import decrypt
|
||||
|
||||
@ -24,6 +24,7 @@ from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from roles import RoleMgr
|
||||
from api.common.exceptions import AdminException
|
||||
from common.versions import get_ragflow_version
|
||||
|
||||
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
||||
|
||||
@ -369,3 +370,13 @@ def get_user_permission(user_name: str):
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@admin_bp.route('/version', methods=['GET'])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def show_version():
|
||||
try:
|
||||
res = {"version": get_ragflow_version()}
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
import os
|
||||
import importlib
|
||||
import inspect
|
||||
@ -50,9 +50,10 @@ del _package_path, _import_submodules, _extract_classes_from_module
|
||||
|
||||
|
||||
def component_class(class_name):
|
||||
for mdl in ["agent.component", "agent.tools", "rag.flow"]:
|
||||
for module_name in ["agent.component", "agent.tools", "rag.flow"]:
|
||||
try:
|
||||
return getattr(importlib.import_module(mdl), class_name)
|
||||
except Exception:
|
||||
return getattr(importlib.import_module(module_name), class_name)
|
||||
except Exception as e:
|
||||
logging.warning(f"Can't import module: {module_name}, error: {e}")
|
||||
pass
|
||||
assert False, f"Can't import {class_name}"
|
||||
|
||||
@ -514,6 +514,7 @@ class ComponentBase(ABC):
|
||||
def get_param(self, name):
|
||||
if hasattr(self._param, name):
|
||||
return getattr(self._param, name)
|
||||
return None
|
||||
|
||||
def debug(self, **kwargs):
|
||||
return self._invoke(**kwargs)
|
||||
@ -521,7 +522,7 @@ class ComponentBase(ABC):
|
||||
def get_parent(self) -> Union[object, None]:
|
||||
pid = self._canvas.get_component(self._id).get("parent_id")
|
||||
if not pid:
|
||||
return
|
||||
return None
|
||||
return self._canvas.get_component(pid)["obj"]
|
||||
|
||||
def get_upstream(self) -> List[str]:
|
||||
@ -546,7 +547,7 @@ class ComponentBase(ABC):
|
||||
|
||||
def exception_handler(self):
|
||||
if not self._param.exception_method:
|
||||
return
|
||||
return None
|
||||
return {
|
||||
"goto": self._param.exception_goto,
|
||||
"default_value": self._param.exception_default_value
|
||||
|
||||
@ -34,7 +34,7 @@ class GithubOAuthClient(OAuthClient):
|
||||
|
||||
def fetch_user_info(self, access_token, **kwargs):
|
||||
"""
|
||||
Fetch github user info.
|
||||
Fetch GitHub user info.
|
||||
"""
|
||||
user_info = {}
|
||||
try:
|
||||
|
||||
@ -43,7 +43,8 @@ class OIDCClient(OAuthClient):
|
||||
self.jwks_uri = config['jwks_uri']
|
||||
|
||||
|
||||
def _load_oidc_metadata(self, issuer):
|
||||
@staticmethod
|
||||
def _load_oidc_metadata(issuer):
|
||||
"""
|
||||
Load OIDC metadata from `/.well-known/openid-configuration`.
|
||||
"""
|
||||
|
||||
@ -73,7 +73,8 @@ def get_connector(connector_id):
|
||||
@login_required
|
||||
def list_logs(connector_id):
|
||||
req = request.args.to_dict(flat=True)
|
||||
return get_json_result(data=SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15))))
|
||||
arr, total = SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15)))
|
||||
return get_json_result(data={"total": total, "logs": arr})
|
||||
|
||||
|
||||
@manager.route("/<connector_id>/resume", methods=["PUT"]) # noqa: F821
|
||||
|
||||
@ -38,7 +38,7 @@ from api.utils.api_utils import get_json_result
|
||||
from rag.nlp import search
|
||||
from api.constants import DATASET_NAME_LIMIT
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.doc_store_conn import OrderByExpr
|
||||
from rag.utils.doc_store_conn import OrderByExpr
|
||||
from common.constants import RetCode, PipelineTaskType, StatusEnum, VALID_TASK_STATUS, FileSource, LLMType, PAGERANK_FLD
|
||||
from common import settings
|
||||
|
||||
@ -52,7 +52,7 @@ def create():
|
||||
tenant_id = current_user.id,
|
||||
parser_id = req.pop("parser_id", None),
|
||||
**req
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
if not KnowledgebaseService.save(**req):
|
||||
@ -122,10 +122,9 @@ def update():
|
||||
if not e:
|
||||
return get_data_error_result(
|
||||
message="Database error (Knowledgebase rename)!")
|
||||
if connectors:
|
||||
errors = Connector2KbService.link_connectors(kb.id, [conn["id"] for conn in connectors], current_user.id)
|
||||
if errors:
|
||||
logging.error("Link KB errors: ", errors)
|
||||
errors = Connector2KbService.link_connectors(kb.id, [conn["id"] for conn in connectors], current_user.id)
|
||||
if errors:
|
||||
logging.error("Link KB errors: ", errors)
|
||||
kb = kb.to_dict()
|
||||
kb.update(req)
|
||||
|
||||
@ -571,7 +570,7 @@ def trace_graphrag():
|
||||
|
||||
ok, task = TaskService.get_by_id(task_id)
|
||||
if not ok:
|
||||
return get_error_data_result(message="GraphRAG Task Not Found or Error Occurred")
|
||||
return get_json_result(data={})
|
||||
|
||||
return get_json_result(data=task.to_dict())
|
||||
|
||||
@ -780,14 +779,14 @@ def check_embedding():
|
||||
|
||||
def _to_1d(x):
|
||||
a = np.asarray(x, dtype=np.float32)
|
||||
return a.reshape(-1)
|
||||
return a.reshape(-1)
|
||||
|
||||
def _cos_sim(a, b, eps=1e-12):
|
||||
a = _to_1d(a)
|
||||
b = _to_1d(b)
|
||||
na = np.linalg.norm(a)
|
||||
nb = np.linalg.norm(b)
|
||||
if na < eps or nb < eps:
|
||||
if na < eps or nb < eps:
|
||||
return 0.0
|
||||
return float(np.dot(a, b) / (na * nb))
|
||||
|
||||
@ -825,7 +824,7 @@ def check_embedding():
|
||||
indexNames=index_nm, knowledgebaseIds=[kb_id]
|
||||
)
|
||||
ids = docStoreConn.getChunkIds(res1)
|
||||
if not ids:
|
||||
if not ids:
|
||||
continue
|
||||
|
||||
cid = ids[0]
|
||||
@ -869,7 +868,7 @@ def check_embedding():
|
||||
continue
|
||||
|
||||
try:
|
||||
qv, _ = emb_mdl.encode_queries(txt)
|
||||
qv, _ = emb_mdl.encode_queries(txt)
|
||||
sim = _cos_sim(qv, ck["vector"])
|
||||
except Exception:
|
||||
return get_error_data_result(message="embedding failure")
|
||||
|
||||
@ -128,7 +128,7 @@ def add_llm():
|
||||
api_key = req.get("api_key", "x")
|
||||
llm_name = req.get("llm_name")
|
||||
|
||||
if factory not in get_allowed_llm_factories():
|
||||
if factory not in [f.name for f in get_allowed_llm_factories()]:
|
||||
return get_data_error_result(message=f"LLM factory {factory} is not allowed")
|
||||
|
||||
def apikey_json(keys):
|
||||
|
||||
@ -29,7 +29,7 @@ from api.utils.api_utils import (
|
||||
server_error_response,
|
||||
generate_confirmation_token,
|
||||
)
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from common.time_utils import current_timestamp, datetime_format
|
||||
from timeit import default_timer as timer
|
||||
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from .reload_config_base import ReloadConfigBase
|
||||
|
||||
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
#
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Tuple, List
|
||||
|
||||
from anthropic import BaseModel
|
||||
from peewee import SQL, fn
|
||||
@ -71,7 +72,7 @@ class SyncLogsService(CommonService):
|
||||
model = SyncLogs
|
||||
|
||||
@classmethod
|
||||
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15):
|
||||
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15) -> Tuple[List[dict], int]:
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.connector_id,
|
||||
@ -113,10 +114,11 @@ class SyncLogsService(CommonService):
|
||||
)
|
||||
|
||||
query = query.distinct().order_by(cls.model.update_time.desc())
|
||||
totbal = query.count()
|
||||
if page_number:
|
||||
query = query.paginate(page_number, items_per_page)
|
||||
|
||||
return list(query.dicts())
|
||||
return list(query.dicts()), totbal
|
||||
|
||||
@classmethod
|
||||
def start(cls, id, connector_id):
|
||||
@ -130,6 +132,14 @@ class SyncLogsService(CommonService):
|
||||
|
||||
@classmethod
|
||||
def schedule(cls, connector_id, kb_id, poll_range_start=None, reindex=False, total_docs_indexed=0):
|
||||
try:
|
||||
if cls.model.select().where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).count() > 100:
|
||||
rm_ids = [m.id for m in cls.model.select(cls.model.id).where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).order_by(cls.model.update_time.asc()).limit(70)]
|
||||
deleted = cls.model.delete().where(cls.model.id.in_(rm_ids)).execute()
|
||||
logging.info(f"[SyncLogService] Cleaned {deleted} old logs.")
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
try:
|
||||
e = cls.query(kb_id=kb_id, connector_id=connector_id, status=TaskStatus.SCHEDULE)
|
||||
if e:
|
||||
@ -185,11 +195,10 @@ class SyncLogsService(CommonService):
|
||||
doc_ids = []
|
||||
err, doc_blob_pairs = FileService.upload_document(kb, files, tenant_id, src)
|
||||
errs.extend(err)
|
||||
if not err:
|
||||
kb_table_num_map = {}
|
||||
for doc, _ in doc_blob_pairs:
|
||||
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
||||
doc_ids.append(doc["id"])
|
||||
kb_table_num_map = {}
|
||||
for doc, _ in doc_blob_pairs:
|
||||
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
||||
doc_ids.append(doc["id"])
|
||||
|
||||
return errs, doc_ids
|
||||
|
||||
|
||||
@ -623,7 +623,8 @@ class DocumentService(CommonService):
|
||||
cls.update_by_id(
|
||||
docid, {"progress": random.random() * 1 / 100.,
|
||||
"progress_msg": "Task is queued...",
|
||||
"process_begin_at": get_format_time()
|
||||
"process_begin_at": get_format_time(),
|
||||
"run": TaskStatus.RUNNING.value
|
||||
})
|
||||
|
||||
@classmethod
|
||||
|
||||
@ -39,7 +39,7 @@ from common.file_utils import get_project_base_directory
|
||||
from common import settings
|
||||
from api.db.db_models import init_database_tables as init_web_db
|
||||
from api.db.init_data import init_web_data
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from common.config_utils import show_configs
|
||||
from rag.utils.mcp_tool_call_conn import shutdown_all_mcp_sessions
|
||||
from rag.utils.redis_conn import RedisDistributedLock
|
||||
|
||||
@ -189,14 +189,12 @@ def _manage_async_retrieval(
|
||||
async with Client(intents=intents, proxy=proxy_url) as cli:
|
||||
asyncio.create_task(coro=cli.start(token))
|
||||
await cli.wait_until_ready()
|
||||
print("connected ...", flush=True)
|
||||
|
||||
filtered_channels: list[TextChannel] = await _fetch_filtered_channels(
|
||||
discord_client=cli,
|
||||
server_ids=server_ids,
|
||||
channel_names=channel_names,
|
||||
)
|
||||
print("connected ...", filtered_channels, flush=True)
|
||||
|
||||
for channel in filtered_channels:
|
||||
async for doc in _fetch_documents_from_channel(
|
||||
@ -204,6 +202,7 @@ def _manage_async_retrieval(
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
):
|
||||
print(doc)
|
||||
yield doc
|
||||
|
||||
def run_and_yield() -> Iterable[Document]:
|
||||
@ -257,6 +256,29 @@ class DiscordConnector(LoadConnector, PollConnector):
|
||||
end: datetime | None = None,
|
||||
) -> GenerateDocumentsOutput:
|
||||
doc_batch = []
|
||||
def merge_batch():
|
||||
nonlocal doc_batch
|
||||
id = doc_batch[0].id
|
||||
min_updated_at = doc_batch[0].doc_updated_at
|
||||
max_updated_at = doc_batch[-1].doc_updated_at
|
||||
blob = b''
|
||||
size_bytes = 0
|
||||
for d in doc_batch:
|
||||
min_updated_at = min(min_updated_at, d.doc_updated_at)
|
||||
max_updated_at = max(max_updated_at, d.doc_updated_at)
|
||||
blob += b'\n\n' + d.blob
|
||||
size_bytes += d.size_bytes
|
||||
|
||||
return Document(
|
||||
id=id,
|
||||
source=DocumentSource.DISCORD,
|
||||
semantic_identifier=f"{min_updated_at} -> {max_updated_at}",
|
||||
doc_updated_at=max_updated_at,
|
||||
blob=blob,
|
||||
extension="txt",
|
||||
size_bytes=size_bytes,
|
||||
)
|
||||
|
||||
for doc in _manage_async_retrieval(
|
||||
token=self.discord_bot_token,
|
||||
requested_start_date_string=self.requested_start_date_string,
|
||||
@ -267,11 +289,11 @@ class DiscordConnector(LoadConnector, PollConnector):
|
||||
):
|
||||
doc_batch.append(doc)
|
||||
if len(doc_batch) >= self.batch_size:
|
||||
yield doc_batch
|
||||
yield [merge_batch()]
|
||||
doc_batch = []
|
||||
|
||||
if doc_batch:
|
||||
yield doc_batch
|
||||
yield [merge_batch()]
|
||||
|
||||
def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None:
|
||||
self._discord_bot_token = credentials["discord_bot_token"]
|
||||
|
||||
18
common/exceptions.py
Normal file
18
common/exceptions.py
Normal file
@ -0,0 +1,18 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
class TaskCanceledException(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -72,7 +72,7 @@ Commands are case-insensitive and must be terminated with a semicolon(;).
|
||||
|
||||
### Service manage commands
|
||||
|
||||
`LIST SERVICES;`
|
||||
`LIST SERVICES;`
|
||||
|
||||
- Lists all available services within the RAGFlow system.
|
||||
|
||||
@ -83,6 +83,11 @@ Commands are case-insensitive and must be terminated with a semicolon(;).
|
||||
- Shows detailed status information for the service identified by **id**.
|
||||
- [Example](#example-show-service)
|
||||
|
||||
`SHOW VERSION;`
|
||||
|
||||
- Shows RAGFlow version.
|
||||
- [Example](#example-show-version)
|
||||
|
||||
### User Management Commands
|
||||
|
||||
`LIST USERS;`
|
||||
@ -232,6 +237,18 @@ Service redis is alive. Detail:
|
||||
| 0 | 2 | 1 | 10.41 | 7.2.4 | standalone | 10446 | 30.84G | 1.10M |
|
||||
+-----------------+-------------------+---------------------------+-------------------------+---------------+-------------+--------------------------+---------------------+-------------+
|
||||
```
|
||||
<span id="example-show-version"></span>
|
||||
|
||||
- Show RAGFlow version
|
||||
|
||||
```
|
||||
admin> show version;
|
||||
+-----------------------+
|
||||
| version |
|
||||
+-----------------------+
|
||||
| v0.21.0-241-gc6cf58d5 |
|
||||
+-----------------------+
|
||||
```
|
||||
|
||||
<span id="example-list-users"></span>
|
||||
|
||||
|
||||
@ -4,17 +4,17 @@
|
||||
# /// script
|
||||
# requires-python = ">=3.10"
|
||||
# dependencies = [
|
||||
# "huggingface-hub",
|
||||
# "nltk",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
from typing import Union
|
||||
import nltk
|
||||
import argparse
|
||||
import os
|
||||
import urllib.request
|
||||
import argparse
|
||||
from typing import Union
|
||||
|
||||
import nltk
|
||||
|
||||
|
||||
def get_urls(use_china_mirrors=False) -> list[Union[str, list[str]]]:
|
||||
if use_china_mirrors:
|
||||
@ -38,25 +38,14 @@ def get_urls(use_china_mirrors=False) -> list[Union[str, list[str]]]:
|
||||
["https://storage.googleapis.com/chrome-for-testing-public/121.0.6167.85/linux64/chromedriver-linux64.zip", "chromedriver-linux64-121-0-6167-85"],
|
||||
]
|
||||
|
||||
repos = [
|
||||
"InfiniFlow/text_concat_xgb_v1.0",
|
||||
"InfiniFlow/deepdoc",
|
||||
"InfiniFlow/huqie",
|
||||
]
|
||||
|
||||
def download_model(repo_id):
|
||||
local_dir = os.path.abspath(os.path.join("huggingface.co", repo_id))
|
||||
os.makedirs(local_dir, exist_ok=True)
|
||||
snapshot_download(repo_id=repo_id, local_dir=local_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Download dependencies with optional China mirror support')
|
||||
parser.add_argument('--china-mirrors', action='store_true', help='Use China-accessible mirrors for downloads')
|
||||
parser = argparse.ArgumentParser(description="Download dependencies with optional China mirror support")
|
||||
parser.add_argument("--china-mirrors", action="store_true", help="Use China-accessible mirrors for downloads")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
urls = get_urls(args.china_mirrors)
|
||||
|
||||
|
||||
for url in urls:
|
||||
download_url = url[0] if isinstance(url, list) else url
|
||||
filename = url[1] if isinstance(url, list) else url.split("/")[-1]
|
||||
@ -64,11 +53,7 @@ if __name__ == "__main__":
|
||||
if not os.path.exists(filename):
|
||||
urllib.request.urlretrieve(download_url, filename)
|
||||
|
||||
local_dir = os.path.abspath('nltk_data')
|
||||
for data in ['wordnet', 'punkt', 'punkt_tab']:
|
||||
local_dir = os.path.abspath("nltk_data")
|
||||
for data in ["wordnet", "punkt", "punkt_tab"]:
|
||||
print(f"Downloading nltk {data}...")
|
||||
nltk.download(data, download_dir=local_dir)
|
||||
|
||||
for repo_id in repos:
|
||||
print(f"Downloading huggingface repo {repo_id}...")
|
||||
download_model(repo_id)
|
||||
|
||||
@ -29,6 +29,8 @@ import editdistance
|
||||
from graphrag.entity_resolution_prompt import ENTITY_RESOLUTION_PROMPT
|
||||
from rag.llm.chat_model import Base as CompletionLLM
|
||||
from graphrag.utils import perform_variable_replacements, chat_limiter, GraphChange
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.exceptions import TaskCanceledException
|
||||
|
||||
DEFAULT_RECORD_DELIMITER = "##"
|
||||
DEFAULT_ENTITY_INDEX_DELIMITER = "<|>"
|
||||
@ -67,7 +69,8 @@ class EntityResolution(Extractor):
|
||||
async def __call__(self, graph: nx.Graph,
|
||||
subgraph_nodes: set[str],
|
||||
prompt_variables: dict[str, Any] | None = None,
|
||||
callback: Callable | None = None) -> EntityResolutionResult:
|
||||
callback: Callable | None = None,
|
||||
task_id: str = "") -> EntityResolutionResult:
|
||||
"""Call method definition."""
|
||||
if prompt_variables is None:
|
||||
prompt_variables = {}
|
||||
@ -109,7 +112,7 @@ class EntityResolution(Extractor):
|
||||
try:
|
||||
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||
with trio.move_on_after(280 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||
await self._resolve_candidate(candidate_batch, result_set, result_lock)
|
||||
await self._resolve_candidate(candidate_batch, result_set, result_lock, task_id)
|
||||
remain_candidates_to_resolve = remain_candidates_to_resolve - len(candidate_batch[1])
|
||||
callback(msg=f"Resolved {len(candidate_batch[1])} pairs, {remain_candidates_to_resolve} are remained to resolve. ")
|
||||
if cancel_scope.cancelled_caught:
|
||||
@ -136,7 +139,7 @@ class EntityResolution(Extractor):
|
||||
|
||||
async def limited_merge_nodes(graph, nodes, change):
|
||||
async with semaphore:
|
||||
await self._merge_graph_nodes(graph, nodes, change)
|
||||
await self._merge_graph_nodes(graph, nodes, change, task_id)
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for sub_connect_graph in nx.connected_components(connect_graph):
|
||||
@ -153,7 +156,12 @@ class EntityResolution(Extractor):
|
||||
change=change,
|
||||
)
|
||||
|
||||
async def _resolve_candidate(self, candidate_resolution_i: tuple[str, list[tuple[str, str]]], resolution_result: set[str], resolution_result_lock: trio.Lock):
|
||||
async def _resolve_candidate(self, candidate_resolution_i: tuple[str, list[tuple[str, str]]], resolution_result: set[str], resolution_result_lock: trio.Lock, task_id: str = ""):
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during entity resolution candidate processing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
pair_txt = [
|
||||
f'When determining whether two {candidate_resolution_i[0]}s are the same, you should only focus on critical properties and overlook noisy factors.\n']
|
||||
for index, candidate in enumerate(candidate_resolution_i[1]):
|
||||
@ -173,7 +181,7 @@ class EntityResolution(Extractor):
|
||||
try:
|
||||
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||
with trio.move_on_after(280 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
||||
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {}, task_id)
|
||||
if cancel_scope.cancelled_caught:
|
||||
logging.warning("_resolve_candidate._chat timeout, skipping...")
|
||||
return
|
||||
|
||||
@ -14,6 +14,8 @@ from dataclasses import dataclass
|
||||
import networkx as nx
|
||||
import pandas as pd
|
||||
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.exceptions import TaskCanceledException
|
||||
from common.connection_utils import timeout
|
||||
from graphrag.general import leiden
|
||||
from graphrag.general.community_report_prompt import COMMUNITY_REPORT_PROMPT
|
||||
@ -51,7 +53,7 @@ class CommunityReportsExtractor(Extractor):
|
||||
self._extraction_prompt = COMMUNITY_REPORT_PROMPT
|
||||
self._max_report_length = max_report_length or 1500
|
||||
|
||||
async def __call__(self, graph: nx.Graph, callback: Callable | None = None):
|
||||
async def __call__(self, graph: nx.Graph, callback: Callable | None = None, task_id: str = ""):
|
||||
enable_timeout_assertion = os.environ.get("ENABLE_TIMEOUT_ASSERTION")
|
||||
for node_degree in graph.degree:
|
||||
graph.nodes[str(node_degree[0])]["rank"] = int(node_degree[1])
|
||||
@ -64,6 +66,11 @@ class CommunityReportsExtractor(Extractor):
|
||||
@timeout(120)
|
||||
async def extract_community_report(community):
|
||||
nonlocal res_str, res_dict, over, token_count
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during community report extraction.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
cm_id, cm = community
|
||||
weight = cm["weight"]
|
||||
ents = cm["nodes"]
|
||||
@ -95,7 +102,10 @@ class CommunityReportsExtractor(Extractor):
|
||||
async with chat_limiter:
|
||||
try:
|
||||
with trio.move_on_after(180 if enable_timeout_assertion else 1000000000) as cancel_scope:
|
||||
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {})
|
||||
if task_id and has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled before LLM call.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {}, task_id)
|
||||
if cancel_scope.cancelled_caught:
|
||||
logging.warning("extract_community_report._chat timeout, skipping...")
|
||||
return
|
||||
@ -136,6 +146,9 @@ class CommunityReportsExtractor(Extractor):
|
||||
for level, comm in communities.items():
|
||||
logging.info(f"Level {level}: Community: {len(comm.keys())}")
|
||||
for community in comm.items():
|
||||
if task_id and has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled before community processing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
nursery.start_soon(extract_community_report, community)
|
||||
if callback:
|
||||
callback(msg=f"Community reports done in {trio.current_time() - st:.2f}s, used tokens: {token_count}")
|
||||
|
||||
@ -23,7 +23,9 @@ from typing import Callable
|
||||
import networkx as nx
|
||||
import trio
|
||||
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.connection_utils import timeout
|
||||
from common.token_utils import truncate
|
||||
from graphrag.general.graph_prompt import SUMMARIZE_DESCRIPTIONS_PROMPT
|
||||
from graphrag.utils import (
|
||||
GraphChange,
|
||||
@ -38,7 +40,7 @@ from graphrag.utils import (
|
||||
)
|
||||
from rag.llm.chat_model import Base as CompletionLLM
|
||||
from rag.prompts.generator import message_fit_in
|
||||
from common.token_utils import truncate
|
||||
from common.exceptions import TaskCanceledException
|
||||
|
||||
GRAPH_FIELD_SEP = "<SEP>"
|
||||
DEFAULT_ENTITY_TYPES = ["organization", "person", "geo", "event", "category"]
|
||||
@ -60,7 +62,7 @@ class Extractor:
|
||||
self._entity_types = entity_types or DEFAULT_ENTITY_TYPES
|
||||
|
||||
@timeout(60 * 20)
|
||||
def _chat(self, system, history, gen_conf={}):
|
||||
def _chat(self, system, history, gen_conf={}, task_id=""):
|
||||
hist = deepcopy(history)
|
||||
conf = deepcopy(gen_conf)
|
||||
response = get_llm_cache(self._llm.llm_name, system, hist, conf)
|
||||
@ -69,6 +71,12 @@ class Extractor:
|
||||
_, system_msg = message_fit_in([{"role": "system", "content": system}], int(self._llm.max_length * 0.92))
|
||||
response = ""
|
||||
for attempt in range(3):
|
||||
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during entity resolution candidate processing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
try:
|
||||
response = self._llm.chat(system_msg[0]["content"], hist, conf)
|
||||
response = re.sub(r"^.*</think>", "", response, flags=re.DOTALL)
|
||||
@ -99,25 +107,29 @@ class Extractor:
|
||||
maybe_edges[(if_relation["src_id"], if_relation["tgt_id"])].append(if_relation)
|
||||
return dict(maybe_nodes), dict(maybe_edges)
|
||||
|
||||
async def __call__(self, doc_id: str, chunks: list[str], callback: Callable | None = None):
|
||||
async def __call__(self, doc_id: str, chunks: list[str], callback: Callable | None = None, task_id: str = ""):
|
||||
self.callback = callback
|
||||
start_ts = trio.current_time()
|
||||
|
||||
async def extract_all(doc_id, chunks, max_concurrency=MAX_CONCURRENT_PROCESS_AND_EXTRACT_CHUNK):
|
||||
async def extract_all(doc_id, chunks, max_concurrency=MAX_CONCURRENT_PROCESS_AND_EXTRACT_CHUNK, task_id=""):
|
||||
out_results = []
|
||||
error_count = 0
|
||||
max_errors = 3
|
||||
|
||||
limiter = trio.Semaphore(max_concurrency)
|
||||
|
||||
async def worker(chunk_key_dp: tuple[str, str], idx: int, total: int):
|
||||
async def worker(chunk_key_dp: tuple[str, str], idx: int, total: int, task_id=""):
|
||||
nonlocal error_count
|
||||
async with limiter:
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during entity extraction")
|
||||
|
||||
try:
|
||||
await self._process_single_content(chunk_key_dp, idx, total, out_results)
|
||||
await self._process_single_content(chunk_key_dp, idx, total, out_results, task_id)
|
||||
except Exception as e:
|
||||
error_count += 1
|
||||
error_msg = f"Error processing chunk {idx+1}/{total}: {str(e)}"
|
||||
error_msg = f"Error processing chunk {idx + 1}/{total}: {str(e)}"
|
||||
logging.warning(error_msg)
|
||||
if self.callback:
|
||||
self.callback(msg=error_msg)
|
||||
@ -127,7 +139,7 @@ class Extractor:
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for i, ck in enumerate(chunks):
|
||||
nursery.start_soon(worker, (doc_id, ck), i, len(chunks))
|
||||
nursery.start_soon(worker, (doc_id, ck), i, len(chunks), task_id)
|
||||
|
||||
if error_count > 0:
|
||||
warning_msg = f"Completed with {error_count} errors (out of {len(chunks)} chunks processed)"
|
||||
@ -137,7 +149,13 @@ class Extractor:
|
||||
|
||||
return out_results
|
||||
|
||||
out_results = await extract_all(doc_id, chunks, max_concurrency=MAX_CONCURRENT_PROCESS_AND_EXTRACT_CHUNK)
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled before entity extraction")
|
||||
|
||||
out_results = await extract_all(doc_id, chunks, max_concurrency=MAX_CONCURRENT_PROCESS_AND_EXTRACT_CHUNK, task_id=task_id)
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled after entity extraction")
|
||||
|
||||
maybe_nodes = defaultdict(list)
|
||||
maybe_edges = defaultdict(list)
|
||||
@ -154,9 +172,17 @@ class Extractor:
|
||||
start_ts = now
|
||||
logging.info("Entities merging...")
|
||||
all_entities_data = []
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled before nodes merging")
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for en_nm, ents in maybe_nodes.items():
|
||||
nursery.start_soon(self._merge_nodes, en_nm, ents, all_entities_data)
|
||||
nursery.start_soon(self._merge_nodes, en_nm, ents, all_entities_data, task_id)
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled after nodes merging")
|
||||
|
||||
now = trio.current_time()
|
||||
if self.callback:
|
||||
self.callback(msg=f"Entities merging done, {now - start_ts:.2f}s.")
|
||||
@ -164,9 +190,17 @@ class Extractor:
|
||||
start_ts = now
|
||||
logging.info("Relationships merging...")
|
||||
all_relationships_data = []
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled before relationships merging")
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for (src, tgt), rels in maybe_edges.items():
|
||||
nursery.start_soon(self._merge_edges, src, tgt, rels, all_relationships_data)
|
||||
nursery.start_soon(self._merge_edges, src, tgt, rels, all_relationships_data, task_id)
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled after relationships merging")
|
||||
|
||||
now = trio.current_time()
|
||||
if self.callback:
|
||||
self.callback(msg=f"Relationships merging done, {now - start_ts:.2f}s.")
|
||||
@ -181,7 +215,10 @@ class Extractor:
|
||||
|
||||
return all_entities_data, all_relationships_data
|
||||
|
||||
async def _merge_nodes(self, entity_name: str, entities: list[dict], all_relationships_data):
|
||||
async def _merge_nodes(self, entity_name: str, entities: list[dict], all_relationships_data, task_id=""):
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during merge nodes")
|
||||
|
||||
if not entities:
|
||||
return
|
||||
entity_type = sorted(
|
||||
@ -191,7 +228,7 @@ class Extractor:
|
||||
)[0][0]
|
||||
description = GRAPH_FIELD_SEP.join(sorted(set([dp["description"] for dp in entities])))
|
||||
already_source_ids = flat_uniq_list(entities, "source_id")
|
||||
description = await self._handle_entity_relation_summary(entity_name, description)
|
||||
description = await self._handle_entity_relation_summary(entity_name, description, task_id=task_id)
|
||||
node_data = dict(
|
||||
entity_type=entity_type,
|
||||
description=description,
|
||||
@ -200,18 +237,21 @@ class Extractor:
|
||||
node_data["entity_name"] = entity_name
|
||||
all_relationships_data.append(node_data)
|
||||
|
||||
async def _merge_edges(self, src_id: str, tgt_id: str, edges_data: list[dict], all_relationships_data=None):
|
||||
async def _merge_edges(self, src_id: str, tgt_id: str, edges_data: list[dict], all_relationships_data=None, task_id=""):
|
||||
if not edges_data:
|
||||
return
|
||||
weight = sum([edge["weight"] for edge in edges_data])
|
||||
description = GRAPH_FIELD_SEP.join(sorted(set([edge["description"] for edge in edges_data])))
|
||||
description = await self._handle_entity_relation_summary(f"{src_id} -> {tgt_id}", description)
|
||||
description = await self._handle_entity_relation_summary(f"{src_id} -> {tgt_id}", description, task_id=task_id)
|
||||
keywords = flat_uniq_list(edges_data, "keywords")
|
||||
source_id = flat_uniq_list(edges_data, "source_id")
|
||||
edge_data = dict(src_id=src_id, tgt_id=tgt_id, description=description, keywords=keywords, weight=weight, source_id=source_id)
|
||||
all_relationships_data.append(edge_data)
|
||||
|
||||
async def _merge_graph_nodes(self, graph: nx.Graph, nodes: list[str], change: GraphChange):
|
||||
async def _merge_graph_nodes(self, graph: nx.Graph, nodes: list[str], change: GraphChange, task_id=""):
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during merge graph nodes")
|
||||
|
||||
if len(nodes) <= 1:
|
||||
return
|
||||
change.added_updated_nodes.add(nodes[0])
|
||||
@ -220,6 +260,9 @@ class Extractor:
|
||||
node0_attrs = graph.nodes[nodes[0]]
|
||||
node0_neighbors = set(graph.neighbors(nodes[0]))
|
||||
for node1 in nodes[1:]:
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during merge_graph nodes")
|
||||
|
||||
# Merge two nodes, keep "entity_name", "entity_type", "page_rank" unchanged.
|
||||
node1_attrs = graph.nodes[node1]
|
||||
node0_attrs["description"] += f"{GRAPH_FIELD_SEP}{node1_attrs['description']}"
|
||||
@ -236,15 +279,18 @@ class Extractor:
|
||||
edge0_attrs["description"] += f"{GRAPH_FIELD_SEP}{edge1_attrs['description']}"
|
||||
for attr in ["keywords", "source_id"]:
|
||||
edge0_attrs[attr] = sorted(set(edge0_attrs[attr] + edge1_attrs[attr]))
|
||||
edge0_attrs["description"] = await self._handle_entity_relation_summary(f"({nodes[0]}, {neighbor})", edge0_attrs["description"])
|
||||
edge0_attrs["description"] = await self._handle_entity_relation_summary(f"({nodes[0]}, {neighbor})", edge0_attrs["description"], task_id=task_id)
|
||||
graph.add_edge(nodes[0], neighbor, **edge0_attrs)
|
||||
else:
|
||||
graph.add_edge(nodes[0], neighbor, **edge1_attrs)
|
||||
graph.remove_node(node1)
|
||||
node0_attrs["description"] = await self._handle_entity_relation_summary(nodes[0], node0_attrs["description"])
|
||||
node0_attrs["description"] = await self._handle_entity_relation_summary(nodes[0], node0_attrs["description"], task_id=task_id)
|
||||
graph.nodes[nodes[0]].update(node0_attrs)
|
||||
|
||||
async def _handle_entity_relation_summary(self, entity_or_relation_name: str, description: str) -> str:
|
||||
async def _handle_entity_relation_summary(self, entity_or_relation_name: str, description: str, task_id="") -> str:
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during summary handling")
|
||||
|
||||
summary_max_tokens = 512
|
||||
use_description = truncate(description, summary_max_tokens)
|
||||
description_list = use_description.split(GRAPH_FIELD_SEP)
|
||||
@ -258,6 +304,10 @@ class Extractor:
|
||||
)
|
||||
use_prompt = prompt_template.format(**context_base)
|
||||
logging.info(f"Trigger summary: {entity_or_relation_name}")
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled during summary handling")
|
||||
|
||||
async with chat_limiter:
|
||||
summary = await trio.to_thread.run_sync(self._chat, "", [{"role": "user", "content": use_prompt}])
|
||||
summary = await trio.to_thread.run_sync(self._chat, "", [{"role": "user", "content": use_prompt}], {}, task_id)
|
||||
return summary
|
||||
|
||||
@ -97,7 +97,7 @@ class GraphExtractor(Extractor):
|
||||
self._entity_types_key: ",".join(entity_types),
|
||||
}
|
||||
|
||||
async def _process_single_content(self, chunk_key_dp: tuple[str, str], chunk_seq: int, num_chunks: int, out_results):
|
||||
async def _process_single_content(self, chunk_key_dp: tuple[str, str], chunk_seq: int, num_chunks: int, out_results, task_id=""):
|
||||
token_count = 0
|
||||
chunk_key = chunk_key_dp[0]
|
||||
content = chunk_key_dp[1]
|
||||
@ -107,7 +107,7 @@ class GraphExtractor(Extractor):
|
||||
}
|
||||
hint_prompt = perform_variable_replacements(self._extraction_prompt, variables=variables)
|
||||
async with chat_limiter:
|
||||
response = await trio.to_thread.run_sync(lambda: self._chat(hint_prompt, [{"role": "user", "content": "Output:"}], {}))
|
||||
response = await trio.to_thread.run_sync(self._chat, hint_prompt, [{"role": "user", "content": "Output:"}], {}, task_id)
|
||||
token_count += num_tokens_from_string(hint_prompt + response)
|
||||
|
||||
results = response or ""
|
||||
|
||||
@ -21,6 +21,8 @@ import networkx as nx
|
||||
import trio
|
||||
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.exceptions import TaskCanceledException
|
||||
from common.misc_utils import get_uuid
|
||||
from common.connection_utils import timeout
|
||||
from graphrag.entity_resolution import EntityResolution
|
||||
@ -106,6 +108,7 @@ async def run_graphrag(
|
||||
chat_model,
|
||||
embedding_model,
|
||||
callback,
|
||||
task_id=row["id"],
|
||||
)
|
||||
if with_community:
|
||||
await graphrag_task_lock.spin_acquire()
|
||||
@ -118,6 +121,7 @@ async def run_graphrag(
|
||||
chat_model,
|
||||
embedding_model,
|
||||
callback,
|
||||
task_id=row["id"],
|
||||
)
|
||||
finally:
|
||||
graphrag_task_lock.release()
|
||||
@ -207,6 +211,10 @@ async def run_graphrag_for_kb(
|
||||
failed_docs: list[tuple[str, str]] = [] # (doc_id, error)
|
||||
|
||||
async def build_one(doc_id: str):
|
||||
if has_canceled(row["id"]):
|
||||
callback(msg=f"Task {row['id']} cancelled, stopping execution.")
|
||||
raise TaskCanceledException(f"Task {row['id']} was cancelled")
|
||||
|
||||
chunks = all_doc_chunks.get(doc_id, [])
|
||||
if not chunks:
|
||||
callback(msg=f"[GraphRAG] doc:{doc_id} has no available chunks, skip generation.")
|
||||
@ -232,6 +240,7 @@ async def run_graphrag_for_kb(
|
||||
chat_model,
|
||||
embedding_model,
|
||||
callback,
|
||||
task_id=row["id"]
|
||||
)
|
||||
if sg:
|
||||
subgraphs[doc_id] = sg
|
||||
@ -239,14 +248,24 @@ async def run_graphrag_for_kb(
|
||||
else:
|
||||
failed_docs.append((doc_id, "subgraph is empty"))
|
||||
callback(msg=f"{msg} empty")
|
||||
except TaskCanceledException as canceled:
|
||||
callback(msg=f"[GraphRAG] build_subgraph doc:{doc_id} FAILED: {canceled}")
|
||||
except Exception as e:
|
||||
failed_docs.append((doc_id, repr(e)))
|
||||
callback(msg=f"[GraphRAG] build_subgraph doc:{doc_id} FAILED: {e!r}")
|
||||
|
||||
if has_canceled(row["id"]):
|
||||
callback(msg=f"Task {row['id']} cancelled before processing documents.")
|
||||
raise TaskCanceledException(f"Task {row['id']} was cancelled")
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for doc_id in doc_ids:
|
||||
nursery.start_soon(build_one, doc_id)
|
||||
|
||||
if has_canceled(row["id"]):
|
||||
callback(msg=f"Task {row['id']} cancelled after document processing.")
|
||||
raise TaskCanceledException(f"Task {row['id']} was cancelled")
|
||||
|
||||
ok_docs = [d for d in doc_ids if d in subgraphs]
|
||||
if not ok_docs:
|
||||
callback(msg=f"[GraphRAG] kb:{kb_id} no subgraphs generated successfully, end.")
|
||||
@ -257,6 +276,10 @@ async def run_graphrag_for_kb(
|
||||
await kb_lock.spin_acquire()
|
||||
callback(msg=f"[GraphRAG] kb:{kb_id} merge lock acquired")
|
||||
|
||||
if has_canceled(row["id"]):
|
||||
callback(msg=f"Task {row['id']} cancelled before merging subgraphs.")
|
||||
raise TaskCanceledException(f"Task {row['id']} was cancelled")
|
||||
|
||||
try:
|
||||
union_nodes: set = set()
|
||||
final_graph = None
|
||||
@ -288,6 +311,10 @@ async def run_graphrag_for_kb(
|
||||
callback(msg=f"[GraphRAG] KB merge done in {now - start:.2f}s. ok={len(ok_docs)} / total={len(doc_ids)}")
|
||||
return {"ok_docs": ok_docs, "failed_docs": failed_docs, "total_docs": len(doc_ids), "total_chunks": total_chunks, "seconds": now - start}
|
||||
|
||||
if has_canceled(row["id"]):
|
||||
callback(msg=f"Task {row['id']} cancelled before resolution/community extraction.")
|
||||
raise TaskCanceledException(f"Task {row['id']} was cancelled")
|
||||
|
||||
await kb_lock.spin_acquire()
|
||||
callback(msg=f"[GraphRAG] kb:{kb_id} post-merge lock acquired for resolution/community")
|
||||
|
||||
@ -306,6 +333,7 @@ async def run_graphrag_for_kb(
|
||||
chat_model,
|
||||
embedding_model,
|
||||
callback,
|
||||
task_id=row["id"],
|
||||
)
|
||||
|
||||
if with_community:
|
||||
@ -317,6 +345,7 @@ async def run_graphrag_for_kb(
|
||||
chat_model,
|
||||
embedding_model,
|
||||
callback,
|
||||
task_id=row["id"],
|
||||
)
|
||||
finally:
|
||||
kb_lock.release()
|
||||
@ -343,7 +372,12 @@ async def generate_subgraph(
|
||||
llm_bdl,
|
||||
embed_bdl,
|
||||
callback,
|
||||
task_id: str = "",
|
||||
):
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during subgraph generation for doc {doc_id}.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
contains = await does_graph_contains(tenant_id, kb_id, doc_id)
|
||||
if contains:
|
||||
callback(msg=f"Graph already contains {doc_id}")
|
||||
@ -354,15 +388,24 @@ async def generate_subgraph(
|
||||
language=language,
|
||||
entity_types=entity_types,
|
||||
)
|
||||
ents, rels = await ext(doc_id, chunks, callback)
|
||||
ents, rels = await ext(doc_id, chunks, callback, task_id=task_id)
|
||||
subgraph = nx.Graph()
|
||||
|
||||
for ent in ents:
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during entity processing for doc {doc_id}.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
assert "description" in ent, f"entity {ent} does not have description"
|
||||
ent["source_id"] = [doc_id]
|
||||
subgraph.add_node(ent["entity_name"], **ent)
|
||||
|
||||
ignored_rels = 0
|
||||
for rel in rels:
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during relationship processing for doc {doc_id}.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
assert "description" in rel, f"relation {rel} does not have description"
|
||||
if not subgraph.has_node(rel["src_id"]) or not subgraph.has_node(rel["tgt_id"]):
|
||||
ignored_rels += 1
|
||||
@ -434,17 +477,27 @@ async def resolve_entities(
|
||||
llm_bdl,
|
||||
embed_bdl,
|
||||
callback,
|
||||
task_id: str = "",
|
||||
):
|
||||
# Check if task has been canceled before resolution
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during entity resolution.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
start = trio.current_time()
|
||||
er = EntityResolution(
|
||||
llm_bdl,
|
||||
)
|
||||
reso = await er(graph, subgraph_nodes, callback=callback)
|
||||
reso = await er(graph, subgraph_nodes, callback=callback, task_id=task_id)
|
||||
graph = reso.graph
|
||||
change = reso.change
|
||||
callback(msg=f"Graph resolution removed {len(change.removed_nodes)} nodes and {len(change.removed_edges)} edges.")
|
||||
callback(msg="Graph resolution updated pagerank.")
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled after entity resolution.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
await set_graph(tenant_id, kb_id, embed_bdl, graph, change, callback)
|
||||
now = trio.current_time()
|
||||
callback(msg=f"Graph resolution done in {now - start:.2f}s.")
|
||||
@ -459,12 +512,22 @@ async def extract_community(
|
||||
llm_bdl,
|
||||
embed_bdl,
|
||||
callback,
|
||||
task_id: str = "",
|
||||
):
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled before community extraction.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
start = trio.current_time()
|
||||
ext = CommunityReportsExtractor(
|
||||
llm_bdl,
|
||||
)
|
||||
cr = await ext(graph, callback=callback)
|
||||
cr = await ext(graph, callback=callback, task_id=task_id)
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during community extraction.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
community_structure = cr.structured_output
|
||||
community_reports = cr.output
|
||||
doc_ids = graph.graph["source_id"]
|
||||
@ -472,6 +535,10 @@ async def extract_community(
|
||||
now = trio.current_time()
|
||||
callback(msg=f"Graph extracted {len(cr.structured_output)} communities in {now - start:.2f}s.")
|
||||
start = now
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled during community indexing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
chunks = []
|
||||
for stru, rep in zip(community_structure, community_reports):
|
||||
obj = {
|
||||
@ -509,6 +576,10 @@ async def extract_community(
|
||||
error_message = f"Insert chunk error: {doc_store_result}, please check log file and Elasticsearch/Infinity status!"
|
||||
raise Exception(error_message)
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
callback(msg=f"Task {task_id} cancelled after community indexing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
now = trio.current_time()
|
||||
callback(msg=f"Graph indexed {len(cr.structured_output)} communities in {now - start:.2f}s.")
|
||||
return community_structure, community_reports
|
||||
|
||||
@ -71,7 +71,7 @@ class GraphExtractor(Extractor):
|
||||
self._left_token_count = llm_invoker.max_length - num_tokens_from_string(self._entity_extract_prompt.format(**self._context_base, input_text=""))
|
||||
self._left_token_count = max(llm_invoker.max_length * 0.6, self._left_token_count)
|
||||
|
||||
async def _process_single_content(self, chunk_key_dp: tuple[str, str], chunk_seq: int, num_chunks: int, out_results):
|
||||
async def _process_single_content(self, chunk_key_dp: tuple[str, str], chunk_seq: int, num_chunks: int, out_results, task_id=""):
|
||||
token_count = 0
|
||||
chunk_key = chunk_key_dp[0]
|
||||
content = chunk_key_dp[1]
|
||||
@ -86,13 +86,13 @@ class GraphExtractor(Extractor):
|
||||
if self.callback:
|
||||
self.callback(msg=f"Start processing for {chunk_key}: {content[:25]}...")
|
||||
async with chat_limiter:
|
||||
final_result = await trio.to_thread.run_sync(self._chat, "", [{"role": "user", "content": hint_prompt}], gen_conf)
|
||||
final_result = await trio.to_thread.run_sync(self._chat, "", [{"role": "user", "content": hint_prompt}], gen_conf, task_id)
|
||||
token_count += num_tokens_from_string(hint_prompt + final_result)
|
||||
history = pack_user_ass_to_openai_messages(hint_prompt, final_result, self._continue_prompt)
|
||||
for now_glean_index in range(self._max_gleanings):
|
||||
async with chat_limiter:
|
||||
# glean_result = await trio.to_thread.run_sync(lambda: self._chat(hint_prompt, history, gen_conf))
|
||||
glean_result = await trio.to_thread.run_sync(self._chat, "", history, gen_conf)
|
||||
glean_result = await trio.to_thread.run_sync(self._chat, "", history, gen_conf, task_id)
|
||||
history.extend([{"role": "assistant", "content": glean_result}])
|
||||
token_count += num_tokens_from_string("\n".join([m["content"] for m in history]) + hint_prompt + self._continue_prompt)
|
||||
final_result += glean_result
|
||||
@ -101,7 +101,7 @@ class GraphExtractor(Extractor):
|
||||
|
||||
history.extend([{"role": "user", "content": self._if_loop_prompt}])
|
||||
async with chat_limiter:
|
||||
if_loop_result = await trio.to_thread.run_sync(self._chat, "", history, gen_conf)
|
||||
if_loop_result = await trio.to_thread.run_sync(self._chat, "", history, gen_conf, task_id)
|
||||
token_count += num_tokens_from_string("\n".join([m["content"] for m in history]) + if_loop_result + self._if_loop_prompt)
|
||||
if_loop_result = if_loop_result.strip().strip('"').strip("'").lower()
|
||||
if if_loop_result != "yes":
|
||||
|
||||
@ -20,7 +20,7 @@ from io import BytesIO
|
||||
|
||||
from deepdoc.parser.utils import get_text
|
||||
from rag.app import naive
|
||||
from rag.app.naive import plaintext_parser, PARSERS
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
from rag.nlp import bullets_category, is_english,remove_contents_table, \
|
||||
hierarchical_merge, make_colon_as_title, naive_merge, random_choices, tokenize_table, \
|
||||
tokenize_chunks
|
||||
@ -102,10 +102,10 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
parser = PARSERS.get(name, plaintext_parser)
|
||||
parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, tables, _ = parser(
|
||||
sections, tables, pdf_parser = parser(
|
||||
filename = filename,
|
||||
binary = binary,
|
||||
from_page = from_page,
|
||||
|
||||
@ -25,7 +25,7 @@ from rag.nlp import bullets_category, remove_contents_table, \
|
||||
make_colon_as_title, tokenize_chunks, docx_question_level, tree_merge
|
||||
from rag.nlp import rag_tokenizer, Node
|
||||
from deepdoc.parser import PdfParser, DocxParser, HtmlParser
|
||||
from rag.app.naive import plaintext_parser, PARSERS
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
|
||||
|
||||
|
||||
@ -161,10 +161,10 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
parser = PARSERS.get(name, plaintext_parser)
|
||||
parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
raw_sections, tables, _ = parser(
|
||||
raw_sections, tables, pdf_parser = parser(
|
||||
filename = filename,
|
||||
binary = binary,
|
||||
from_page = from_page,
|
||||
|
||||
@ -26,7 +26,7 @@ from deepdoc.parser import PdfParser, DocxParser
|
||||
from deepdoc.parser.figure_parser import vision_figure_parser_pdf_wrapper,vision_figure_parser_docx_wrapper
|
||||
from docx import Document
|
||||
from PIL import Image
|
||||
from rag.app.naive import plaintext_parser, PARSERS
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
|
||||
class Pdf(PdfParser):
|
||||
def __init__(self):
|
||||
@ -202,7 +202,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
pdf_parser = PARSERS.get(name, plaintext_parser)
|
||||
pdf_parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, tbls, pdf_parser = pdf_parser(
|
||||
|
||||
@ -38,7 +38,7 @@ from deepdoc.parser.docling_parser import DoclingParser
|
||||
from deepdoc.parser.tcadp_parser import TCADPParser
|
||||
from rag.nlp import concat_img, find_codec, naive_merge, naive_merge_with_images, naive_merge_docx, rag_tokenizer, tokenize_chunks, tokenize_chunks_with_images, tokenize_table
|
||||
|
||||
def DeepDOC_parser(filename, binary=None, from_page=0, to_page=100000, callback=None, pdf_cls = None ,**kwargs):
|
||||
def by_deepdoc(filename, binary=None, from_page=0, to_page=100000, lang="Chinese", callback=None, pdf_cls = None ,**kwargs):
|
||||
callback = callback
|
||||
binary = binary
|
||||
pdf_parser = pdf_cls() if pdf_cls else Pdf()
|
||||
@ -48,13 +48,14 @@ def DeepDOC_parser(filename, binary=None, from_page=0, to_page=100000, callback=
|
||||
to_page=to_page,
|
||||
callback=callback
|
||||
)
|
||||
|
||||
tables = vision_figure_parser_pdf_wrapper(tbls=tables,
|
||||
callback=callback,
|
||||
**kwargs)
|
||||
return sections, tables, pdf_parser
|
||||
|
||||
|
||||
def MinerU_parser(filename, binary=None, callback=None, **kwargs):
|
||||
def by_mineru(filename, binary=None, from_page=0, to_page=100000, lang="Chinese", callback=None, pdf_cls = None ,**kwargs):
|
||||
mineru_executable = os.environ.get("MINERU_EXECUTABLE", "mineru")
|
||||
mineru_api = os.environ.get("MINERU_APISERVER", "http://host.docker.internal:9987")
|
||||
pdf_parser = MinerUParser(mineru_path=mineru_executable, mineru_api=mineru_api)
|
||||
@ -74,7 +75,7 @@ def MinerU_parser(filename, binary=None, callback=None, **kwargs):
|
||||
return sections, tables, pdf_parser
|
||||
|
||||
|
||||
def Docling_parser(filename, binary=None, callback=None, **kwargs):
|
||||
def by_docling(filename, binary=None, from_page=0, to_page=100000, lang="Chinese", callback=None, pdf_cls = None ,**kwargs):
|
||||
pdf_parser = DoclingParser()
|
||||
|
||||
if not pdf_parser.check_installation():
|
||||
@ -91,7 +92,7 @@ def Docling_parser(filename, binary=None, callback=None, **kwargs):
|
||||
return sections, tables, pdf_parser
|
||||
|
||||
|
||||
def TCADP_parser(filename, binary=None, callback=None, **kwargs):
|
||||
def by_tcadp(filename, binary=None, from_page=0, to_page=100000, lang="Chinese", callback=None, pdf_cls = None ,**kwargs):
|
||||
tcadp_parser = TCADPParser()
|
||||
|
||||
if not tcadp_parser.check_installation():
|
||||
@ -108,7 +109,7 @@ def TCADP_parser(filename, binary=None, callback=None, **kwargs):
|
||||
return sections, tables, tcadp_parser
|
||||
|
||||
|
||||
def plaintext_parser(filename, binary=None, from_page=0, to_page=100000, callback=None, **kwargs):
|
||||
def by_plaintext(filename, binary=None, from_page=0, to_page=100000, callback=None, **kwargs):
|
||||
if kwargs.get("layout_recognizer", "") == "Plain Text":
|
||||
pdf_parser = PlainParser()
|
||||
else:
|
||||
@ -125,11 +126,11 @@ def plaintext_parser(filename, binary=None, from_page=0, to_page=100000, callbac
|
||||
|
||||
|
||||
PARSERS = {
|
||||
"deepdoc": DeepDOC_parser,
|
||||
"mineru": MinerU_parser,
|
||||
"docling": Docling_parser,
|
||||
"tcadp": TCADP_parser,
|
||||
"plaintext": plaintext_parser, # default
|
||||
"deepdoc": by_deepdoc,
|
||||
"mineru": by_mineru,
|
||||
"docling": by_docling,
|
||||
"tcadp": by_tcadp,
|
||||
"plaintext": by_plaintext, # default
|
||||
}
|
||||
|
||||
|
||||
@ -630,10 +631,10 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
parser = PARSERS.get(name, plaintext_parser)
|
||||
parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, tables, _ = parser(
|
||||
sections, tables, pdf_parser = parser(
|
||||
filename = filename,
|
||||
binary = binary,
|
||||
from_page = from_page,
|
||||
|
||||
@ -23,7 +23,7 @@ from rag.app import naive
|
||||
from rag.nlp import rag_tokenizer, tokenize
|
||||
from deepdoc.parser import PdfParser, ExcelParser, HtmlParser
|
||||
from deepdoc.parser.figure_parser import vision_figure_parser_docx_wrapper
|
||||
from rag.app.naive import plaintext_parser, PARSERS
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
|
||||
class Pdf(PdfParser):
|
||||
def __call__(self, filename, binary=None, from_page=0,
|
||||
@ -88,10 +88,10 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
parser = PARSERS.get(name, plaintext_parser)
|
||||
parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, tbls, _ = parser(
|
||||
sections, tbls, pdf_parser = parser(
|
||||
filename = filename,
|
||||
binary = binary,
|
||||
from_page = from_page,
|
||||
|
||||
@ -24,7 +24,7 @@ from rag.nlp import tokenize, is_english
|
||||
from rag.nlp import rag_tokenizer
|
||||
from deepdoc.parser import PdfParser, PptParser, PlainParser
|
||||
from PyPDF2 import PdfReader as pdf2_read
|
||||
from rag.app.naive import plaintext_parser, PARSERS
|
||||
from rag.app.naive import by_plaintext, PARSERS
|
||||
|
||||
class Ppt(PptParser):
|
||||
def __call__(self, fnm, from_page, to_page, callback=None):
|
||||
@ -131,7 +131,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
layout_recognizer = "DeepDOC" if layout_recognizer else "Plain Text"
|
||||
|
||||
name = layout_recognizer.strip().lower()
|
||||
parser = PARSERS.get(name, plaintext_parser)
|
||||
parser = PARSERS.get(name, by_plaintext)
|
||||
callback(0.1, "Start to parse.")
|
||||
|
||||
sections, _, _ = parser(
|
||||
|
||||
@ -41,7 +41,7 @@ class Pipeline(Graph):
|
||||
self._doc_id = None
|
||||
|
||||
def callback(self, component_name: str, progress: float | int | None = None, message: str = "") -> None:
|
||||
from rag.svr.task_executor import TaskCanceledException
|
||||
from common.exceptions import TaskCanceledException
|
||||
log_key = f"{self._flow_id}-{self.task_id}-logs"
|
||||
timestamp = timer()
|
||||
if has_canceled(self.task_id):
|
||||
|
||||
@ -20,7 +20,9 @@ import numpy as np
|
||||
from sklearn.mixture import GaussianMixture
|
||||
import trio
|
||||
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.connection_utils import timeout
|
||||
from common.exceptions import TaskCanceledException
|
||||
from graphrag.utils import (
|
||||
get_llm_cache,
|
||||
get_embed_cache,
|
||||
@ -75,18 +77,24 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
await trio.to_thread.run_sync(lambda: set_embed_cache(self._embd_model.llm_name, txt, embds))
|
||||
return embds
|
||||
|
||||
def _get_optimal_clusters(self, embeddings: np.ndarray, random_state: int):
|
||||
def _get_optimal_clusters(self, embeddings: np.ndarray, random_state: int, task_id: str = ""):
|
||||
max_clusters = min(self._max_cluster, len(embeddings))
|
||||
n_clusters = np.arange(1, max_clusters)
|
||||
bics = []
|
||||
for n in n_clusters:
|
||||
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during get optimal clusters.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
gm = GaussianMixture(n_components=n, random_state=random_state)
|
||||
gm.fit(embeddings)
|
||||
bics.append(gm.bic(embeddings))
|
||||
optimal_clusters = n_clusters[np.argmin(bics)]
|
||||
return optimal_clusters
|
||||
|
||||
async def __call__(self, chunks, random_state, callback=None):
|
||||
async def __call__(self, chunks, random_state, callback=None, task_id: str = ""):
|
||||
if len(chunks) <= 1:
|
||||
return []
|
||||
chunks = [(s, a) for s, a in chunks if s and len(a) > 0]
|
||||
@ -96,6 +104,12 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
@timeout(60*20)
|
||||
async def summarize(ck_idx: list[int]):
|
||||
nonlocal chunks
|
||||
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during RAPTOR summarization.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
texts = [chunks[i][0] for i in ck_idx]
|
||||
len_per_chunk = int(
|
||||
(self._llm_model.max_length - self._max_token) / len(texts)
|
||||
@ -104,6 +118,11 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
[truncate(t, max(1, len_per_chunk)) for t in texts]
|
||||
)
|
||||
async with chat_limiter:
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled before RAPTOR LLM call.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
cnt = await self._chat(
|
||||
"You're a helpful assistant.",
|
||||
[
|
||||
@ -122,11 +141,22 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
cnt,
|
||||
)
|
||||
logging.debug(f"SUM: {cnt}")
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled before RAPTOR embedding.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
embds = await self._embedding_encode(cnt)
|
||||
chunks.append((cnt, embds))
|
||||
|
||||
labels = []
|
||||
while end - start > 1:
|
||||
|
||||
if task_id:
|
||||
if has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled during RAPTOR layer processing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
embeddings = [embd for _, embd in chunks[start:end]]
|
||||
if len(embeddings) == 2:
|
||||
await summarize([start, start + 1])
|
||||
@ -148,7 +178,7 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
n_components=min(12, len(embeddings) - 2),
|
||||
metric="cosine",
|
||||
).fit_transform(embeddings)
|
||||
n_clusters = self._get_optimal_clusters(reduced_embeddings, random_state)
|
||||
n_clusters = self._get_optimal_clusters(reduced_embeddings, random_state, task_id=task_id)
|
||||
if n_clusters == 1:
|
||||
lbls = [0 for _ in range(len(reduced_embeddings))]
|
||||
else:
|
||||
@ -162,6 +192,11 @@ class RecursiveAbstractiveProcessing4TreeOrganizedRetrieval:
|
||||
for c in range(n_clusters):
|
||||
ck_idx = [i + start for i in range(len(lbls)) if lbls[i] == c]
|
||||
assert len(ck_idx) > 0
|
||||
|
||||
if task_id and has_canceled(task_id):
|
||||
logging.info(f"Task {task_id} cancelled before RAPTOR cluster processing.")
|
||||
raise TaskCanceledException(f"Task {task_id} was cancelled")
|
||||
|
||||
nursery.start_soon(summarize, ck_idx)
|
||||
|
||||
assert len(chunks) - end == n_clusters, "{} vs. {}".format(
|
||||
|
||||
@ -28,7 +28,7 @@ from api.db.services.connector_service import SyncLogsService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from common.log_utils import init_root_logger
|
||||
from common.config_utils import show_configs
|
||||
from common.data_source import BlobStorageConnector
|
||||
from common.data_source import BlobStorageConnector, NotionConnector, DiscordConnector
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
@ -37,7 +37,7 @@ import trio
|
||||
import faulthandler
|
||||
from common.constants import FileSource, TaskStatus
|
||||
from common import settings
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from common.data_source.confluence_connector import ConfluenceConnector
|
||||
from common.data_source.utils import load_all_docs_from_checkpoint_connector
|
||||
from common.signal_utils import start_tracemalloc_and_snapshot, stop_tracemalloc
|
||||
@ -47,6 +47,8 @@ task_limiter = trio.Semaphore(MAX_CONCURRENT_TASKS)
|
||||
|
||||
|
||||
class SyncBase:
|
||||
SOURCE_NAME: str = None
|
||||
|
||||
def __init__(self, conf: dict) -> None:
|
||||
self.conf = conf
|
||||
|
||||
@ -67,7 +69,7 @@ class SyncBase:
|
||||
docs = [{
|
||||
"id": doc.id,
|
||||
"connector_id": task["connector_id"],
|
||||
"source": FileSource.S3,
|
||||
"source": self.SOURCE_NAME,
|
||||
"semantic_identifier": doc.semantic_identifier,
|
||||
"extension": doc.extension,
|
||||
"size_bytes": doc.size_bytes,
|
||||
@ -76,7 +78,7 @@ class SyncBase:
|
||||
} for doc in document_batch]
|
||||
|
||||
e, kb = KnowledgebaseService.get_by_id(task["kb_id"])
|
||||
err, dids = SyncLogsService.duplicate_and_parse(kb, docs, task["tenant_id"], f"{FileSource.S3}/{task['connector_id']}")
|
||||
err, dids = SyncLogsService.duplicate_and_parse(kb, docs, task["tenant_id"], f"{self.SOURCE_NAME}/{task['connector_id']}")
|
||||
SyncLogsService.increase_docs(task["id"], min_update, max_update, len(docs), "\n".join(err), len(err))
|
||||
doc_num += len(docs)
|
||||
|
||||
@ -98,6 +100,8 @@ class SyncBase:
|
||||
|
||||
|
||||
class S3(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.S3
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
self.connector = BlobStorageConnector(
|
||||
bucket_type=self.conf.get("bucket_type", "s3"),
|
||||
@ -109,14 +113,17 @@ class S3(SyncBase):
|
||||
else self.connector.poll_source(task["poll_range_start"].timestamp(), datetime.now(timezone.utc).timestamp())
|
||||
|
||||
begin_info = "totally" if task["reindex"]=="1" or not task["poll_range_start"] else "from {}".format(task["poll_range_start"])
|
||||
logging.info("Connect to {}: {} {}".format(self.conf.get("bucket_type", "s3"),
|
||||
logging.info("Connect to {}: {}(prefix/{}) {}".format(self.conf.get("bucket_type", "s3"),
|
||||
self.conf["bucket_name"],
|
||||
self.conf.get("prefix", ""),
|
||||
begin_info
|
||||
))
|
||||
return document_batch_generator
|
||||
|
||||
|
||||
class Confluence(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.CONFLUENCE
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
from common.data_source.interfaces import StaticCredentialsProvider
|
||||
from common.data_source.config import DocumentSource
|
||||
@ -131,10 +138,7 @@ class Confluence(SyncBase):
|
||||
credentials_provider = StaticCredentialsProvider(
|
||||
tenant_id=task["tenant_id"],
|
||||
connector_name=DocumentSource.CONFLUENCE,
|
||||
credential_json={
|
||||
"confluence_username": self.conf["username"],
|
||||
"confluence_access_token": self.conf["access_token"],
|
||||
},
|
||||
credential_json=self.conf["credentials"]
|
||||
)
|
||||
self.connector.set_credentials_provider(credentials_provider)
|
||||
|
||||
@ -155,52 +159,83 @@ class Confluence(SyncBase):
|
||||
)
|
||||
|
||||
logging.info("Connect to Confluence: {} {}".format(self.conf["wiki_base"], begin_info))
|
||||
return document_generator
|
||||
return [document_generator]
|
||||
|
||||
|
||||
class Notion(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.NOTION
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
self.connector = NotionConnector(root_page_id=self.conf["root_page_id"])
|
||||
self.connector.load_credentials(self.conf["credentials"])
|
||||
document_generator = self.connector.load_from_state() if task["reindex"]=="1" or not task["poll_range_start"] \
|
||||
else self.connector.poll_source(task["poll_range_start"].timestamp(), datetime.now(timezone.utc).timestamp())
|
||||
|
||||
begin_info = "totally" if task["reindex"]=="1" or not task["poll_range_start"] else "from {}".format(task["poll_range_start"])
|
||||
logging.info("Connect to Notion: root({}) {}".format(self.conf["root_page_id"], begin_info))
|
||||
return document_generator
|
||||
|
||||
|
||||
class Discord(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.DISCORD
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
server_ids: str | None = self.conf.get("server_ids", None)
|
||||
# "channel1,channel2"
|
||||
channel_names: str | None = self.conf.get("channel_names", None)
|
||||
|
||||
self.connector = DiscordConnector(
|
||||
server_ids=server_ids.split(",") if server_ids else [],
|
||||
channel_names=channel_names.split(",") if channel_names else [],
|
||||
start_date=datetime(1970, 1, 1, tzinfo=timezone.utc).strftime("%Y-%m-%d"),
|
||||
batch_size=self.conf.get("batch_size", 1024)
|
||||
)
|
||||
self.connector.load_credentials(self.conf["credentials"])
|
||||
document_generator = self.connector.load_from_state() if task["reindex"]=="1" or not task["poll_range_start"] \
|
||||
else self.connector.poll_source(task["poll_range_start"].timestamp(), datetime.now(timezone.utc).timestamp())
|
||||
|
||||
begin_info = "totally" if task["reindex"]=="1" or not task["poll_range_start"] else "from {}".format(task["poll_range_start"])
|
||||
logging.info("Connect to Discord: servers({}), channel({}) {}".format(server_ids, channel_names, begin_info))
|
||||
return document_generator
|
||||
|
||||
|
||||
class Gmail(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.GMAIL
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
|
||||
|
||||
class GoogleDriver(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.GOOGLE_DRIVER
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
|
||||
|
||||
class Jira(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.JIRA
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
|
||||
|
||||
class SharePoint(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.SHAREPOINT
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
|
||||
|
||||
class Slack(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.SLACK
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
|
||||
|
||||
class Teams(SyncBase):
|
||||
SOURCE_NAME: str = FileSource.TEAMS
|
||||
|
||||
async def _generate(self, task: dict):
|
||||
pass
|
||||
@ -221,7 +256,7 @@ func_factory = {
|
||||
|
||||
async def dispatch_tasks():
|
||||
async with trio.open_nursery() as nursery:
|
||||
for task in SyncLogsService.list_sync_tasks():
|
||||
for task in SyncLogsService.list_sync_tasks()[0]:
|
||||
if task["poll_range_start"]:
|
||||
task["poll_range_start"] = task["poll_range_start"].astimezone(timezone.utc)
|
||||
if task["poll_range_end"]:
|
||||
|
||||
@ -55,7 +55,7 @@ from api.db.services.document_service import DocumentService
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from api.db.services.task_service import TaskService, has_canceled, CANVAS_DEBUG_DOC_ID, GRAPH_RAPTOR_FAKE_DOC_ID
|
||||
from api.db.services.file2document_service import File2DocumentService
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from api.db.db_models import close_connection
|
||||
from rag.app import laws, paper, presentation, manual, qa, table, book, resume, picture, naive, one, audio, \
|
||||
email, tag
|
||||
@ -65,6 +65,7 @@ from common.token_utils import num_tokens_from_string, truncate
|
||||
from rag.utils.redis_conn import REDIS_CONN, RedisDistributedLock
|
||||
from graphrag.utils import chat_limiter
|
||||
from common.signal_utils import start_tracemalloc_and_snapshot, stop_tracemalloc
|
||||
from common.exceptions import TaskCanceledException
|
||||
from common import settings
|
||||
from common.constants import PAGERANK_FLD, TAG_FLD, SVR_CONSUMER_GROUP_NAME
|
||||
|
||||
@ -127,9 +128,7 @@ def signal_handler(sig, frame):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
class TaskCanceledException(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
|
||||
def set_progress(task_id, from_page=0, to_page=-1, prog=None, msg="Processing..."):
|
||||
@ -660,7 +659,7 @@ async def run_raptor_for_kb(row, kb_parser_config, chat_mdl, embd_mdl, vector_si
|
||||
raptor_config["threshold"],
|
||||
)
|
||||
original_length = len(chunks)
|
||||
chunks = await raptor(chunks, kb_parser_config["raptor"]["random_seed"], callback)
|
||||
chunks = await raptor(chunks, kb_parser_config["raptor"]["random_seed"], callback, row["id"])
|
||||
doc = {
|
||||
"doc_id": fake_doc_id,
|
||||
"kb_id": [str(row["kb_id"])],
|
||||
@ -815,6 +814,8 @@ async def do_handle_task(task):
|
||||
callback=progress_callback,
|
||||
doc_ids=task.get("doc_ids", []),
|
||||
)
|
||||
if fake_doc_ids := task.get("doc_ids", []):
|
||||
task_doc_id = fake_doc_ids[0] # use the first document ID to represent this task for logging purposes
|
||||
# Either using graphrag or Standard chunking methods
|
||||
elif task_type == "graphrag":
|
||||
ok, kb = KnowledgebaseService.get_by_id(task_dataset_id)
|
||||
|
||||
14
web/src/assets/svg/data-source/discord.svg
Normal file
14
web/src/assets/svg/data-source/discord.svg
Normal file
@ -0,0 +1,14 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="discord">
|
||||
<mask id="mask0_826_561469" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="24" height="24">
|
||||
<rect id="矩形" width="24" height="24" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_826_561469)">
|
||||
</g>
|
||||
<g id="Discord Icon SVG Vector Icon 1">
|
||||
<g id="Group">
|
||||
<path id="Vector" d="M18.2477 6.17085C17.0825 5.6257 15.8367 5.2295 14.5342 5.00391C14.3742 5.29311 14.1873 5.68211 14.0585 5.99155C12.6739 5.78332 11.302 5.78332 9.94287 5.99155C9.81404 5.68211 9.62292 5.29311 9.46152 5.00391C8.1576 5.2295 6.91032 5.62716 5.74514 6.17374C3.39498 9.72515 2.75789 13.1883 3.07644 16.6024C4.63519 17.7664 6.14581 18.4735 7.63093 18.9362C7.99762 18.4316 8.32465 17.8951 8.60638 17.3297C8.06981 17.1258 7.5559 16.8742 7.07031 16.5821C7.19913 16.4867 7.32514 16.3869 7.44689 16.2842C10.4086 17.6695 13.6267 17.6695 16.5531 16.2842C16.6762 16.3869 16.8022 16.4867 16.9296 16.5821C16.4426 16.8756 15.9273 17.1273 15.3907 17.3312C15.6724 17.8951 15.9981 18.433 16.3662 18.9377C17.8527 18.4749 19.3647 17.7678 20.9235 16.6024C21.2973 12.6446 20.285 9.21325 18.2477 6.17085ZM9.00988 14.5028C8.12079 14.5028 7.39166 13.6727 7.39166 12.662C7.39166 11.6512 8.10522 10.8198 9.00988 10.8198C9.91457 10.8198 10.6437 11.6498 10.6281 12.662C10.6295 13.6727 9.91457 14.5028 9.00988 14.5028ZM14.9901 14.5028C14.101 14.5028 13.3718 13.6727 13.3718 12.662C13.3718 11.6512 14.0854 10.8198 14.9901 10.8198C15.8947 10.8198 16.6238 11.6498 16.6083 12.662C16.6083 13.6727 15.8947 14.5028 14.9901 14.5028Z" fill="#5865F2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
19
web/src/assets/svg/data-source/notion.svg
Normal file
19
web/src/assets/svg/data-source/notion.svg
Normal file
@ -0,0 +1,19 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="notion">
|
||||
<rect id="矩形" width="12" height="12" fill="#D8D8D8" fill-opacity="0.01"/>
|
||||
<mask id="mask0_826_561463" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="24" height="24">
|
||||
<rect id="矩形_2" width="24" height="24" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_826_561463)">
|
||||
</g>
|
||||
<g id="Log in out – Notion Help Center 1" clip-path="url(#clip0_826_561463)">
|
||||
<path id="Vector" d="M4.6861 3.77652L14.6677 3.04092C15.8935 2.93592 16.2085 3.00672 16.9789 3.56592L20.1655 5.80572C20.6917 6.19092 20.8669 6.29592 20.8669 6.71532V18.9991C20.8669 19.7689 20.5861 20.2243 19.6057 20.2939L8.0143 20.9941C7.2775 21.0289 6.9277 20.9239 6.5419 20.4337L4.1959 17.3893C3.7741 16.8289 3.6001 16.4095 3.6001 15.9193V5.00052C3.6001 4.37112 3.8809 3.84612 4.6861 3.77652Z" fill="white"/>
|
||||
<path id="Vector_2" fill-rule="evenodd" clip-rule="evenodd" d="M14.6683 3.04092L4.6849 3.77652C3.8809 3.84612 3.6001 4.37112 3.6001 5.00052V15.9193C3.6001 16.4095 3.7747 16.8289 4.1953 17.3893L6.5419 20.4337C6.9277 20.9239 7.2775 21.0289 8.0137 20.9941L19.6063 20.2939C20.5867 20.2243 20.8675 19.7689 20.8675 18.9991V6.71532C20.8675 6.31752 20.7097 6.20292 20.2453 5.86332L20.1661 5.80572L16.9801 3.56592C16.2091 3.00672 15.8941 2.93592 14.6677 3.04092H14.6683ZM8.2759 6.51432C7.3297 6.57792 7.1143 6.59232 6.5773 6.15612L5.2111 5.07132C5.0713 4.93092 5.1415 4.75572 5.4913 4.72092L15.0883 4.02132C15.8935 3.95112 16.3141 4.23132 16.6291 4.47612L18.2755 5.66592C18.3457 5.70132 18.5203 5.91072 18.3103 5.91072L8.3983 6.50592L8.2759 6.51432ZM7.1725 18.8941V8.46612C7.1725 8.01072 7.3129 7.80072 7.7329 7.76532L19.1155 7.10052C19.5013 7.06572 19.6765 7.31052 19.6765 7.76532V18.1237C19.6765 18.5791 19.6063 18.9643 18.9757 18.9991L8.0833 19.6291C7.4533 19.6639 7.1725 19.4545 7.1725 18.8941ZM17.9257 9.02532C17.9953 9.34032 17.9257 9.65532 17.6095 9.69132L17.0851 9.79512V17.4943C16.6291 17.7391 16.2091 17.8789 15.8587 17.8789C15.2983 17.8789 15.1579 17.7037 14.7379 17.1793L11.3053 11.7901V17.0041L12.3913 17.2495C12.3913 17.2495 12.3913 17.8795 11.5153 17.8795L9.0991 18.0193C9.0289 17.8789 9.0991 17.5291 9.3439 17.4595L9.9745 17.2849V10.3909L9.0991 10.3201C9.0289 10.0051 9.2035 9.55032 9.6943 9.51552L12.2863 9.34032L15.8587 14.8003V9.97032L14.9479 9.86592C14.8783 9.48012 15.1579 9.20052 15.5083 9.16572L17.9257 9.02532Z" fill="black"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_826_561463">
|
||||
<rect width="17.4" height="18" fill="white" transform="translate(3.6001 3)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.6 KiB |
29
web/src/assets/svg/data-source/s3.svg
Normal file
29
web/src/assets/svg/data-source/s3.svg
Normal file
@ -0,0 +1,29 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="s3">
|
||||
<mask id="mask0_826_561478" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="0" y="0" width="24" height="24">
|
||||
<rect id="矩形" width="24" height="24" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_826_561478)">
|
||||
</g>
|
||||
<g id="FileAmazon-S3-Logo - Wikimedia Commons 2" clip-path="url(#clip0_826_561478)">
|
||||
<path id="Vector" fill-rule="evenodd" clip-rule="evenodd" d="M6.63563 6.48047L9.35059 12.0352L6.63563 17.5898L5.52348 16.9219V7.14844L6.63563 6.48047Z" fill="#E25444"/>
|
||||
<path id="Vector_2" fill-rule="evenodd" clip-rule="evenodd" d="M6.63566 6.48047L12.0656 7.14844L14.8296 12.0352L12.0656 16.9219L6.63566 17.5898V6.48047Z" fill="#7B1D13"/>
|
||||
<path id="Vector_3" fill-rule="evenodd" clip-rule="evenodd" d="M17.5934 6.48047L18.4766 6.90234V17.168L17.5934 17.5898L12.0653 12.0352L17.5934 6.48047Z" fill="#58150D"/>
|
||||
<path id="Vector_4" fill-rule="evenodd" clip-rule="evenodd" d="M17.6055 6.46875L12.0582 8.125V16.0937L17.6055 17.5937V6.46875Z" fill="#E25444"/>
|
||||
<path id="Vector_5" fill-rule="evenodd" clip-rule="evenodd" d="M12.0552 9.00002L14.4106 8.59376L12.0552 5.71875L9.70566 8.59376L12.0552 9.00002Z" fill="#58150D"/>
|
||||
<path id="Vector_6" fill-rule="evenodd" clip-rule="evenodd" d="M9.70566 8.59376L12.0582 9.00625L14.4106 8.59376V5.71875" fill="#58150D"/>
|
||||
<path id="Vector_7" fill-rule="evenodd" clip-rule="evenodd" d="M12.0552 15.0312L14.4106 15.5L12.0552 17.9688L9.70566 15.5L12.0552 15.0312Z" fill="#58150D"/>
|
||||
<path id="Vector_8" fill-rule="evenodd" clip-rule="evenodd" d="M12.0653 3.5625L14.4204 4.89844V8.58984L12.0579 7.87501L12.0653 3.5625Z" fill="#7B1D13"/>
|
||||
<path id="Vector_9" fill-rule="evenodd" clip-rule="evenodd" d="M12.0582 9.90625L14.4106 10.1813V13.867L12.0582 14.1563V9.90625Z" fill="#7B1D13"/>
|
||||
<path id="Vector_10" fill-rule="evenodd" clip-rule="evenodd" d="M12.0582 16.1249L14.4106 15.491V19.1143L12.0582 20.4375V16.1249Z" fill="#7B1D13"/>
|
||||
<path id="Vector_11" fill-rule="evenodd" clip-rule="evenodd" d="M9.70561 15.491L12.0581 16.1251V20.4375L9.70561 19.1143V15.491Z" fill="#E25444"/>
|
||||
<path id="Vector_12" fill-rule="evenodd" clip-rule="evenodd" d="M12.0581 9.90625L9.70561 10.1813V13.867L12.0581 14.1563V9.90625Z" fill="#E25444"/>
|
||||
<path id="Vector_13" fill-rule="evenodd" clip-rule="evenodd" d="M12.0654 3.5625L9.71029 4.89844V8.58984L12.0654 7.88672V3.5625Z" fill="#E25444"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_826_561478">
|
||||
<rect width="14" height="18" fill="white" transform="matrix(-1 0 0 1 19 3)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
41
web/src/components/back-button/index.tsx
Normal file
41
web/src/components/back-button/index.tsx
Normal file
@ -0,0 +1,41 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { ArrowBigLeft } from 'lucide-react';
|
||||
import React from 'react';
|
||||
import { useNavigate } from 'umi';
|
||||
import { Button } from '../ui/button';
|
||||
|
||||
interface BackButtonProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
to?: string;
|
||||
}
|
||||
|
||||
const BackButton: React.FC<BackButtonProps> = ({
|
||||
to,
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}) => {
|
||||
const navigate = useNavigate();
|
||||
|
||||
const handleClick = () => {
|
||||
if (to) {
|
||||
navigate(to);
|
||||
} else {
|
||||
navigate(-1);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className={cn('gap-2 bg-bg-card border border-border-default', className)}
|
||||
onClick={handleClick}
|
||||
{...props}
|
||||
>
|
||||
<ArrowBigLeft className="h-4 w-4" />
|
||||
{children || 'Back'}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
export default BackButton;
|
||||
@ -8,9 +8,10 @@ interface StatusBadgeProps {
|
||||
// status: 'Success' | 'Failed' | 'Running' | 'Pending';
|
||||
status: RunningStatus;
|
||||
name?: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
const FileStatusBadge: FC<StatusBadgeProps> = ({ status, name }) => {
|
||||
const FileStatusBadge: FC<StatusBadgeProps> = ({ status, name, className }) => {
|
||||
const getStatusColor = () => {
|
||||
// #3ba05c → rgb(59, 160, 92) // state-success
|
||||
// #d8494b → rgb(216, 73, 75) // state-error
|
||||
@ -51,7 +52,7 @@ const FileStatusBadge: FC<StatusBadgeProps> = ({ status, name }) => {
|
||||
|
||||
return (
|
||||
<span
|
||||
className={`inline-flex items-center w-[75px] px-2 py-1 rounded-full text-xs font-medium ${getStatusColor()}`}
|
||||
className={`inline-flex items-center w-[75px] px-2 py-1 rounded-full text-xs font-medium ${getStatusColor()} ${className}`}
|
||||
>
|
||||
<div className={`w-1 h-1 mr-1 rounded-full ${getBgStatusColor()}`}></div>
|
||||
{name || ''}
|
||||
|
||||
@ -39,7 +39,7 @@ export function RAGFlowFormItem({
|
||||
<FormItem
|
||||
className={cn(
|
||||
{
|
||||
'flex items-center': horizontal,
|
||||
'flex items-center w-full': horizontal,
|
||||
},
|
||||
className,
|
||||
)}
|
||||
|
||||
@ -76,7 +76,6 @@ export enum Operator {
|
||||
Relevant = 'Relevant',
|
||||
RewriteQuestion = 'RewriteQuestion',
|
||||
KeywordExtract = 'KeywordExtract',
|
||||
Baidu = 'Baidu',
|
||||
DuckDuckGo = 'DuckDuckGo',
|
||||
Wikipedia = 'Wikipedia',
|
||||
PubMed = 'PubMed',
|
||||
@ -85,7 +84,6 @@ export enum Operator {
|
||||
Bing = 'Bing',
|
||||
GoogleScholar = 'GoogleScholar',
|
||||
GitHub = 'GitHub',
|
||||
BaiduFanyi = 'BaiduFanyi',
|
||||
QWeather = 'QWeather',
|
||||
ExeSQL = 'ExeSQL',
|
||||
Switch = 'Switch',
|
||||
@ -111,6 +109,8 @@ export enum Operator {
|
||||
SearXNG = 'SearXNG',
|
||||
Placeholder = 'Placeholder',
|
||||
DataOperations = 'DataOperations',
|
||||
VariableAssigner = 'VariableAssigner',
|
||||
VariableAggregator = 'VariableAggregator',
|
||||
File = 'File', // pipeline
|
||||
Parser = 'Parser',
|
||||
Tokenizer = 'Tokenizer',
|
||||
|
||||
@ -13,6 +13,7 @@ export enum RunningStatus {
|
||||
CANCEL = '2', // need to refresh
|
||||
DONE = '3', // need to refresh
|
||||
FAIL = '4', // need to refresh
|
||||
SCHEDULE = '5',
|
||||
}
|
||||
|
||||
export const RunningStatusMap = {
|
||||
@ -21,6 +22,7 @@ export const RunningStatusMap = {
|
||||
[RunningStatus.CANCEL]: 'Cancel',
|
||||
[RunningStatus.DONE]: 'Success',
|
||||
[RunningStatus.FAIL]: 'Failed',
|
||||
[RunningStatus.SCHEDULE]: 'Schedule',
|
||||
};
|
||||
|
||||
export enum ModelVariableType {
|
||||
|
||||
@ -141,12 +141,21 @@ export const useNavigatePage = () => {
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const navigateToDataSourceDetail = useCallback(
|
||||
(id?: string) => {
|
||||
navigate(
|
||||
`${Routes.UserSetting}${Routes.DataSource}${Routes.DataSourceDetailPage}?id=${id}`,
|
||||
);
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const navigateToDataflowResult = useCallback(
|
||||
(props: NavigateToDataflowResultProps) => () => {
|
||||
let params: string[] = [];
|
||||
Object.keys(props).forEach((key) => {
|
||||
if (props[key]) {
|
||||
params.push(`${key}=${props[key]}`);
|
||||
if (props[key as keyof typeof props]) {
|
||||
params.push(`${key}=${props[key as keyof typeof props]}`);
|
||||
}
|
||||
});
|
||||
navigate(
|
||||
@ -179,5 +188,6 @@ export const useNavigatePage = () => {
|
||||
navigateToOldProfile,
|
||||
navigateToDataflowResult,
|
||||
navigateToDataFile,
|
||||
navigateToDataSourceDetail,
|
||||
};
|
||||
};
|
||||
|
||||
@ -27,9 +27,12 @@ export function useBuildSwitchOperatorOptions(
|
||||
const { t } = useTranslation();
|
||||
|
||||
const switchOperatorOptions = useMemo(() => {
|
||||
return SwitchOperatorOptions.filter((x) =>
|
||||
subset.some((y) => y === x.value),
|
||||
).map((x) => ({
|
||||
const filteredOptions =
|
||||
subset.length > 0
|
||||
? SwitchOperatorOptions.filter((x) => subset.some((y) => y === x.value))
|
||||
: SwitchOperatorOptions;
|
||||
|
||||
return filteredOptions.map((x) => ({
|
||||
value: x.value,
|
||||
icon: (
|
||||
<LogicalOperatorIcon
|
||||
@ -39,7 +42,7 @@ export function useBuildSwitchOperatorOptions(
|
||||
),
|
||||
label: t(`flow.switchOperatorOptions.${x.label}`),
|
||||
}));
|
||||
}, [t]);
|
||||
}, [subset, t]);
|
||||
|
||||
return switchOperatorOptions;
|
||||
}
|
||||
|
||||
@ -1,6 +1,12 @@
|
||||
import { RunningStatus } from '@/constants/knowledge';
|
||||
import { DataSourceKey } from '@/pages/user-setting/data-source/contant';
|
||||
import { TreeData } from '@antv/g6/lib/types';
|
||||
|
||||
export interface IConnector {
|
||||
id: string;
|
||||
name: string;
|
||||
status: RunningStatus;
|
||||
source: DataSourceKey;
|
||||
}
|
||||
// knowledge base
|
||||
export interface IKnowledge {
|
||||
avatar?: any;
|
||||
@ -35,6 +41,7 @@ export interface IKnowledge {
|
||||
mindmap_task_id?: string;
|
||||
graphrag_task_finish_at: string;
|
||||
graphrag_task_id: string;
|
||||
connectors: IConnector[];
|
||||
}
|
||||
|
||||
export interface IKnowledgeResult {
|
||||
|
||||
@ -274,6 +274,9 @@ export default {
|
||||
reRankModelWaring: 'Re-rank model is very time consuming.',
|
||||
},
|
||||
knowledgeConfiguration: {
|
||||
dataSource: 'Data Source',
|
||||
linkSourceSetTip: 'Manage data source linkage with this dataset',
|
||||
linkDataSource: 'Link Data Source',
|
||||
tocExtraction: 'TOC Enhance',
|
||||
tocExtractionTip:
|
||||
" For existing chunks, generate a hierarchical table of contents (one directory per file). During queries, when Directory Enhancement is activated, the system will use a large model to determine which directory items are relevant to the user's question, thereby identifying the relevant chunks.",
|
||||
@ -680,6 +683,19 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
tocEnhanceTip: ` During the parsing of the document, table of contents information was generated (see the 'Enable Table of Contents Extraction' option in the General method). This allows the large model to return table of contents items relevant to the user's query, thereby using these items to retrieve related chunks and apply weighting to these chunks during the sorting process. This approach is derived from mimicking the behavioral logic of how humans search for knowledge in books.`,
|
||||
},
|
||||
setting: {
|
||||
errorMsg: 'Error message',
|
||||
newDocs: 'New Docs',
|
||||
timeStarted: 'Time started',
|
||||
log: 'Log',
|
||||
s3Description:
|
||||
'Connect to your AWS S3 bucket to import and sync stored files.',
|
||||
discordDescription:
|
||||
'Link your Discord server to access and analyze chat data.',
|
||||
notionDescription:
|
||||
'Sync pages and databases from Notion for knowledge retrieval.',
|
||||
availableSourcesDescription: 'Select a data source to add',
|
||||
availableSources: 'Available Sources',
|
||||
datasourceDescription: 'Manage your data source and connections',
|
||||
save: 'Save',
|
||||
search: 'Search',
|
||||
availableModels: 'Available models',
|
||||
@ -697,6 +713,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
'Please enter your current password to change your password.',
|
||||
model: 'Model providers',
|
||||
systemModelDescription: 'Please complete these settings before beginning',
|
||||
dataSources: 'Data Sources',
|
||||
team: 'Team',
|
||||
system: 'System',
|
||||
logout: 'Log out',
|
||||
@ -1530,6 +1547,13 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
codeDescription: 'It allows developers to write custom Python logic.',
|
||||
dataOperations: 'Data operations',
|
||||
dataOperationsDescription: 'Perform various operations on a Data object.',
|
||||
variableAssigner: 'Variable assigner',
|
||||
variableAssignerDescription:
|
||||
'This component performs operations on Data objects, including extracting, filtering, and editing keys and values in the Data.',
|
||||
variableAggregator: 'Variable aggregator',
|
||||
variableAggregatorDescription: `This process aggregates variables from multiple branches into a single variable to achieve unified configuration for downstream nodes.
|
||||
|
||||
The variable aggregation node (originally the variable assignment node) is a crucial node in the workflow. It is responsible for integrating the output results of different branches, ensuring that regardless of which branch is executed, its result can be referenced and accessed through a unified variable. This is extremely useful in multi-branch scenarios, as it maps variables with the same function across different branches to a single output variable, avoiding redundant definitions in downstream nodes.`,
|
||||
inputVariables: 'Input variables',
|
||||
runningHintText: 'is running...🕞',
|
||||
openingSwitch: 'Opening switch',
|
||||
@ -1837,12 +1861,16 @@ Important structured information may include: names, dates, locations, events, k
|
||||
changeStepModalConfirmText: 'Switch Anyway',
|
||||
changeStepModalCancelText: 'Cancel',
|
||||
unlinkPipelineModalTitle: 'Unlink Ingestion pipeline',
|
||||
unlinkPipelineModalConfirmText: 'Unlink',
|
||||
unlinkPipelineModalContent: `
|
||||
<p>Once unlinked, this Dataset will no longer be connected to the current Ingestion pipeline.</p>
|
||||
<p>Files that are already being parsed will continue until completion</p>
|
||||
<p>Files that are not yet parsed will no longer be processed</p> <br/>
|
||||
<p>Are you sure you want to proceed?</p> `,
|
||||
unlinkPipelineModalConfirmText: 'Unlink',
|
||||
unlinkSourceModalTitle: 'Unlink data source',
|
||||
unlinkSourceModalContent: `
|
||||
<p>Are you sure to unlink this data source ?</p>`,
|
||||
unlinkSourceModalConfirmText: 'Unlink',
|
||||
},
|
||||
datasetOverview: {
|
||||
downloadTip: 'Files being downloaded from data sources. ',
|
||||
|
||||
@ -260,6 +260,9 @@ export default {
|
||||
theDocumentBeingParsedCannotBeDeleted: '正在解析的文档不能被删除',
|
||||
},
|
||||
knowledgeConfiguration: {
|
||||
dataSource: '数据源',
|
||||
linkSourceSetTip: '管理与此数据集的数据源链接',
|
||||
linkDataSource: '链接数据源',
|
||||
tocExtractionTip:
|
||||
'对于已有的chunk生成层级结构的目录信息(每个文件一个目录)。在查询时,激活`目录增强`后,系统会用大模型去判断用户问题和哪些目录项相关,从而找到相关的chunk。',
|
||||
deleteGenerateModalContent: `
|
||||
@ -671,6 +674,16 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
tocEnhanceTip: `解析文档时生成了目录信息(见General方法的‘启用目录抽取’),让大模型返回和用户问题相关的目录项,从而利用目录项拿到相关chunk,对这些chunk在排序中进行加权。这种方法来源于模仿人类查询书本中知识的行为逻辑`,
|
||||
},
|
||||
setting: {
|
||||
errorMsg: '错误信息',
|
||||
newDocs: '新文档',
|
||||
timeStarted: '开始时间',
|
||||
log: '日志',
|
||||
s3Description: ' 连接你的 AWS S3 存储桶以导入和同步文件。',
|
||||
discordDescription: ' 连接你的 Discord 服务器以访问和分析聊天数据。',
|
||||
notionDescription: ' 同步 Notion 页面与数据库,用于知识检索。',
|
||||
availableSourcesDescription: '选择要添加的数据源',
|
||||
availableSources: '可用数据源',
|
||||
datasourceDescription: '管理您的数据源和连接',
|
||||
save: '保存',
|
||||
search: '搜索',
|
||||
availableModels: '可选模型',
|
||||
@ -688,6 +701,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
passwordDescription: '请输入您当前的密码以更改您的密码。',
|
||||
model: '模型提供商',
|
||||
systemModelDescription: '请在开始之前完成这些设置',
|
||||
dataSources: '数据源',
|
||||
team: '团队',
|
||||
system: '系统',
|
||||
logout: '登出',
|
||||
@ -1454,6 +1468,12 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
codeDescription: '它允许开发人员编写自定义 Python 逻辑。',
|
||||
dataOperations: '数据操作',
|
||||
dataOperationsDescription: '对数据对象执行各种操作。',
|
||||
variableAssigner: '变量赋值器',
|
||||
variableAssignerDescription:
|
||||
'此组件对数据对象执行操作,包括提取、筛选和编辑数据中的键和值。',
|
||||
variableAggregator: '变量聚合',
|
||||
variableAggregatorDescription: `将多路分支的变量聚合为一个变量,以实现下游节点统一配置。
|
||||
变量聚合节点(原变量赋值节点)是工作流程中的一个关键节点,它负责整合不同分支的输出结果,确保无论哪个分支被执行,其结果都能通过一个统一的变量来引用和访问。这在多分支的情况下非常有用,可将不同分支下相同作用的变量映射为一个输出变量,避免下游节点重复定义。`,
|
||||
inputVariables: '输入变量',
|
||||
addVariable: '新增变量',
|
||||
runningHintText: '正在运行中...🕞',
|
||||
@ -1731,6 +1751,10 @@ Tokenizer 会根据所选方式将内容存储为对应的数据结构。`,
|
||||
<p>尚未解析的文件将不再被处理。</p> <br/>
|
||||
<p>你确定要继续吗?</p> `,
|
||||
unlinkPipelineModalConfirmText: '解绑',
|
||||
unlinkSourceModalTitle: '取消链接数据源',
|
||||
unlinkSourceModalContent: `
|
||||
<p>您确定要取消链接此数据源吗?</p>`,
|
||||
unlinkSourceModalConfirmText: '取消链接',
|
||||
},
|
||||
datasetOverview: {
|
||||
downloadTip: '正在从数据源下载文件。',
|
||||
|
||||
@ -72,6 +72,8 @@ import { SwitchNode } from './node/switch-node';
|
||||
import { TemplateNode } from './node/template-node';
|
||||
import TokenizerNode from './node/tokenizer-node';
|
||||
import { ToolNode } from './node/tool-node';
|
||||
import { VariableAggregatorNode } from './node/variable-aggregator-node';
|
||||
import { VariableAssignerNode } from './node/variable-assigner-node';
|
||||
|
||||
export const nodeTypes: NodeTypes = {
|
||||
ragNode: RagNode,
|
||||
@ -98,6 +100,8 @@ export const nodeTypes: NodeTypes = {
|
||||
splitterNode: SplitterNode,
|
||||
contextNode: ExtractorNode,
|
||||
dataOperationsNode: DataOperationsNode,
|
||||
variableAssignerNode: VariableAssignerNode,
|
||||
variableAggregatorNode: VariableAggregatorNode,
|
||||
};
|
||||
|
||||
const edgeTypes = {
|
||||
|
||||
@ -79,6 +79,8 @@ export function AccordionOperators({
|
||||
Operator.Code,
|
||||
Operator.StringTransform,
|
||||
Operator.DataOperations,
|
||||
Operator.VariableAssigner,
|
||||
Operator.VariableAggregator,
|
||||
]}
|
||||
isCustomDropdown={isCustomDropdown}
|
||||
mousePosition={mousePosition}
|
||||
|
||||
@ -58,6 +58,7 @@ function InnerToolNode({
|
||||
const mcp = x as unknown as IAgentForm['mcp'][number];
|
||||
return (
|
||||
<ToolCard
|
||||
key={mcp.mcp_id}
|
||||
onClick={handleClick(mcp.mcp_id)}
|
||||
className="cursor-pointer"
|
||||
data-tool={x.mcp_id}
|
||||
@ -70,6 +71,7 @@ function InnerToolNode({
|
||||
const tool = x as unknown as IAgentForm['tools'][number];
|
||||
return (
|
||||
<ToolCard
|
||||
key={tool.component_name}
|
||||
onClick={handleClick(tool.component_name)}
|
||||
className="cursor-pointer"
|
||||
data-tool={tool.component_name}
|
||||
|
||||
11
web/src/pages/agent/canvas/node/variable-aggregator-node.tsx
Normal file
11
web/src/pages/agent/canvas/node/variable-aggregator-node.tsx
Normal file
@ -0,0 +1,11 @@
|
||||
import { IRagNode } from '@/interfaces/database/agent';
|
||||
import { NodeProps } from '@xyflow/react';
|
||||
import { RagNode } from '.';
|
||||
|
||||
export function VariableAggregatorNode({ ...props }: NodeProps<IRagNode>) {
|
||||
return (
|
||||
<RagNode {...props}>
|
||||
<section>VariableAggregatorNode</section>
|
||||
</RagNode>
|
||||
);
|
||||
}
|
||||
11
web/src/pages/agent/canvas/node/variable-assigner-node.tsx
Normal file
11
web/src/pages/agent/canvas/node/variable-assigner-node.tsx
Normal file
@ -0,0 +1,11 @@
|
||||
import { IRagNode } from '@/interfaces/database/agent';
|
||||
import { NodeProps } from '@xyflow/react';
|
||||
import { RagNode } from '.';
|
||||
|
||||
export function VariableAssignerNode({ ...props }: NodeProps<IRagNode>) {
|
||||
return (
|
||||
<RagNode {...props}>
|
||||
<section>select</section>
|
||||
</RagNode>
|
||||
);
|
||||
}
|
||||
@ -22,7 +22,6 @@ export enum AgentDialogueMode {
|
||||
}
|
||||
|
||||
import { ModelVariableType } from '@/constants/knowledge';
|
||||
import i18n from '@/locales/config';
|
||||
import { t } from 'i18next';
|
||||
|
||||
// DuckDuckGo's channel options
|
||||
@ -66,106 +65,6 @@ export const AgentOperatorList = [
|
||||
Operator.Agent,
|
||||
];
|
||||
|
||||
export const componentMenuList = [
|
||||
{
|
||||
name: Operator.Retrieval,
|
||||
},
|
||||
{
|
||||
name: Operator.Categorize,
|
||||
},
|
||||
{
|
||||
name: Operator.Message,
|
||||
},
|
||||
|
||||
{
|
||||
name: Operator.RewriteQuestion,
|
||||
},
|
||||
{
|
||||
name: Operator.KeywordExtract,
|
||||
},
|
||||
{
|
||||
name: Operator.Switch,
|
||||
},
|
||||
{
|
||||
name: Operator.Iteration,
|
||||
},
|
||||
{
|
||||
name: Operator.Code,
|
||||
},
|
||||
{
|
||||
name: Operator.WaitingDialogue,
|
||||
},
|
||||
{
|
||||
name: Operator.Agent,
|
||||
},
|
||||
{
|
||||
name: Operator.Note,
|
||||
},
|
||||
{
|
||||
name: Operator.DuckDuckGo,
|
||||
},
|
||||
{
|
||||
name: Operator.Baidu,
|
||||
},
|
||||
{
|
||||
name: Operator.Wikipedia,
|
||||
},
|
||||
{
|
||||
name: Operator.PubMed,
|
||||
},
|
||||
{
|
||||
name: Operator.ArXiv,
|
||||
},
|
||||
{
|
||||
name: Operator.Google,
|
||||
},
|
||||
{
|
||||
name: Operator.Bing,
|
||||
},
|
||||
{
|
||||
name: Operator.GoogleScholar,
|
||||
},
|
||||
{
|
||||
name: Operator.GitHub,
|
||||
},
|
||||
{
|
||||
name: Operator.BaiduFanyi,
|
||||
},
|
||||
{
|
||||
name: Operator.QWeather,
|
||||
},
|
||||
{
|
||||
name: Operator.ExeSQL,
|
||||
},
|
||||
{
|
||||
name: Operator.WenCai,
|
||||
},
|
||||
{
|
||||
name: Operator.AkShare,
|
||||
},
|
||||
{
|
||||
name: Operator.YahooFinance,
|
||||
},
|
||||
{
|
||||
name: Operator.Jin10,
|
||||
},
|
||||
{
|
||||
name: Operator.TuShare,
|
||||
},
|
||||
{
|
||||
name: Operator.Crawler,
|
||||
},
|
||||
{
|
||||
name: Operator.Invoke,
|
||||
},
|
||||
{
|
||||
name: Operator.Email,
|
||||
},
|
||||
{
|
||||
name: Operator.SearXNG,
|
||||
},
|
||||
];
|
||||
|
||||
export const DataOperationsOperatorOptions = [
|
||||
ComparisonOperator.Equal,
|
||||
ComparisonOperator.NotEqual,
|
||||
@ -209,14 +108,6 @@ export const initialBeginValues = {
|
||||
prologue: `Hi! I'm your assistant. What can I do for you?`,
|
||||
};
|
||||
|
||||
export const initialGenerateValues = {
|
||||
...initialLlmBaseValues,
|
||||
prompt: i18n.t('flow.promptText'),
|
||||
cite: true,
|
||||
message_history_window_size: 12,
|
||||
parameters: [],
|
||||
};
|
||||
|
||||
export const initialRewriteQuestionValues = {
|
||||
...initialLlmBaseValues,
|
||||
language: '',
|
||||
@ -281,11 +172,6 @@ export const initialSearXNGValues = {
|
||||
},
|
||||
};
|
||||
|
||||
export const initialBaiduValues = {
|
||||
top_n: 10,
|
||||
...initialQueryBaseValues,
|
||||
};
|
||||
|
||||
export const initialWikipediaValues = {
|
||||
top_n: 10,
|
||||
language: 'en',
|
||||
@ -385,13 +271,6 @@ export const initialGithubValues = {
|
||||
},
|
||||
};
|
||||
|
||||
export const initialBaiduFanyiValues = {
|
||||
appid: 'xxx',
|
||||
secret_key: 'xxx',
|
||||
trans_type: 'translate',
|
||||
...initialQueryBaseValues,
|
||||
};
|
||||
|
||||
export const initialQWeatherValues = {
|
||||
web_apikey: 'xxx',
|
||||
type: 'weather',
|
||||
@ -717,6 +596,10 @@ export const initialDataOperationsValues = {
|
||||
},
|
||||
};
|
||||
|
||||
export const initialVariableAssignerValues = {};
|
||||
|
||||
export const initialVariableAggregatorValues = {};
|
||||
|
||||
export const CategorizeAnchorPointPositions = [
|
||||
{ top: 1, right: 34 },
|
||||
{ top: 8, right: 18 },
|
||||
@ -757,7 +640,6 @@ export const RestrictedUpstreamMap = {
|
||||
Operator.Message,
|
||||
Operator.Relevant,
|
||||
],
|
||||
[Operator.Baidu]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.DuckDuckGo]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.Wikipedia]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.PubMed]: [Operator.Begin, Operator.Retrieval],
|
||||
@ -766,7 +648,6 @@ export const RestrictedUpstreamMap = {
|
||||
[Operator.Bing]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.GoogleScholar]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.GitHub]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.BaiduFanyi]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.QWeather]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.SearXNG]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.ExeSQL]: [Operator.Begin],
|
||||
@ -798,6 +679,7 @@ export const RestrictedUpstreamMap = {
|
||||
[Operator.Tokenizer]: [Operator.Begin],
|
||||
[Operator.Extractor]: [Operator.Begin],
|
||||
[Operator.File]: [Operator.Begin],
|
||||
[Operator.VariableAssigner]: [Operator.Begin],
|
||||
};
|
||||
|
||||
export const NodeMap = {
|
||||
@ -809,7 +691,6 @@ export const NodeMap = {
|
||||
[Operator.RewriteQuestion]: 'rewriteNode',
|
||||
[Operator.KeywordExtract]: 'keywordNode',
|
||||
[Operator.DuckDuckGo]: 'ragNode',
|
||||
[Operator.Baidu]: 'ragNode',
|
||||
[Operator.Wikipedia]: 'ragNode',
|
||||
[Operator.PubMed]: 'ragNode',
|
||||
[Operator.ArXiv]: 'ragNode',
|
||||
@ -817,7 +698,6 @@ export const NodeMap = {
|
||||
[Operator.Bing]: 'ragNode',
|
||||
[Operator.GoogleScholar]: 'ragNode',
|
||||
[Operator.GitHub]: 'ragNode',
|
||||
[Operator.BaiduFanyi]: 'ragNode',
|
||||
[Operator.QWeather]: 'ragNode',
|
||||
[Operator.SearXNG]: 'ragNode',
|
||||
[Operator.ExeSQL]: 'ragNode',
|
||||
@ -849,6 +729,8 @@ export const NodeMap = {
|
||||
[Operator.HierarchicalMerger]: 'splitterNode',
|
||||
[Operator.Extractor]: 'contextNode',
|
||||
[Operator.DataOperations]: 'dataOperationsNode',
|
||||
[Operator.VariableAssigner]: 'variableAssignerNode',
|
||||
[Operator.VariableAggregator]: 'variableAggregatorNode',
|
||||
};
|
||||
|
||||
export enum BeginQueryType {
|
||||
|
||||
@ -2,8 +2,6 @@ import { Operator } from '../constant';
|
||||
import AgentForm from '../form/agent-form';
|
||||
import AkShareForm from '../form/akshare-form';
|
||||
import ArXivForm from '../form/arxiv-form';
|
||||
import BaiduFanyiForm from '../form/baidu-fanyi-form';
|
||||
import BaiduForm from '../form/baidu-form';
|
||||
import BeginForm from '../form/begin-form';
|
||||
import BingForm from '../form/bing-form';
|
||||
import CategorizeForm from '../form/categorize-form';
|
||||
@ -40,6 +38,7 @@ import TokenizerForm from '../form/tokenizer-form';
|
||||
import ToolForm from '../form/tool-form';
|
||||
import TuShareForm from '../form/tushare-form';
|
||||
import UserFillUpForm from '../form/user-fill-up-form';
|
||||
import VariableAssignerForm from '../form/variable-assigner-form';
|
||||
import WenCaiForm from '../form/wencai-form';
|
||||
import WikipediaForm from '../form/wikipedia-form';
|
||||
import YahooFinanceForm from '../form/yahoo-finance-form';
|
||||
@ -72,9 +71,6 @@ export const FormConfigMap = {
|
||||
[Operator.Agent]: {
|
||||
component: AgentForm,
|
||||
},
|
||||
[Operator.Baidu]: {
|
||||
component: BaiduForm,
|
||||
},
|
||||
[Operator.DuckDuckGo]: {
|
||||
component: DuckDuckGoForm,
|
||||
},
|
||||
@ -102,9 +98,6 @@ export const FormConfigMap = {
|
||||
[Operator.GitHub]: {
|
||||
component: GithubForm,
|
||||
},
|
||||
[Operator.BaiduFanyi]: {
|
||||
component: BaiduFanyiForm,
|
||||
},
|
||||
[Operator.QWeather]: {
|
||||
component: QWeatherForm,
|
||||
},
|
||||
@ -190,4 +183,7 @@ export const FormConfigMap = {
|
||||
[Operator.DataOperations]: {
|
||||
component: DataOperationsForm,
|
||||
},
|
||||
[Operator.VariableAssigner]: {
|
||||
component: VariableAssignerForm,
|
||||
},
|
||||
};
|
||||
|
||||
@ -26,6 +26,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
AgentExceptionMethod,
|
||||
JsonSchemaDataType,
|
||||
NodeHandleId,
|
||||
VariableType,
|
||||
initialAgentValues,
|
||||
@ -157,6 +158,7 @@ function AgentForm({ node }: INextOperatorForm) {
|
||||
placeholder={t('flow.messagePlaceholder')}
|
||||
showToolbar={true}
|
||||
extraOptions={extraOptions}
|
||||
types={[JsonSchemaDataType.String]}
|
||||
></PromptEditor>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
@ -174,6 +176,7 @@ function AgentForm({ node }: INextOperatorForm) {
|
||||
<PromptEditor
|
||||
{...field}
|
||||
showToolbar={true}
|
||||
types={[JsonSchemaDataType.String]}
|
||||
></PromptEditor>
|
||||
</section>
|
||||
</FormControl>
|
||||
|
||||
@ -1,71 +0,0 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { Form, Input, Select } from 'antd';
|
||||
import { useMemo } from 'react';
|
||||
import { IOperatorForm } from '../../interface';
|
||||
import {
|
||||
BaiduFanyiDomainOptions,
|
||||
BaiduFanyiSourceLangOptions,
|
||||
} from '../../options';
|
||||
import DynamicInputVariable from '../components/dynamic-input-variable';
|
||||
|
||||
const BaiduFanyiForm = ({ onValuesChange, form, node }: IOperatorForm) => {
|
||||
const { t } = useTranslate('flow');
|
||||
const options = useMemo(() => {
|
||||
return ['translate', 'fieldtranslate'].map((x) => ({
|
||||
value: x,
|
||||
label: t(`baiduSecretKeyOptions.${x}`),
|
||||
}));
|
||||
}, [t]);
|
||||
|
||||
const baiduFanyiOptions = useMemo(() => {
|
||||
return BaiduFanyiDomainOptions.map((x) => ({
|
||||
value: x,
|
||||
label: t(`baiduDomainOptions.${x}`),
|
||||
}));
|
||||
}, [t]);
|
||||
|
||||
const baiduFanyiSourceLangOptions = useMemo(() => {
|
||||
return BaiduFanyiSourceLangOptions.map((x) => ({
|
||||
value: x,
|
||||
label: t(`baiduSourceLangOptions.${x}`),
|
||||
}));
|
||||
}, [t]);
|
||||
|
||||
return (
|
||||
<Form
|
||||
name="basic"
|
||||
autoComplete="off"
|
||||
form={form}
|
||||
onValuesChange={onValuesChange}
|
||||
layout={'vertical'}
|
||||
>
|
||||
<DynamicInputVariable node={node}></DynamicInputVariable>
|
||||
<Form.Item label={t('appid')} name={'appid'}>
|
||||
<Input></Input>
|
||||
</Form.Item>
|
||||
<Form.Item label={t('secretKey')} name={'secret_key'}>
|
||||
<Input></Input>
|
||||
</Form.Item>
|
||||
<Form.Item label={t('transType')} name={'trans_type'}>
|
||||
<Select options={options}></Select>
|
||||
</Form.Item>
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('trans_type') === 'fieldtranslate' && (
|
||||
<Form.Item label={t('domain')} name={'domain'}>
|
||||
<Select options={baiduFanyiOptions}></Select>
|
||||
</Form.Item>
|
||||
)
|
||||
}
|
||||
</Form.Item>
|
||||
<Form.Item label={t('sourceLang')} name={'source_lang'}>
|
||||
<Select options={baiduFanyiSourceLangOptions}></Select>
|
||||
</Form.Item>
|
||||
<Form.Item label={t('targetLang')} name={'target_lang'}>
|
||||
<Select options={baiduFanyiSourceLangOptions}></Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
export default BaiduFanyiForm;
|
||||
@ -1,22 +0,0 @@
|
||||
import { TopNFormField } from '@/components/top-n-item';
|
||||
import { Form } from '@/components/ui/form';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { DynamicInputVariable } from '../components/next-dynamic-input-variable';
|
||||
|
||||
const BaiduForm = ({ form, node }: INextOperatorForm) => {
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<DynamicInputVariable node={node}></DynamicInputVariable>
|
||||
<TopNFormField></TopNFormField>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
export default BaiduForm;
|
||||
@ -21,6 +21,7 @@ import {
|
||||
TooltipTrigger,
|
||||
} from '@/components/ui/tooltip';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { JsonSchemaDataType } from '@/pages/agent/constant';
|
||||
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext';
|
||||
import { Variable } from 'lucide-react';
|
||||
import { ReactNode, useCallback, useState } from 'react';
|
||||
@ -54,6 +55,7 @@ type IProps = {
|
||||
value?: string;
|
||||
onChange?: (value?: string) => void;
|
||||
placeholder?: ReactNode;
|
||||
types?: JsonSchemaDataType[];
|
||||
} & PromptContentProps &
|
||||
Pick<VariablePickerMenuPluginProps, 'extraOptions' | 'baseOptions'>;
|
||||
|
||||
@ -127,6 +129,7 @@ export function PromptEditor({
|
||||
multiLine = true,
|
||||
extraOptions,
|
||||
baseOptions,
|
||||
types,
|
||||
}: IProps) {
|
||||
const { t } = useTranslation();
|
||||
const initialConfig: InitialConfigType = {
|
||||
@ -179,6 +182,7 @@ export function PromptEditor({
|
||||
value={value}
|
||||
extraOptions={extraOptions}
|
||||
baseOptions={baseOptions}
|
||||
types={types}
|
||||
></VariablePickerMenuPlugin>
|
||||
<PasteHandlerPlugin />
|
||||
<VariableOnChangePlugin
|
||||
|
||||
@ -30,11 +30,12 @@ import * as ReactDOM from 'react-dom';
|
||||
|
||||
import { $createVariableNode } from './variable-node';
|
||||
|
||||
import { JsonSchemaDataType } from '@/pages/agent/constant';
|
||||
import {
|
||||
useFindAgentStructuredOutputLabel,
|
||||
useShowSecondaryMenu,
|
||||
} from '@/pages/agent/hooks/use-build-structured-output';
|
||||
import { useBuildQueryVariableOptions } from '@/pages/agent/hooks/use-get-begin-query';
|
||||
import { useFilterQueryVariableOptionsByTypes } from '@/pages/agent/hooks/use-get-begin-query';
|
||||
import { PromptIdentity } from '../../agent-form/use-build-prompt-options';
|
||||
import { StructuredOutputSecondaryMenu } from '../structured-output-secondary-menu';
|
||||
import { ProgrammaticTag } from './constant';
|
||||
@ -80,9 +81,11 @@ function VariablePickerMenuItem({
|
||||
index,
|
||||
option,
|
||||
selectOptionAndCleanUp,
|
||||
types,
|
||||
}: {
|
||||
index: number;
|
||||
option: VariableOption;
|
||||
types?: JsonSchemaDataType[];
|
||||
selectOptionAndCleanUp: (
|
||||
option: VariableOption | VariableInnerOption,
|
||||
) => void;
|
||||
@ -108,6 +111,7 @@ function VariablePickerMenuItem({
|
||||
<StructuredOutputSecondaryMenu
|
||||
key={x.value}
|
||||
data={x}
|
||||
types={types}
|
||||
click={(y) =>
|
||||
selectOptionAndCleanUp({
|
||||
...x,
|
||||
@ -149,11 +153,13 @@ export type VariablePickerMenuPluginProps = {
|
||||
value?: string;
|
||||
extraOptions?: VariablePickerMenuOptionType[];
|
||||
baseOptions?: VariablePickerMenuOptionType[];
|
||||
types?: JsonSchemaDataType[];
|
||||
};
|
||||
export default function VariablePickerMenuPlugin({
|
||||
value,
|
||||
extraOptions,
|
||||
baseOptions,
|
||||
types,
|
||||
}: VariablePickerMenuPluginProps): JSX.Element {
|
||||
const [editor] = useLexicalComposerContext();
|
||||
|
||||
@ -180,7 +186,7 @@ export default function VariablePickerMenuPlugin({
|
||||
|
||||
const [queryString, setQueryString] = React.useState<string | null>('');
|
||||
|
||||
let options = useBuildQueryVariableOptions();
|
||||
let options = useFilterQueryVariableOptionsByTypes(types);
|
||||
|
||||
if (baseOptions) {
|
||||
options = baseOptions as typeof options;
|
||||
@ -379,6 +385,7 @@ export default function VariablePickerMenuPlugin({
|
||||
index={i}
|
||||
key={option.key}
|
||||
option={option}
|
||||
types={types}
|
||||
selectOptionAndCleanUp={selectOptionAndCleanUp}
|
||||
/>
|
||||
))}
|
||||
|
||||
@ -5,12 +5,11 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { isEmpty, toLower } from 'lodash';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
import { ReactNode } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { JsonSchemaDataType } from '../../constant';
|
||||
import { useBuildQueryVariableOptions } from '../../hooks/use-get-begin-query';
|
||||
import { useFilterQueryVariableOptionsByTypes } from '../../hooks/use-get-begin-query';
|
||||
import { GroupedSelectWithSecondaryMenu } from './select-with-secondary-menu';
|
||||
|
||||
type QueryVariableProps = {
|
||||
@ -31,22 +30,7 @@ export function QueryVariable({
|
||||
const { t } = useTranslation();
|
||||
const form = useFormContext();
|
||||
|
||||
const nextOptions = useBuildQueryVariableOptions();
|
||||
|
||||
const finalOptions = useMemo(() => {
|
||||
return !isEmpty(types)
|
||||
? nextOptions.map((x) => {
|
||||
return {
|
||||
...x,
|
||||
options: x.options.filter(
|
||||
(y) =>
|
||||
types?.some((x) => toLower(y.type).includes(x)) ||
|
||||
y.type === undefined, // agent structured output
|
||||
),
|
||||
};
|
||||
})
|
||||
: nextOptions;
|
||||
}, [nextOptions, types]);
|
||||
const finalOptions = useFilterQueryVariableOptionsByTypes(types);
|
||||
|
||||
return (
|
||||
<FormField
|
||||
|
||||
@ -14,6 +14,7 @@ import { memo } from 'react';
|
||||
import { useFieldArray, useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { z } from 'zod';
|
||||
import { JsonSchemaDataType } from '../../constant';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
@ -66,6 +67,7 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
<PromptEditor
|
||||
{...field}
|
||||
placeholder={t('flow.messagePlaceholder')}
|
||||
types={[JsonSchemaDataType.String]}
|
||||
></PromptEditor>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
|
||||
100
web/src/pages/agent/form/variable-assigner-form/index.tsx
Normal file
100
web/src/pages/agent/form/variable-assigner-form/index.tsx
Normal file
@ -0,0 +1,100 @@
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { Form } from '@/components/ui/form';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { buildOptions } from '@/utils/form';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { memo } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { z } from 'zod';
|
||||
import {
|
||||
JsonSchemaDataType,
|
||||
Operations,
|
||||
initialDataOperationsValues,
|
||||
} from '../../constant';
|
||||
import { useFormValues } from '../../hooks/use-form-values';
|
||||
import { useWatchFormChange } from '../../hooks/use-watch-form-change';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { buildOutputList } from '../../utils/build-output-list';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { Output, OutputSchema } from '../components/output';
|
||||
import { QueryVariableList } from '../components/query-variable-list';
|
||||
|
||||
export const RetrievalPartialSchema = {
|
||||
query: z.array(z.object({ input: z.string().optional() })),
|
||||
operations: z.string(),
|
||||
select_keys: z.array(z.object({ name: z.string().optional() })).optional(),
|
||||
remove_keys: z.array(z.object({ name: z.string().optional() })).optional(),
|
||||
updates: z
|
||||
.array(
|
||||
z.object({ key: z.string().optional(), value: z.string().optional() }),
|
||||
)
|
||||
.optional(),
|
||||
rename_keys: z
|
||||
.array(
|
||||
z.object({
|
||||
old_key: z.string().optional(),
|
||||
new_key: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
filter_values: z
|
||||
.array(
|
||||
z.object({
|
||||
key: z.string().optional(),
|
||||
value: z.string().optional(),
|
||||
operator: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
...OutputSchema,
|
||||
};
|
||||
|
||||
export const FormSchema = z.object(RetrievalPartialSchema);
|
||||
|
||||
export type DataOperationsFormSchemaType = z.infer<typeof FormSchema>;
|
||||
|
||||
const outputList = buildOutputList(initialDataOperationsValues.outputs);
|
||||
|
||||
function VariableAssignerForm({ node }: INextOperatorForm) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const defaultValues = useFormValues(initialDataOperationsValues, node);
|
||||
|
||||
const form = useForm<DataOperationsFormSchemaType>({
|
||||
defaultValues: defaultValues,
|
||||
mode: 'onChange',
|
||||
resolver: zodResolver(FormSchema),
|
||||
shouldUnregister: true,
|
||||
});
|
||||
|
||||
const OperationsOptions = buildOptions(
|
||||
Operations,
|
||||
t,
|
||||
`flow.operationsOptions`,
|
||||
true,
|
||||
);
|
||||
|
||||
useWatchFormChange(node?.id, form, true);
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<FormWrapper>
|
||||
<QueryVariableList
|
||||
tooltip={t('flow.queryTip')}
|
||||
label={t('flow.query')}
|
||||
types={[JsonSchemaDataType.Array, JsonSchemaDataType.Object]}
|
||||
></QueryVariableList>
|
||||
<Separator />
|
||||
<RAGFlowFormItem name="operations" label={t('flow.operations')}>
|
||||
<SelectWithSearch options={OperationsOptions} allowClear />
|
||||
</RAGFlowFormItem>
|
||||
|
||||
<Output list={outputList} isFormRequired></Output>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(VariableAssignerForm);
|
||||
@ -12,8 +12,6 @@ import {
|
||||
initialAgentValues,
|
||||
initialAkShareValues,
|
||||
initialArXivValues,
|
||||
initialBaiduFanyiValues,
|
||||
initialBaiduValues,
|
||||
initialBeginValues,
|
||||
initialBingValues,
|
||||
initialCategorizeValues,
|
||||
@ -50,6 +48,8 @@ import {
|
||||
initialTokenizerValues,
|
||||
initialTuShareValues,
|
||||
initialUserFillUpValues,
|
||||
initialVariableAggregatorValues,
|
||||
initialVariableAssignerValues,
|
||||
initialWaitingDialogueValues,
|
||||
initialWenCaiValues,
|
||||
initialWikipediaValues,
|
||||
@ -86,7 +86,6 @@ export const useInitializeOperatorParams = () => {
|
||||
llm_id: llmId,
|
||||
},
|
||||
[Operator.DuckDuckGo]: initialDuckValues,
|
||||
[Operator.Baidu]: initialBaiduValues,
|
||||
[Operator.Wikipedia]: initialWikipediaValues,
|
||||
[Operator.PubMed]: initialPubMedValues,
|
||||
[Operator.ArXiv]: initialArXivValues,
|
||||
@ -95,7 +94,6 @@ export const useInitializeOperatorParams = () => {
|
||||
[Operator.GoogleScholar]: initialGoogleScholarValues,
|
||||
[Operator.SearXNG]: initialSearXNGValues,
|
||||
[Operator.GitHub]: initialGithubValues,
|
||||
[Operator.BaiduFanyi]: initialBaiduFanyiValues,
|
||||
[Operator.QWeather]: initialQWeatherValues,
|
||||
[Operator.ExeSQL]: initialExeSqlValues,
|
||||
[Operator.Switch]: initialSwitchValues,
|
||||
@ -131,6 +129,8 @@ export const useInitializeOperatorParams = () => {
|
||||
prompts: t('flow.prompts.user.summary'),
|
||||
},
|
||||
[Operator.DataOperations]: initialDataOperationsValues,
|
||||
[Operator.VariableAssigner]: initialVariableAssignerValues,
|
||||
[Operator.VariableAggregator]: initialVariableAggregatorValues,
|
||||
};
|
||||
}, [llmId]);
|
||||
|
||||
|
||||
@ -4,12 +4,14 @@ import { RAGFlowNodeType } from '@/interfaces/database/flow';
|
||||
import { buildNodeOutputOptions } from '@/utils/canvas-util';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
import { t } from 'i18next';
|
||||
import { isEmpty, toLower } from 'lodash';
|
||||
import get from 'lodash/get';
|
||||
import { useCallback, useContext, useEffect, useMemo, useState } from 'react';
|
||||
import {
|
||||
AgentDialogueMode,
|
||||
BeginId,
|
||||
BeginQueryType,
|
||||
JsonSchemaDataType,
|
||||
Operator,
|
||||
VariableType,
|
||||
} from '../constant';
|
||||
@ -171,6 +173,29 @@ export function useBuildQueryVariableOptions(n?: RAGFlowNodeType) {
|
||||
return nextOptions;
|
||||
}
|
||||
|
||||
export function useFilterQueryVariableOptionsByTypes(
|
||||
types?: JsonSchemaDataType[],
|
||||
) {
|
||||
const nextOptions = useBuildQueryVariableOptions();
|
||||
|
||||
const filteredOptions = useMemo(() => {
|
||||
return !isEmpty(types)
|
||||
? nextOptions.map((x) => {
|
||||
return {
|
||||
...x,
|
||||
options: x.options.filter(
|
||||
(y) =>
|
||||
types?.some((x) => toLower(y.type).includes(x)) ||
|
||||
y.type === undefined, // agent structured output
|
||||
),
|
||||
};
|
||||
})
|
||||
: nextOptions;
|
||||
}, [nextOptions, types]);
|
||||
|
||||
return filteredOptions;
|
||||
}
|
||||
|
||||
export function useBuildComponentIdOptions(nodeId?: string, parentId?: string) {
|
||||
const nodes = useGraphStore((state) => state.nodes);
|
||||
|
||||
|
||||
@ -25,7 +25,10 @@ export function LogSheet({
|
||||
}: LogSheetProps) {
|
||||
return (
|
||||
<Sheet open onOpenChange={hideModal} modal={false}>
|
||||
<SheetContent className={cn('top-20 right-[620px]')}>
|
||||
<SheetContent
|
||||
className={cn('top-20 right-[620px]')}
|
||||
onInteractOutside={(e) => e.preventDefault()}
|
||||
>
|
||||
<SheetHeader>
|
||||
<SheetTitle className="flex items-center gap-1">
|
||||
<NotebookText className="size-4" />
|
||||
|
||||
@ -14,7 +14,7 @@ import { ReactComponent as YahooFinanceIcon } from '@/assets/svg/yahoo-finance.s
|
||||
|
||||
import { IconFont } from '@/components/icon-font';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { FileCode, HousePlus } from 'lucide-react';
|
||||
import { Equal, FileCode, HousePlus, Variable } from 'lucide-react';
|
||||
import { Operator } from './constant';
|
||||
|
||||
interface IProps {
|
||||
@ -55,9 +55,10 @@ export const SVGIconMap = {
|
||||
[Operator.WenCai]: WenCaiIcon,
|
||||
[Operator.Crawler]: CrawlerIcon,
|
||||
};
|
||||
|
||||
export const LucideIconMap = {
|
||||
[Operator.DataOperations]: FileCode,
|
||||
[Operator.VariableAssigner]: Equal,
|
||||
[Operator.VariableAggregator]: Variable,
|
||||
};
|
||||
|
||||
const Empty = () => {
|
||||
|
||||
@ -0,0 +1,97 @@
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { cn } from '@/lib/utils';
|
||||
import {
|
||||
IDataSorceInfo,
|
||||
IDataSourceBase,
|
||||
} from '@/pages/user-setting/data-source/interface';
|
||||
import { Check } from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export type IAddedSourceCardProps = IDataSorceInfo & {
|
||||
filterString: string;
|
||||
list: IDataSourceBase[];
|
||||
selectedList: IDataSourceBase[];
|
||||
setSelectedList: (list: IDataSourceBase[]) => void;
|
||||
};
|
||||
export const AddedSourceCard = (props: IAddedSourceCardProps) => {
|
||||
const {
|
||||
list: originList,
|
||||
name,
|
||||
icon,
|
||||
filterString,
|
||||
selectedList,
|
||||
setSelectedList,
|
||||
} = props;
|
||||
|
||||
const list = useMemo(() => {
|
||||
return originList.map((item) => {
|
||||
const checked = selectedList?.some((i) => i.id === item.id) || false;
|
||||
return {
|
||||
...item,
|
||||
checked: checked,
|
||||
};
|
||||
});
|
||||
}, [originList, selectedList]);
|
||||
|
||||
const filterList = useMemo(
|
||||
() => list.filter((item) => item.name.indexOf(filterString) > -1),
|
||||
[filterString, list],
|
||||
);
|
||||
|
||||
// const { navigateToDataSourceDetail } = useNavigatePage();
|
||||
// const toDetail = (id: string) => {
|
||||
// navigateToDataSourceDetail(id);
|
||||
// };
|
||||
|
||||
const onCheck = (item: IDataSourceBase & { checked: boolean }) => {
|
||||
if (item.checked) {
|
||||
setSelectedList(selectedList.filter((i) => i.id !== item.id));
|
||||
} else {
|
||||
setSelectedList([...(selectedList || []), item]);
|
||||
}
|
||||
};
|
||||
return (
|
||||
<>
|
||||
{filterList.length > 0 && (
|
||||
<Card className="bg-transparent border border-border-button px-5 pt-[10px] pb-5 rounded-md">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 p-0 pb-3">
|
||||
{/* <Users className="mr-2 h-5 w-5 text-[#1677ff]" /> */}
|
||||
<CardTitle className="text-base flex gap-1 font-normal">
|
||||
{icon}
|
||||
{name}
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="p-2 flex flex-col gap-2">
|
||||
{filterList.map((item) => (
|
||||
<div
|
||||
key={item.id}
|
||||
className={cn(
|
||||
'flex flex-row items-center justify-between rounded-md bg-bg-input px-2 py-1 cursor-pointer',
|
||||
// { hidden: item.name.indexOf(filterString) <= -1 },
|
||||
)}
|
||||
onClick={() => {
|
||||
console.log('item--->', item);
|
||||
// toDetail(item.id);
|
||||
onCheck(item);
|
||||
}}
|
||||
>
|
||||
<div className="text-sm text-text-secondary ">{item.name}</div>
|
||||
<div className="text-sm text-text-secondary flex gap-2">
|
||||
{item.checked && (
|
||||
<Check
|
||||
className="cursor-pointer"
|
||||
size={14}
|
||||
// onClick={() => {
|
||||
// toDetail(item.id);
|
||||
// }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
@ -0,0 +1,86 @@
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { SearchInput } from '@/components/ui/input';
|
||||
import { Modal } from '@/components/ui/modal/modal';
|
||||
import { IConnector } from '@/interfaces/database/knowledge';
|
||||
import { useListDataSource } from '@/pages/user-setting/data-source/hooks';
|
||||
import { IDataSourceBase } from '@/pages/user-setting/data-source/interface';
|
||||
import { t } from 'i18next';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { AddedSourceCard } from './added-source-card';
|
||||
|
||||
const LinkDataSourceModal = ({
|
||||
selectedList,
|
||||
open,
|
||||
setOpen,
|
||||
onSubmit,
|
||||
}: {
|
||||
selectedList: IConnector[];
|
||||
open: boolean;
|
||||
setOpen: (open: boolean) => void;
|
||||
onSubmit?: (list: IDataSourceBase[] | undefined) => void;
|
||||
}) => {
|
||||
const [list, setList] = useState<IDataSourceBase[]>();
|
||||
const [fileterString, setFileterString] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
setList(selectedList);
|
||||
}, [selectedList]);
|
||||
|
||||
const { categorizedList } = useListDataSource();
|
||||
const handleFormSubmit = (values: any) => {
|
||||
console.log(values, selectedList);
|
||||
onSubmit?.(list);
|
||||
};
|
||||
return (
|
||||
<Modal
|
||||
className="!w-[560px]"
|
||||
title={t('knowledgeConfiguration.linkDataSource')}
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
setList(selectedList);
|
||||
}}
|
||||
onOpenChange={setOpen}
|
||||
showfooter={false}
|
||||
>
|
||||
<div className="flex flex-col gap-4 ">
|
||||
{/* {JSON.stringify(selectedList)} */}
|
||||
<SearchInput
|
||||
value={fileterString}
|
||||
onChange={(e) => setFileterString(e.target.value)}
|
||||
/>
|
||||
<div className="flex flex-col gap-3">
|
||||
{categorizedList.map((item, index) => (
|
||||
<AddedSourceCard
|
||||
key={index}
|
||||
selectedList={list as IDataSourceBase[]}
|
||||
setSelectedList={(list) => setList(list)}
|
||||
filterString={fileterString}
|
||||
{...item}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
<div className="flex justify-end gap-1">
|
||||
<Button
|
||||
type="button"
|
||||
variant={'outline'}
|
||||
className="btn-primary"
|
||||
onClick={() => {
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
{t('modal.cancelText')}
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant={'default'}
|
||||
className="btn-primary"
|
||||
onClick={handleFormSubmit}
|
||||
>
|
||||
{t('modal.okText')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
export default LinkDataSourceModal;
|
||||
@ -0,0 +1,193 @@
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Modal } from '@/components/ui/modal/modal';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { IConnector } from '@/interfaces/database/knowledge';
|
||||
import { DataSourceInfo } from '@/pages/user-setting/data-source/contant';
|
||||
import { IDataSourceBase } from '@/pages/user-setting/data-source/interface';
|
||||
import { Link, Settings, Unlink } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import LinkDataSourceModal from './link-data-source-modal';
|
||||
|
||||
export type IDataSourceNodeProps = IConnector & {
|
||||
icon: React.ReactNode;
|
||||
};
|
||||
|
||||
export interface ILinkDataSourceProps {
|
||||
data?: IConnector[];
|
||||
handleLinkOrEditSubmit?: (data: IDataSourceBase[] | undefined) => void;
|
||||
unbindFunc?: (item: DataSourceItemProps) => void;
|
||||
}
|
||||
|
||||
interface DataSourceItemProps extends IDataSourceNodeProps {
|
||||
openLinkModalFunc?: (open: boolean, data?: IDataSourceNodeProps) => void;
|
||||
unbindFunc?: (item: DataSourceItemProps) => void;
|
||||
}
|
||||
|
||||
const DataSourceItem = (props: DataSourceItemProps) => {
|
||||
const { t } = useTranslation();
|
||||
const { id, name, icon, openLinkModalFunc, unbindFunc } = props;
|
||||
|
||||
const { navigateToDataSourceDetail } = useNavigatePage();
|
||||
const toDetail = (id: string) => {
|
||||
navigateToDataSourceDetail(id);
|
||||
};
|
||||
const openUnlinkModal = () => {
|
||||
Modal.show({
|
||||
visible: true,
|
||||
className: '!w-[560px]',
|
||||
title: t('dataflowParser.unlinkSourceModalTitle'),
|
||||
children: (
|
||||
<div
|
||||
className="text-sm text-text-secondary"
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: t('dataflowParser.unlinkSourceModalContent'),
|
||||
}}
|
||||
></div>
|
||||
),
|
||||
onVisibleChange: () => {
|
||||
Modal.hide();
|
||||
},
|
||||
footer: (
|
||||
<div className="flex justify-end gap-2">
|
||||
<Button variant={'outline'} onClick={() => Modal.hide()}>
|
||||
{t('dataflowParser.changeStepModalCancelText')}
|
||||
</Button>
|
||||
<Button
|
||||
variant={'secondary'}
|
||||
className="!bg-state-error text-bg-base"
|
||||
onClick={() => {
|
||||
unbindFunc?.(props);
|
||||
Modal.hide();
|
||||
}}
|
||||
>
|
||||
{t('dataflowParser.unlinkSourceModalConfirmText')}
|
||||
</Button>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-between gap-1 px-2 rounded-md border ">
|
||||
<div className="flex items-center gap-1">
|
||||
{icon}
|
||||
<div>{name}</div>
|
||||
</div>
|
||||
<div className="flex gap-1 items-center">
|
||||
<Button
|
||||
variant={'transparent'}
|
||||
className="border-none"
|
||||
type="button"
|
||||
onClick={() => {
|
||||
toDetail(id);
|
||||
}}
|
||||
// onClick={() =>
|
||||
// openLinkModalFunc?.(true, { ...omit(props, ['openLinkModalFunc']) })
|
||||
// }
|
||||
>
|
||||
<Settings />
|
||||
</Button>
|
||||
<>
|
||||
<Button
|
||||
type="button"
|
||||
variant={'transparent'}
|
||||
className="border-none"
|
||||
onClick={() => {
|
||||
openUnlinkModal();
|
||||
}}
|
||||
>
|
||||
<Unlink />
|
||||
</Button>
|
||||
</>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const LinkDataSource = (props: ILinkDataSourceProps) => {
|
||||
const { data, handleLinkOrEditSubmit: submit, unbindFunc } = props;
|
||||
const { t } = useTranslation();
|
||||
const [openLinkModal, setOpenLinkModal] = useState(false);
|
||||
|
||||
const pipelineNode: IDataSourceNodeProps[] = useMemo(() => {
|
||||
if (data && data.length > 0) {
|
||||
return data.map((item) => {
|
||||
return {
|
||||
...item,
|
||||
id: item?.id,
|
||||
name: item?.name,
|
||||
icon:
|
||||
DataSourceInfo[item?.source as keyof typeof DataSourceInfo]?.icon ||
|
||||
'',
|
||||
} as IDataSourceNodeProps;
|
||||
});
|
||||
}
|
||||
return [];
|
||||
}, [data]);
|
||||
|
||||
const openLinkModalFunc = (open: boolean, data?: IDataSourceNodeProps) => {
|
||||
console.log('open', open, data);
|
||||
setOpenLinkModal(open);
|
||||
// if (data) {
|
||||
// setCurrentDataSource(data);
|
||||
// } else {
|
||||
// setCurrentDataSource(undefined);
|
||||
// }
|
||||
};
|
||||
|
||||
const handleLinkOrEditSubmit = (data: IDataSourceBase[] | undefined) => {
|
||||
console.log('handleLinkOrEditSubmit', data);
|
||||
submit?.(data);
|
||||
setOpenLinkModal(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-2">
|
||||
<section className="flex flex-col">
|
||||
<div className="flex items-center gap-1 text-text-primary text-sm">
|
||||
{t('knowledgeConfiguration.dataSource')}
|
||||
</div>
|
||||
<div className="flex justify-between items-center">
|
||||
<div className="text-center text-xs text-text-secondary">
|
||||
{t('knowledgeConfiguration.linkSourceSetTip')}
|
||||
</div>
|
||||
<Button
|
||||
type="button"
|
||||
variant={'transparent'}
|
||||
onClick={() => {
|
||||
openLinkModalFunc?.(true);
|
||||
}}
|
||||
>
|
||||
<Link />
|
||||
<span className="text-xs text-text-primary">
|
||||
{t('knowledgeConfiguration.linkDataSource')}
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
</section>
|
||||
<section className="flex flex-col gap-2">
|
||||
{pipelineNode.map(
|
||||
(item) =>
|
||||
item.id && (
|
||||
<DataSourceItem
|
||||
key={item.id}
|
||||
openLinkModalFunc={openLinkModalFunc}
|
||||
unbindFunc={unbindFunc}
|
||||
{...item}
|
||||
/>
|
||||
),
|
||||
)}
|
||||
</section>
|
||||
<LinkDataSourceModal
|
||||
selectedList={data as IConnector[]}
|
||||
open={openLinkModal}
|
||||
setOpen={(open: boolean) => {
|
||||
openLinkModalFunc(open);
|
||||
}}
|
||||
onSubmit={handleLinkOrEditSubmit}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
export default LinkDataSource;
|
||||
@ -76,6 +76,16 @@ export const formSchema = z
|
||||
})
|
||||
.optional(),
|
||||
pagerank: z.number(),
|
||||
connectors: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
source: z.string().optional(),
|
||||
ststus: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
// icon: z.array(z.instanceof(File)),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
|
||||
@ -7,6 +7,8 @@ import { Form } from '@/components/ui/form';
|
||||
import { FormLayout } from '@/constants/form';
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
import { PermissionRole } from '@/constants/permission';
|
||||
import { DataSourceInfo } from '@/pages/user-setting/data-source/contant';
|
||||
import { IDataSourceBase } from '@/pages/user-setting/data-source/interface';
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useForm, useWatch } from 'react-hook-form';
|
||||
@ -19,6 +21,9 @@ import {
|
||||
} from '../dataset/generate-button/generate';
|
||||
import { ChunkMethodForm } from './chunk-method-form';
|
||||
import ChunkMethodLearnMore from './chunk-method-learn-more';
|
||||
import LinkDataSource, {
|
||||
IDataSourceNodeProps,
|
||||
} from './components/link-data-source';
|
||||
import { MainContainer } from './configuration-form-container';
|
||||
import { ChunkMethodItem, ParseTypeItem } from './configuration/common-item';
|
||||
import { formSchema } from './form-schema';
|
||||
@ -78,10 +83,12 @@ export default function DatasetSettings() {
|
||||
pipeline_id: '',
|
||||
parseType: 1,
|
||||
pagerank: 0,
|
||||
connectors: [],
|
||||
},
|
||||
});
|
||||
const knowledgeDetails = useFetchKnowledgeConfigurationOnMount(form);
|
||||
// const [pipelineData, setPipelineData] = useState<IDataPipelineNodeProps>();
|
||||
const [sourceData, setSourceData] = useState<IDataSourceNodeProps[]>();
|
||||
const [graphRagGenerateData, setGraphRagGenerateData] =
|
||||
useState<IGenerateLogButtonProps>();
|
||||
const [raptorGenerateData, setRaptorGenerateData] =
|
||||
@ -97,6 +104,19 @@ export default function DatasetSettings() {
|
||||
// linked: true,
|
||||
// };
|
||||
// setPipelineData(data);
|
||||
|
||||
const source_data: IDataSourceNodeProps[] =
|
||||
knowledgeDetails?.connectors?.map((connector) => {
|
||||
return {
|
||||
...connector,
|
||||
icon:
|
||||
DataSourceInfo[connector.source as keyof typeof DataSourceInfo]
|
||||
?.icon || '',
|
||||
};
|
||||
});
|
||||
|
||||
setSourceData(source_data);
|
||||
|
||||
setGraphRagGenerateData({
|
||||
finish_at: knowledgeDetails.graphrag_task_finish_at,
|
||||
task_id: knowledgeDetails.graphrag_task_id,
|
||||
@ -129,6 +149,23 @@ export default function DatasetSettings() {
|
||||
// }
|
||||
// };
|
||||
|
||||
const handleLinkOrEditSubmit = (data: IDataSourceBase[] | undefined) => {
|
||||
if (data) {
|
||||
const connectors = data.map((connector) => {
|
||||
return {
|
||||
...connector,
|
||||
icon:
|
||||
DataSourceInfo[connector.source as keyof typeof DataSourceInfo]
|
||||
?.icon || '',
|
||||
};
|
||||
});
|
||||
setSourceData(connectors as IDataSourceNodeProps[]);
|
||||
form.setValue('connectors', connectors || []);
|
||||
// form.setValue('pipeline_name', data.name || '');
|
||||
// form.setValue('pipeline_avatar', data.avatar || '');
|
||||
}
|
||||
};
|
||||
|
||||
const handleDeletePipelineTask = (type: GenerateType) => {
|
||||
if (type === GenerateType.KnowledgeGraph) {
|
||||
setGraphRagGenerateData({
|
||||
@ -158,6 +195,19 @@ export default function DatasetSettings() {
|
||||
}
|
||||
console.log('parseType', parseType);
|
||||
}, [parseType, form]);
|
||||
|
||||
const unbindFunc = (data: IDataSourceBase) => {
|
||||
if (data) {
|
||||
const connectors = sourceData?.filter((connector) => {
|
||||
return connector.id !== data.id;
|
||||
});
|
||||
console.log('🚀 ~ DatasetSettings ~ connectors:', connectors);
|
||||
setSourceData(connectors as IDataSourceNodeProps[]);
|
||||
form.setValue('connectors', connectors || []);
|
||||
// form.setValue('pipeline_name', data.name || '');
|
||||
// form.setValue('pipeline_avatar', data.avatar || '');
|
||||
}
|
||||
};
|
||||
return (
|
||||
<section className="p-5 h-full flex flex-col">
|
||||
<TopTitle
|
||||
@ -205,6 +255,13 @@ export default function DatasetSettings() {
|
||||
data={pipelineData}
|
||||
handleLinkOrEditSubmit={handleLinkOrEditSubmit}
|
||||
/> */}
|
||||
|
||||
<Divider />
|
||||
<LinkDataSource
|
||||
data={sourceData}
|
||||
handleLinkOrEditSubmit={handleLinkOrEditSubmit}
|
||||
unbindFunc={unbindFunc}
|
||||
/>
|
||||
</MainContainer>
|
||||
</div>
|
||||
<div className="text-right items-center flex justify-end gap-3 w-[768px]">
|
||||
|
||||
@ -1,129 +0,0 @@
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import {
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { SwitchOperatorOptions } from '@/constants/agent';
|
||||
import { useBuildSwitchOperatorOptions } from '@/hooks/logic-hooks/use-build-operator-options';
|
||||
import { useFetchKnowledgeMetadata } from '@/hooks/use-knowledge-request';
|
||||
import { Plus, X } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
import { useFieldArray, useFormContext } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export function MetadataFilterConditions({ kbIds }: { kbIds: string[] }) {
|
||||
const { t } = useTranslation();
|
||||
const form = useFormContext();
|
||||
const name = 'meta_data_filter.manual';
|
||||
const metadata = useFetchKnowledgeMetadata(kbIds);
|
||||
|
||||
const switchOperatorOptions = useBuildSwitchOperatorOptions();
|
||||
|
||||
const { fields, remove, append } = useFieldArray({
|
||||
name,
|
||||
control: form.control,
|
||||
});
|
||||
|
||||
const add = useCallback(
|
||||
(key: string) => () => {
|
||||
append({
|
||||
key,
|
||||
value: '',
|
||||
op: SwitchOperatorOptions[0].value,
|
||||
});
|
||||
},
|
||||
[append],
|
||||
);
|
||||
|
||||
return (
|
||||
<section className="flex flex-col gap-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<FormLabel>{t('chat.conditions')}</FormLabel>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger>
|
||||
<Button variant={'ghost'} type="button">
|
||||
<Plus />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
{Object.keys(metadata.data).map((key, idx) => {
|
||||
return (
|
||||
<DropdownMenuItem key={idx} onClick={add(key)}>
|
||||
{key}
|
||||
</DropdownMenuItem>
|
||||
);
|
||||
})}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
<div className="space-y-5">
|
||||
{fields.map((field, index) => {
|
||||
const typeField = `${name}.${index}.key`;
|
||||
return (
|
||||
<div key={field.id} className="flex w-full items-center gap-2">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={typeField}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1 overflow-hidden">
|
||||
<FormControl>
|
||||
<Input
|
||||
{...field}
|
||||
placeholder={t('common.pleaseInput')}
|
||||
></Input>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<Separator className="w-3 text-text-secondary" />
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={`${name}.${index}.op`}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1 overflow-hidden">
|
||||
<FormControl>
|
||||
<SelectWithSearch
|
||||
{...field}
|
||||
options={switchOperatorOptions}
|
||||
></SelectWithSearch>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<Separator className="w-3 text-text-secondary" />
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={`${name}.${index}.value`}
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex-1 overflow-hidden">
|
||||
<FormControl>
|
||||
<Input placeholder={t('common.pleaseInput')} {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<Button variant={'ghost'} onClick={() => remove(index)}>
|
||||
<X className="text-text-sub-title-invert " />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
@ -0,0 +1,80 @@
|
||||
import { Modal } from '@/components/ui/modal/modal';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { FieldValues } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { DynamicForm, FormFieldConfig } from './component/dynamic-form';
|
||||
import {
|
||||
DataSourceFormBaseFields,
|
||||
DataSourceFormDefaultValues,
|
||||
DataSourceFormFields,
|
||||
} from './contant';
|
||||
import { IDataSorceInfo } from './interface';
|
||||
|
||||
const AddDataSourceModal = ({
|
||||
visible,
|
||||
hideModal,
|
||||
loading,
|
||||
sourceData,
|
||||
onOk,
|
||||
}: IModalProps<FieldValues> & { sourceData?: IDataSorceInfo }) => {
|
||||
const { t } = useTranslation();
|
||||
const [fields, setFields] = useState<FormFieldConfig[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
if (sourceData) {
|
||||
setFields([
|
||||
...DataSourceFormBaseFields,
|
||||
...DataSourceFormFields[
|
||||
sourceData.id as keyof typeof DataSourceFormFields
|
||||
],
|
||||
] as FormFieldConfig[]);
|
||||
}
|
||||
}, [sourceData]);
|
||||
|
||||
const handleOk = async (values?: FieldValues) => {
|
||||
await onOk?.(values);
|
||||
hideModal?.();
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('setting.add')}
|
||||
open={visible || false}
|
||||
onOpenChange={(open) => !open && hideModal?.()}
|
||||
// onOk={() => handleOk()}
|
||||
okText={t('common.ok')}
|
||||
cancelText={t('common.cancel')}
|
||||
showfooter={false}
|
||||
>
|
||||
<DynamicForm.Root
|
||||
fields={fields}
|
||||
onSubmit={(data) => {
|
||||
console.log(data);
|
||||
}}
|
||||
defaultValues={
|
||||
DataSourceFormDefaultValues[
|
||||
sourceData?.id as keyof typeof DataSourceFormDefaultValues
|
||||
] as FieldValues
|
||||
}
|
||||
>
|
||||
<div className="flex items-center justify-end w-full gap-2">
|
||||
<DynamicForm.CancelButton
|
||||
handleCancel={() => {
|
||||
hideModal?.();
|
||||
}}
|
||||
/>
|
||||
<DynamicForm.SavingButton
|
||||
submitLoading={loading || false}
|
||||
buttonText={t('common.ok')}
|
||||
submitFunc={(values: FieldValues) => {
|
||||
handleOk(values);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</DynamicForm.Root>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default AddDataSourceModal;
|
||||
@ -0,0 +1,51 @@
|
||||
import { ConfirmDeleteDialog } from '@/components/confirm-delete-dialog';
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { Settings, Trash2 } from 'lucide-react';
|
||||
import { useDeleteDataSource } from '../hooks';
|
||||
import { IDataSorceInfo, IDataSourceBase } from '../interface';
|
||||
|
||||
export type IAddedSourceCardProps = IDataSorceInfo & {
|
||||
list: IDataSourceBase[];
|
||||
};
|
||||
export const AddedSourceCard = (props: IAddedSourceCardProps) => {
|
||||
const { list, name, icon } = props;
|
||||
const { handleDelete } = useDeleteDataSource();
|
||||
const { navigateToDataSourceDetail } = useNavigatePage();
|
||||
const toDetail = (id: string) => {
|
||||
navigateToDataSourceDetail(id);
|
||||
};
|
||||
return (
|
||||
<Card className="bg-transparent border border-border-button px-5 pt-[10px] pb-5 rounded-md">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 p-0 pb-3">
|
||||
{/* <Users className="mr-2 h-5 w-5 text-[#1677ff]" /> */}
|
||||
<CardTitle className="text-base flex gap-1 font-normal">
|
||||
{icon}
|
||||
{name}
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="p-2 flex flex-col gap-2">
|
||||
{list.map((item) => (
|
||||
<div
|
||||
key={item.id}
|
||||
className="flex flex-row items-center justify-between rounded-md bg-bg-input px-[10px] py-4"
|
||||
>
|
||||
<div className="text-sm text-text-secondary ">{item.name}</div>
|
||||
<div className="text-sm text-text-secondary flex gap-2">
|
||||
<Settings
|
||||
className="cursor-pointer"
|
||||
size={14}
|
||||
onClick={() => {
|
||||
toDetail(item.id);
|
||||
}}
|
||||
/>
|
||||
<ConfirmDeleteDialog onOk={() => handleDelete(item)}>
|
||||
<Trash2 className="cursor-pointer" size={14} />
|
||||
</ConfirmDeleteDialog>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
@ -0,0 +1,725 @@
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { forwardRef, useEffect, useImperativeHandle, useMemo } from 'react';
|
||||
import {
|
||||
DefaultValues,
|
||||
FieldValues,
|
||||
SubmitHandler,
|
||||
useForm,
|
||||
useFormContext,
|
||||
} from 'react-hook-form';
|
||||
import { ZodSchema, z } from 'zod';
|
||||
|
||||
import EditTag from '@/components/edit-tag';
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { Checkbox } from '@/components/ui/checkbox';
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { t } from 'i18next';
|
||||
import { Loader } from 'lucide-react';
|
||||
|
||||
// Field type enumeration
|
||||
export enum FormFieldType {
|
||||
Text = 'text',
|
||||
Email = 'email',
|
||||
Password = 'password',
|
||||
Number = 'number',
|
||||
Textarea = 'textarea',
|
||||
Select = 'select',
|
||||
Checkbox = 'checkbox',
|
||||
Tag = 'tag',
|
||||
}
|
||||
|
||||
// Field configuration interface
|
||||
export interface FormFieldConfig {
|
||||
name: string;
|
||||
label: string;
|
||||
type: FormFieldType;
|
||||
hidden?: boolean;
|
||||
required?: boolean;
|
||||
placeholder?: string;
|
||||
options?: { label: string; value: string }[];
|
||||
defaultValue?: any;
|
||||
validation?: {
|
||||
pattern?: RegExp;
|
||||
minLength?: number;
|
||||
maxLength?: number;
|
||||
min?: number;
|
||||
max?: number;
|
||||
message?: string;
|
||||
};
|
||||
render?: (fieldProps: any) => React.ReactNode;
|
||||
horizontal?: boolean;
|
||||
onChange?: (value: any) => void;
|
||||
}
|
||||
|
||||
// Component props interface
|
||||
interface DynamicFormProps<T extends FieldValues> {
|
||||
fields: FormFieldConfig[];
|
||||
onSubmit: SubmitHandler<T>;
|
||||
className?: string;
|
||||
children?: React.ReactNode;
|
||||
defaultValues?: DefaultValues<T>;
|
||||
}
|
||||
|
||||
// Form ref interface
|
||||
export interface DynamicFormRef {
|
||||
submit: () => void;
|
||||
getValues: () => any;
|
||||
reset: (values?: any) => void;
|
||||
}
|
||||
|
||||
// Generate Zod validation schema based on field configurations
|
||||
const generateSchema = (fields: FormFieldConfig[]): ZodSchema<any> => {
|
||||
const schema: Record<string, ZodSchema> = {};
|
||||
const nestedSchemas: Record<string, Record<string, ZodSchema>> = {};
|
||||
|
||||
fields.forEach((field) => {
|
||||
let fieldSchema: ZodSchema;
|
||||
|
||||
// Create base validation schema based on field type
|
||||
switch (field.type) {
|
||||
case FormFieldType.Email:
|
||||
fieldSchema = z.string().email('Please enter a valid email address');
|
||||
break;
|
||||
case FormFieldType.Number:
|
||||
fieldSchema = z.coerce.number();
|
||||
if (field.validation?.min !== undefined) {
|
||||
fieldSchema = (fieldSchema as z.ZodNumber).min(
|
||||
field.validation.min,
|
||||
field.validation.message ||
|
||||
`Value cannot be less than ${field.validation.min}`,
|
||||
);
|
||||
}
|
||||
if (field.validation?.max !== undefined) {
|
||||
fieldSchema = (fieldSchema as z.ZodNumber).max(
|
||||
field.validation.max,
|
||||
field.validation.message ||
|
||||
`Value cannot be greater than ${field.validation.max}`,
|
||||
);
|
||||
}
|
||||
break;
|
||||
case FormFieldType.Checkbox:
|
||||
fieldSchema = z.boolean();
|
||||
break;
|
||||
case FormFieldType.Tag:
|
||||
fieldSchema = z.array(z.string());
|
||||
break;
|
||||
default:
|
||||
fieldSchema = z.string();
|
||||
break;
|
||||
}
|
||||
|
||||
// Handle required fields
|
||||
if (field.required) {
|
||||
if (field.type === FormFieldType.Checkbox) {
|
||||
fieldSchema = (fieldSchema as z.ZodBoolean).refine(
|
||||
(val) => val === true,
|
||||
{
|
||||
message: `${field.label} is required`,
|
||||
},
|
||||
);
|
||||
} else if (field.type === FormFieldType.Tag) {
|
||||
fieldSchema = (fieldSchema as z.ZodArray<z.ZodString>).min(1, {
|
||||
message: `${field.label} is required`,
|
||||
});
|
||||
} else {
|
||||
fieldSchema = (fieldSchema as z.ZodString).min(1, {
|
||||
message: `${field.label} is required`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!field.required) {
|
||||
fieldSchema = fieldSchema.optional();
|
||||
}
|
||||
|
||||
// Handle other validation rules
|
||||
if (
|
||||
field.type !== FormFieldType.Number &&
|
||||
field.type !== FormFieldType.Checkbox &&
|
||||
field.type !== FormFieldType.Tag &&
|
||||
field.required
|
||||
) {
|
||||
fieldSchema = fieldSchema as z.ZodString;
|
||||
|
||||
if (field.validation?.minLength !== undefined) {
|
||||
fieldSchema = (fieldSchema as z.ZodString).min(
|
||||
field.validation.minLength,
|
||||
field.validation.message ||
|
||||
`Enter at least ${field.validation.minLength} characters`,
|
||||
);
|
||||
}
|
||||
|
||||
if (field.validation?.maxLength !== undefined) {
|
||||
fieldSchema = (fieldSchema as z.ZodString).max(
|
||||
field.validation.maxLength,
|
||||
field.validation.message ||
|
||||
`Enter up to ${field.validation.maxLength} characters`,
|
||||
);
|
||||
}
|
||||
|
||||
if (field.validation?.pattern) {
|
||||
fieldSchema = (fieldSchema as z.ZodString).regex(
|
||||
field.validation.pattern,
|
||||
field.validation.message || 'Invalid input format',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (field.name.includes('.')) {
|
||||
const keys = field.name.split('.');
|
||||
const firstKey = keys[0];
|
||||
|
||||
if (!nestedSchemas[firstKey]) {
|
||||
nestedSchemas[firstKey] = {};
|
||||
}
|
||||
|
||||
let currentSchema = nestedSchemas[firstKey];
|
||||
for (let i = 1; i < keys.length - 1; i++) {
|
||||
const key = keys[i];
|
||||
if (!currentSchema[key]) {
|
||||
currentSchema[key] = {};
|
||||
}
|
||||
currentSchema = currentSchema[key];
|
||||
}
|
||||
|
||||
const lastKey = keys[keys.length - 1];
|
||||
currentSchema[lastKey] = fieldSchema;
|
||||
} else {
|
||||
schema[field.name] = fieldSchema;
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(nestedSchemas).forEach((key) => {
|
||||
const buildNestedSchema = (obj: Record<string, any>): ZodSchema => {
|
||||
const nestedSchema: Record<string, ZodSchema> = {};
|
||||
Object.keys(obj).forEach((subKey) => {
|
||||
if (
|
||||
typeof obj[subKey] === 'object' &&
|
||||
!(obj[subKey] instanceof z.ZodType)
|
||||
) {
|
||||
nestedSchema[subKey] = buildNestedSchema(obj[subKey]);
|
||||
} else {
|
||||
nestedSchema[subKey] = obj[subKey];
|
||||
}
|
||||
});
|
||||
return z.object(nestedSchema);
|
||||
};
|
||||
|
||||
schema[key] = buildNestedSchema(nestedSchemas[key]);
|
||||
});
|
||||
return z.object(schema);
|
||||
};
|
||||
|
||||
// Generate default values based on field configurations
|
||||
const generateDefaultValues = <T extends FieldValues>(
|
||||
fields: FormFieldConfig[],
|
||||
): DefaultValues<T> => {
|
||||
const defaultValues: Record<string, any> = {};
|
||||
|
||||
fields.forEach((field) => {
|
||||
if (field.name.includes('.')) {
|
||||
const keys = field.name.split('.');
|
||||
let current = defaultValues;
|
||||
|
||||
for (let i = 0; i < keys.length - 1; i++) {
|
||||
const key = keys[i];
|
||||
if (!current[key]) {
|
||||
current[key] = {};
|
||||
}
|
||||
current = current[key];
|
||||
}
|
||||
|
||||
const lastKey = keys[keys.length - 1];
|
||||
if (field.defaultValue !== undefined) {
|
||||
current[lastKey] = field.defaultValue;
|
||||
} else if (field.type === FormFieldType.Checkbox) {
|
||||
current[lastKey] = false;
|
||||
} else if (field.type === FormFieldType.Tag) {
|
||||
current[lastKey] = [];
|
||||
} else {
|
||||
current[lastKey] = '';
|
||||
}
|
||||
} else {
|
||||
if (field.defaultValue !== undefined) {
|
||||
defaultValues[field.name] = field.defaultValue;
|
||||
} else if (field.type === FormFieldType.Checkbox) {
|
||||
defaultValues[field.name] = false;
|
||||
} else if (field.type === FormFieldType.Tag) {
|
||||
defaultValues[field.name] = [];
|
||||
} else {
|
||||
defaultValues[field.name] = '';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return defaultValues as DefaultValues<T>;
|
||||
};
|
||||
|
||||
// Dynamic form component
|
||||
const DynamicForm = {
|
||||
Root: forwardRef(
|
||||
<T extends FieldValues>(
|
||||
{
|
||||
fields,
|
||||
onSubmit,
|
||||
className = '',
|
||||
children,
|
||||
defaultValues: formDefaultValues = {} as DefaultValues<T>,
|
||||
}: DynamicFormProps<T>,
|
||||
ref: React.Ref<any>,
|
||||
) => {
|
||||
// Generate validation schema and default values
|
||||
const schema = useMemo(() => generateSchema(fields), [fields]);
|
||||
|
||||
const defaultValues = useMemo(() => {
|
||||
const value = {
|
||||
...generateDefaultValues(fields),
|
||||
...formDefaultValues,
|
||||
};
|
||||
console.log('generateDefaultValues', fields, value);
|
||||
return value;
|
||||
}, [fields, formDefaultValues]);
|
||||
|
||||
// Initialize form
|
||||
const form = useForm<T>({
|
||||
resolver: zodResolver(schema),
|
||||
defaultValues,
|
||||
});
|
||||
|
||||
// Expose form methods via ref
|
||||
useImperativeHandle(ref, () => ({
|
||||
submit: () => form.handleSubmit(onSubmit)(),
|
||||
getValues: () => form.getValues(),
|
||||
reset: (values?: T) => {
|
||||
if (values) {
|
||||
form.reset(values);
|
||||
} else {
|
||||
form.reset();
|
||||
}
|
||||
},
|
||||
setError: form.setError,
|
||||
clearErrors: form.clearErrors,
|
||||
trigger: form.trigger,
|
||||
}));
|
||||
|
||||
useEffect(() => {
|
||||
if (formDefaultValues && Object.keys(formDefaultValues).length > 0) {
|
||||
form.reset({
|
||||
...generateDefaultValues(fields),
|
||||
...formDefaultValues,
|
||||
});
|
||||
}
|
||||
}, [form, formDefaultValues, fields]);
|
||||
|
||||
// Submit handler
|
||||
// const handleSubmit = form.handleSubmit(onSubmit);
|
||||
|
||||
// Render form fields
|
||||
const renderField = (field: FormFieldConfig) => {
|
||||
if (field.render) {
|
||||
return (
|
||||
<RAGFlowFormItem
|
||||
name={field.name}
|
||||
label={field.label}
|
||||
required={field.required}
|
||||
horizontal={field.horizontal}
|
||||
>
|
||||
{(fieldProps) => {
|
||||
const finalFieldProps = field.onChange
|
||||
? {
|
||||
...fieldProps,
|
||||
onChange: (e: any) => {
|
||||
fieldProps.onChange(e);
|
||||
field.onChange?.(e.target?.value ?? e);
|
||||
},
|
||||
}
|
||||
: fieldProps;
|
||||
return field.render?.(finalFieldProps);
|
||||
}}
|
||||
</RAGFlowFormItem>
|
||||
);
|
||||
}
|
||||
switch (field.type) {
|
||||
case FormFieldType.Textarea:
|
||||
return (
|
||||
<RAGFlowFormItem
|
||||
name={field.name}
|
||||
label={field.label}
|
||||
required={field.required}
|
||||
horizontal={field.horizontal}
|
||||
>
|
||||
{(fieldProps) => {
|
||||
const finalFieldProps = field.onChange
|
||||
? {
|
||||
...fieldProps,
|
||||
onChange: (e: any) => {
|
||||
fieldProps.onChange(e);
|
||||
field.onChange?.(e.target.value);
|
||||
},
|
||||
}
|
||||
: fieldProps;
|
||||
return (
|
||||
<Textarea
|
||||
{...finalFieldProps}
|
||||
placeholder={field.placeholder}
|
||||
className="resize-none"
|
||||
/>
|
||||
);
|
||||
}}
|
||||
</RAGFlowFormItem>
|
||||
);
|
||||
|
||||
case FormFieldType.Select:
|
||||
return (
|
||||
<RAGFlowFormItem
|
||||
name={field.name}
|
||||
label={field.label}
|
||||
required={field.required}
|
||||
horizontal={field.horizontal}
|
||||
>
|
||||
{(fieldProps) => {
|
||||
const finalFieldProps = field.onChange
|
||||
? {
|
||||
...fieldProps,
|
||||
onChange: (value: string) => {
|
||||
console.log('select value', value);
|
||||
if (fieldProps.onChange) {
|
||||
fieldProps.onChange(value);
|
||||
}
|
||||
field.onChange?.(value);
|
||||
},
|
||||
}
|
||||
: fieldProps;
|
||||
return (
|
||||
<SelectWithSearch
|
||||
triggerClassName="!shrink"
|
||||
{...finalFieldProps}
|
||||
options={field.options}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
</RAGFlowFormItem>
|
||||
);
|
||||
|
||||
case FormFieldType.Checkbox:
|
||||
return (
|
||||
<FormField
|
||||
control={form.control}
|
||||
name={field.name as any}
|
||||
render={({ field: formField }) => (
|
||||
<FormItem
|
||||
className={cn('flex items-center', {
|
||||
'flex-row items-start space-x-3 space-y-0':
|
||||
!field.horizontal,
|
||||
})}
|
||||
>
|
||||
{field.label && !field.horizontal && (
|
||||
<div className="space-y-1 leading-none">
|
||||
<FormLabel className="font-normal">
|
||||
{field.label}{' '}
|
||||
{field.required && (
|
||||
<span className="text-destructive">*</span>
|
||||
)}
|
||||
</FormLabel>
|
||||
</div>
|
||||
)}
|
||||
<FormControl>
|
||||
<Checkbox
|
||||
checked={formField.value}
|
||||
onCheckedChange={(checked) => {
|
||||
formField.onChange(checked);
|
||||
field.onChange?.(checked);
|
||||
}}
|
||||
/>
|
||||
</FormControl>
|
||||
{field.label && field.horizontal && (
|
||||
<div className="space-y-1 leading-none">
|
||||
<FormLabel className="font-normal">
|
||||
{field.label}{' '}
|
||||
{field.required && (
|
||||
<span className="text-destructive">*</span>
|
||||
)}
|
||||
</FormLabel>
|
||||
</div>
|
||||
)}
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
);
|
||||
|
||||
case FormFieldType.Tag:
|
||||
return (
|
||||
<RAGFlowFormItem
|
||||
name={field.name}
|
||||
label={field.label}
|
||||
required={field.required}
|
||||
horizontal={field.horizontal}
|
||||
>
|
||||
{(fieldProps) => {
|
||||
const finalFieldProps = field.onChange
|
||||
? {
|
||||
...fieldProps,
|
||||
onChange: (value: string[]) => {
|
||||
fieldProps.onChange(value);
|
||||
field.onChange?.(value);
|
||||
},
|
||||
}
|
||||
: fieldProps;
|
||||
return (
|
||||
// <TagInput {...fieldProps} placeholder={field.placeholder} />
|
||||
<div className="w-full">
|
||||
<EditTag {...finalFieldProps}></EditTag>
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
</RAGFlowFormItem>
|
||||
);
|
||||
|
||||
default:
|
||||
return (
|
||||
<RAGFlowFormItem
|
||||
name={field.name}
|
||||
label={field.label}
|
||||
required={field.required}
|
||||
horizontal={field.horizontal}
|
||||
>
|
||||
{(fieldProps) => {
|
||||
const finalFieldProps = field.onChange
|
||||
? {
|
||||
...fieldProps,
|
||||
onChange: (e: any) => {
|
||||
fieldProps.onChange(e);
|
||||
field.onChange?.(e.target.value);
|
||||
},
|
||||
}
|
||||
: fieldProps;
|
||||
return (
|
||||
<Input
|
||||
{...finalFieldProps}
|
||||
type={field.type}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
</RAGFlowFormItem>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className={`space-y-6 ${className}`}
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
form.handleSubmit(onSubmit)(e);
|
||||
}}
|
||||
>
|
||||
<>
|
||||
{fields.map((field) => (
|
||||
<div key={field.name} className={cn({ hidden: field.hidden })}>
|
||||
{renderField(field)}
|
||||
</div>
|
||||
))}
|
||||
{children}
|
||||
</>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
},
|
||||
) as <T extends FieldValues>(
|
||||
props: DynamicFormProps<T> & { ref?: React.Ref<DynamicFormRef> },
|
||||
) => React.ReactElement,
|
||||
|
||||
SavingButton: ({
|
||||
submitLoading,
|
||||
buttonText,
|
||||
submitFunc,
|
||||
}: {
|
||||
submitLoading: boolean;
|
||||
buttonText?: string;
|
||||
submitFunc?: (values: FieldValues) => void;
|
||||
}) => {
|
||||
const form = useFormContext();
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
disabled={submitLoading}
|
||||
onClick={() => {
|
||||
console.log('form submit');
|
||||
(async () => {
|
||||
console.log('form submit2');
|
||||
try {
|
||||
let beValid = await form.formControl.trigger();
|
||||
console.log('form valid', beValid, form, form.formControl);
|
||||
if (beValid) {
|
||||
form.handleSubmit(async (values) => {
|
||||
console.log('form values', values);
|
||||
submitFunc?.(values);
|
||||
})();
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
} finally {
|
||||
console.log('form submit3');
|
||||
}
|
||||
})();
|
||||
}}
|
||||
className={cn(
|
||||
'px-2 py-1 bg-primary text-primary-foreground rounded-md hover:bg-primary/90',
|
||||
)}
|
||||
>
|
||||
{submitLoading && (
|
||||
<Loader className="inline-block mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
{buttonText ?? t('modal.okText')}
|
||||
</button>
|
||||
);
|
||||
},
|
||||
|
||||
CancelButton: ({
|
||||
handleCancel,
|
||||
cancelText,
|
||||
}: {
|
||||
handleCancel: () => void;
|
||||
cancelText?: string;
|
||||
}) => {
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => handleCancel()}
|
||||
className="px-2 py-1 border border-input rounded-md hover:bg-muted"
|
||||
>
|
||||
{cancelText ?? t('modal.cancelText')}
|
||||
</button>
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
export { DynamicForm };
|
||||
|
||||
/**
|
||||
* Usage Example 1: Basic Form
|
||||
*
|
||||
* <DynamicForm
|
||||
* fields={[
|
||||
* {
|
||||
* name: "username",
|
||||
* label: "Username",
|
||||
* type: FormFieldType.Text,
|
||||
* required: true,
|
||||
* placeholder: "Please enter username"
|
||||
* },
|
||||
* {
|
||||
* name: "email",
|
||||
* label: "Email",
|
||||
* type: FormFieldType.Email,
|
||||
* required: true,
|
||||
* placeholder: "Please enter email address"
|
||||
* }
|
||||
* ]}
|
||||
* onSubmit={(data) => {
|
||||
* console.log(data); // { username: "...", email: "..." }
|
||||
* }}
|
||||
* />
|
||||
*
|
||||
* Usage Example 2: Nested Object Form
|
||||
*
|
||||
* <DynamicForm
|
||||
* fields={[
|
||||
* {
|
||||
* name: "user.name",
|
||||
* label: "Name",
|
||||
* type: FormFieldType.Text,
|
||||
* required: true,
|
||||
* placeholder: "Please enter name"
|
||||
* },
|
||||
* {
|
||||
* name: "user.email",
|
||||
* label: "Email",
|
||||
* type: FormFieldType.Email,
|
||||
* required: true,
|
||||
* placeholder: "Please enter email address"
|
||||
* },
|
||||
* {
|
||||
* name: "user.profile.age",
|
||||
* label: "Age",
|
||||
* type: FormFieldType.Number,
|
||||
* required: true,
|
||||
* validation: {
|
||||
* min: 18,
|
||||
* max: 100,
|
||||
* message: "Age must be between 18 and 100"
|
||||
* }
|
||||
* },
|
||||
* {
|
||||
* name: "user.profile.bio",
|
||||
* label: "Bio",
|
||||
* type: FormFieldType.Textarea,
|
||||
* placeholder: "Please briefly introduce yourself"
|
||||
* },
|
||||
* {
|
||||
* name: "settings.notifications",
|
||||
* label: "Enable Notifications",
|
||||
* type: FormFieldType.Checkbox
|
||||
* }
|
||||
* ]}
|
||||
* onSubmit={(data) => {
|
||||
* console.log(data);
|
||||
* // {
|
||||
* // user: {
|
||||
* // name: "...",
|
||||
* // email: "...",
|
||||
* // profile: {
|
||||
* // age: ...,
|
||||
* // bio: "..."
|
||||
* // }
|
||||
* // },
|
||||
* // settings: {
|
||||
* // notifications: true/false
|
||||
* // }
|
||||
* // }
|
||||
* }}
|
||||
* />
|
||||
*
|
||||
* Usage Example 3: Tag Type Form
|
||||
*
|
||||
* <DynamicForm
|
||||
* fields={[
|
||||
* {
|
||||
* name: "skills",
|
||||
* label: "Skill Tags",
|
||||
* type: FormFieldType.Tag,
|
||||
* required: true,
|
||||
* placeholder: "Enter skill and press Enter to add tag"
|
||||
* },
|
||||
* {
|
||||
* name: "user.hobbies",
|
||||
* label: "Hobbies",
|
||||
* type: FormFieldType.Tag,
|
||||
* placeholder: "Enter hobby and press Enter to add tag"
|
||||
* }
|
||||
* ]}
|
||||
* onSubmit={(data) => {
|
||||
* console.log(data);
|
||||
* // {
|
||||
* // skills: ["JavaScript", "React", "TypeScript"],
|
||||
* // user: {
|
||||
* // hobbies: ["Reading", "Swimming", "Travel"]
|
||||
* // }
|
||||
* // }
|
||||
* }}
|
||||
* />
|
||||
*/
|
||||
173
web/src/pages/user-setting/data-source/contant.tsx
Normal file
173
web/src/pages/user-setting/data-source/contant.tsx
Normal file
@ -0,0 +1,173 @@
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
import { t } from 'i18next';
|
||||
import { FormFieldType } from './component/dynamic-form';
|
||||
|
||||
export enum DataSourceKey {
|
||||
S3 = 's3',
|
||||
NOTION = 'notion',
|
||||
DISCORD = 'discord',
|
||||
// CONFLUENNCE = 'confluence',
|
||||
// GMAIL = 'gmail',
|
||||
// GOOGLE_DRIVER = 'google_driver',
|
||||
// JIRA = 'jira',
|
||||
// SHAREPOINT = 'sharepoint',
|
||||
// SLACK = 'slack',
|
||||
// TEAMS = 'teams',
|
||||
}
|
||||
|
||||
export const DataSourceInfo = {
|
||||
[DataSourceKey.S3]: {
|
||||
name: 'S3',
|
||||
description: t(`setting.${DataSourceKey.S3}Description`),
|
||||
icon: <SvgIcon name={'data-source/s3'} width={28} />,
|
||||
},
|
||||
[DataSourceKey.NOTION]: {
|
||||
name: 'Notion',
|
||||
description: t(`setting.${DataSourceKey.NOTION}Description`),
|
||||
icon: <SvgIcon name={'data-source/notion'} width={28} />,
|
||||
},
|
||||
[DataSourceKey.DISCORD]: {
|
||||
name: 'Discord',
|
||||
description: t(`setting.${DataSourceKey.DISCORD}Description`),
|
||||
icon: <SvgIcon name={'data-source/discord'} width={28} />,
|
||||
},
|
||||
};
|
||||
|
||||
export const DataSourceFormBaseFields = [
|
||||
{
|
||||
id: 'Id',
|
||||
name: 'id',
|
||||
type: FormFieldType.Text,
|
||||
required: false,
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
label: 'Name',
|
||||
name: 'name',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Source',
|
||||
name: 'source',
|
||||
type: FormFieldType.Select,
|
||||
required: true,
|
||||
hidden: true,
|
||||
options: Object.keys(DataSourceKey).map((item) => ({
|
||||
label: item,
|
||||
value: DataSourceKey[item as keyof typeof DataSourceKey],
|
||||
})),
|
||||
},
|
||||
];
|
||||
|
||||
export const DataSourceFormFields = {
|
||||
[DataSourceKey.S3]: [
|
||||
{
|
||||
label: 'AWS Access Key ID',
|
||||
name: 'config.credentials.aws_access_key_id',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'AWS Secret Access Key',
|
||||
name: 'config.credentials.aws_secret_access_key',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Bucket Name',
|
||||
name: 'config.bucket_name',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Bucket Type',
|
||||
name: 'config.bucket_type',
|
||||
type: FormFieldType.Select,
|
||||
options: [
|
||||
{ label: 'S3', value: 's3' },
|
||||
{ label: 'R2', value: 'r2' },
|
||||
{ label: 'Google Cloud Storage', value: 'google_cloud_storage' },
|
||||
{ label: 'OCI Storage', value: 'oci_storage' },
|
||||
],
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Prefix',
|
||||
name: 'config.prefix',
|
||||
type: FormFieldType.Text,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
[DataSourceKey.NOTION]: [
|
||||
{
|
||||
label: 'Notion Integration Token',
|
||||
name: 'config.credentials.notion_integration_token',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Root Page Id',
|
||||
name: 'config.root_page_id',
|
||||
type: FormFieldType.Text,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
[DataSourceKey.DISCORD]: [
|
||||
{
|
||||
label: 'Discord Bot Token',
|
||||
name: 'config.credentials.discord_bot_token',
|
||||
type: FormFieldType.Text,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
label: 'Server IDs',
|
||||
name: 'config.server_ids',
|
||||
type: FormFieldType.Tag,
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
label: 'Channels',
|
||||
name: 'config.channels',
|
||||
type: FormFieldType.Tag,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const DataSourceFormDefaultValues = {
|
||||
[DataSourceKey.S3]: {
|
||||
name: '',
|
||||
source: DataSourceKey.S3,
|
||||
config: {
|
||||
bucket_name: '',
|
||||
bucket_type: 's3',
|
||||
prefix: '',
|
||||
credentials: {
|
||||
aws_access_key_id: '',
|
||||
aws_secret_access_key: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
[DataSourceKey.NOTION]: {
|
||||
name: '',
|
||||
source: DataSourceKey.NOTION,
|
||||
config: {
|
||||
root_page_id: '',
|
||||
credentials: {
|
||||
notion_integration_token: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
[DataSourceKey.DISCORD]: {
|
||||
name: '',
|
||||
source: DataSourceKey.DISCORD,
|
||||
config: {
|
||||
server_ids: [],
|
||||
channels: [],
|
||||
credentials: {
|
||||
discord_bot_token: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@ -0,0 +1,193 @@
|
||||
import BackButton from '@/components/back-button';
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { RunningStatus } from '@/constants/knowledge';
|
||||
import { t } from 'i18next';
|
||||
import { debounce } from 'lodash';
|
||||
import { CirclePause, Repeat } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { FieldValues } from 'react-hook-form';
|
||||
import {
|
||||
DynamicForm,
|
||||
DynamicFormRef,
|
||||
FormFieldConfig,
|
||||
FormFieldType,
|
||||
} from '../component/dynamic-form';
|
||||
import {
|
||||
DataSourceFormBaseFields,
|
||||
DataSourceFormDefaultValues,
|
||||
DataSourceFormFields,
|
||||
DataSourceInfo,
|
||||
} from '../contant';
|
||||
import {
|
||||
useAddDataSource,
|
||||
useDataSourceResume,
|
||||
useFetchDataSourceDetail,
|
||||
} from '../hooks';
|
||||
import { DataSourceLogsTable } from './log-table';
|
||||
|
||||
const SourceDetailPage = () => {
|
||||
const formRef = useRef<DynamicFormRef>(null);
|
||||
|
||||
const { data: detail } = useFetchDataSourceDetail();
|
||||
const { handleResume } = useDataSourceResume();
|
||||
|
||||
const detailInfo = useMemo(() => {
|
||||
if (detail) {
|
||||
return DataSourceInfo[detail.source];
|
||||
}
|
||||
}, [detail]);
|
||||
|
||||
const [fields, setFields] = useState<FormFieldConfig[]>([]);
|
||||
const [defaultValues, setDefaultValues] = useState<FieldValues>(
|
||||
DataSourceFormDefaultValues[
|
||||
detail?.source as keyof typeof DataSourceFormDefaultValues
|
||||
] as FieldValues,
|
||||
);
|
||||
|
||||
const runSchedule = useCallback(() => {
|
||||
handleResume({
|
||||
resume:
|
||||
detail?.status === RunningStatus.RUNNING ||
|
||||
detail?.status === RunningStatus.SCHEDULE
|
||||
? false
|
||||
: true,
|
||||
});
|
||||
}, [detail, handleResume]);
|
||||
|
||||
const customFields = useMemo(() => {
|
||||
return [
|
||||
{
|
||||
label: 'Refresh Freq',
|
||||
name: 'refresh_freq',
|
||||
type: FormFieldType.Number,
|
||||
required: false,
|
||||
render: (fieldProps: FormFieldConfig) => (
|
||||
<div className="flex items-center gap-1 w-full relative">
|
||||
<Input {...fieldProps} type={FormFieldType.Number} />
|
||||
<span className="absolute right-0 -translate-x-12 text-text-secondary italic ">
|
||||
minutes
|
||||
</span>
|
||||
<button
|
||||
type="button"
|
||||
className="text-text-secondary bg-bg-input rounded-sm text-xs h-full p-2 border border-border-button "
|
||||
onClick={() => {
|
||||
runSchedule();
|
||||
}}
|
||||
>
|
||||
{detail?.status === RunningStatus.RUNNING ||
|
||||
detail?.status === RunningStatus.SCHEDULE ? (
|
||||
<CirclePause size={12} />
|
||||
) : (
|
||||
<Repeat size={12} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
label: 'Prune Freq',
|
||||
name: 'prune_freq',
|
||||
type: FormFieldType.Number,
|
||||
required: false,
|
||||
hidden: true,
|
||||
render: (fieldProps: FormFieldConfig) => {
|
||||
return (
|
||||
<div className="flex items-center gap-1 w-full relative">
|
||||
<Input {...fieldProps} type={FormFieldType.Number} />
|
||||
<span className="absolute right-0 -translate-x-3 text-text-secondary italic ">
|
||||
hours
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Timeout Secs',
|
||||
name: 'timeout_secs',
|
||||
type: FormFieldType.Number,
|
||||
required: false,
|
||||
render: (fieldProps: FormFieldConfig) => (
|
||||
<div className="flex items-center gap-1 w-full relative">
|
||||
<Input {...fieldProps} type={FormFieldType.Number} />
|
||||
<span className="absolute right-0 -translate-x-3 text-text-secondary italic ">
|
||||
minutes
|
||||
</span>
|
||||
</div>
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [detail, runSchedule]);
|
||||
|
||||
const { handleAddOk } = useAddDataSource();
|
||||
|
||||
const onSubmit = useCallback(() => {
|
||||
formRef?.current?.submit();
|
||||
}, [formRef]);
|
||||
|
||||
useEffect(() => {
|
||||
if (detail) {
|
||||
const fields = [
|
||||
...DataSourceFormBaseFields,
|
||||
...DataSourceFormFields[
|
||||
detail.source as keyof typeof DataSourceFormFields
|
||||
],
|
||||
...customFields,
|
||||
] as FormFieldConfig[];
|
||||
|
||||
const neweFields = fields.map((field) => {
|
||||
return {
|
||||
...field,
|
||||
horizontal: true,
|
||||
onChange: () => {
|
||||
onSubmit();
|
||||
},
|
||||
};
|
||||
});
|
||||
setFields(neweFields);
|
||||
|
||||
const defultValueTemp = {
|
||||
...(DataSourceFormDefaultValues[
|
||||
detail?.source as keyof typeof DataSourceFormDefaultValues
|
||||
] as FieldValues),
|
||||
...detail,
|
||||
};
|
||||
console.log('defaultValue', defultValueTemp);
|
||||
setDefaultValues(defultValueTemp);
|
||||
}
|
||||
}, [detail, customFields, onSubmit]);
|
||||
|
||||
return (
|
||||
<div className="px-10 py-5">
|
||||
<BackButton />
|
||||
<Card className="bg-transparent border border-border-button px-5 pt-[10px] pb-5 rounded-md mt-5">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 p-0 pb-3">
|
||||
{/* <Users className="mr-2 h-5 w-5 text-[#1677ff]" /> */}
|
||||
<CardTitle className="text-2xl text-text-primary flex gap-1 items-center font-normal pb-3">
|
||||
{detailInfo?.icon}
|
||||
{detail?.name}
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<Separator className="border-border-button bg-border-button w-[calc(100%+2rem)] -translate-x-4 -translate-y-4" />
|
||||
<CardContent className="p-2 flex flex-col gap-2 max-h-[calc(100vh-190px)] overflow-y-auto scrollbar-auto">
|
||||
<div className="max-w-[1200px]">
|
||||
<DynamicForm.Root
|
||||
ref={formRef}
|
||||
fields={fields}
|
||||
onSubmit={debounce((data) => {
|
||||
handleAddOk(data);
|
||||
}, 500)}
|
||||
defaultValues={defaultValues}
|
||||
/>
|
||||
</div>
|
||||
<section className="flex flex-col gap-2 mt-6">
|
||||
<div className="text-2xl text-text-primary">{t('setting.log')}</div>
|
||||
<DataSourceLogsTable />
|
||||
</section>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
export default SourceDetailPage;
|
||||
@ -0,0 +1,240 @@
|
||||
import FileStatusBadge from '@/components/file-status-badge';
|
||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { RAGFlowPagination } from '@/components/ui/ragflow-pagination';
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from '@/components/ui/table';
|
||||
import { RunningStatusMap } from '@/constants/knowledge';
|
||||
import { RunningStatus } from '@/pages/dataset/dataset/constant';
|
||||
import { Routes } from '@/routes';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import {
|
||||
HoverCard,
|
||||
HoverCardContent,
|
||||
HoverCardTrigger,
|
||||
} from '@radix-ui/react-hover-card';
|
||||
import {
|
||||
ColumnDef,
|
||||
flexRender,
|
||||
getCoreRowModel,
|
||||
getFilteredRowModel,
|
||||
getPaginationRowModel,
|
||||
getSortedRowModel,
|
||||
useReactTable,
|
||||
} from '@tanstack/react-table';
|
||||
import { t } from 'i18next';
|
||||
import { pick } from 'lodash';
|
||||
import { Eye } from 'lucide-react';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useNavigate } from 'umi';
|
||||
import { useLogListDataSource } from '../hooks';
|
||||
|
||||
const columns = ({
|
||||
handleToDataSetDetail,
|
||||
}: {
|
||||
handleToDataSetDetail: (id: string) => void;
|
||||
}) => {
|
||||
return [
|
||||
{
|
||||
accessorKey: 'update_date',
|
||||
header: t('setting.timeStarted'),
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2 text-text-primary">
|
||||
{row.original.update_date
|
||||
? formatDate(row.original.update_date)
|
||||
: '-'}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: 'status',
|
||||
header: t('knowledgeDetails.status'),
|
||||
cell: ({ row }) => (
|
||||
<FileStatusBadge
|
||||
status={row.original.status as RunningStatus}
|
||||
name={RunningStatusMap[row.original.status as RunningStatus]}
|
||||
className="!w-20"
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: 'kb_name',
|
||||
header: t('knowledgeDetails.dataset'),
|
||||
cell: ({ row }) => {
|
||||
return (
|
||||
<div
|
||||
className="flex items-center gap-2 text-text-primary cursor-pointer"
|
||||
onClick={() => {
|
||||
console.log('handleToDataSetDetail', row.original.kb_id);
|
||||
handleToDataSetDetail(row.original.kb_id);
|
||||
}}
|
||||
>
|
||||
<RAGFlowAvatar
|
||||
avatar={row.original.avatar}
|
||||
name={row.original.kb_name}
|
||||
className="size-4"
|
||||
/>
|
||||
{row.original.kb_name}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: 'new_docs_indexed',
|
||||
header: t('setting.newDocs'),
|
||||
},
|
||||
|
||||
{
|
||||
id: 'operations',
|
||||
header: t('setting.errorMsg'),
|
||||
cell: ({ row }) => (
|
||||
<div className="flex gap-1 items-center">
|
||||
{row.original.error_msg}
|
||||
{row.original.error_msg && (
|
||||
<div className="flex justify-start space-x-2 opacity-0 group-hover:opacity-100 transition-opacity">
|
||||
<HoverCard>
|
||||
<HoverCardTrigger>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="p-1"
|
||||
// onClick={() => {
|
||||
// showLog(row, LogTabs.FILE_LOGS);
|
||||
// }}
|
||||
>
|
||||
<Eye />
|
||||
</Button>
|
||||
</HoverCardTrigger>
|
||||
<HoverCardContent className="w-[40vw] max-h-[40vh] overflow-auto bg-bg-base z-[999] px-3 py-2 rounded-md border border-border-default">
|
||||
<div className="space-y-2">
|
||||
{row.original.full_exception_trace}
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
</HoverCard>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
] as ColumnDef<any>[];
|
||||
};
|
||||
|
||||
// const paginationInit = {
|
||||
// current: 1,
|
||||
// pageSize: 10,
|
||||
// total: 0,
|
||||
// };
|
||||
export const DataSourceLogsTable = () => {
|
||||
// const [pagination, setPagination] = useState(paginationInit);
|
||||
const { data, pagination, setPagination } = useLogListDataSource();
|
||||
const navigate = useNavigate();
|
||||
const currentPagination = useMemo(
|
||||
() => ({
|
||||
pageIndex: (pagination.current || 1) - 1,
|
||||
pageSize: pagination.pageSize || 10,
|
||||
}),
|
||||
[pagination],
|
||||
);
|
||||
|
||||
const handleToDataSetDetail = useCallback(
|
||||
(id: string) => {
|
||||
console.log('handleToDataSetDetail', id);
|
||||
navigate(`${Routes.DatasetBase}${Routes.DataSetSetting}/${id}`);
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const table = useReactTable<any>({
|
||||
data: data || [],
|
||||
columns: columns({ handleToDataSetDetail }),
|
||||
manualPagination: true,
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
getPaginationRowModel: getPaginationRowModel(),
|
||||
getSortedRowModel: getSortedRowModel(),
|
||||
getFilteredRowModel: getFilteredRowModel(),
|
||||
// onSortingChange: setSorting,
|
||||
// onColumnFiltersChange: setColumnFilters,
|
||||
// onRowSelectionChange: setRowSelection,
|
||||
state: {
|
||||
// sorting,
|
||||
// columnFilters,
|
||||
// rowSelection,
|
||||
pagination: currentPagination,
|
||||
},
|
||||
// pageCount: pagination.total
|
||||
// ? Math.ceil(pagination.total / pagination.pageSize)
|
||||
// : 0,
|
||||
rowCount: pagination.total ?? 0,
|
||||
});
|
||||
|
||||
return (
|
||||
// <div className="w-full h-[calc(100vh-360px)]">
|
||||
// <Table rootClassName="max-h-[calc(100vh-380px)]">
|
||||
<div className="w-full">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
{table.getHeaderGroups().map((headerGroup) => (
|
||||
<TableRow key={headerGroup.id}>
|
||||
{headerGroup.headers.map((header) => (
|
||||
<TableHead key={header.id}>
|
||||
{flexRender(
|
||||
header.column.columnDef.header,
|
||||
header.getContext(),
|
||||
)}
|
||||
</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
))}
|
||||
</TableHeader>
|
||||
<TableBody className="relative min-w-[1280px] overflow-auto">
|
||||
{table.getRowModel().rows?.length ? (
|
||||
table.getRowModel().rows.map((row) => (
|
||||
<TableRow
|
||||
key={row.id}
|
||||
data-state={row.getIsSelected() && 'selected'}
|
||||
className="group"
|
||||
>
|
||||
{row.getVisibleCells().map((cell) => (
|
||||
<TableCell
|
||||
key={cell.id}
|
||||
className={cell.column.columnDef.meta?.cellClassName}
|
||||
>
|
||||
{flexRender(cell.column.columnDef.cell, cell.getContext())}
|
||||
</TableCell>
|
||||
))}
|
||||
</TableRow>
|
||||
))
|
||||
) : (
|
||||
<TableRow>
|
||||
<TableCell colSpan={columns.length} className="h-24 text-center">
|
||||
No results.
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
)}
|
||||
</TableBody>
|
||||
</Table>
|
||||
<div className="flex items-center justify-end mt-4">
|
||||
<div className="space-x-2">
|
||||
{/* <RAGFlowPagination
|
||||
{...{ current: pagination.current, pageSize: pagination.pageSize }}
|
||||
total={pagination.total}
|
||||
onChange={(page, pageSize) => setPagination({ page, pageSize })}
|
||||
/> */}
|
||||
<RAGFlowPagination
|
||||
{...pick(pagination, 'current', 'pageSize')}
|
||||
total={pagination.total}
|
||||
onChange={(page, pageSize) => {
|
||||
setPagination({ page, pageSize });
|
||||
}}
|
||||
></RAGFlowPagination>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
188
web/src/pages/user-setting/data-source/hooks.ts
Normal file
188
web/src/pages/user-setting/data-source/hooks.ts
Normal file
@ -0,0 +1,188 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useGetPaginationWithRouter } from '@/hooks/logic-hooks';
|
||||
import dataSourceService, {
|
||||
dataSourceResume,
|
||||
deleteDataSource,
|
||||
featchDataSourceDetail,
|
||||
getDataSourceLogs,
|
||||
} from '@/services/data-source-service';
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { t } from 'i18next';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSearchParams } from 'umi';
|
||||
import { DataSourceInfo, DataSourceKey } from './contant';
|
||||
import { IDataSorceInfo, IDataSource, IDataSourceBase } from './interface';
|
||||
|
||||
export const useListDataSource = () => {
|
||||
const { data: list, isFetching } = useQuery<IDataSource[]>({
|
||||
queryKey: ['data-source'],
|
||||
queryFn: async () => {
|
||||
const { data } = await dataSourceService.dataSourceList();
|
||||
return data.data;
|
||||
},
|
||||
});
|
||||
|
||||
const categorizeDataBySource = (data: IDataSourceBase[]) => {
|
||||
const categorizedData: Record<DataSourceKey, any[]> = {} as Record<
|
||||
DataSourceKey,
|
||||
any[]
|
||||
>;
|
||||
|
||||
data.forEach((item) => {
|
||||
const source = item.source;
|
||||
if (!categorizedData[source]) {
|
||||
categorizedData[source] = [];
|
||||
}
|
||||
categorizedData[source].push({
|
||||
...item,
|
||||
});
|
||||
});
|
||||
|
||||
return categorizedData;
|
||||
};
|
||||
|
||||
const updatedDataSourceTemplates = useMemo(() => {
|
||||
const categorizedData = categorizeDataBySource(list || []);
|
||||
let sourcelist: Array<IDataSorceInfo & { list: Array<IDataSourceBase> }> =
|
||||
[];
|
||||
Object.keys(categorizedData).forEach((key: string) => {
|
||||
const k = key as DataSourceKey;
|
||||
sourcelist.push({
|
||||
id: k,
|
||||
name: DataSourceInfo[k].name,
|
||||
description: DataSourceInfo[k].description,
|
||||
icon: DataSourceInfo[k].icon,
|
||||
list: categorizedData[k] || [],
|
||||
});
|
||||
});
|
||||
|
||||
console.log('🚀 ~ useListDataSource ~ sourcelist:', sourcelist);
|
||||
return sourcelist;
|
||||
}, [list]);
|
||||
|
||||
return { list, categorizedList: updatedDataSourceTemplates, isFetching };
|
||||
};
|
||||
|
||||
export const useAddDataSource = () => {
|
||||
const [addSource, setAddSource] = useState<IDataSorceInfo | undefined>(
|
||||
undefined,
|
||||
);
|
||||
const [addLoading, setAddLoading] = useState<boolean>(false);
|
||||
const {
|
||||
visible: addingModalVisible,
|
||||
hideModal: hideAddingModal,
|
||||
showModal,
|
||||
} = useSetModalState();
|
||||
const showAddingModal = useCallback(
|
||||
(data: IDataSorceInfo) => {
|
||||
setAddSource(data);
|
||||
showModal();
|
||||
},
|
||||
[showModal],
|
||||
);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const handleAddOk = useCallback(
|
||||
async (data: any) => {
|
||||
setAddLoading(true);
|
||||
const { data: res } = await dataSourceService.dataSourceSet(data);
|
||||
console.log('🚀 ~ handleAddOk ~ code:', res.code);
|
||||
if (res.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['data-source'] });
|
||||
message.success(t(`message.operated`));
|
||||
hideAddingModal();
|
||||
}
|
||||
setAddLoading(false);
|
||||
},
|
||||
[hideAddingModal, queryClient],
|
||||
);
|
||||
|
||||
return {
|
||||
addSource,
|
||||
addLoading,
|
||||
setAddSource,
|
||||
addingModalVisible,
|
||||
hideAddingModal,
|
||||
showAddingModal,
|
||||
handleAddOk,
|
||||
};
|
||||
};
|
||||
|
||||
export const useLogListDataSource = () => {
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const [currentQueryParameters] = useSearchParams();
|
||||
const id = currentQueryParameters.get('id');
|
||||
|
||||
const { data, isFetching } = useQuery<{ logs: IDataSource[]; total: number }>(
|
||||
{
|
||||
queryKey: ['data-source-logs', id, pagination],
|
||||
queryFn: async () => {
|
||||
const { data } = await getDataSourceLogs(id as string, {
|
||||
page_size: pagination.pageSize,
|
||||
page: pagination.current,
|
||||
});
|
||||
return data.data;
|
||||
},
|
||||
},
|
||||
);
|
||||
return {
|
||||
data: data?.logs,
|
||||
isFetching,
|
||||
pagination: { ...pagination, total: data?.total },
|
||||
setPagination,
|
||||
};
|
||||
};
|
||||
|
||||
export const useDeleteDataSource = () => {
|
||||
const [deleteLoading, setDeleteLoading] = useState<boolean>(false);
|
||||
const { hideModal, showModal } = useSetModalState();
|
||||
const queryClient = useQueryClient();
|
||||
const handleDelete = useCallback(
|
||||
async ({ id }: { id: string }) => {
|
||||
setDeleteLoading(true);
|
||||
const { data } = await deleteDataSource(id);
|
||||
if (data.code === 0) {
|
||||
message.success(t(`message.deleted`));
|
||||
queryClient.invalidateQueries({ queryKey: ['data-source'] });
|
||||
}
|
||||
setDeleteLoading(false);
|
||||
},
|
||||
[setDeleteLoading, queryClient],
|
||||
);
|
||||
return { deleteLoading, hideModal, showModal, handleDelete };
|
||||
};
|
||||
|
||||
export const useFetchDataSourceDetail = () => {
|
||||
const [currentQueryParameters] = useSearchParams();
|
||||
const id = currentQueryParameters.get('id');
|
||||
const { data } = useQuery<IDataSource>({
|
||||
queryKey: ['data-source-detail', id],
|
||||
enabled: !!id,
|
||||
queryFn: async () => {
|
||||
const { data } = await featchDataSourceDetail(id as string);
|
||||
// if (data.code === 0) {
|
||||
|
||||
// }
|
||||
return data.data;
|
||||
},
|
||||
});
|
||||
return { data };
|
||||
};
|
||||
|
||||
export const useDataSourceResume = () => {
|
||||
const [currentQueryParameters] = useSearchParams();
|
||||
const id = currentQueryParameters.get('id');
|
||||
const queryClient = useQueryClient();
|
||||
const handleResume = useCallback(
|
||||
async (param: { resume: boolean }) => {
|
||||
const { data } = await dataSourceResume(id as string, param);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['data-source-detail', id] });
|
||||
message.success(t(`message.operated`));
|
||||
}
|
||||
},
|
||||
[id, queryClient],
|
||||
);
|
||||
return { handleResume };
|
||||
};
|
||||
151
web/src/pages/user-setting/data-source/index.tsx
Normal file
151
web/src/pages/user-setting/data-source/index.tsx
Normal file
@ -0,0 +1,151 @@
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import Spotlight from '@/components/spotlight';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Separator } from '@/components/ui/separator';
|
||||
import { Plus } from 'lucide-react';
|
||||
import AddDataSourceModal from './add-datasource-modal';
|
||||
import { AddedSourceCard } from './component/added-source-card';
|
||||
import { DataSourceInfo, DataSourceKey } from './contant';
|
||||
import { useAddDataSource, useListDataSource } from './hooks';
|
||||
import { IDataSorceInfo } from './interface';
|
||||
const dataSourceTemplates = [
|
||||
{
|
||||
id: DataSourceKey.S3,
|
||||
name: DataSourceInfo[DataSourceKey.S3].name,
|
||||
description: DataSourceInfo[DataSourceKey.S3].description,
|
||||
icon: DataSourceInfo[DataSourceKey.S3].icon,
|
||||
list: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'S3 Bucket 1',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'S3 Bucket 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: DataSourceKey.DISCORD,
|
||||
name: DataSourceInfo[DataSourceKey.DISCORD].name,
|
||||
description: DataSourceInfo[DataSourceKey.DISCORD].description,
|
||||
icon: DataSourceInfo[DataSourceKey.DISCORD].icon,
|
||||
},
|
||||
{
|
||||
id: DataSourceKey.NOTION,
|
||||
name: DataSourceInfo[DataSourceKey.NOTION].name,
|
||||
description: DataSourceInfo[DataSourceKey.NOTION].description,
|
||||
icon: DataSourceInfo[DataSourceKey.NOTION].icon,
|
||||
},
|
||||
];
|
||||
const DataSource = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
// useListTenantUser();
|
||||
const { categorizedList } = useListDataSource();
|
||||
|
||||
const {
|
||||
addSource,
|
||||
addLoading,
|
||||
addingModalVisible,
|
||||
handleAddOk,
|
||||
hideAddingModal,
|
||||
showAddingModal,
|
||||
} = useAddDataSource();
|
||||
|
||||
const AbailableSourceCard = ({
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
icon,
|
||||
}: IDataSorceInfo) => {
|
||||
return (
|
||||
<div className="p-[10px] border border-border-button rounded-lg relative group hover:bg-bg-card">
|
||||
<div className="flex gap-2">
|
||||
<div className="w-6 h-6">{icon}</div>
|
||||
<div className="flex flex-1 flex-col items-start gap-2">
|
||||
<div className="text-base text-text-primary">{name}</div>
|
||||
<div className="text-xs text-text-secondary">{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className=" absolute top-2 right-2">
|
||||
<Button
|
||||
onClick={() =>
|
||||
showAddingModal({
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
icon,
|
||||
})
|
||||
}
|
||||
className=" rounded-md px-1 text-bg-base gap-1 bg-text-primary text-xs py-0 h-6 items-center hidden group-hover:flex"
|
||||
>
|
||||
<Plus size={12} />
|
||||
{t('setting.add')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full flex flex-col gap-4 relative ">
|
||||
<Spotlight />
|
||||
|
||||
<Card className="bg-transparent border-none px-0">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 px-4 pt-4 pb-0">
|
||||
<CardTitle className="text-2xl font-medium">
|
||||
{t('setting.dataSources')}
|
||||
<div className="text-sm text-text-secondary">
|
||||
{t('setting.datasourceDescription')}
|
||||
</div>
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
</Card>
|
||||
<Separator className="border-border-button bg-border-button " />
|
||||
<div className=" flex flex-col gap-4 p-4 max-h-[calc(100vh-120px)] overflow-y-auto overflow-x-hidden scrollbar-auto">
|
||||
<div className="flex flex-col gap-3">
|
||||
{categorizedList.map((item, index) => (
|
||||
<AddedSourceCard key={index} {...item} />
|
||||
))}
|
||||
</div>
|
||||
<Card className="bg-transparent border-none mt-8">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 p-0 pb-4">
|
||||
{/* <Users className="mr-2 h-5 w-5 text-[#1677ff]" /> */}
|
||||
<CardTitle className="text-2xl font-semibold">
|
||||
{t('setting.availableSources')}
|
||||
<div className="text-sm text-text-secondary font-normal">
|
||||
{t('setting.availableSourcesDescription')}
|
||||
</div>
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="p-0">
|
||||
{/* <TenantTable searchTerm={searchTerm}></TenantTable> */}
|
||||
<div className="grid sm:grid-cols-1 lg:grid-cols-2 xl:grid-cols-2 2xl:grid-cols-4 3xl:grid-cols-4 gap-4">
|
||||
{dataSourceTemplates.map((item, index) => (
|
||||
<AbailableSourceCard {...item} key={index} />
|
||||
))}
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{addingModalVisible && (
|
||||
<AddDataSourceModal
|
||||
visible
|
||||
loading={addLoading}
|
||||
hideModal={hideAddingModal}
|
||||
onOk={(data) => {
|
||||
console.log(data);
|
||||
handleAddOk(data);
|
||||
}}
|
||||
sourceData={addSource}
|
||||
></AddDataSourceModal>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DataSource;
|
||||
45
web/src/pages/user-setting/data-source/interface.ts
Normal file
45
web/src/pages/user-setting/data-source/interface.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { RunningStatus } from '@/constants/knowledge';
|
||||
import { DataSourceKey } from './contant';
|
||||
|
||||
export interface IDataSorceInfo {
|
||||
id: DataSourceKey;
|
||||
name: string;
|
||||
description: string;
|
||||
icon: React.ReactNode;
|
||||
}
|
||||
|
||||
export type IDataSource = IDataSourceBase & {
|
||||
config: any;
|
||||
indexing_start: null | string;
|
||||
input_type: string;
|
||||
prune_freq: number;
|
||||
refresh_freq: number;
|
||||
status: string;
|
||||
tenant_id: string;
|
||||
update_date: string;
|
||||
update_time: number;
|
||||
};
|
||||
|
||||
export interface IDataSourceBase {
|
||||
id: string;
|
||||
name: string;
|
||||
source: DataSourceKey;
|
||||
}
|
||||
|
||||
export interface IDataSourceLog {
|
||||
connector_id: string;
|
||||
error_count: number;
|
||||
error_msg: string;
|
||||
id: string;
|
||||
kb_id: string;
|
||||
kb_name: string;
|
||||
name: string;
|
||||
new_docs_indexed: number;
|
||||
poll_range_end: null | string;
|
||||
poll_range_start: null | string;
|
||||
reindex: string;
|
||||
source: DataSourceKey;
|
||||
status: RunningStatus;
|
||||
tenant_id: string;
|
||||
timeout_secs: number;
|
||||
}
|
||||
@ -27,6 +27,7 @@ interface IProps {
|
||||
const SystemSetting = ({ onOk, loading }: IProps) => {
|
||||
const { systemSetting: initialValues, allOptions } =
|
||||
useFetchSystemModelSettingOnMount();
|
||||
const { t: tcommon } = useTranslate('common');
|
||||
const { t } = useTranslate('setting');
|
||||
|
||||
const [formData, setFormData] = useState({
|
||||
@ -159,7 +160,7 @@ const SystemSetting = ({ onOk, loading }: IProps) => {
|
||||
value={value}
|
||||
options={options}
|
||||
onChange={(value) => handleFieldChange(id, value)}
|
||||
placeholder={t('common:selectPlaceholder')}
|
||||
placeholder={tcommon('selectPlaceholder')}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { LlmItem, useSelectLlmList } from '@/hooks/llm-hooks';
|
||||
import { t } from 'i18next';
|
||||
import { ModelProviderCard } from './modal-card';
|
||||
|
||||
export const UsedModel = ({
|
||||
@ -11,7 +12,9 @@ export const UsedModel = ({
|
||||
const { factoryList, myLlmList: llmList, loading } = useSelectLlmList();
|
||||
return (
|
||||
<div className="flex flex-col w-full gap-4 mb-4">
|
||||
<div className="text-text-primary text-2xl mb-2 mt-4">Added models</div>
|
||||
<div className="text-text-primary text-2xl font-semibold mb-2 mt-4">
|
||||
{t('setting.addedModels')}
|
||||
</div>
|
||||
{llmList.map((llm) => {
|
||||
return (
|
||||
<ModelProviderCard
|
||||
|
||||
@ -11,22 +11,24 @@ import {
|
||||
} from '@/hooks/use-user-setting-request';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Routes } from '@/routes';
|
||||
import { Banknote, Box, Cog, Unplug, User, Users } from 'lucide-react';
|
||||
import { t } from 'i18next';
|
||||
import { Banknote, Box, Cog, Server, Unplug, User, Users } from 'lucide-react';
|
||||
import { useEffect } from 'react';
|
||||
import { useHandleMenuClick } from './hooks';
|
||||
|
||||
const menuItems = [
|
||||
{ icon: User, label: 'Profile', key: Routes.Profile },
|
||||
{ icon: Users, label: 'Team', key: Routes.Team },
|
||||
{ icon: Box, label: 'Model Providers', key: Routes.Model },
|
||||
{ icon: Unplug, label: 'API', key: Routes.Api },
|
||||
{ icon: User, label: t('setting.profile'), key: Routes.Profile },
|
||||
{ icon: Users, label: t('setting.team'), key: Routes.Team },
|
||||
{ icon: Box, label: t('setting.model'), key: Routes.Model },
|
||||
{ icon: Unplug, label: t('setting.api'), key: Routes.Api },
|
||||
// {
|
||||
// icon: MessageSquareQuote,
|
||||
// label: 'Prompt Templates',
|
||||
// key: Routes.Profile,
|
||||
// },
|
||||
// { icon: TextSearch, label: 'Retrieval Templates', key: Routes.Profile },
|
||||
{ icon: Cog, label: 'System', key: Routes.System },
|
||||
{ icon: Server, label: t('setting.dataSources'), key: Routes.DataSource },
|
||||
{ icon: Cog, label: t('setting.system'), key: Routes.System },
|
||||
// { icon: Banknote, label: 'Plan', key: Routes.Plan },
|
||||
{ icon: Banknote, label: 'MCP', key: Routes.Mcp },
|
||||
];
|
||||
@ -101,7 +103,7 @@ export function SideBar() {
|
||||
logout();
|
||||
}}
|
||||
>
|
||||
Log Out
|
||||
{t('setting.logout')}
|
||||
</Button>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
@ -27,6 +27,8 @@ export enum Routes {
|
||||
System = '/system',
|
||||
Model = '/model',
|
||||
Prompt = '/prompt',
|
||||
DataSource = '/data-source',
|
||||
DataSourceDetailPage = '/data-source-detail-page',
|
||||
ProfileMcp = `${ProfileSetting}${Mcp}`,
|
||||
ProfileTeam = `${ProfileSetting}${Team}`,
|
||||
ProfilePlan = `${ProfileSetting}${Plan}`,
|
||||
@ -400,9 +402,19 @@ const routes = [
|
||||
path: `/user-setting${Routes.Mcp}`,
|
||||
component: `@/pages${Routes.ProfileMcp}`,
|
||||
},
|
||||
{
|
||||
path: `/user-setting${Routes.DataSource}`,
|
||||
component: `@/pages/user-setting${Routes.DataSource}`,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
path: `/user-setting${Routes.DataSource}${Routes.DataSourceDetailPage}`,
|
||||
component: `@/pages/user-setting${Routes.DataSource}${Routes.DataSourceDetailPage}`,
|
||||
layout: false,
|
||||
},
|
||||
|
||||
// Admin routes
|
||||
{
|
||||
path: Routes.Admin,
|
||||
|
||||
33
web/src/services/data-source-service.ts
Normal file
33
web/src/services/data-source-service.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import api from '@/utils/api';
|
||||
import registerServer from '@/utils/register-server';
|
||||
import request from '@/utils/request';
|
||||
|
||||
const { dataSourceSet, dataSourceList } = api;
|
||||
const methods = {
|
||||
dataSourceSet: {
|
||||
url: dataSourceSet,
|
||||
method: 'post',
|
||||
},
|
||||
dataSourceList: {
|
||||
url: dataSourceList,
|
||||
method: 'get',
|
||||
},
|
||||
} as const;
|
||||
const dataSourceService = registerServer<keyof typeof methods>(
|
||||
methods,
|
||||
request,
|
||||
);
|
||||
|
||||
export const deleteDataSource = (id: string) =>
|
||||
request.post(api.dataSourceDel(id));
|
||||
export const dataSourceResume = (id: string, data: { resume: boolean }) => {
|
||||
console.log('api.dataSourceResume(id)', data);
|
||||
return request.put(api.dataSourceResume(id), { data });
|
||||
};
|
||||
|
||||
export const getDataSourceLogs = (id: string, params?: any) =>
|
||||
request.get(api.dataSourceLogs(id), { params });
|
||||
export const featchDataSourceDetail = (id: string) =>
|
||||
request.get(api.dataSourceDetail(id));
|
||||
|
||||
export default dataSourceService;
|
||||
@ -34,6 +34,14 @@ export default {
|
||||
enable_llm: `${api_host}/llm/enable_llm`,
|
||||
deleteFactory: `${api_host}/llm/delete_factory`,
|
||||
|
||||
// data source
|
||||
dataSourceSet: `${api_host}/connector/set`,
|
||||
dataSourceList: `${api_host}/connector/list`,
|
||||
dataSourceDel: (id: string) => `${api_host}/connector/${id}/rm`,
|
||||
dataSourceResume: (id: string) => `${api_host}/connector/${id}/resume`,
|
||||
dataSourceLogs: (id: string) => `${api_host}/connector/${id}/logs`,
|
||||
dataSourceDetail: (id: string) => `${api_host}/connector/${id}`,
|
||||
|
||||
// plugin
|
||||
llm_tools: `${api_host}/plugin/llm_tools`,
|
||||
|
||||
|
||||
Reference in New Issue
Block a user