mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
113 Commits
pipeline
...
f4324e89d9
| Author | SHA1 | Date | |
|---|---|---|---|
| f4324e89d9 | |||
| f04c9e2937 | |||
| 1fc2889f98 | |||
| ee0c38da66 | |||
| c1806e1ab2 | |||
| 66d0d44a00 | |||
| 2078d88c28 | |||
| 7734ad7fcd | |||
| 1a47e136e3 | |||
| cbf04ee470 | |||
| ef0aecea3b | |||
| dfc5fa1f4d | |||
| f341dc03b8 | |||
| 4585edc20e | |||
| dba9158f9a | |||
| 82f572ff95 | |||
| 518a00630e | |||
| aa61ae24dc | |||
| fb950079ef | |||
| aec8c15e7e | |||
| 7c620bdc69 | |||
| e7dde69584 | |||
| d6eded1959 | |||
| 80f851922a | |||
| 17757930a3 | |||
| a8883905a7 | |||
| 8426cbbd02 | |||
| 0b759f559c | |||
| 2d5d10ecbf | |||
| 954bd5a1c2 | |||
| ccb1c269e8 | |||
| 6dfb0c245c | |||
| 72d1047a8f | |||
| bece37e6c8 | |||
| 59cb0eb8bc | |||
| fc56217eb3 | |||
| 723cf9443e | |||
| bd94b5dfb5 | |||
| ef59c5bab9 | |||
| 62b7c655c5 | |||
| b0b866c8fd | |||
| 3a831d0c28 | |||
| 9e323a9351 | |||
| 7ac95b759b | |||
| daea357940 | |||
| 4aa1abd8e5 | |||
| 922b5c652d | |||
| aaa97874c6 | |||
| 193d93d820 | |||
| 4058715df7 | |||
| 3f595029d7 | |||
| e8f5a4da56 | |||
| a9472e3652 | |||
| 4dd48b60f3 | |||
| e4ab8ba2de | |||
| a1f848bfe0 | |||
| f2309ff93e | |||
| 38be53cf31 | |||
| 65a06d62d8 | |||
| 10cbbb76f8 | |||
| 1c84d1b562 | |||
| 4eb7659499 | |||
| 46a61e5aff | |||
| da82566304 | |||
| c8b79dfed4 | |||
| da80fa40bc | |||
| 94dbd4aac9 | |||
| ca9f30e1a1 | |||
| 2e4295d5ca | |||
| d11b1628a1 | |||
| 45f9f428db | |||
| 902703d145 | |||
| 7ccca2143c | |||
| 70ce02faf4 | |||
| 3f1741c8c6 | |||
| 6c24ad7966 | |||
| 4846589599 | |||
| a24547aa66 | |||
| a04c5247ab | |||
| ed6a76dcc0 | |||
| a0ccbec8bd | |||
| 4693c5382a | |||
| ff3b4d0dcd | |||
| 62d35b1b73 | |||
| 91b609447d | |||
| c353840244 | |||
| f12b9fdcd4 | |||
| 80ede65bbe | |||
| 52cf186028 | |||
| ea0f1d47a5 | |||
| 9fe7c92217 | |||
| d353f7f7f8 | |||
| f3738b06f1 | |||
| 5a8bc88147 | |||
| 04ef5b2783 | |||
| c9ea22ef69 | |||
| 152111fd9d | |||
| 86f6da2f74 | |||
| 8c00cbc87a | |||
| 41e808f4e6 | |||
| bc0281040b | |||
| 341a7b1473 | |||
| c29c395390 | |||
| a23a0f230c | |||
| 2a88ce6be1 | |||
| 664b781d62 | |||
| 65571e5254 | |||
| aa30f20730 | |||
| b9b278d441 | |||
| e1d86cfee3 | |||
| 8ebd07337f | |||
| dd584d57b0 | |||
| 3d39b96c6f |
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.MY_GITHUB_TOKEN }} # Use the secret as an environment variable
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the secret as an environment variable
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
# https://github.com/actions/upload-release-asset has been replaced by https://github.com/softprops/action-gh-release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
token: ${{ secrets.MY_GITHUB_TOKEN }} # Use the secret as an environment variable
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the secret as an environment variable
|
||||
prerelease: ${{ env.PRERELEASE }}
|
||||
tag_name: ${{ env.RELEASE_TAG }}
|
||||
# The body field does not support environment variable substitution directly.
|
||||
|
||||
48
.github/workflows/tests.yml
vendored
48
.github/workflows/tests.yml
vendored
@ -34,12 +34,10 @@ jobs:
|
||||
# https://github.com/hmarr/debug-action
|
||||
#- uses: hmarr/debug-action@v2
|
||||
|
||||
- name: Show who triggered this workflow
|
||||
- name: Ensure workspace ownership
|
||||
run: |
|
||||
echo "Workflow triggered by ${{ github.event_name }}"
|
||||
|
||||
- name: Ensure workspace ownership
|
||||
run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
||||
echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
||||
|
||||
# https://github.com/actions/checkout/issues/1781
|
||||
- name: Check out code
|
||||
@ -48,6 +46,44 @@ jobs:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Check workflow duplication
|
||||
if: ${{ !cancelled() && !failure() && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci')) }}
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} != 'pull_request' ]]; then
|
||||
HEAD=$(git rev-parse HEAD)
|
||||
# Find a PR that introduced a given commit
|
||||
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
|
||||
PR_NUMBER=$(gh pr list --search ${HEAD} --state merged --json number --jq .[0].number)
|
||||
echo "HEAD=${HEAD}"
|
||||
echo "PR_NUMBER=${PR_NUMBER}"
|
||||
if [[ -n ${PR_NUMBER} ]]; then
|
||||
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
||||
if [[ -f ${PR_SHA_FP} ]]; then
|
||||
read -r PR_SHA PR_RUN_ID < "${PR_SHA_FP}"
|
||||
# Calculate the hash of the current workspace content
|
||||
HEAD_SHA=$(git rev-parse HEAD^{tree})
|
||||
if [[ ${HEAD_SHA} == ${PR_SHA} ]]; then
|
||||
echo "Cancel myself since the workspace content hash is the same with PR #${PR_NUMBER} merged. See ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${PR_RUN_ID} for details."
|
||||
gh run cancel ${GITHUB_RUN_ID}
|
||||
while true; do
|
||||
status=$(gh run view ${GITHUB_RUN_ID} --json status -q .status)
|
||||
[ "$status" = "completed" ] && break
|
||||
sleep 5
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
||||
# Calculate the hash of the current workspace content
|
||||
PR_SHA=$(git rev-parse HEAD^{tree})
|
||||
echo "PR #${PR_NUMBER} workspace content hash: ${PR_SHA}"
|
||||
mkdir -p ${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}
|
||||
echo "${PR_SHA} ${GITHUB_RUN_ID}" > ${PR_SHA_FP}
|
||||
fi
|
||||
|
||||
# https://github.com/astral-sh/ruff-action
|
||||
- name: Static check with Ruff
|
||||
uses: astral-sh/ruff-action@v3
|
||||
@ -59,11 +95,11 @@ jobs:
|
||||
run: |
|
||||
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
|
||||
sudo docker pull ubuntu:22.04
|
||||
sudo docker build --progress=plain --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
|
||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
|
||||
|
||||
- name: Build ragflow:nightly
|
||||
run: |
|
||||
sudo docker build --progress=plain --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
|
||||
|
||||
- name: Start ragflow:nightly-slim
|
||||
run: |
|
||||
|
||||
@ -341,11 +341,13 @@ docker build --platform linux/amd64 -f Dockerfile -t infiniflow/ragflow:nightly
|
||||
5. If your operating system does not have jemalloc, please install it as follows:
|
||||
|
||||
```bash
|
||||
# ubuntu
|
||||
# Ubuntu
|
||||
sudo apt-get install libjemalloc-dev
|
||||
# centos
|
||||
# CentOS
|
||||
sudo yum install jemalloc
|
||||
# mac
|
||||
# OpenSUSE
|
||||
sudo zypper install jemalloc
|
||||
# macOS
|
||||
sudo brew install jemalloc
|
||||
```
|
||||
|
||||
|
||||
@ -390,6 +390,22 @@ class AdminCLI:
|
||||
service_id: int = command['number']
|
||||
print(f"Showing service: {service_id}")
|
||||
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/services/{service_id}'
|
||||
response = requests.get(url, auth=HTTPBasicAuth(self.admin_account, self.admin_password))
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
res_data = res_json['data']
|
||||
if res_data['alive']:
|
||||
print(f"Service {res_data['service_name']} is alive. Detail:")
|
||||
if isinstance(res_data['message'], str):
|
||||
print(res_data['message'])
|
||||
else:
|
||||
self._print_table_simple(res_data['message'])
|
||||
else:
|
||||
print(f"Service {res_data['service_name']} is down. Detail: {res_data['message']}")
|
||||
else:
|
||||
print(f"Fail to show service, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_restart_service(self, command):
|
||||
service_id: int = command['number']
|
||||
print(f"Restart service {service_id}")
|
||||
|
||||
@ -3,7 +3,7 @@ import uuid
|
||||
from functools import wraps
|
||||
from flask import request, jsonify
|
||||
|
||||
from exceptions import AdminException
|
||||
from api.common.exceptions import AdminException
|
||||
from api.db.init_data import encode_to_base64
|
||||
from api.db.services import UserService
|
||||
|
||||
|
||||
@ -32,6 +32,7 @@ class BaseConfig(BaseModel):
|
||||
host: str
|
||||
port: int
|
||||
service_type: str
|
||||
detail_func_name: str
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {'id': self.id, 'name': self.name, 'host': self.host, 'port': self.port, 'service_type': self.service_type}
|
||||
@ -209,7 +210,8 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
name: str = f'ragflow_{ragflow_count}'
|
||||
host: str = v['host']
|
||||
http_port: int = v['http_port']
|
||||
config = RAGFlowServerConfig(id=id_count, name=name, host=host, port=http_port, service_type="ragflow_server")
|
||||
config = RAGFlowServerConfig(id=id_count, name=name, host=host, port=http_port,
|
||||
service_type="ragflow_server", detail_func_name="check_ragflow_server_alive")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "es":
|
||||
@ -222,7 +224,8 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
password: str = v.get('password')
|
||||
config = ElasticsearchConfig(id=id_count, name=name, host=host, port=port, service_type="retrieval",
|
||||
retrieval_type="elasticsearch",
|
||||
username=username, password=password)
|
||||
username=username, password=password,
|
||||
detail_func_name="get_es_cluster_stats")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
|
||||
@ -234,7 +237,7 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
port = int(parts[1])
|
||||
database: str = v.get('db_name', 'default_db')
|
||||
config = InfinityConfig(id=id_count, name=name, host=host, port=port, service_type="retrieval", retrieval_type="infinity",
|
||||
db_name=database)
|
||||
db_name=database, detail_func_name="get_infinity_status")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "minio":
|
||||
@ -246,7 +249,7 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
user = v.get('user')
|
||||
password = v.get('password')
|
||||
config = MinioConfig(id=id_count, name=name, host=host, port=port, user=user, password=password, service_type="file_store",
|
||||
store_type="minio")
|
||||
store_type="minio", detail_func_name="check_minio_alive")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "redis":
|
||||
@ -258,7 +261,7 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
password = v.get('password')
|
||||
db: int = v.get('db')
|
||||
config = RedisConfig(id=id_count, name=name, host=host, port=port, password=password, database=db,
|
||||
service_type="message_queue", mq_type="redis")
|
||||
service_type="message_queue", mq_type="redis", detail_func_name="get_redis_info")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "mysql":
|
||||
@ -268,7 +271,7 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
username = v.get('user')
|
||||
password = v.get('password')
|
||||
config = MySQLConfig(id=id_count, name=name, host=host, port=port, username=username, password=password,
|
||||
service_type="meta_data", meta_type="mysql")
|
||||
service_type="meta_data", meta_type="mysql", detail_func_name="get_mysql_status")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case "admin":
|
||||
|
||||
@ -3,7 +3,7 @@ from flask import Blueprint, request
|
||||
from auth import login_verify
|
||||
from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from exceptions import AdminException
|
||||
from api.common.exceptions import AdminException
|
||||
|
||||
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
||||
|
||||
|
||||
@ -7,7 +7,9 @@ from api.db.services.canvas_service import UserCanvasService
|
||||
from api.db.services.user_service import TenantService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.utils.crypt import decrypt
|
||||
from exceptions import AdminException, UserAlreadyExistsError, UserNotFoundError
|
||||
from api.utils import health_utils
|
||||
|
||||
from api.common.exceptions import AdminException, UserAlreadyExistsError, UserNotFoundError
|
||||
from config import SERVICE_CONFIGS
|
||||
|
||||
class UserMgr:
|
||||
@ -164,7 +166,22 @@ class ServiceMgr:
|
||||
|
||||
@staticmethod
|
||||
def get_service_details(service_id: int):
|
||||
raise AdminException("get_service_details: not implemented")
|
||||
service_id = int(service_id)
|
||||
configs = SERVICE_CONFIGS.configs
|
||||
service_config_mapping = {
|
||||
c.id: {
|
||||
'name': c.name,
|
||||
'detail_func_name': c.detail_func_name
|
||||
} for c in configs
|
||||
}
|
||||
service_info = service_config_mapping.get(service_id, {})
|
||||
if not service_info:
|
||||
raise AdminException(f"Invalid service_id: {service_id}")
|
||||
|
||||
detail_func = getattr(health_utils, service_info.get('detail_func_name'))
|
||||
res = detail_func()
|
||||
res.update({'service_name': service_info.get('name')})
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def shutdown_service(service_id: int):
|
||||
|
||||
@ -346,3 +346,7 @@ Respond immediately with your final comprehensive answer.
|
||||
|
||||
return "Error occurred."
|
||||
|
||||
def reset(self):
|
||||
for k, cpn in self.tools.items():
|
||||
cpn.reset()
|
||||
|
||||
|
||||
@ -19,11 +19,12 @@ import os
|
||||
import re
|
||||
import time
|
||||
from abc import ABC
|
||||
|
||||
import requests
|
||||
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
from api.utils.api_utils import timeout
|
||||
from deepdoc.parser import HtmlParser
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
|
||||
class InvokeParam(ComponentParamBase):
|
||||
@ -43,11 +44,11 @@ class InvokeParam(ComponentParamBase):
|
||||
self.datatype = "json" # New parameter to determine data posting type
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(self.method.lower(), "Type of content from the crawler", ['get', 'post', 'put'])
|
||||
self.check_valid_value(self.method.lower(), "Type of content from the crawler", ["get", "post", "put"])
|
||||
self.check_empty(self.url, "End point URL")
|
||||
self.check_positive_integer(self.timeout, "Timeout time in second")
|
||||
self.check_boolean(self.clean_html, "Clean HTML")
|
||||
self.check_valid_value(self.datatype.lower(), "Data post type", ['json', 'formdata']) # Check for valid datapost value
|
||||
self.check_valid_value(self.datatype.lower(), "Data post type", ["json", "formdata"]) # Check for valid datapost value
|
||||
|
||||
|
||||
class Invoke(ComponentBase, ABC):
|
||||
@ -63,6 +64,18 @@ class Invoke(ComponentBase, ABC):
|
||||
args[para["key"]] = self._canvas.get_variable_value(para["ref"])
|
||||
|
||||
url = self._param.url.strip()
|
||||
|
||||
def replace_variable(match):
|
||||
var_name = match.group(1)
|
||||
try:
|
||||
value = self._canvas.get_variable_value(var_name)
|
||||
return str(value or "")
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
# {base_url} or {component_id@variable_name}
|
||||
url = re.sub(r"\{([a-zA-Z_][a-zA-Z0-9_.@-]*)\}", replace_variable, url)
|
||||
|
||||
if url.find("http") != 0:
|
||||
url = "http://" + url
|
||||
|
||||
@ -75,52 +88,32 @@ class Invoke(ComponentBase, ABC):
|
||||
proxies = {"http": self._param.proxy, "https": self._param.proxy}
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
for _ in range(self._param.max_retries + 1):
|
||||
try:
|
||||
if method == 'get':
|
||||
response = requests.get(url=url,
|
||||
params=args,
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
timeout=self._param.timeout)
|
||||
if method == "get":
|
||||
response = requests.get(url=url, params=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
if self._param.clean_html:
|
||||
sections = HtmlParser()(None, response.content)
|
||||
self.set_output("result", "\n".join(sections))
|
||||
else:
|
||||
self.set_output("result", response.text)
|
||||
|
||||
if method == 'put':
|
||||
if self._param.datatype.lower() == 'json':
|
||||
response = requests.put(url=url,
|
||||
json=args,
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
timeout=self._param.timeout)
|
||||
if method == "put":
|
||||
if self._param.datatype.lower() == "json":
|
||||
response = requests.put(url=url, json=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
else:
|
||||
response = requests.put(url=url,
|
||||
data=args,
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
timeout=self._param.timeout)
|
||||
response = requests.put(url=url, data=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
if self._param.clean_html:
|
||||
sections = HtmlParser()(None, response.content)
|
||||
self.set_output("result", "\n".join(sections))
|
||||
else:
|
||||
self.set_output("result", response.text)
|
||||
|
||||
if method == 'post':
|
||||
if self._param.datatype.lower() == 'json':
|
||||
response = requests.post(url=url,
|
||||
json=args,
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
timeout=self._param.timeout)
|
||||
if method == "post":
|
||||
if self._param.datatype.lower() == "json":
|
||||
response = requests.post(url=url, json=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
else:
|
||||
response = requests.post(url=url,
|
||||
data=args,
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
timeout=self._param.timeout)
|
||||
response = requests.post(url=url, data=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
if self._param.clean_html:
|
||||
self.set_output("result", "\n".join(sections))
|
||||
else:
|
||||
|
||||
@ -156,8 +156,8 @@ class CodeExec(ToolBase, ABC):
|
||||
self.set_output("_ERROR", "construct code request error: " + str(e))
|
||||
|
||||
try:
|
||||
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60))
|
||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run", code_req, resp.status_code)
|
||||
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run, code_req: {code_req}, resp.status_code {resp.status_code}:")
|
||||
if resp.status_code != 200:
|
||||
resp.raise_for_status()
|
||||
body = resp.json()
|
||||
|
||||
@ -85,7 +85,7 @@ class SearXNG(ToolBase, ABC):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
searxng_url = (kwargs.get("searxng_url") or getattr(self._param, "searxng_url", "") or "").strip()
|
||||
searxng_url = (getattr(self._param, "searxng_url", "") or kwargs.get("searxng_url") or "").strip()
|
||||
# In try-run, if no URL configured, just return empty instead of raising
|
||||
if not searxng_url:
|
||||
self.set_output("formalized_content", "")
|
||||
|
||||
@ -24,6 +24,7 @@ from flask import request
|
||||
from flask_login import current_user, login_required
|
||||
|
||||
from api import settings
|
||||
from api.common.check_team_permission import check_kb_team_permission
|
||||
from api.constants import FILE_NAME_LEN_LIMIT, IMG_BASE64_PREFIX
|
||||
from api.db import VALID_FILE_TYPES, VALID_TASK_STATUS, FileSource, FileType, ParserType, TaskStatus
|
||||
from api.db.db_models import File, Task
|
||||
@ -68,8 +69,10 @@ def upload():
|
||||
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||
if not e:
|
||||
raise LookupError("Can't find this knowledgebase!")
|
||||
err, files = FileService.upload_document(kb, file_objs, current_user.id)
|
||||
if not check_kb_team_permission(kb, current_user.id):
|
||||
return get_json_result(data=False, message="No authorization.", code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
err, files = FileService.upload_document(kb, file_objs, current_user.id)
|
||||
if err:
|
||||
return get_json_result(data=files, message="\n".join(err), code=settings.RetCode.SERVER_ERROR)
|
||||
|
||||
@ -94,6 +97,8 @@ def web_crawl():
|
||||
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||
if not e:
|
||||
raise LookupError("Can't find this knowledgebase!")
|
||||
if check_kb_team_permission(kb, current_user.id):
|
||||
return get_json_result(data=False, message="No authorization.", code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
blob = html2pdf(url)
|
||||
if not blob:
|
||||
@ -552,8 +557,8 @@ def get(doc_id):
|
||||
@login_required
|
||||
@validate_request("doc_id")
|
||||
def change_parser():
|
||||
req = request.json
|
||||
|
||||
req = request.json
|
||||
if not DocumentService.accessible(req["doc_id"], current_user.id):
|
||||
return get_json_result(data=False, message="No authorization.", code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
@ -577,7 +582,7 @@ def change_parser():
|
||||
settings.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
||||
|
||||
try:
|
||||
if "pipeline_id" in req:
|
||||
if "pipeline_id" in req and req["pipeline_id"] != "":
|
||||
if doc.pipeline_id == req["pipeline_id"]:
|
||||
return get_json_result(data=True)
|
||||
DocumentService.update_by_id(doc.id, {"pipeline_id": req["pipeline_id"]})
|
||||
|
||||
@ -21,6 +21,7 @@ import flask
|
||||
from flask import request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from api.common.check_team_permission import check_file_team_permission
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.file2document_service import File2DocumentService
|
||||
from api.utils.api_utils import server_error_response, get_data_error_result, validate_request
|
||||
@ -246,7 +247,7 @@ def rm():
|
||||
return get_data_error_result(message="File or Folder not found!")
|
||||
if not file.tenant_id:
|
||||
return get_data_error_result(message="Tenant not found!")
|
||||
if file.tenant_id != current_user.id:
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
if file.source_type == FileSource.KNOWLEDGEBASE:
|
||||
continue
|
||||
@ -294,7 +295,7 @@ def rename():
|
||||
e, file = FileService.get_by_id(req["file_id"])
|
||||
if not e:
|
||||
return get_data_error_result(message="File not found!")
|
||||
if file.tenant_id != current_user.id:
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
if file.type != FileType.FOLDER.value \
|
||||
and pathlib.Path(req["name"].lower()).suffix != pathlib.Path(
|
||||
@ -332,7 +333,7 @@ def get(file_id):
|
||||
e, file = FileService.get_by_id(file_id)
|
||||
if not e:
|
||||
return get_data_error_result(message="Document not found!")
|
||||
if file.tenant_id != current_user.id:
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
blob = STORAGE_IMPL.get(file.parent_id, file.location)
|
||||
@ -373,7 +374,7 @@ def move():
|
||||
return get_data_error_result(message="File or Folder not found!")
|
||||
if not file.tenant_id:
|
||||
return get_data_error_result(message="Tenant not found!")
|
||||
if file.tenant_id != current_user.id:
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
fe, _ = FileService.get_by_id(parent_id)
|
||||
if not fe:
|
||||
|
||||
@ -38,6 +38,7 @@ from api.constants import DATASET_NAME_LIMIT
|
||||
from rag.settings import PAGERANK_FLD
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
|
||||
|
||||
@manager.route('/create', methods=['post']) # noqa: F821
|
||||
@login_required
|
||||
@validate_request("name")
|
||||
|
||||
@ -39,7 +39,6 @@ from rag.utils.redis_conn import REDIS_CONN
|
||||
from flask import jsonify
|
||||
from api.utils.health_utils import run_health_checks
|
||||
|
||||
|
||||
@manager.route("/version", methods=["GET"]) # noqa: F821
|
||||
@login_required
|
||||
def version():
|
||||
@ -178,6 +177,11 @@ def healthz():
|
||||
return jsonify(result), (200 if all_ok else 500)
|
||||
|
||||
|
||||
@manager.route("/ping", methods=["GET"]) # noqa: F821
|
||||
def ping():
|
||||
return "pong", 200
|
||||
|
||||
|
||||
@manager.route("/new_token", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
def new_token():
|
||||
|
||||
59
api/common/check_team_permission.py
Normal file
59
api/common/check_team_permission.py
Normal file
@ -0,0 +1,59 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
from api.db import TenantPermission
|
||||
from api.db.db_models import File, Knowledgebase
|
||||
from api.db.services.file_service import FileService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.user_service import TenantService
|
||||
|
||||
|
||||
def check_kb_team_permission(kb: dict | Knowledgebase, other: str) -> bool:
|
||||
kb = kb.to_dict() if isinstance(kb, Knowledgebase) else kb
|
||||
|
||||
kb_tenant_id = kb["tenant_id"]
|
||||
|
||||
if kb_tenant_id == other:
|
||||
return True
|
||||
|
||||
if kb["permission"] != TenantPermission.TEAM:
|
||||
return False
|
||||
|
||||
joined_tenants = TenantService.get_joined_tenants_by_user_id(other)
|
||||
return any(tenant["tenant_id"] == kb_tenant_id for tenant in joined_tenants)
|
||||
|
||||
|
||||
def check_file_team_permission(file: dict | File, other: str) -> bool:
|
||||
file = file.to_dict() if isinstance(file, File) else file
|
||||
|
||||
file_tenant_id = file["tenant_id"]
|
||||
if file_tenant_id == other:
|
||||
return True
|
||||
|
||||
file_id = file["id"]
|
||||
|
||||
kb_ids = [kb_info["kb_id"] for kb_info in FileService.get_kb_id_by_file_id(file_id)]
|
||||
|
||||
for kb_id in kb_ids:
|
||||
ok, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||
if not ok:
|
||||
continue
|
||||
|
||||
if check_kb_team_permission(kb, other):
|
||||
return True
|
||||
|
||||
return False
|
||||
21
api/common/exceptions.py
Normal file
21
api/common/exceptions.py
Normal file
@ -0,0 +1,21 @@
|
||||
class AdminException(Exception):
|
||||
def __init__(self, message, code=400):
|
||||
super().__init__(message)
|
||||
self.type = "admin"
|
||||
self.code = code
|
||||
self.message = message
|
||||
|
||||
|
||||
class UserNotFoundError(AdminException):
|
||||
def __init__(self, username):
|
||||
super().__init__(f"User '{username}' not found", 404)
|
||||
|
||||
|
||||
class UserAlreadyExistsError(AdminException):
|
||||
def __init__(self, username):
|
||||
super().__init__(f"User '{username}' already exists", 409)
|
||||
|
||||
|
||||
class CannotDeleteAdminError(AdminException):
|
||||
def __init__(self):
|
||||
super().__init__("Cannot delete admin account", 403)
|
||||
@ -315,4 +315,4 @@ class UserTenantService(CommonService):
|
||||
).first()
|
||||
return user_tenant
|
||||
except peewee.DoesNotExist:
|
||||
return None
|
||||
return None
|
||||
|
||||
@ -21,3 +21,26 @@ def string_to_bytes(string):
|
||||
|
||||
def bytes_to_string(byte):
|
||||
return byte.decode(encoding="utf-8")
|
||||
|
||||
|
||||
def convert_bytes(size_in_bytes: int) -> str:
|
||||
"""
|
||||
Format size in bytes.
|
||||
"""
|
||||
if size_in_bytes == 0:
|
||||
return "0 B"
|
||||
|
||||
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
|
||||
i = 0
|
||||
size = float(size_in_bytes)
|
||||
|
||||
while size >= 1024 and i < len(units) - 1:
|
||||
size /= 1024
|
||||
i += 1
|
||||
|
||||
if i == 0 or size >= 100:
|
||||
return f"{size:.0f} {units[i]}"
|
||||
elif size >= 10:
|
||||
return f"{size:.1f} {units[i]}"
|
||||
else:
|
||||
return f"{size:.2f} {units[i]}"
|
||||
|
||||
@ -13,14 +13,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import os
|
||||
import requests
|
||||
from timeit import default_timer as timer
|
||||
|
||||
from api import settings
|
||||
from api.db.db_models import DB
|
||||
from rag import settings as rag_settings
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.utils.es_conn import ESConnection
|
||||
from rag.utils.infinity_conn import InfinityConnection
|
||||
|
||||
|
||||
def _ok_nok(ok: bool) -> str:
|
||||
@ -65,6 +68,96 @@ def check_storage() -> tuple[bool, dict]:
|
||||
return False, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", "error": str(e)}
|
||||
|
||||
|
||||
def get_es_cluster_stats() -> dict:
|
||||
doc_engine = os.getenv('DOC_ENGINE', 'elasticsearch')
|
||||
if doc_engine != 'elasticsearch':
|
||||
raise Exception("Elasticsearch is not in use.")
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"message": ESConnection().get_cluster_stats()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def get_infinity_status():
|
||||
doc_engine = os.getenv('DOC_ENGINE', 'elasticsearch')
|
||||
if doc_engine != 'infinity':
|
||||
raise Exception("Infinity is not in use.")
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"message": InfinityConnection().health()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def get_mysql_status():
|
||||
try:
|
||||
cursor = DB.execute_sql("SHOW PROCESSLIST;")
|
||||
res_rows = cursor.fetchall()
|
||||
headers = ['id', 'user', 'host', 'db', 'command', 'time', 'state', 'info']
|
||||
cursor.close()
|
||||
return {
|
||||
"alive": True,
|
||||
"message": [dict(zip(headers, r)) for r in res_rows]
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def check_minio_alive():
|
||||
start_time = timer()
|
||||
try:
|
||||
response = requests.get(f'http://{rag_settings.MINIO["host"]}/minio/health/live')
|
||||
if response.status_code == 200:
|
||||
return {'alive': True, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
return {'alive': False, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def get_redis_info():
|
||||
try:
|
||||
return {
|
||||
"alive": True,
|
||||
"message": REDIS_CONN.info()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def check_ragflow_server_alive():
|
||||
start_time = timer()
|
||||
try:
|
||||
response = requests.get(f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping')
|
||||
if response.status_code == 200:
|
||||
return {'alive': True, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
return {'alive': False, "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
except Exception as e:
|
||||
return {
|
||||
"alive": False,
|
||||
"message": f"error: {str(e)}",
|
||||
}
|
||||
|
||||
|
||||
def run_health_checks() -> tuple[dict, bool]:
|
||||
|
||||
@ -3533,6 +3533,13 @@
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-5-20250929",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
|
||||
"max_tokens": 204800,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
|
||||
@ -4862,6 +4869,280 @@
|
||||
"max_tokens": 8000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "LongCat-Flash-Thinking",
|
||||
"tags": "LLM,CHAT,8000",
|
||||
"max_tokens": 8000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "DeerAPI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
"llm_name": "gpt-5-chat-latest",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "chatgpt-4o-latest",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5-mini",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5-nano",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1-mini",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1-nano",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4o-mini",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "o4-mini-2025-04-16",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "o3-pro-2025-06-10",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-opus-4-1-20250805",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-opus-4-1-20250805-thinking",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514-thinking",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-3-7-sonnet-latest",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-3-5-haiku-latest",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-pro",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-flash",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-flash-lite",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.0-flash",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-4-0709",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-3",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-3-mini",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-2-image-1212",
|
||||
"tags": "LLM,CHAT,32k,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-v3.1",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-v3",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-r1-0528",
|
||||
"tags": "LLM,CHAT,164k",
|
||||
"max_tokens": 164000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-chat",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-reasoner",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "qwen3-30b-a3b",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "qwen3-coder-plus-2025-07-22",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-ada-002",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-3-small",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-3-large",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-1",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 26214400,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "tts-1",
|
||||
"tags": "TTS",
|
||||
"max_tokens": 2048,
|
||||
"model_type": "tts",
|
||||
"is_tools": false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1129,7 +1129,7 @@ class RAGFlowPdfParser:
|
||||
for tag in re.findall(r"@@[0-9-]+\t[0-9.\t]+##", txt):
|
||||
pn, left, right, top, bottom = tag.strip("#").strip("@").split("\t")
|
||||
left, right, top, bottom = float(left), float(right), float(top), float(bottom)
|
||||
poss.append(([int(p) - 1 for p in pn.split("-")], int(left), int(right), int(top), int(bottom)))
|
||||
poss.append(([int(p) - 1 for p in pn.split("-")], left, right, top, bottom))
|
||||
return poss
|
||||
|
||||
def crop(self, text, ZM=3, need_position=False):
|
||||
@ -1274,12 +1274,16 @@ class VisionParser(RAGFlowPdfParser):
|
||||
prompt=vision_llm_describe_prompt(page=pdf_page_num + 1),
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
if kwargs.get("callback"):
|
||||
kwargs["callback"](idx * 1.0 / len(self.page_images), f"Processed: {idx + 1}/{len(self.page_images)}")
|
||||
|
||||
if text:
|
||||
width, height = self.page_images[idx].size
|
||||
all_docs.append((text, f"{pdf_page_num + 1} 0 {width / zoomin} 0 {height / zoomin}"))
|
||||
all_docs.append((
|
||||
text,
|
||||
f"@@{pdf_page_num + 1}\t{0.0:.1f}\t{width / zoomin:.1f}\t{0.0:.1f}\t{height / zoomin:.1f}##"
|
||||
))
|
||||
return all_docs, []
|
||||
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ Where:
|
||||
|
||||
- `mcp-host`: The MCP server's host address.
|
||||
- `mcp-port`: The MCP server's listening port.
|
||||
- `mcp-base_url`: The address of the running RAGFlow server.
|
||||
- `mcp-base-url`: The address of the running RAGFlow server.
|
||||
- `mcp-script-path`: The file path to the MCP server’s main script.
|
||||
- `mcp-mode`: The launch mode.
|
||||
- `self-host`: (default) self-host mode.
|
||||
|
||||
360
docs/guides/manage_users_and_services.md
Normal file
360
docs/guides/manage_users_and_services.md
Normal file
@ -0,0 +1,360 @@
|
||||
# Admin CLI and Admin Service
|
||||
|
||||
|
||||
|
||||
The Admin CLI and Admin Service form a client-server architectural suite for RAGflow system administration. The Admin CLI serves as an interactive command-line interface that receives instructions and displays execution results from the Admin Service in real-time. This duo enables real-time monitoring of system operational status, supporting visibility into RAGflow Server services and dependent components including MySQL, Elasticsearch, Redis, and MinIO. In administrator mode, they provide user management capabilities that allow viewing users and performing critical operations—such as user creation, password updates, activation status changes, and comprehensive user data deletion—even when corresponding web interface functionalities are disabled.
|
||||
|
||||
|
||||
|
||||
## Starting the Admin Service
|
||||
|
||||
1. Before start Admin Service, please make sure RAGFlow system is already started.
|
||||
2. Switch to ragflow/ directory and run the service script:
|
||||
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
python admin/admin_server.py
|
||||
```
|
||||
|
||||
The service will start and listen for incoming connections from the CLI on the configured port. Default port is 9381.
|
||||
|
||||
|
||||
|
||||
## Using the Admin CLI
|
||||
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Launch the CLI client:
|
||||
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
python admin/admin_client.py -h 0.0.0.0 -p 9381
|
||||
```
|
||||
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
|
||||
|
||||
|
||||
## Supported Commands
|
||||
|
||||
Commands are case-insensitive and must be terminated with a semicolon(;).
|
||||
|
||||
### Service manage commands
|
||||
|
||||
`LIST SERVICES;`
|
||||
|
||||
- Lists all available services within the RAGFLow system.
|
||||
|
||||
- [Example](#example-list-services)
|
||||
|
||||
`SHOW SERVICE <id>;`
|
||||
|
||||
- Shows detailed status information for the service identified by <id>.
|
||||
- [Example](#example-show-service)
|
||||
|
||||
### User Management Commands
|
||||
|
||||
`LIST USERS;`
|
||||
|
||||
- Lists all users known to the system.
|
||||
- [Example](#example-list-users)
|
||||
|
||||
`SHOW USER <username>;`
|
||||
|
||||
- Shows details and permissions for the user specified by **email**. The username must be enclosed in single or double quotes.
|
||||
- [Example](#example-show-user)
|
||||
|
||||
`CREATE USER <username> <password>;`
|
||||
|
||||
- Create user by username and password. The username and password must be enclosed in single or double quotes.
|
||||
- [Example](#example-create-user)
|
||||
|
||||
`DROP USER <username>;`
|
||||
|
||||
- Removes the specified user from the system. Use with caution.
|
||||
- [Example](#example-drop-user)
|
||||
|
||||
`ALTER USER PASSWORD <username> <new_password>;`
|
||||
|
||||
- Changes the password for the specified user.
|
||||
- [Example](#example-alter-user-password)
|
||||
|
||||
`ALTER USER ACTIVE <username> <on/off>;`
|
||||
|
||||
- Changes the user to active or inactive.
|
||||
- [Example](#example-alter-user-active)
|
||||
|
||||
### Data and Agent Commands
|
||||
|
||||
`LIST DATASETS OF <username>;`
|
||||
|
||||
- Lists the datasets associated with the specified user.
|
||||
- [Example](#example-list-datasets-of-user)
|
||||
|
||||
`LIST AGENTS OF <username>;`
|
||||
|
||||
- Lists the agents associated with the specified user.
|
||||
- [Example](#example-list-agents-of-user)
|
||||
|
||||
### Meta-Commands
|
||||
|
||||
- \? or \help
|
||||
Shows help information for the available commands.
|
||||
- \q or \quit
|
||||
Exits the CLI application.
|
||||
- [Example](#example-meta-commands)
|
||||
|
||||
### Examples
|
||||
|
||||
<span id="example-list-services"></span>
|
||||
|
||||
- List all available services.
|
||||
|
||||
```
|
||||
admin> list services;
|
||||
command: list services;
|
||||
Listing all services
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
| extra | host | id | name | port | service_type |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
| {} | 0.0.0.0 | 0 | ragflow_0 | 9380 | ragflow_server |
|
||||
| {'meta_type': 'mysql', 'password': 'infini_rag_flow', 'username': 'root'} | localhost | 1 | mysql | 5455 | meta_data |
|
||||
| {'password': 'infini_rag_flow', 'store_type': 'minio', 'user': 'rag_flow'} | localhost | 2 | minio | 9000 | file_store |
|
||||
| {'password': 'infini_rag_flow', 'retrieval_type': 'elasticsearch', 'username': 'elastic'} | localhost | 3 | elasticsearch | 1200 | retrieval |
|
||||
| {'db_name': 'default_db', 'retrieval_type': 'infinity'} | localhost | 4 | infinity | 23817 | retrieval |
|
||||
| {'database': 1, 'mq_type': 'redis', 'password': 'infini_rag_flow'} | localhost | 5 | redis | 6379 | message_queue |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
|
||||
```
|
||||
|
||||
<span id="example-show-service"></span>
|
||||
|
||||
- Show ragflow_server.
|
||||
|
||||
```
|
||||
admin> show service 0;
|
||||
command: show service 0;
|
||||
Showing service: 0
|
||||
Service ragflow_0 is alive. Detail:
|
||||
Confirm elapsed: 26.0 ms.
|
||||
```
|
||||
|
||||
- Show mysql.
|
||||
|
||||
```
|
||||
admin> show service 1;
|
||||
command: show service 1;
|
||||
Showing service: 1
|
||||
Service mysql is alive. Detail:
|
||||
+---------+----------+------------------+------+------------------+------------------------+-------+-----------------+
|
||||
| command | db | host | id | info | state | time | user |
|
||||
+---------+----------+------------------+------+------------------+------------------------+-------+-----------------+
|
||||
| Daemon | None | localhost | 5 | None | Waiting on empty queue | 16111 | event_scheduler |
|
||||
| Sleep | rag_flow | 172.18.0.1:40046 | 1610 | None | | 2 | root |
|
||||
| Query | rag_flow | 172.18.0.1:35882 | 1629 | SHOW PROCESSLIST | init | 0 | root |
|
||||
+---------+----------+------------------+------+------------------+------------------------+-------+-----------------+
|
||||
```
|
||||
|
||||
- Show minio.
|
||||
|
||||
```
|
||||
admin> show service 2;
|
||||
command: show service 2;
|
||||
Showing service: 2
|
||||
Service minio is alive. Detail:
|
||||
Confirm elapsed: 2.1 ms.
|
||||
```
|
||||
|
||||
- Show elasticsearch.
|
||||
|
||||
```
|
||||
admin> show service 3;
|
||||
command: show service 3;
|
||||
Showing service: 3
|
||||
Service elasticsearch is alive. Detail:
|
||||
+----------------+------+--------------+---------+----------------+--------------+---------------+--------------+------------------------------+----------------------------+-----------------+-------+---------------+---------+-------------+---------------------+--------+------------+--------------------+
|
||||
| cluster_name | docs | docs_deleted | indices | indices_shards | jvm_heap_max | jvm_heap_used | jvm_versions | mappings_deduplicated_fields | mappings_deduplicated_size | mappings_fields | nodes | nodes_version | os_mem | os_mem_used | os_mem_used_percent | status | store_size | total_dataset_size |
|
||||
+----------------+------+--------------+---------+----------------+--------------+---------------+--------------+------------------------------+----------------------------+-----------------+-------+---------------+---------+-------------+---------------------+--------+------------+--------------------+
|
||||
| docker-cluster | 717 | 86 | 37 | 42 | 3.76 GB | 1.74 GB | 21.0.1+12-29 | 6575 | 48.0 KB | 8521 | 1 | ['8.11.3'] | 7.52 GB | 4.55 GB | 61 | green | 4.60 MB | 4.60 MB |
|
||||
+----------------+------+--------------+---------+----------------+--------------+---------------+--------------+------------------------------+----------------------------+-----------------+-------+---------------+---------+-------------+---------------------+--------+------------+--------------------+
|
||||
```
|
||||
|
||||
- Show infinity.
|
||||
|
||||
```
|
||||
admin> show service 4;
|
||||
command: show service 4;
|
||||
Showing service: 4
|
||||
Fail to show service, code: 500, message: Infinity is not in use.
|
||||
```
|
||||
|
||||
- Show redis.
|
||||
|
||||
```
|
||||
admin> show service 5;
|
||||
command: show service 5;
|
||||
Showing service: 5
|
||||
Service redis is alive. Detail:
|
||||
+-----------------+-------------------+---------------------------+-------------------------+---------------+-------------+--------------------------+---------------------+-------------+
|
||||
| blocked_clients | connected_clients | instantaneous_ops_per_sec | mem_fragmentation_ratio | redis_version | server_mode | total_commands_processed | total_system_memory | used_memory |
|
||||
+-----------------+-------------------+---------------------------+-------------------------+---------------+-------------+--------------------------+---------------------+-------------+
|
||||
| 0 | 2 | 1 | 10.41 | 7.2.4 | standalone | 10446 | 30.84G | 1.10M |
|
||||
+-----------------+-------------------+---------------------------+-------------------------+---------------+-------------+--------------------------+---------------------+-------------+
|
||||
```
|
||||
|
||||
<span id="example-list-users"></span>
|
||||
|
||||
- List all user.
|
||||
|
||||
```
|
||||
admin> list users;
|
||||
command: list users;
|
||||
Listing all users
|
||||
+-------------------------------+----------------------+-----------+----------+
|
||||
| create_date | email | is_active | nickname |
|
||||
+-------------------------------+----------------------+-----------+----------+
|
||||
| Mon, 22 Sep 2025 10:59:04 GMT | admin@ragflow.io | 1 | admin |
|
||||
| Sun, 14 Sep 2025 17:36:27 GMT | lynn_inf@hotmail.com | 1 | Lynn |
|
||||
+-------------------------------+----------------------+-----------+----------+
|
||||
```
|
||||
|
||||
<span id="example-show-user"></span>
|
||||
|
||||
- Show specified user.
|
||||
|
||||
```
|
||||
admin> show user "admin@ragflow.io";
|
||||
command: show user "admin@ragflow.io";
|
||||
Showing user: admin@ragflow.io
|
||||
+-------------------------------+------------------+-----------+--------------+------------------+--------------+----------+-----------------+---------------+--------+-------------------------------+
|
||||
| create_date | email | is_active | is_anonymous | is_authenticated | is_superuser | language | last_login_time | login_channel | status | update_date |
|
||||
+-------------------------------+------------------+-----------+--------------+------------------+--------------+----------+-----------------+---------------+--------+-------------------------------+
|
||||
| Mon, 22 Sep 2025 10:59:04 GMT | admin@ragflow.io | 1 | 0 | 1 | True | Chinese | None | None | 1 | Mon, 22 Sep 2025 10:59:04 GMT |
|
||||
+-------------------------------+------------------+-----------+--------------+------------------+--------------+----------+-----------------+---------------+--------+-------------------------------+
|
||||
```
|
||||
|
||||
<span id="example-create-user"></span>
|
||||
|
||||
- Create new user.
|
||||
|
||||
```
|
||||
admin> create user "example@ragflow.io" "psw";
|
||||
command: create user "example@ragflow.io" "psw";
|
||||
Create user: example@ragflow.io, password: psw, role: user
|
||||
+----------------------------------+--------------------+----------------------------------+--------------+---------------+----------+
|
||||
| access_token | email | id | is_superuser | login_channel | nickname |
|
||||
+----------------------------------+--------------------+----------------------------------+--------------+---------------+----------+
|
||||
| 5cdc6d1e9df111f099b543aee592c6bf | example@ragflow.io | 5cdc6ca69df111f099b543aee592c6bf | False | password | |
|
||||
+----------------------------------+--------------------+----------------------------------+--------------+---------------+----------+
|
||||
```
|
||||
|
||||
<span id="example-alter-user-password"></span>
|
||||
|
||||
- Alter user password.
|
||||
|
||||
```
|
||||
admin> alter user password "example@ragflow.io" "newpsw";
|
||||
command: alter user password "example@ragflow.io" "newpsw";
|
||||
Alter user: example@ragflow.io, password: newpsw
|
||||
Password updated successfully!
|
||||
```
|
||||
|
||||
<span id="example-alter-user-active"></span>
|
||||
|
||||
- Alter user active, turn off.
|
||||
|
||||
```
|
||||
admin> alter user active "example@ragflow.io" off;
|
||||
command: alter user active "example@ragflow.io" off;
|
||||
Alter user example@ragflow.io activate status, turn off.
|
||||
Turn off user activate status successfully!
|
||||
```
|
||||
|
||||
<span id="example-drop-user"></span>
|
||||
|
||||
- Drop user.
|
||||
|
||||
```
|
||||
admin> Drop user "example@ragflow.io";
|
||||
command: Drop user "example@ragflow.io";
|
||||
Drop user: example@ragflow.io
|
||||
Successfully deleted user. Details:
|
||||
Start to delete owned tenant.
|
||||
- Deleted 2 tenant-LLM records.
|
||||
- Deleted 0 langfuse records.
|
||||
- Deleted 1 tenant.
|
||||
- Deleted 1 user-tenant records.
|
||||
- Deleted 1 user.
|
||||
Delete done!
|
||||
```
|
||||
|
||||
Delete user's data at the same time.
|
||||
|
||||
<span id="example-list-datasets-of-user"></span>
|
||||
|
||||
- List the specified user's dataset.
|
||||
|
||||
```
|
||||
admin> list datasets of "lynn_inf@hotmail.com";
|
||||
command: list datasets of "lynn_inf@hotmail.com";
|
||||
Listing all datasets of user: lynn_inf@hotmail.com
|
||||
+-----------+-------------------------------+---------+----------+---------------+------------+--------+-----------+-------------------------------+
|
||||
| chunk_num | create_date | doc_num | language | name | permission | status | token_num | update_date |
|
||||
+-----------+-------------------------------+---------+----------+---------------+------------+--------+-----------+-------------------------------+
|
||||
| 29 | Mon, 15 Sep 2025 11:56:59 GMT | 12 | Chinese | test_dataset | me | 1 | 12896 | Fri, 19 Sep 2025 17:50:58 GMT |
|
||||
| 4 | Sun, 28 Sep 2025 11:49:31 GMT | 6 | Chinese | dataset_share | team | 1 | 1121 | Sun, 28 Sep 2025 14:41:03 GMT |
|
||||
+-----------+-------------------------------+---------+----------+---------------+------------+--------+-----------+-------------------------------+
|
||||
```
|
||||
|
||||
<span id="example-list-agents-of-user"></span>
|
||||
|
||||
- List the specified user's agents.
|
||||
|
||||
```
|
||||
admin> list agents of "lynn_inf@hotmail.com";
|
||||
command: list agents of "lynn_inf@hotmail.com";
|
||||
Listing all agents of user: lynn_inf@hotmail.com
|
||||
+-----------------+-------------+------------+-----------------+
|
||||
| canvas_category | canvas_type | permission | title |
|
||||
+-----------------+-------------+------------+-----------------+
|
||||
| agent_canvas | None | team | research_helper |
|
||||
+-----------------+-------------+------------+-----------------+
|
||||
```
|
||||
|
||||
<span id="example-meta-commands"></span>
|
||||
|
||||
- Show help infomation.
|
||||
|
||||
```
|
||||
admin> \help
|
||||
command: \help
|
||||
|
||||
Commands:
|
||||
LIST SERVICES
|
||||
SHOW SERVICE <service>
|
||||
STARTUP SERVICE <service>
|
||||
SHUTDOWN SERVICE <service>
|
||||
RESTART SERVICE <service>
|
||||
LIST USERS
|
||||
SHOW USER <user>
|
||||
DROP USER <user>
|
||||
CREATE USER <user> <password>
|
||||
ALTER USER PASSWORD <user> <new_password>
|
||||
ALTER USER ACTIVE <user> <on/off>
|
||||
LIST DATASETS OF <user>
|
||||
LIST AGENTS OF <user>
|
||||
|
||||
Meta Commands:
|
||||
\?, \h, \help Show this help
|
||||
\q, \quit, \exit Quit the CLI
|
||||
```
|
||||
|
||||
- Exit
|
||||
|
||||
```
|
||||
admin> \q
|
||||
command: \q
|
||||
Goodbye!
|
||||
```
|
||||
|
||||
@ -66,6 +66,7 @@ A complete list of models supported by RAGFlow, which will continue to expand.
|
||||
| DeepInfra | :heavy_check_mark: | :heavy_check_mark: | | | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| 302.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
|
||||
| CometAPI | :heavy_check_mark: | :heavy_check_mark: | | | | |
|
||||
| DeerAPI | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | :heavy_check_mark: |
|
||||
|
||||
```mdx-code-block
|
||||
</APITable>
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
# dependencies = [
|
||||
# "huggingface-hub",
|
||||
# "nltk",
|
||||
# "argparse",
|
||||
# ]
|
||||
# ///
|
||||
|
||||
|
||||
@ -119,7 +119,7 @@ dependencies = [
|
||||
"graspologic>=3.4.1,<4.0.0",
|
||||
"mini-racer>=0.12.4,<0.13.0",
|
||||
"pyodbc>=5.2.0,<6.0.0",
|
||||
"pyicu>=2.13.1,<3.0.0",
|
||||
"pyicu>=2.15.3,<3.0.0",
|
||||
"flasgger>=0.9.7.1,<0.10.0",
|
||||
"xxhash>=3.5.0,<4.0.0",
|
||||
"trio>=0.29.0",
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
@ -1,299 +0,0 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
import random
|
||||
import trio
|
||||
from api.db import LLMType
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from deepdoc.parser.pdf_parser import RAGFlowPdfParser
|
||||
from graphrag.utils import chat_limiter, get_llm_cache, set_llm_cache
|
||||
from rag.flow.base import ProcessBase, ProcessParamBase
|
||||
from rag.flow.chunker.schema import ChunkerFromUpstream
|
||||
from rag.nlp import naive_merge, naive_merge_with_images, concat_img
|
||||
from rag.prompts.prompts import keyword_extraction, question_proposal, detect_table_of_contents, \
|
||||
table_of_contents_index, toc_transformer
|
||||
from rag.utils import num_tokens_from_string
|
||||
|
||||
|
||||
class ChunkerParam(ProcessParamBase):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.method_options = [
|
||||
# General
|
||||
"general",
|
||||
"onetable",
|
||||
# Customer Service
|
||||
"q&a",
|
||||
"manual",
|
||||
# Recruitment
|
||||
"resume",
|
||||
# Education & Research
|
||||
"book",
|
||||
"paper",
|
||||
"laws",
|
||||
"presentation",
|
||||
"toc" # table of contents
|
||||
# Other
|
||||
# "Tag" # TODO: Other method
|
||||
]
|
||||
self.method = "general"
|
||||
self.chunk_token_size = 512
|
||||
self.delimiter = "\n"
|
||||
self.overlapped_percent = 0
|
||||
self.page_rank = 0
|
||||
self.auto_keywords = 0
|
||||
self.auto_questions = 0
|
||||
self.tag_sets = []
|
||||
self.llm_setting = {"llm_id": "", "lang": "Chinese"}
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(self.method.lower(), "Chunk method abnormal.", self.method_options)
|
||||
self.check_positive_integer(self.chunk_token_size, "Chunk token size.")
|
||||
self.check_nonnegative_number(self.page_rank, "Page rank value: (0, 10]")
|
||||
self.check_nonnegative_number(self.auto_keywords, "Auto-keyword value: (0, 10]")
|
||||
self.check_nonnegative_number(self.auto_questions, "Auto-question value: (0, 10]")
|
||||
self.check_decimal_float(self.overlapped_percent, "Overlapped percentage: [0, 1)")
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return {}
|
||||
|
||||
|
||||
class Chunker(ProcessBase):
|
||||
component_name = "Chunker"
|
||||
|
||||
def _general(self, from_upstream: ChunkerFromUpstream):
|
||||
self.callback(random.randint(1, 5) / 100.0, "Start to chunk via `General`.")
|
||||
if from_upstream.output_format in ["markdown", "text", "html"]:
|
||||
if from_upstream.output_format == "markdown":
|
||||
payload = from_upstream.markdown_result
|
||||
elif from_upstream.output_format == "text":
|
||||
payload = from_upstream.text_result
|
||||
else: # == "html"
|
||||
payload = from_upstream.html_result
|
||||
|
||||
if not payload:
|
||||
payload = ""
|
||||
|
||||
cks = naive_merge(
|
||||
payload,
|
||||
self._param.chunk_token_size,
|
||||
self._param.delimiter,
|
||||
self._param.overlapped_percent,
|
||||
)
|
||||
return [{"text": c} for c in cks]
|
||||
|
||||
# json
|
||||
sections, section_images = [], []
|
||||
for o in from_upstream.json_result or []:
|
||||
sections.append((o.get("text", ""), o.get("position_tag", "")))
|
||||
section_images.append(o.get("image"))
|
||||
|
||||
chunks, images = naive_merge_with_images(
|
||||
sections,
|
||||
section_images,
|
||||
self._param.chunk_token_size,
|
||||
self._param.delimiter,
|
||||
self._param.overlapped_percent,
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
"text": RAGFlowPdfParser.remove_tag(c),
|
||||
"image": img,
|
||||
"positions": RAGFlowPdfParser.extract_positions(c),
|
||||
}
|
||||
for c, img in zip(chunks, images)
|
||||
]
|
||||
|
||||
def _q_and_a(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _resume(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _manual(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _table(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _paper(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _book(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _laws(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _presentation(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _one(self, from_upstream: ChunkerFromUpstream):
|
||||
pass
|
||||
|
||||
def _toc(self, from_upstream: ChunkerFromUpstream):
|
||||
self.callback(random.randint(1, 5) / 100.0, "Start to chunk via `ToC`.")
|
||||
if from_upstream.output_format in ["markdown", "text", "html"]:
|
||||
return
|
||||
|
||||
# json
|
||||
sections, section_images, page_1024, tc_arr = [], [], [""], [0]
|
||||
for o in from_upstream.json_result or []:
|
||||
txt = o.get("text", "")
|
||||
tc = num_tokens_from_string(txt)
|
||||
page_1024[-1] += "\n" + txt
|
||||
tc_arr[-1] += tc
|
||||
if tc_arr[-1] > 1024:
|
||||
page_1024.append("")
|
||||
tc_arr.append(0)
|
||||
sections.append((o.get("text", ""), o.get("position_tag", "")))
|
||||
section_images.append(o.get("image"))
|
||||
print(len(sections), o)
|
||||
|
||||
llm_setting = self._param.llm_setting
|
||||
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT, llm_name=llm_setting["llm_id"], lang=llm_setting["lang"])
|
||||
self.callback(random.randint(5, 15) / 100.0, "Start to detect table of contents...")
|
||||
toc_secs = detect_table_of_contents(page_1024, chat_mdl)
|
||||
if toc_secs:
|
||||
self.callback(random.randint(25, 35) / 100.0, "Start to extract table of contents...")
|
||||
toc_arr = toc_transformer(toc_secs, chat_mdl)
|
||||
toc_arr = [it for it in toc_arr if it.get("structure")]
|
||||
print(json.dumps(toc_arr, ensure_ascii=False, indent=2), flush=True)
|
||||
self.callback(random.randint(35, 75) / 100.0, "Start to link table of contents...")
|
||||
toc_arr = table_of_contents_index(toc_arr, [t for t,_ in sections], chat_mdl)
|
||||
for i in range(len(toc_arr)-1):
|
||||
if not toc_arr[i].get("indices"):
|
||||
continue
|
||||
|
||||
for j in range(i+1, len(toc_arr)):
|
||||
if toc_arr[j].get("indices"):
|
||||
if toc_arr[j]["indices"][0] - toc_arr[i]["indices"][-1] > 1:
|
||||
toc_arr[i]["indices"].extend([x for x in range(toc_arr[i]["indices"][-1]+1, toc_arr[j]["indices"][0])])
|
||||
break
|
||||
# put all sections ahead of toc_arr[0] into it
|
||||
# for i in range(len(toc_arr)):
|
||||
# if toc_arr[i].get("indices") and toc_arr[i]["indices"][0]:
|
||||
# toc_arr[i]["indices"] = [x for x in range(toc_arr[i]["indices"][-1]+1)]
|
||||
# break
|
||||
# put all sections after toc_arr[-1] into it
|
||||
for i in range(len(toc_arr)-1, -1, -1):
|
||||
if toc_arr[i].get("indices") and toc_arr[i]["indices"][-1]:
|
||||
toc_arr[i]["indices"] = [x for x in range(toc_arr[i]["indices"][0], len(sections))]
|
||||
break
|
||||
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n", json.dumps(toc_arr, ensure_ascii=False, indent=2), flush=True)
|
||||
|
||||
chunks, images = [], []
|
||||
for it in toc_arr:
|
||||
if not it.get("indices"):
|
||||
continue
|
||||
txt = ""
|
||||
img = None
|
||||
for i in it["indices"]:
|
||||
idx = i
|
||||
txt += "\n" + sections[idx][0] + "\t" + sections[idx][1]
|
||||
if img and section_images[idx]:
|
||||
img = concat_img(img, section_images[idx])
|
||||
elif section_images[idx]:
|
||||
img = section_images[idx]
|
||||
|
||||
it["indices"] = []
|
||||
if not txt:
|
||||
continue
|
||||
it["indices"] = [len(chunks)]
|
||||
print(it, "KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK\n", txt)
|
||||
chunks.append(txt)
|
||||
images.append(img)
|
||||
self.callback(1, "Done")
|
||||
return [
|
||||
{
|
||||
"text": RAGFlowPdfParser.remove_tag(c),
|
||||
"image": img,
|
||||
"positions": RAGFlowPdfParser.extract_positions(c),
|
||||
}
|
||||
for c, img in zip(chunks, images)
|
||||
]
|
||||
|
||||
self.callback(message="No table of contents detected.")
|
||||
|
||||
|
||||
async def _invoke(self, **kwargs):
|
||||
function_map = {
|
||||
"general": self._general,
|
||||
"q&a": self._q_and_a,
|
||||
"resume": self._resume,
|
||||
"manual": self._manual,
|
||||
"table": self._table,
|
||||
"paper": self._paper,
|
||||
"book": self._book,
|
||||
"laws": self._laws,
|
||||
"presentation": self._presentation,
|
||||
"one": self._one,
|
||||
}
|
||||
|
||||
try:
|
||||
from_upstream = ChunkerFromUpstream.model_validate(kwargs)
|
||||
except Exception as e:
|
||||
self.set_output("_ERROR", f"Input error: {str(e)}")
|
||||
return
|
||||
|
||||
chunks = function_map[self._param.method](from_upstream)
|
||||
llm_setting = self._param.llm_setting
|
||||
|
||||
async def auto_keywords():
|
||||
nonlocal chunks, llm_setting
|
||||
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT, llm_name=llm_setting["llm_id"], lang=llm_setting["lang"])
|
||||
|
||||
async def doc_keyword_extraction(chat_mdl, ck, topn):
|
||||
cached = get_llm_cache(chat_mdl.llm_name, ck["text"], "keywords", {"topn": topn})
|
||||
if not cached:
|
||||
async with chat_limiter:
|
||||
cached = await trio.to_thread.run_sync(lambda: keyword_extraction(chat_mdl, ck["text"], topn))
|
||||
set_llm_cache(chat_mdl.llm_name, ck["text"], cached, "keywords", {"topn": topn})
|
||||
if cached:
|
||||
ck["keywords"] = cached.split(",")
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for ck in chunks:
|
||||
nursery.start_soon(doc_keyword_extraction, chat_mdl, ck, self._param.auto_keywords)
|
||||
|
||||
async def auto_questions():
|
||||
nonlocal chunks, llm_setting
|
||||
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT, llm_name=llm_setting["llm_id"], lang=llm_setting["lang"])
|
||||
|
||||
async def doc_question_proposal(chat_mdl, d, topn):
|
||||
cached = get_llm_cache(chat_mdl.llm_name, ck["text"], "question", {"topn": topn})
|
||||
if not cached:
|
||||
async with chat_limiter:
|
||||
cached = await trio.to_thread.run_sync(lambda: question_proposal(chat_mdl, ck["text"], topn))
|
||||
set_llm_cache(chat_mdl.llm_name, ck["text"], cached, "question", {"topn": topn})
|
||||
if cached:
|
||||
d["questions"] = cached.split("\n")
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
for ck in chunks:
|
||||
nursery.start_soon(doc_question_proposal, chat_mdl, ck, self._param.auto_questions)
|
||||
|
||||
async with trio.open_nursery() as nursery:
|
||||
if self._param.auto_questions:
|
||||
nursery.start_soon(auto_questions)
|
||||
if self._param.auto_keywords:
|
||||
nursery.start_soon(auto_keywords)
|
||||
|
||||
if self._param.page_rank:
|
||||
for ck in chunks:
|
||||
ck["page_rank"] = self._param.page_rank
|
||||
|
||||
self.set_output("chunks", chunks)
|
||||
@ -1,37 +0,0 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class ChunkerFromUpstream(BaseModel):
|
||||
created_time: float | None = Field(default=None, alias="_created_time")
|
||||
elapsed_time: float | None = Field(default=None, alias="_elapsed_time")
|
||||
|
||||
name: str
|
||||
file: dict | None = Field(default=None)
|
||||
|
||||
output_format: Literal["json", "markdown", "text", "html"] | None = Field(default=None)
|
||||
|
||||
json_result: list[dict[str, Any]] | None = Field(default=None, alias="json")
|
||||
markdown_result: str | None = Field(default=None, alias="markdown")
|
||||
text_result: str | None = Field(default=None, alias="text")
|
||||
html_result: list[str] | None = Field(default=None, alias="html")
|
||||
|
||||
model_config = ConfigDict(populate_by_name=True, extra="forbid")
|
||||
|
||||
# def to_dict(self, *, exclude_none: bool = True) -> dict:
|
||||
# return self.model_dump(by_alias=True, exclude_none=exclude_none)
|
||||
@ -133,6 +133,7 @@ class Base(ABC):
|
||||
"logprobs",
|
||||
"top_logprobs",
|
||||
"extra_headers",
|
||||
"enable_thinking"
|
||||
}
|
||||
|
||||
gen_conf = {k: v for k, v in gen_conf.items() if k in allowed_conf}
|
||||
@ -1275,6 +1276,14 @@ class TokenPonyChat(Base):
|
||||
if not base_url:
|
||||
base_url = "https://ragflow.vip-api.tokenpony.cn/v1"
|
||||
|
||||
class DeerAPIChat(Base):
|
||||
_FACTORY_NAME = "DeerAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.deerapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.deerapi.com/v1"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
|
||||
class LiteLLMBase(ABC):
|
||||
_FACTORY_NAME = [
|
||||
|
||||
@ -26,7 +26,7 @@ from openai.lib.azure import AzureOpenAI
|
||||
from zhipuai import ZhipuAI
|
||||
from rag.nlp import is_english
|
||||
from rag.prompts.generator import vision_llm_describe_prompt
|
||||
from rag.utils import num_tokens_from_string
|
||||
from rag.utils import num_tokens_from_string, total_token_count_from_response
|
||||
|
||||
|
||||
class Base(ABC):
|
||||
@ -125,7 +125,7 @@ class Base(ABC):
|
||||
b64 = base64.b64encode(data).decode("utf-8")
|
||||
return f"data:{mime};base64,{b64}"
|
||||
with BytesIO() as buffered:
|
||||
fmt = "JPEG"
|
||||
fmt = "jpeg"
|
||||
try:
|
||||
image.save(buffered, format="JPEG")
|
||||
except Exception:
|
||||
@ -133,10 +133,10 @@ class Base(ABC):
|
||||
buffered.seek(0)
|
||||
buffered.truncate()
|
||||
image.save(buffered, format="PNG")
|
||||
fmt = "PNG"
|
||||
fmt = "png"
|
||||
data = buffered.getvalue()
|
||||
b64 = base64.b64encode(data).decode("utf-8")
|
||||
mime = f"image/{fmt.lower()}"
|
||||
mime = f"image/{fmt}"
|
||||
return f"data:{mime};base64,{b64}"
|
||||
|
||||
def prompt(self, b64):
|
||||
@ -178,7 +178,7 @@ class GptV4(Base):
|
||||
model=self.model_name,
|
||||
messages=self.prompt(b64),
|
||||
)
|
||||
return res.choices[0].message.content.strip(), res.usage.total_tokens
|
||||
return res.choices[0].message.content.strip(), total_token_count_from_response(res)
|
||||
|
||||
def describe_with_prompt(self, image, prompt=None):
|
||||
b64 = self.image2base64(image)
|
||||
@ -186,7 +186,7 @@ class GptV4(Base):
|
||||
model=self.model_name,
|
||||
messages=self.vision_llm_prompt(b64, prompt),
|
||||
)
|
||||
return res.choices[0].message.content.strip(), res.usage.total_tokens
|
||||
return res.choices[0].message.content.strip(),total_token_count_from_response(res)
|
||||
|
||||
|
||||
class AzureGptV4(GptV4):
|
||||
@ -522,11 +522,10 @@ class GeminiCV(Base):
|
||||
)
|
||||
b64 = self.image2base64(image)
|
||||
with BytesIO(base64.b64decode(b64)) as bio:
|
||||
img = open(bio)
|
||||
input = [prompt, img]
|
||||
res = self.model.generate_content(input)
|
||||
img.close()
|
||||
return res.text, res.usage_metadata.total_token_count
|
||||
with open(bio) as img:
|
||||
input = [prompt, img]
|
||||
res = self.model.generate_content(input)
|
||||
return res.text, total_token_count_from_response(res)
|
||||
|
||||
def describe_with_prompt(self, image, prompt=None):
|
||||
from PIL.Image import open
|
||||
@ -534,11 +533,10 @@ class GeminiCV(Base):
|
||||
b64 = self.image2base64(image)
|
||||
vision_prompt = prompt if prompt else vision_llm_describe_prompt()
|
||||
with BytesIO(base64.b64decode(b64)) as bio:
|
||||
img = open(bio)
|
||||
input = [vision_prompt, img]
|
||||
res = self.model.generate_content(input)
|
||||
img.close()
|
||||
return res.text, res.usage_metadata.total_token_count
|
||||
with open(bio) as img:
|
||||
input = [vision_prompt, img]
|
||||
res = self.model.generate_content(input)
|
||||
return res.text, total_token_count_from_response(res)
|
||||
|
||||
def chat(self, system, history, gen_conf, images=[]):
|
||||
generation_config = dict(temperature=gen_conf.get("temperature", 0.3), top_p=gen_conf.get("top_p", 0.7))
|
||||
@ -547,7 +545,7 @@ class GeminiCV(Base):
|
||||
self._form_history(system, history, images),
|
||||
generation_config=generation_config)
|
||||
ans = response.text
|
||||
return ans, response.usage_metadata.total_token_count
|
||||
return ans, total_token_count_from_response(ans)
|
||||
except Exception as e:
|
||||
return "**ERROR**: " + str(e), 0
|
||||
|
||||
@ -570,10 +568,7 @@ class GeminiCV(Base):
|
||||
except Exception as e:
|
||||
yield ans + "\n**ERROR**: " + str(e)
|
||||
|
||||
if response and hasattr(response, "usage_metadata") and hasattr(response.usage_metadata, "total_token_count"):
|
||||
yield response.usage_metadata.total_token_count
|
||||
else:
|
||||
yield 0
|
||||
yield total_token_count_from_response(response)
|
||||
|
||||
|
||||
class NvidiaCV(Base):
|
||||
|
||||
@ -33,7 +33,7 @@ from zhipuai import ZhipuAI
|
||||
from api import settings
|
||||
from api.utils.file_utils import get_home_cache_dir
|
||||
from api.utils.log_utils import log_exception
|
||||
from rag.utils import num_tokens_from_string, truncate, total_token_count_from_response
|
||||
from rag.utils import num_tokens_from_string, truncate
|
||||
|
||||
|
||||
class Base(ABC):
|
||||
@ -52,7 +52,15 @@ class Base(ABC):
|
||||
raise NotImplementedError("Please implement encode method!")
|
||||
|
||||
def total_token_count(self, resp):
|
||||
return total_token_count_from_response(resp)
|
||||
try:
|
||||
return resp.usage.total_tokens
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
return resp["usage"]["total_tokens"]
|
||||
except Exception:
|
||||
pass
|
||||
return 0
|
||||
|
||||
|
||||
class DefaultEmbedding(Base):
|
||||
@ -936,7 +944,6 @@ class GiteeEmbed(SILICONFLOWEmbed):
|
||||
base_url = "https://ai.gitee.com/v1/embeddings"
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
|
||||
class DeepInfraEmbed(OpenAIEmbed):
|
||||
_FACTORY_NAME = "DeepInfra"
|
||||
|
||||
@ -962,3 +969,11 @@ class CometAPIEmbed(OpenAIEmbed):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
class DeerAPIEmbed(OpenAIEmbed):
|
||||
_FACTORY_NAME = "DeerAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.deerapi.com/v1"):
|
||||
if not base_url:
|
||||
base_url = "https://api.deerapi.com/v1"
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
@ -244,3 +244,12 @@ class CometAPISeq2txt(Base):
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
class DeerAPISeq2txt(Base):
|
||||
_FACTORY_NAME = "DeerAPI"
|
||||
|
||||
def __init__(self, key, model_name="whisper-1", base_url="https://api.deerapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.deerapi.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
@ -402,3 +402,11 @@ class CometAPITTS(OpenAITTS):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
class DeerAPITTS(OpenAITTS):
|
||||
_FACTORY_NAME = "DeerAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.deerapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.deerapi.com/v1"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
53
rag/prompts/assign_toc_levels.md
Normal file
53
rag/prompts/assign_toc_levels.md
Normal file
@ -0,0 +1,53 @@
|
||||
You are given a JSON array of TOC items. Each item has at least {"title": string} and may include an existing structure.
|
||||
|
||||
Task
|
||||
- For each item, assign a depth label using Arabic numerals only: top-level = 1, second-level = 2, third-level = 3, etc.
|
||||
- Multiple items may share the same depth (e.g., many 1s, many 2s).
|
||||
- Do not use dotted numbering (no 1.1/1.2). Use a single digit string per item indicating its depth only.
|
||||
- Preserve the original item order exactly. Do not insert, delete, or reorder.
|
||||
- Decide levels yourself to keep a coherent hierarchy. Keep peers at the same depth.
|
||||
|
||||
Output
|
||||
- Return a valid JSON array only (no extra text).
|
||||
- Each element must be {"structure": "1|2|3", "title": <original title string>}.
|
||||
- title must be the original title string.
|
||||
|
||||
Examples
|
||||
|
||||
Example A (chapters with sections)
|
||||
Input:
|
||||
["Chapter 1 Methods", "Section 1 Definition", "Section 2 Process", "Chapter 2 Experiment"]
|
||||
|
||||
Output:
|
||||
[
|
||||
{"structure":"1","title":"Chapter 1 Methods"},
|
||||
{"structure":"2","title":"Section 1 Definition"},
|
||||
{"structure":"2","title":"Section 2 Process"},
|
||||
{"structure":"1","title":"Chapter 2 Experiment"}
|
||||
]
|
||||
|
||||
Example B (parts with chapters)
|
||||
Input:
|
||||
["Part I Theory", "Chapter 1 Basics", "Chapter 2 Methods", "Part II Applications", "Chapter 3 Case Studies"]
|
||||
|
||||
Output:
|
||||
[
|
||||
{"structure":"1","title":"Part I Theory"},
|
||||
{"structure":"2","title":"Chapter 1 Basics"},
|
||||
{"structure":"2","title":"Chapter 2 Methods"},
|
||||
{"structure":"1","title":"Part II Applications"},
|
||||
{"structure":"2","title":"Chapter 3 Case Studies"}
|
||||
]
|
||||
|
||||
Example C (plain headings)
|
||||
Input:
|
||||
["Introduction", "Background and Motivation", "Related Work", "Methodology", "Evaluation"]
|
||||
|
||||
Output:
|
||||
[
|
||||
{"structure":"1","title":"Introduction"},
|
||||
{"structure":"2","title":"Background and Motivation"},
|
||||
{"structure":"2","title":"Related Work"},
|
||||
{"structure":"1","title":"Methodology"},
|
||||
{"structure":"1","title":"Evaluation"}
|
||||
]
|
||||
@ -29,7 +29,7 @@ from rag.utils import encoder, num_tokens_from_string
|
||||
|
||||
STOP_TOKEN="<|STOP|>"
|
||||
COMPLETE_TASK="complete_task"
|
||||
|
||||
INPUT_UTILIZATION = 0.5
|
||||
|
||||
def get_value(d, k1, k2):
|
||||
return d.get(k1, d.get(k2))
|
||||
@ -439,9 +439,9 @@ def gen_meta_filter(chat_mdl, meta_data:dict, query: str) -> list:
|
||||
return []
|
||||
|
||||
|
||||
def gen_json(system_prompt:str, user_prompt:str, chat_mdl):
|
||||
def gen_json(system_prompt:str, user_prompt:str, chat_mdl, gen_conf = None):
|
||||
_, msg = message_fit_in(form_message(system_prompt, user_prompt), chat_mdl.max_length)
|
||||
ans = chat_mdl.chat(msg[0]["content"], msg[1:])
|
||||
ans = chat_mdl.chat(msg[0]["content"], msg[1:],gen_conf=gen_conf)
|
||||
ans = re.sub(r"(^.*</think>|```json\n|```\n*$)", "", ans, flags=re.DOTALL)
|
||||
try:
|
||||
return json_repair.loads(ans)
|
||||
@ -649,4 +649,85 @@ def toc_transformer(toc_pages, chat_mdl):
|
||||
return last_complete
|
||||
|
||||
|
||||
TOC_LEVELS = load_prompt("assign_toc_levels")
|
||||
def assign_toc_levels(toc_secs, chat_mdl, gen_conf = {"temperature": 0.2}):
|
||||
print("\nBegin TOC level assignment...\n")
|
||||
|
||||
ans = gen_json(
|
||||
PROMPT_JINJA_ENV.from_string(TOC_LEVELS).render(),
|
||||
str(toc_secs),
|
||||
chat_mdl,
|
||||
gen_conf
|
||||
)
|
||||
|
||||
return ans
|
||||
|
||||
|
||||
TOC_FROM_TEXT_SYSTEM = load_prompt("toc_from_text_system")
|
||||
TOC_FROM_TEXT_USER = load_prompt("toc_from_text_user")
|
||||
# Generate TOC from text chunks with text llms
|
||||
def gen_toc_from_text(text, chat_mdl):
|
||||
ans = gen_json(
|
||||
PROMPT_JINJA_ENV.from_string(TOC_FROM_TEXT_SYSTEM).render(),
|
||||
PROMPT_JINJA_ENV.from_string(TOC_FROM_TEXT_USER).render(text=text),
|
||||
chat_mdl,
|
||||
gen_conf={"temperature": 0.0, "top_p": 0.9, "enable_thinking": False, }
|
||||
)
|
||||
return ans
|
||||
|
||||
|
||||
def split_chunks(chunks, max_length: int):
|
||||
"""
|
||||
Pack chunks into batches according to max_length, returning [{"id": idx, "text": chunk_text}, ...].
|
||||
Do not split a single chunk, even if it exceeds max_length.
|
||||
"""
|
||||
|
||||
result = []
|
||||
batch, batch_tokens = [], 0
|
||||
|
||||
for idx, chunk in enumerate(chunks):
|
||||
t = num_tokens_from_string(chunk)
|
||||
if batch_tokens + t > max_length:
|
||||
result.append(batch)
|
||||
batch, batch_tokens = [], 0
|
||||
batch.append({"id": idx, "text": chunk})
|
||||
batch_tokens += t
|
||||
if batch:
|
||||
result.append(batch)
|
||||
return result
|
||||
|
||||
|
||||
def run_toc_from_text(chunks, chat_mdl):
|
||||
input_budget = int(chat_mdl.max_length * INPUT_UTILIZATION) - num_tokens_from_string(
|
||||
TOC_FROM_TEXT_USER + TOC_FROM_TEXT_SYSTEM
|
||||
)
|
||||
|
||||
input_budget = 2000 if input_budget > 2000 else input_budget
|
||||
chunk_sections = split_chunks(chunks, input_budget)
|
||||
res = []
|
||||
|
||||
for chunk in chunk_sections:
|
||||
ans = gen_toc_from_text(chunk, chat_mdl)
|
||||
res.extend(ans)
|
||||
|
||||
# Filter out entries with title == -1
|
||||
filtered = [x for x in res if x.get("title") and x.get("title") != "-1"]
|
||||
|
||||
print("\n\nFiltered TOC sections:\n", filtered)
|
||||
|
||||
# Generate initial structure (structure/title)
|
||||
raw_structure = [{"structure": "0", "title": x.get("title", "")} for x in filtered]
|
||||
|
||||
# Assign hierarchy levels using LLM
|
||||
toc_with_levels = assign_toc_levels(raw_structure, chat_mdl, {"temperature": 0.0, "top_p": 0.9, "enable_thinking": False})
|
||||
|
||||
# Merge structure and content (by index)
|
||||
merged = []
|
||||
for _ , (toc_item, src_item) in enumerate(zip(toc_with_levels, filtered)):
|
||||
merged.append({
|
||||
"structure": toc_item.get("structure", "0"),
|
||||
"title": toc_item.get("title", ""),
|
||||
"content": src_item.get("content", ""),
|
||||
})
|
||||
|
||||
return merged
|
||||
113
rag/prompts/toc_from_text_system.md
Normal file
113
rag/prompts/toc_from_text_system.md
Normal file
@ -0,0 +1,113 @@
|
||||
You are a robust Table-of-Contents (TOC) extractor.
|
||||
|
||||
GOAL
|
||||
Given a dictionary of chunks {chunk_id: chunk_text}, extract TOC-like headings and return a strict JSON array of objects:
|
||||
[
|
||||
{"title": , "content": ""},
|
||||
...
|
||||
]
|
||||
|
||||
FIELDS
|
||||
- "title": the heading text (clean, no page numbers or leader dots).
|
||||
- If any part of a chunk has no valid heading, output that part as {"title":"-1", ...}.
|
||||
- "content": the chunk_id (string).
|
||||
- One chunk can yield multiple JSON objects in order (unmatched text + one or more headings).
|
||||
|
||||
RULES
|
||||
1) Preserve input chunk order strictly.
|
||||
2) If a chunk contains multiple headings, expand them in order:
|
||||
- Pre-heading narrative → {"title":"-1","content":chunk_id}
|
||||
- Then each heading → {"title":"...","content":chunk_id}
|
||||
3) Do not merge outputs across chunks; each object refers to exactly one chunk_id.
|
||||
4) "title" must be non-empty (or exactly "-1"). "content" must be a string (chunk_id).
|
||||
5) When ambiguous, prefer "-1" unless the text strongly looks like a heading.
|
||||
|
||||
HEADING DETECTION (cues, not hard rules)
|
||||
- Appears near line start, short isolated phrase, often followed by content.
|
||||
- May contain separators: — —— - : : · •
|
||||
- Numbering styles:
|
||||
• 第[一二三四五六七八九十百]+(篇|章|节|条)
|
||||
• [((]?[一二三四五六七八九十]+[))]?
|
||||
• [((]?[①②③④⑤⑥⑦⑧⑨⑩][))]?
|
||||
• ^\d+(\.\d+)*[)..]?\s*
|
||||
• ^[IVXLCDM]+[).]
|
||||
• ^[A-Z][).]
|
||||
- Canonical section cues (general only):
|
||||
Common heading indicators include words such as:
|
||||
"Overview", "Introduction", "Background", "Purpose", "Scope", "Definition",
|
||||
"Method", "Procedure", "Result", "Discussion", "Summary", "Conclusion",
|
||||
"Appendix", "Reference", "Annex", "Acknowledgment", "Disclaimer".
|
||||
These are soft cues, not strict requirements.
|
||||
- Length restriction:
|
||||
• Chinese heading: ≤25 characters
|
||||
• English heading: ≤80 characters
|
||||
- Exclude long narrative sentences, continuous prose, or bullet-style lists → output as "-1".
|
||||
|
||||
OUTPUT FORMAT
|
||||
- Return ONLY a valid JSON array of {"title","content"} objects.
|
||||
- No reasoning or commentary.
|
||||
|
||||
EXAMPLES
|
||||
|
||||
Example 1 — No heading
|
||||
Input:
|
||||
{0: "Copyright page · Publication info (ISBN 123-456). All rights reserved."}
|
||||
Output:
|
||||
[
|
||||
{"title":"-1","content":"0"}
|
||||
]
|
||||
|
||||
Example 2 — One heading
|
||||
Input:
|
||||
{1: "Chapter 1: General Provisions This chapter defines the overall rules…"}
|
||||
Output:
|
||||
[
|
||||
{"title":"Chapter 1: General Provisions","content":"1"}
|
||||
]
|
||||
|
||||
Example 3 — Narrative + heading
|
||||
Input:
|
||||
{2: "This paragraph introduces the background and goals. Section 2: Definitions Key terms are explained…"}
|
||||
Output:
|
||||
[
|
||||
{"title":"-1","content":"2"},
|
||||
{"title":"Section 2: Definitions","content":"2"}
|
||||
]
|
||||
|
||||
Example 4 — Multiple headings in one chunk
|
||||
Input:
|
||||
{3: "Declarations and Commitments (I) Party B commits… (II) Party C commits… Appendix A Data Specification"}
|
||||
Output:
|
||||
[
|
||||
{"title":"Declarations and Commitments (I)","content":"3"},
|
||||
{"title":"(II)","content":"3"},
|
||||
{"title":"Appendix A","content":"3"}
|
||||
]
|
||||
|
||||
Example 5 — Numbering styles
|
||||
Input:
|
||||
{4: "1. Scope: Defines boundaries. 2) Definitions: Terms used. III) Methods Overview."}
|
||||
Output:
|
||||
[
|
||||
{"title":"1. Scope","content":"4"},
|
||||
{"title":"2) Definitions","content":"4"},
|
||||
{"title":"III) Methods","content":"4"}
|
||||
]
|
||||
|
||||
Example 6 — Long list (NOT headings)
|
||||
Input:
|
||||
{5: "Item list: apples, bananas, strawberries, blueberries, mangos, peaches"}
|
||||
Output:
|
||||
[
|
||||
{"title":"-1","content":"5"}
|
||||
]
|
||||
|
||||
Example 7 — Mixed Chinese/English
|
||||
Input:
|
||||
{6: "(出版信息略)This standard follows industry practices. Chapter 1: Overview 摘要… 第2节:术语与缩略语"}
|
||||
Output:
|
||||
[
|
||||
{"title":"-1","content":"6"},
|
||||
{"title":"Chapter 1: Overview","content":"6"},
|
||||
{"title":"第2节:术语与缩略语","content":"6"}
|
||||
]
|
||||
8
rag/prompts/toc_from_text_user.md
Normal file
8
rag/prompts/toc_from_text_user.md
Normal file
@ -0,0 +1,8 @@
|
||||
OUTPUT FORMAT
|
||||
- Return ONLY the JSON array.
|
||||
- Use double quotes.
|
||||
- No extra commentary.
|
||||
- Keep language of "title" the same as the input.
|
||||
|
||||
INPUT
|
||||
{{text}}
|
||||
@ -32,7 +32,7 @@ from api.utils.log_utils import init_root_logger, get_project_base_directory
|
||||
from graphrag.general.index import run_graphrag_for_kb
|
||||
from graphrag.utils import get_llm_cache, set_llm_cache, get_tags_from_cache, set_tags_to_cache
|
||||
from rag.flow.pipeline import Pipeline
|
||||
from rag.prompts import keyword_extraction, question_proposal, content_tagging
|
||||
from rag.prompts.generator import keyword_extraction, question_proposal, content_tagging
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
@ -95,6 +95,12 @@ def total_token_count_from_response(resp):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if hasattr(resp, "usage_metadata") and hasattr(resp.usage_metadata, "total_tokens"):
|
||||
try:
|
||||
return resp.usage_metadata.total_tokens
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if 'usage' in resp and 'total_tokens' in resp['usage']:
|
||||
try:
|
||||
return resp["usage"]["total_tokens"]
|
||||
|
||||
@ -28,6 +28,7 @@ from rag import settings
|
||||
from rag.settings import TAG_FLD, PAGERANK_FLD
|
||||
from rag.utils import singleton, get_float
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from api.utils.common import convert_bytes
|
||||
from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \
|
||||
FusionExpr
|
||||
from rag.nlp import is_english, rag_tokenizer
|
||||
@ -579,3 +580,52 @@ class ESConnection(DocStoreConnection):
|
||||
break
|
||||
logger.error(f"ESConnection.sql timeout for {ATTEMPT_TIME} times!")
|
||||
return None
|
||||
|
||||
def get_cluster_stats(self):
|
||||
"""
|
||||
curl -XGET "http://{es_host}/_cluster/stats" -H "kbn-xsrf: reporting" to view raw stats.
|
||||
"""
|
||||
raw_stats = self.es.cluster.stats()
|
||||
logger.debug(f"ESConnection.get_cluster_stats: {raw_stats}")
|
||||
try:
|
||||
res = {
|
||||
'cluster_name': raw_stats['cluster_name'],
|
||||
'status': raw_stats['status']
|
||||
}
|
||||
indices_status = raw_stats['indices']
|
||||
res.update({
|
||||
'indices': indices_status['count'],
|
||||
'indices_shards': indices_status['shards']['total']
|
||||
})
|
||||
doc_info = indices_status['docs']
|
||||
res.update({
|
||||
'docs': doc_info['count'],
|
||||
'docs_deleted': doc_info['deleted']
|
||||
})
|
||||
store_info = indices_status['store']
|
||||
res.update({
|
||||
'store_size': convert_bytes(store_info['size_in_bytes']),
|
||||
'total_dataset_size': convert_bytes(store_info['total_data_set_size_in_bytes'])
|
||||
})
|
||||
mappings_info = indices_status['mappings']
|
||||
res.update({
|
||||
'mappings_fields': mappings_info['total_field_count'],
|
||||
'mappings_deduplicated_fields': mappings_info['total_deduplicated_field_count'],
|
||||
'mappings_deduplicated_size': convert_bytes(mappings_info['total_deduplicated_mapping_size_in_bytes'])
|
||||
})
|
||||
node_info = raw_stats['nodes']
|
||||
res.update({
|
||||
'nodes': node_info['count']['total'],
|
||||
'nodes_version': node_info['versions'],
|
||||
'os_mem': convert_bytes(node_info['os']['mem']['total_in_bytes']),
|
||||
'os_mem_used': convert_bytes(node_info['os']['mem']['used_in_bytes']),
|
||||
'os_mem_used_percent': node_info['os']['mem']['used_percent'],
|
||||
'jvm_versions': node_info['jvm']['versions'][0]['vm_version'],
|
||||
'jvm_heap_used': convert_bytes(node_info['jvm']['mem']['heap_used_in_bytes']),
|
||||
'jvm_heap_max': convert_bytes(node_info['jvm']['mem']['heap_max_in_bytes'])
|
||||
})
|
||||
return res
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"ESConnection.get_cluster_stats: {e}")
|
||||
return None
|
||||
|
||||
@ -30,6 +30,7 @@ from rag.settings import PAGERANK_FLD, TAG_FLD
|
||||
from rag.utils import singleton
|
||||
import pandas as pd
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from rag.nlp import is_english
|
||||
|
||||
from rag.utils.doc_store_conn import (
|
||||
DocStoreConnection,
|
||||
@ -40,13 +41,15 @@ from rag.utils.doc_store_conn import (
|
||||
OrderByExpr,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('ragflow.infinity_conn')
|
||||
logger = logging.getLogger("ragflow.infinity_conn")
|
||||
|
||||
|
||||
def field_keyword(field_name: str):
|
||||
# The "docnm_kwd" field is always a string, not list.
|
||||
if field_name == "source_id" or (field_name.endswith("_kwd") and field_name != "docnm_kwd" and field_name != "knowledge_graph_kwd"):
|
||||
return True
|
||||
return False
|
||||
# The "docnm_kwd" field is always a string, not list.
|
||||
if field_name == "source_id" or (field_name.endswith("_kwd") and field_name != "docnm_kwd" and field_name != "knowledge_graph_kwd"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def equivalent_condition_to_str(condition: dict, table_instance=None) -> str | None:
|
||||
assert "_id" not in condition
|
||||
@ -74,7 +77,7 @@ def equivalent_condition_to_str(condition: dict, table_instance=None) -> str | N
|
||||
inCond = list()
|
||||
for item in v:
|
||||
if isinstance(item, str):
|
||||
item = item.replace("'","''")
|
||||
item = item.replace("'", "''")
|
||||
inCond.append(f"filter_fulltext('{k}', '{item}')")
|
||||
if inCond:
|
||||
strInCond = " or ".join(inCond)
|
||||
@ -86,7 +89,7 @@ def equivalent_condition_to_str(condition: dict, table_instance=None) -> str | N
|
||||
inCond = list()
|
||||
for item in v:
|
||||
if isinstance(item, str):
|
||||
item = item.replace("'","''")
|
||||
item = item.replace("'", "''")
|
||||
inCond.append(f"'{item}'")
|
||||
else:
|
||||
inCond.append(str(item))
|
||||
@ -112,13 +115,13 @@ def concat_dataframes(df_list: list[pd.DataFrame], selectFields: list[str]) -> p
|
||||
df_list2 = [df for df in df_list if not df.empty]
|
||||
if df_list2:
|
||||
return pd.concat(df_list2, axis=0).reset_index(drop=True)
|
||||
|
||||
|
||||
schema = []
|
||||
for field_name in selectFields:
|
||||
if field_name == 'score()': # Workaround: fix schema is changed to score()
|
||||
schema.append('SCORE')
|
||||
elif field_name == 'similarity()': # Workaround: fix schema is changed to similarity()
|
||||
schema.append('SIMILARITY')
|
||||
if field_name == "score()": # Workaround: fix schema is changed to score()
|
||||
schema.append("SCORE")
|
||||
elif field_name == "similarity()": # Workaround: fix schema is changed to similarity()
|
||||
schema.append("SIMILARITY")
|
||||
else:
|
||||
schema.append(field_name)
|
||||
return pd.DataFrame(columns=schema)
|
||||
@ -158,9 +161,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
|
||||
def _migrate_db(self, inf_conn):
|
||||
inf_db = inf_conn.create_database(self.dbName, ConflictType.Ignore)
|
||||
fp_mapping = os.path.join(
|
||||
get_project_base_directory(), "conf", "infinity_mapping.json"
|
||||
)
|
||||
fp_mapping = os.path.join(get_project_base_directory(), "conf", "infinity_mapping.json")
|
||||
if not os.path.exists(fp_mapping):
|
||||
raise Exception(f"Mapping file not found at {fp_mapping}")
|
||||
schema = json.load(open(fp_mapping))
|
||||
@ -178,16 +179,12 @@ class InfinityConnection(DocStoreConnection):
|
||||
continue
|
||||
res = inf_table.add_columns({field_name: field_info})
|
||||
assert res.error_code == infinity.ErrorCode.OK
|
||||
logger.info(
|
||||
f"INFINITY added following column to table {table_name}: {field_name} {field_info}"
|
||||
)
|
||||
logger.info(f"INFINITY added following column to table {table_name}: {field_name} {field_info}")
|
||||
if field_info["type"] != "varchar" or "analyzer" not in field_info:
|
||||
continue
|
||||
inf_table.create_index(
|
||||
f"text_idx_{field_name}",
|
||||
IndexInfo(
|
||||
field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}
|
||||
),
|
||||
IndexInfo(field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
|
||||
@ -221,9 +218,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
inf_conn = self.connPool.get_conn()
|
||||
inf_db = inf_conn.create_database(self.dbName, ConflictType.Ignore)
|
||||
|
||||
fp_mapping = os.path.join(
|
||||
get_project_base_directory(), "conf", "infinity_mapping.json"
|
||||
)
|
||||
fp_mapping = os.path.join(get_project_base_directory(), "conf", "infinity_mapping.json")
|
||||
if not os.path.exists(fp_mapping):
|
||||
raise Exception(f"Mapping file not found at {fp_mapping}")
|
||||
schema = json.load(open(fp_mapping))
|
||||
@ -253,15 +248,11 @@ class InfinityConnection(DocStoreConnection):
|
||||
continue
|
||||
inf_table.create_index(
|
||||
f"text_idx_{field_name}",
|
||||
IndexInfo(
|
||||
field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}
|
||||
),
|
||||
IndexInfo(field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
logger.info(
|
||||
f"INFINITY created table {table_name}, vector size {vectorSize}"
|
||||
)
|
||||
logger.info(f"INFINITY created table {table_name}, vector size {vectorSize}")
|
||||
|
||||
def deleteIdx(self, indexName: str, knowledgebaseId: str):
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
@ -288,20 +279,21 @@ class InfinityConnection(DocStoreConnection):
|
||||
"""
|
||||
|
||||
def search(
|
||||
self, selectFields: list[str],
|
||||
highlightFields: list[str],
|
||||
condition: dict,
|
||||
matchExprs: list[MatchExpr],
|
||||
orderBy: OrderByExpr,
|
||||
offset: int,
|
||||
limit: int,
|
||||
indexNames: str | list[str],
|
||||
knowledgebaseIds: list[str],
|
||||
aggFields: list[str] = [],
|
||||
rank_feature: dict | None = None
|
||||
self,
|
||||
selectFields: list[str],
|
||||
highlightFields: list[str],
|
||||
condition: dict,
|
||||
matchExprs: list[MatchExpr],
|
||||
orderBy: OrderByExpr,
|
||||
offset: int,
|
||||
limit: int,
|
||||
indexNames: str | list[str],
|
||||
knowledgebaseIds: list[str],
|
||||
aggFields: list[str] = [],
|
||||
rank_feature: dict | None = None,
|
||||
) -> tuple[pd.DataFrame, int]:
|
||||
"""
|
||||
TODO: Infinity doesn't provide highlight
|
||||
BUG: Infinity returns empty for a highlight field if the query string doesn't use that field.
|
||||
"""
|
||||
if isinstance(indexNames, str):
|
||||
indexNames = indexNames.split(",")
|
||||
@ -438,9 +430,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
matchExpr.extra_options.copy(),
|
||||
)
|
||||
elif isinstance(matchExpr, FusionExpr):
|
||||
builder = builder.fusion(
|
||||
matchExpr.method, matchExpr.topn, matchExpr.fusion_params
|
||||
)
|
||||
builder = builder.fusion(matchExpr.method, matchExpr.topn, matchExpr.fusion_params)
|
||||
else:
|
||||
if filter_cond and len(filter_cond) > 0:
|
||||
builder.filter(filter_cond)
|
||||
@ -455,15 +445,13 @@ class InfinityConnection(DocStoreConnection):
|
||||
self.connPool.release_conn(inf_conn)
|
||||
res = concat_dataframes(df_list, output)
|
||||
if matchExprs:
|
||||
res['Sum'] = res[score_column] + res[PAGERANK_FLD]
|
||||
res = res.sort_values(by='Sum', ascending=False).reset_index(drop=True).drop(columns=['Sum'])
|
||||
res["Sum"] = res[score_column] + res[PAGERANK_FLD]
|
||||
res = res.sort_values(by="Sum", ascending=False).reset_index(drop=True).drop(columns=["Sum"])
|
||||
res = res.head(limit)
|
||||
logger.debug(f"INFINITY search final result: {str(res)}")
|
||||
return res, total_hits_count
|
||||
|
||||
def get(
|
||||
self, chunkId: str, indexName: str, knowledgebaseIds: list[str]
|
||||
) -> dict | None:
|
||||
def get(self, chunkId: str, indexName: str, knowledgebaseIds: list[str]) -> dict | None:
|
||||
inf_conn = self.connPool.get_conn()
|
||||
db_instance = inf_conn.get_database(self.dbName)
|
||||
df_list = list()
|
||||
@ -476,8 +464,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
try:
|
||||
table_instance = db_instance.get_table(table_name)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"Table not found: {table_name}, this knowledge base isn't created in Infinity. Maybe it is created in other document engine.")
|
||||
logger.warning(f"Table not found: {table_name}, this knowledge base isn't created in Infinity. Maybe it is created in other document engine.")
|
||||
continue
|
||||
kb_res, _ = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_df()
|
||||
logger.debug(f"INFINITY get table: {str(table_list)}, result: {str(kb_res)}")
|
||||
@ -487,9 +474,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
res_fields = self.getFields(res, res.columns.tolist())
|
||||
return res_fields.get(chunkId, None)
|
||||
|
||||
def insert(
|
||||
self, documents: list[dict], indexName: str, knowledgebaseId: str = None
|
||||
) -> list[str]:
|
||||
def insert(self, documents: list[dict], indexName: str, knowledgebaseId: str = None) -> list[str]:
|
||||
inf_conn = self.connPool.get_conn()
|
||||
db_instance = inf_conn.get_database(self.dbName)
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
@ -532,7 +517,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
d[k] = v
|
||||
elif re.search(r"_feas$", k):
|
||||
d[k] = json.dumps(v)
|
||||
elif k == 'kb_id':
|
||||
elif k == "kb_id":
|
||||
if isinstance(d[k], list):
|
||||
d[k] = d[k][0] # since d[k] is a list, but we need a str
|
||||
elif k == "position_int":
|
||||
@ -561,18 +546,16 @@ class InfinityConnection(DocStoreConnection):
|
||||
logger.debug(f"INFINITY inserted into {table_name} {str_ids}.")
|
||||
return []
|
||||
|
||||
def update(
|
||||
self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str
|
||||
) -> bool:
|
||||
def update(self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str) -> bool:
|
||||
# if 'position_int' in newValue:
|
||||
# logger.info(f"update position_int: {newValue['position_int']}")
|
||||
inf_conn = self.connPool.get_conn()
|
||||
db_instance = inf_conn.get_database(self.dbName)
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
table_instance = db_instance.get_table(table_name)
|
||||
#if "exists" in condition:
|
||||
# if "exists" in condition:
|
||||
# del condition["exists"]
|
||||
|
||||
|
||||
clmns = {}
|
||||
if table_instance:
|
||||
for n, ty, de, _ in table_instance.show_columns().rows():
|
||||
@ -587,7 +570,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
newValue[k] = v
|
||||
elif re.search(r"_feas$", k):
|
||||
newValue[k] = json.dumps(v)
|
||||
elif k == 'kb_id':
|
||||
elif k == "kb_id":
|
||||
if isinstance(newValue[k], list):
|
||||
newValue[k] = newValue[k][0] # since d[k] is a list, but we need a str
|
||||
elif k == "position_int":
|
||||
@ -611,11 +594,11 @@ class InfinityConnection(DocStoreConnection):
|
||||
del newValue[k]
|
||||
else:
|
||||
newValue[k] = v
|
||||
|
||||
remove_opt = {} # "[k,new_value]": [id_to_update, ...]
|
||||
|
||||
remove_opt = {} # "[k,new_value]": [id_to_update, ...]
|
||||
if removeValue:
|
||||
col_to_remove = list(removeValue.keys())
|
||||
row_to_opt = table_instance.output(col_to_remove + ['id']).filter(filter).to_df()
|
||||
row_to_opt = table_instance.output(col_to_remove + ["id"]).filter(filter).to_df()
|
||||
logger.debug(f"INFINITY search table {str(table_name)}, filter {filter}, result: {str(row_to_opt[0])}")
|
||||
row_to_opt = self.getFields(row_to_opt, col_to_remove)
|
||||
for id, old_v in row_to_opt.items():
|
||||
@ -632,8 +615,8 @@ class InfinityConnection(DocStoreConnection):
|
||||
logger.debug(f"INFINITY update table {table_name}, filter {filter}, newValue {newValue}.")
|
||||
for update_kv, ids in remove_opt.items():
|
||||
k, v = json.loads(update_kv)
|
||||
table_instance.update(filter + " AND id in ({0})".format(",".join([f"'{id}'" for id in ids])), {k:"###".join(v)})
|
||||
|
||||
table_instance.update(filter + " AND id in ({0})".format(",".join([f"'{id}'" for id in ids])), {k: "###".join(v)})
|
||||
|
||||
table_instance.update(filter, newValue)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
return True
|
||||
@ -645,9 +628,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
try:
|
||||
table_instance = db_instance.get_table(table_name)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"Skipped deleting from table {table_name} since the table doesn't exist."
|
||||
)
|
||||
logger.warning(f"Skipped deleting from table {table_name} since the table doesn't exist.")
|
||||
return 0
|
||||
filter = equivalent_condition_to_str(condition, table_instance)
|
||||
logger.debug(f"INFINITY delete table {table_name}, filter {filter}.")
|
||||
@ -675,37 +656,39 @@ class InfinityConnection(DocStoreConnection):
|
||||
if not fields:
|
||||
return {}
|
||||
fieldsAll = fields.copy()
|
||||
fieldsAll.append('id')
|
||||
fieldsAll.append("id")
|
||||
column_map = {col.lower(): col for col in res.columns}
|
||||
matched_columns = {column_map[col.lower()]:col for col in set(fieldsAll) if col.lower() in column_map}
|
||||
matched_columns = {column_map[col.lower()]: col for col in set(fieldsAll) if col.lower() in column_map}
|
||||
none_columns = [col for col in set(fieldsAll) if col.lower() not in column_map]
|
||||
|
||||
res2 = res[matched_columns.keys()]
|
||||
res2 = res2.rename(columns=matched_columns)
|
||||
res2.drop_duplicates(subset=['id'], inplace=True)
|
||||
res2.drop_duplicates(subset=["id"], inplace=True)
|
||||
|
||||
for column in res2.columns:
|
||||
k = column.lower()
|
||||
if field_keyword(k):
|
||||
res2[column] = res2[column].apply(lambda v:[kwd for kwd in v.split("###") if kwd])
|
||||
res2[column] = res2[column].apply(lambda v: [kwd for kwd in v.split("###") if kwd])
|
||||
elif re.search(r"_feas$", k):
|
||||
res2[column] = res2[column].apply(lambda v: json.loads(v) if v else {})
|
||||
elif k == "position_int":
|
||||
|
||||
def to_position_int(v):
|
||||
if v:
|
||||
arr = [int(hex_val, 16) for hex_val in v.split('_')]
|
||||
v = [arr[i:i + 5] for i in range(0, len(arr), 5)]
|
||||
arr = [int(hex_val, 16) for hex_val in v.split("_")]
|
||||
v = [arr[i : i + 5] for i in range(0, len(arr), 5)]
|
||||
else:
|
||||
v = []
|
||||
return v
|
||||
|
||||
res2[column] = res2[column].apply(to_position_int)
|
||||
elif k in ["page_num_int", "top_int"]:
|
||||
res2[column] = res2[column].apply(lambda v:[int(hex_val, 16) for hex_val in v.split('_')] if v else [])
|
||||
res2[column] = res2[column].apply(lambda v: [int(hex_val, 16) for hex_val in v.split("_")] if v else [])
|
||||
else:
|
||||
pass
|
||||
for column in none_columns:
|
||||
res2[column] = None
|
||||
|
||||
|
||||
return res2.set_index("id").to_dict(orient="index")
|
||||
|
||||
def getHighlight(self, res: tuple[pd.DataFrame, int] | pd.DataFrame, keywords: list[str], fieldnm: str):
|
||||
@ -719,23 +702,35 @@ class InfinityConnection(DocStoreConnection):
|
||||
for i in range(num_rows):
|
||||
id = column_id[i]
|
||||
txt = res[fieldnm][i]
|
||||
if re.search(r"<em>[^<>]+</em>", txt, flags=re.IGNORECASE | re.MULTILINE):
|
||||
ans[id] = txt
|
||||
continue
|
||||
txt = re.sub(r"[\r\n]", " ", txt, flags=re.IGNORECASE | re.MULTILINE)
|
||||
txts = []
|
||||
for t in re.split(r"[.?!;\n]", txt):
|
||||
for w in keywords:
|
||||
t = re.sub(
|
||||
r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])"
|
||||
% re.escape(w),
|
||||
r"\1<em>\2</em>\3",
|
||||
t,
|
||||
flags=re.IGNORECASE | re.MULTILINE,
|
||||
)
|
||||
if not re.search(
|
||||
r"<em>[^<>]+</em>", t, flags=re.IGNORECASE | re.MULTILINE
|
||||
):
|
||||
if is_english([t]):
|
||||
for w in keywords:
|
||||
t = re.sub(
|
||||
r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])" % re.escape(w),
|
||||
r"\1<em>\2</em>\3",
|
||||
t,
|
||||
flags=re.IGNORECASE | re.MULTILINE,
|
||||
)
|
||||
else:
|
||||
for w in sorted(keywords, key=len, reverse=True):
|
||||
t = re.sub(
|
||||
re.escape(w),
|
||||
f"<em>{w}</em>",
|
||||
t,
|
||||
flags=re.IGNORECASE | re.MULTILINE,
|
||||
)
|
||||
if not re.search(r"<em>[^<>]+</em>", t, flags=re.IGNORECASE | re.MULTILINE):
|
||||
continue
|
||||
txts.append(t)
|
||||
ans[id] = "...".join(txts)
|
||||
if txts:
|
||||
ans[id] = "...".join(txts)
|
||||
else:
|
||||
ans[id] = txt
|
||||
return ans
|
||||
|
||||
def getAggregation(self, res: tuple[pd.DataFrame, int] | pd.DataFrame, fieldnm: str):
|
||||
|
||||
@ -91,6 +91,20 @@ class RedisDB:
|
||||
if self.REDIS.get(a) == b:
|
||||
return True
|
||||
|
||||
def info(self):
|
||||
info = self.REDIS.info()
|
||||
return {
|
||||
'redis_version': info["redis_version"],
|
||||
'server_mode': info["server_mode"],
|
||||
'used_memory': info["used_memory_human"],
|
||||
'total_system_memory': info["total_system_memory_human"],
|
||||
'mem_fragmentation_ratio': info["mem_fragmentation_ratio"],
|
||||
'connected_clients': info["connected_clients"],
|
||||
'blocked_clients': info["blocked_clients"],
|
||||
'instantaneous_ops_per_sec': info["instantaneous_ops_per_sec"],
|
||||
'total_commands_processed': info["total_commands_processed"]
|
||||
}
|
||||
|
||||
def is_alive(self):
|
||||
return self.REDIS is not None
|
||||
|
||||
|
||||
6
uv.lock
generated
6
uv.lock
generated
@ -4854,9 +4854,9 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyicu"
|
||||
version = "2.15.2"
|
||||
version = "2.15.3"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/9f/57/9db810ab75133a1c87ac2e327fb59199d78d233f575fbb63bfd3492b769c/pyicu-2.15.2.tar.gz", hash = "sha256:561e77eedff17cec6839f26211f7a5ce3c071b776e8a0ec9d1207f46cbce598f" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/88/b0/c8b61bac55424e2ff80e20d7251c3f002baff3c07c34cee3849e3505d8f5/pyicu-2.15.3.tar.gz", hash = "sha256:f32e78e1cb64d0aeb14f027e037a8944861d3114548818a6adf0081ef51aefc3" }
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
@ -5514,7 +5514,7 @@ requires-dist = [
|
||||
{ name = "psycopg2-binary", specifier = "==2.9.9" },
|
||||
{ name = "pyclipper", specifier = "==1.3.0.post5" },
|
||||
{ name = "pycryptodomex", specifier = "==3.20.0" },
|
||||
{ name = "pyicu", specifier = ">=2.13.1,<3.0.0" },
|
||||
{ name = "pyicu", specifier = ">=2.15.3,<3.0.0" },
|
||||
{ name = "pymysql", specifier = ">=1.1.1,<2.0.0" },
|
||||
{ name = "pyodbc", specifier = ">=5.2.0,<6.0.0" },
|
||||
{ name = "pypdf", specifier = "==6.0.0" },
|
||||
|
||||
@ -3,6 +3,7 @@ import path from 'path';
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: ['../src/**/*.mdx', '../src/**/*.stories.@(js|jsx|mjs|ts|tsx)'],
|
||||
staticDirs: ['../public'],
|
||||
addons: [
|
||||
'@storybook/addon-webpack5-compiler-swc',
|
||||
'@storybook/addon-docs',
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import '@/locales/config';
|
||||
import type { Preview } from '@storybook/react-webpack5';
|
||||
import { createElement } from 'react';
|
||||
import '../public/iconfont.js';
|
||||
import { TooltipProvider } from '../src/components/ui/tooltip';
|
||||
|
||||
import '../tailwind.css';
|
||||
|
||||
@ -59,6 +59,10 @@
|
||||
`<symbol id="icon-GitHub" viewBox="0 0 1024 1024">
|
||||
<path d="M512 42.666667C252.714667 42.666667 42.666667 252.714667 42.666667 512c0 207.658667 134.357333 383.104 320.896 445.269333 23.466667 4.096 32.256-9.941333 32.256-22.272 0-11.178667-0.554667-48.128-0.554667-87.424-117.930667 21.717333-148.437333-28.757333-157.824-55.125333-5.290667-13.525333-28.16-55.168-48.085333-66.304-16.426667-8.832-39.936-30.506667-0.597334-31.104 36.949333-0.597333 63.36 34.005333 72.149334 48.128 42.24 70.954667 109.696 51.029333 136.704 38.698667 4.096-30.506667 16.426667-51.029333 29.909333-62.762667-104.448-11.733333-213.546667-52.224-213.546667-231.765333 0-51.029333 18.176-93.269333 48.128-126.122667-4.736-11.733333-21.162667-59.818667 4.693334-124.373333 0 0 39.296-12.288 129.024 48.128a434.901333 434.901333 0 0 1 117.333333-15.829334c39.936 0 79.829333 5.248 117.333333 15.829334 89.770667-61.013333 129.066667-48.128 129.066667-48.128 25.813333 64.554667 9.429333 112.64 4.736 124.373333 29.909333 32.853333 48.085333 74.538667 48.085333 126.122667 0 180.138667-109.696 220.032-214.144 231.765333 17.024 14.677333 31.701333 42.837333 31.701334 86.826667 0 62.762667-0.597333 113.237333-0.597334 129.066666 0 12.330667 8.789333 26.965333 32.256 22.272C846.976 895.104 981.333333 719.104 981.333333 512c0-259.285333-210.005333-469.333333-469.333333-469.333333z"></path>
|
||||
</symbol>` +
|
||||
`<symbol id="icon-more" viewBox="0 0 1024 1024">
|
||||
<path d="M0 0h1024v1024H0z" opacity=".01"></path>
|
||||
<path d="M867.072 141.184H156.032a32 32 0 0 0 0 64h711.04a32 32 0 0 0 0-64z m0.832 226.368H403.2a32 32 0 0 0 0 64h464.704a32 32 0 0 0 0-64zM403.2 573.888h464.704a32 32 0 0 1 0 64H403.2a32 32 0 0 1 0-64z m464.704 226.368H156.864a32 32 0 0 0 0 64h711.04a32 32 0 0 0 0-64zM137.472 367.552v270.336l174.528-122.24-174.528-148.096z" ></path>
|
||||
</symbol>` +
|
||||
'</svg>'),
|
||||
((h) => {
|
||||
var a = (l = (l = document.getElementsByTagName('script'))[
|
||||
|
||||
5
web/src/assets/svg/llm/deerapi.svg
Normal file
5
web/src/assets/svg/llm/deerapi.svg
Normal file
@ -0,0 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated by Pixelmator Pro 3.5.5 -->
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path id="path1" fill="#447eec" stroke="none" d="M 4.043384 3 C 4.218303 3.131369 4.218303 3.131369 4.245634 3.344843 C 4.235589 3.689968 4.120566 4.019037 4.020153 4.354485 C 3.648397 5.639093 3.772068 7.16324 4.721537 8.292472 C 4.834668 8.405636 4.834668 8.405636 5.092898 8.451808 C 5.099438 8.407101 5.105977 8.362394 5.112713 8.316334 C 5.266371 7.465536 5.643018 6.500875 6.492251 5.890116 C 6.722516 5.834181 6.722516 5.834181 6.929549 5.82443 C 7.088069 5.89422 7.088069 5.89422 7.191927 6.021484 C 7.238566 6.266672 7.182186 6.450504 7.071671 6.678328 C 6.587966 7.729729 6.728653 8.892773 7.366847 9.896866 C 7.683047 10.36898 8.067978 10.728178 8.59128 11.079185 C 8.649906 11.121605 8.70853 11.164025 8.768932 11.207732 C 9.66402 11.807772 10.937032 12.285625 12.156346 12.257288 C 13.377048 12.176717 14.726612 11.651421 15.533382 10.972448 C 15.588738 10.922069 15.644094 10.871689 15.701127 10.819783 C 15.850423 10.685079 15.850423 10.685079 16.08547 10.524973 C 16.383444 10.276995 16.547771 10.004078 16.725018 9.699812 C 16.759743 9.646976 16.794468 9.594141 16.830244 9.539706 C 17.098297 9.063352 17.108864 8.602456 17.113121 8.090544 C 17.114 8.021955 17.114882 7.953365 17.115789 7.882698 C 17.109453 7.451891 17.02879 7.068714 16.870663 6.654499 C 16.778524 6.380194 16.726711 6.084627 16.899937 5.82443 C 17.474003 5.873095 17.771864 6.159414 18.124371 6.481276 C 18.275856 6.67165 18.378973 6.862965 18.474209 7.072435 C 18.510059 7.143305 18.510059 7.143305 18.546637 7.215607 C 18.731159 7.597385 18.826818 7.985786 18.911507 8.386124 C 19.354362 8.235567 19.496134 8.016235 19.671312 7.692331 C 19.697708 7.645218 19.724104 7.598103 19.751299 7.549561 C 19.94206 7.198732 20.071728 6.857859 20.135941 6.481276 C 20.153978 6.410828 20.172018 6.340384 20.190603 6.267801 C 20.311863 5.357117 20.176548 4.488987 19.832565 3.61451 C 19.773235 3.416943 19.797062 3.258942 19.873562 3.065685 C 20.343493 3.074291 20.545094 3.203472 20.855154 3.445213 C 21.137327 3.710249 21.304079 4.011683 21.480631 4.321899 C 21.518852 4.385996 21.557068 4.45009 21.596447 4.51613 C 21.93461 5.092474 21.990692 5.56271 21.994455 6.1898 C 21.997162 6.299852 21.997162 6.299852 21.999922 6.412127 C 22.004002 7.072449 21.84753 7.669481 21.491564 8.275281 C 21.453457 8.340647 21.415352 8.406013 21.376089 8.473361 C 21.342043 8.531276 21.307995 8.58919 21.272915 8.648861 C 21.233231 8.726082 21.193544 8.803303 21.152658 8.882862 C 20.590042 9.776692 19.648251 10.593391 18.513157 11.107153 C 17.735565 11.48311 16.590176 12.039842 16.28772 12.721296 C 16.708294 12.597325 17.125742 12.472708 17.534019 12.327189 C 18.42695 12.030186 19.416105 11.804957 20.398319 11.933083 C 20.650791 12.052649 20.650791 12.052649 20.835617 12.195821 C 20.889799 12.745177 20.433676 13.369287 19.976738 13.786102 C 19.068974 14.416512 17.830204 14.815967 16.637558 15.020251 C 16.405041 15.058423 16.405041 15.058423 16.20026 15.15162 C 16.092342 15.326459 16.092342 15.326459 16.047207 15.537517 C 16.0268 15.614313 16.006393 15.691111 15.985371 15.770234 C 15.941998 15.985131 15.916471 16.195107 15.899619 16.41194 C 15.85338 16.919502 15.635668 17.317726 15.243672 17.729734 C 14.687399 18.330616 14.487684 19.071302 14.284009 19.776215 C 14.139319 20.134537 13.878259 20.395561 13.527279 20.656797 C 13.476657 20.695154 13.426036 20.733509 13.373882 20.773027 C 13.044217 20.98155 12.615493 20.960649 12.204453 20.981113 C 12.136414 20.985601 12.068374 20.990088 11.998274 20.994713 C 11.372272 21.025297 10.929299 20.924742 10.406066 20.652691 C 9.956832 20.315306 9.780399 19.975595 9.646261 19.51553 C 9.45176 18.889639 9.236951 18.265442 8.766199 17.721523 C 8.338652 17.217646 8.153504 16.619122 7.973939 16.041952 C 7.951638 15.970907 7.951638 15.970907 7.928885 15.898428 C 7.901113 15.807971 7.874589 15.717293 7.84954 15.62639 C 7.766208 15.349351 7.674052 15.189289 7.366847 15.020251 C 7.084245 14.934055 7.084245 14.934055 6.765562 14.868356 C 5.556292 14.586564 4.309259 14.177955 3.62556 13.33837 C 3.34536 12.926235 3.155031 12.517895 3.343707 12.064452 C 3.860229 11.811237 4.491402 11.904243 5.064542 11.990557 C 5.992092 12.14533 6.804541 12.365472 7.629225 12.721296 C 7.549964 12.200433 7.008167 11.934837 6.498059 11.6257 C 6.238734 11.478328 5.973781 11.345892 5.699649 11.21466 C 5.246062 10.99374 4.881122 10.743555 4.511429 10.448256 C 4.305137 10.283882 4.305137 10.283882 4.043384 10.135485 C 3.752214 9.943573 3.574499 9.753546 3.376505 9.506865 C 3.311633 9.428246 3.246722 9.349648 3.181771 9.271068 C 3.150556 9.232649 3.11934 9.194228 3.087179 9.154644 C 2.999125 9.049258 2.904421 8.947053 2.809384 8.845145 C 1.856228 7.713696 1.874341 6.082378 2.206733 4.810427 C 2.455393 4.168301 3.071521 3.532841 3.693546 3.065685 C 3.872223 3.013599 3.872223 3.013599 4.043384 3 Z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.9 KiB |
@ -3,9 +3,16 @@ import {
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from '@/components/ui/collapsible';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { CollapsibleProps } from '@radix-ui/react-collapsible';
|
||||
import { ListCollapse } from 'lucide-react';
|
||||
import { PropsWithChildren, ReactNode } from 'react';
|
||||
import {
|
||||
PropsWithChildren,
|
||||
ReactNode,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { IconFontFill } from './icon-font';
|
||||
|
||||
type CollapseProps = Omit<CollapsibleProps, 'title'> & {
|
||||
title?: ReactNode;
|
||||
@ -16,22 +23,42 @@ export function Collapse({
|
||||
title,
|
||||
children,
|
||||
rightContent,
|
||||
open,
|
||||
open = true,
|
||||
defaultOpen = false,
|
||||
onOpenChange,
|
||||
disabled,
|
||||
}: CollapseProps) {
|
||||
const [currentOpen, setCurrentOpen] = useState(open);
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentOpen(open);
|
||||
}, [open]);
|
||||
|
||||
const handleOpenChange = useCallback(
|
||||
(open: boolean) => {
|
||||
setCurrentOpen(open);
|
||||
onOpenChange?.(open);
|
||||
},
|
||||
[onOpenChange],
|
||||
);
|
||||
|
||||
return (
|
||||
<Collapsible
|
||||
defaultOpen={defaultOpen}
|
||||
open={open}
|
||||
onOpenChange={onOpenChange}
|
||||
open={currentOpen}
|
||||
onOpenChange={handleOpenChange}
|
||||
disabled={disabled}
|
||||
>
|
||||
<CollapsibleTrigger className="w-full">
|
||||
<section className="flex justify-between items-center pb-2">
|
||||
<div className="flex items-center gap-1">
|
||||
<ListCollapse className="size-4" /> {title}
|
||||
<IconFontFill
|
||||
name={`more`}
|
||||
className={cn('size-4', {
|
||||
'rotate-90': !currentOpen,
|
||||
})}
|
||||
></IconFontFill>
|
||||
{title}
|
||||
</div>
|
||||
<div>{rightContent}</div>
|
||||
</section>
|
||||
|
||||
@ -233,36 +233,32 @@ function EmbedDialog({
|
||||
/>
|
||||
</form>
|
||||
</Form>
|
||||
<div>
|
||||
<div className="max-h-[350px] overflow-auto">
|
||||
<span>{t('embedCode', { keyPrefix: 'search' })}</span>
|
||||
<HightLightMarkdown>{text}</HightLightMarkdown>
|
||||
<div className="max-h-[350px] overflow-auto">
|
||||
<span>{t('embedCode', { keyPrefix: 'search' })}</span>
|
||||
<div className="max-h-full overflow-y-auto">
|
||||
<HightLightMarkdown>{text}</HightLightMarkdown>
|
||||
</div>
|
||||
</div>
|
||||
<div className=" font-medium mt-4 mb-1">
|
||||
{t(isAgent ? 'flow' : 'chat', { keyPrefix: 'header' })}
|
||||
<span className="ml-1 inline-block">ID</span>
|
||||
</div>
|
||||
<div className="bg-bg-card rounded-lg flex justify-between p-2">
|
||||
<span>{token} </span>
|
||||
<CopyToClipboard text={token}></CopyToClipboard>
|
||||
</div>
|
||||
<a
|
||||
className="cursor-pointer text-accent-primary inline-block"
|
||||
href={
|
||||
isAgent
|
||||
? 'https://ragflow.io/docs/dev/http_api_reference#create-session-with-agent'
|
||||
: 'https://ragflow.io/docs/dev/http_api_reference#create-session-with-chat-assistant'
|
||||
}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('howUseId', { keyPrefix: isAgent ? 'flow' : 'chat' })}
|
||||
</a>
|
||||
<div className="max-h-full overflow-y-auto">
|
||||
<HightLightMarkdown>{text}</HightLightMarkdown>
|
||||
</div>
|
||||
</div>
|
||||
<div className=" font-medium mt-4 mb-1">
|
||||
{t(isAgent ? 'flow' : 'chat', { keyPrefix: 'header' })}
|
||||
<span className="ml-1 inline-block">ID</span>
|
||||
</div>
|
||||
<div className="bg-bg-card rounded-lg flex justify-between p-2">
|
||||
<span>{token} </span>
|
||||
<CopyToClipboard text={token}></CopyToClipboard>
|
||||
</div>
|
||||
<a
|
||||
className="cursor-pointer text-accent-primary inline-block"
|
||||
href={
|
||||
isAgent
|
||||
? 'https://ragflow.io/docs/dev/http_api_reference#create-session-with-agent'
|
||||
: 'https://ragflow.io/docs/dev/http_api_reference#create-session-with-chat-assistant'
|
||||
}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('howUseId', { keyPrefix: isAgent ? 'flow' : 'chat' })}
|
||||
</a>
|
||||
</section>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@ -26,7 +26,7 @@ const FileStatusBadge: FC<StatusBadgeProps> = ({ status, name }) => {
|
||||
case RunningStatus.UNSTART:
|
||||
return `bg-[rgba(250,173,20,0.1)] text-state-warning`;
|
||||
default:
|
||||
return 'bg-gray-500/10 text-white';
|
||||
return 'bg-gray-500/10 text-text-secondary';
|
||||
}
|
||||
};
|
||||
|
||||
@ -45,7 +45,7 @@ const FileStatusBadge: FC<StatusBadgeProps> = ({ status, name }) => {
|
||||
case RunningStatus.UNSTART:
|
||||
return `bg-[rgba(250,173,20,1)] text-state-warning`;
|
||||
default:
|
||||
return 'bg-gray-500/10 text-white';
|
||||
return `bg-[rgba(117,120,122,1)] text-text-secondary`;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
export type FilterType = {
|
||||
id: string;
|
||||
label: string;
|
||||
count: number;
|
||||
label: string | JSX.Element;
|
||||
count?: number;
|
||||
};
|
||||
|
||||
export type FilterCollection = {
|
||||
|
||||
@ -4,7 +4,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { SelectWithSearch } from '../originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '../ragflow-form';
|
||||
|
||||
type LLMFormFieldProps = {
|
||||
export type LLMFormFieldProps = {
|
||||
options?: any[];
|
||||
name?: string;
|
||||
};
|
||||
|
||||
@ -67,19 +67,6 @@ const RaptorFormFields = ({
|
||||
const { t } = useTranslate('knowledgeConfiguration');
|
||||
const useRaptor = useWatch({ name: UseRaptorField });
|
||||
|
||||
const changeRaptor = useCallback(
|
||||
(isUseRaptor: boolean) => {
|
||||
if (isUseRaptor) {
|
||||
form.setValue(MaxTokenField, 256);
|
||||
form.setValue(ThresholdField, 0.1);
|
||||
form.setValue(MaxCluster, 64);
|
||||
form.setValue(RandomSeedField, 0);
|
||||
form.setValue(Prompt, t('promptText'));
|
||||
}
|
||||
},
|
||||
[form],
|
||||
);
|
||||
|
||||
const handleGenerate = useCallback(() => {
|
||||
form.setValue(RandomSeedField, random(10000));
|
||||
}, [form]);
|
||||
@ -90,10 +77,6 @@ const RaptorFormFields = ({
|
||||
control={form.control}
|
||||
name={UseRaptorField}
|
||||
render={({ field }) => {
|
||||
// if (typeof field.value === 'undefined') {
|
||||
// // default value set
|
||||
// form.setValue('parser_config.raptor.use_raptor', false);
|
||||
// }
|
||||
return (
|
||||
<FormItem
|
||||
defaultChecked={false}
|
||||
|
||||
@ -40,7 +40,7 @@ export function SliderInputFormField({
|
||||
}: SliderInputFormFieldProps) {
|
||||
const form = useFormContext();
|
||||
|
||||
const isHorizontal = useMemo(() => layout === FormLayout.Vertical, [layout]);
|
||||
const isHorizontal = useMemo(() => layout !== FormLayout.Vertical, [layout]);
|
||||
|
||||
return (
|
||||
<FormField
|
||||
|
||||
@ -26,7 +26,7 @@ Command.displayName = CommandPrimitive.displayName;
|
||||
const CommandDialog = ({ children, ...props }: DialogProps) => {
|
||||
return (
|
||||
<Dialog {...props}>
|
||||
<DialogContent className="overflow-hidden p-0 shadow-lg">
|
||||
<DialogContent className="overflow-auto p-0 shadow-lg">
|
||||
<Command className="[&_[cmdk-group-heading]]:px-2 [&_[cmdk-group-heading]]:font-medium [&_[cmdk-group-heading]]:text-muted-foreground [&_[cmdk-group]:not([hidden])_~[cmdk-group]]:pt-0 [&_[cmdk-group]]:px-2 [&_[cmdk-input-wrapper]_svg]:h-5 [&_[cmdk-input-wrapper]_svg]:w-5 [&_[cmdk-input]]:h-12 [&_[cmdk-item]]:px-2 [&_[cmdk-item]]:py-3 [&_[cmdk-item]_svg]:h-5 [&_[cmdk-item]_svg]:w-5">
|
||||
{children}
|
||||
</Command>
|
||||
@ -58,9 +58,18 @@ const CommandList = React.forwardRef<
|
||||
React.ElementRef<typeof CommandPrimitive.List>,
|
||||
React.ComponentPropsWithoutRef<typeof CommandPrimitive.List>
|
||||
>(({ className, ...props }, ref) => (
|
||||
/**
|
||||
* Solve the problem of the scroll wheel not working
|
||||
* onWheel={(e) => e.stopPropagation()}
|
||||
onMouseEnter={(e) => e.currentTarget.focus()}
|
||||
tabIndex={-1}
|
||||
*/
|
||||
<CommandPrimitive.List
|
||||
ref={ref}
|
||||
className={cn('max-h-[300px] overflow-y-auto overflow-x-hidden', className)}
|
||||
onWheel={(e) => e.stopPropagation()}
|
||||
onMouseEnter={(e) => e.currentTarget.focus()}
|
||||
tabIndex={-1}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
|
||||
@ -20,7 +20,7 @@ const TooltipContent = React.forwardRef<
|
||||
ref={ref}
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
'z-50 overflow-hidden rounded-md border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-w-[20vw]',
|
||||
'z-50 overflow-auto scrollbar-auto rounded-md border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-w-[20vw]',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@ -52,3 +52,13 @@ export enum AgentCategory {
|
||||
AgentCanvas = 'agent_canvas',
|
||||
DataflowCanvas = 'dataflow_canvas',
|
||||
}
|
||||
|
||||
export enum DataflowOperator {
|
||||
Begin = 'File',
|
||||
Note = 'Note',
|
||||
Parser = 'Parser',
|
||||
Tokenizer = 'Tokenizer',
|
||||
Splitter = 'Splitter',
|
||||
HierarchicalMerger = 'HierarchicalMerger',
|
||||
Extractor = 'Extractor',
|
||||
}
|
||||
|
||||
@ -57,6 +57,7 @@ export enum LLMFactory {
|
||||
TokenPony = 'TokenPony',
|
||||
Meituan = 'Meituan',
|
||||
CometAPI = 'CometAPI',
|
||||
DeerAPI = 'DeerAPI',
|
||||
}
|
||||
|
||||
// Please lowercase the file name
|
||||
@ -119,4 +120,5 @@ export const IconMap = {
|
||||
[LLMFactory.TokenPony]: 'token-pony',
|
||||
[LLMFactory.Meituan]: 'longcat',
|
||||
[LLMFactory.CometAPI]: 'cometapi',
|
||||
[LLMFactory.DeerAPI]: 'deerapi',
|
||||
};
|
||||
|
||||
@ -3,7 +3,6 @@ import { useHandleFilterSubmit } from '@/components/list-filter-bar/use-handle-f
|
||||
import message from '@/components/ui/message';
|
||||
import { AgentGlobals } from '@/constants/agent';
|
||||
import {
|
||||
DSL,
|
||||
IAgentLogsRequest,
|
||||
IAgentLogsResponse,
|
||||
IFlow,
|
||||
@ -295,7 +294,7 @@ export const useSetAgent = (showMessage: boolean = true) => {
|
||||
mutationFn: async (params: {
|
||||
id?: string;
|
||||
title?: string;
|
||||
dsl?: DSL;
|
||||
dsl?: Record<string, any>;
|
||||
avatar?: string;
|
||||
canvas_category?: string;
|
||||
}) => {
|
||||
|
||||
@ -32,6 +32,7 @@ export const enum KnowledgeApiAction {
|
||||
FetchKnowledgeGraph = 'fetchKnowledgeGraph',
|
||||
FetchMetadata = 'fetchMetadata',
|
||||
FetchKnowledgeList = 'fetchKnowledgeList',
|
||||
RemoveKnowledgeGraph = 'removeKnowledgeGraph',
|
||||
}
|
||||
|
||||
export const useKnowledgeBaseId = (): string => {
|
||||
@ -143,7 +144,7 @@ export const useFetchNextKnowledgeListByPage = () => {
|
||||
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
// setPagination({ page: 1 }); // TODO: 这里导致重复请求
|
||||
// setPagination({ page: 1 }); // TODO: This results in repeated requests
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange],
|
||||
@ -322,7 +323,7 @@ export const useRemoveKnowledgeGraph = () => {
|
||||
if (data.code === 0) {
|
||||
message.success(i18n.t(`message.deleted`));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchKnowledgeGraph'],
|
||||
queryKey: [KnowledgeApiAction.FetchKnowledgeGraph],
|
||||
});
|
||||
}
|
||||
return data?.code;
|
||||
|
||||
@ -1170,6 +1170,8 @@ export default {
|
||||
cleanHtml: 'HTML bereinigen',
|
||||
cleanHtmlTip:
|
||||
'Wenn die Antwort im HTML-Format vorliegt und nur der Hauptinhalt gewünscht wird, schalten Sie dies bitte ein.',
|
||||
invalidUrl:
|
||||
'Muss eine gültige URL oder eine URL mit Variablenplatzhaltern im Format {Variablenname} oder {Komponente@Variable} sein',
|
||||
reference: 'Referenz',
|
||||
input: 'Eingabe',
|
||||
output: 'Ausgabe',
|
||||
|
||||
@ -1397,6 +1397,8 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
cleanHtml: 'Clean HTML',
|
||||
cleanHtmlTip:
|
||||
'If the response is HTML formatted and only the primary content wanted, please toggle it on.',
|
||||
invalidUrl:
|
||||
'Must be a valid URL or URL with variable placeholders in the format {variable_name} or {component@variable}',
|
||||
reference: 'Reference',
|
||||
input: 'Input',
|
||||
output: 'Output',
|
||||
@ -1531,7 +1533,7 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
agentDescription:
|
||||
'Builds agent components equipped with reasoning, tool usage, and multi-agent collaboration. ',
|
||||
maxRecords: 'Max records',
|
||||
createAgent: 'Create Agent',
|
||||
createAgent: 'Agent flow',
|
||||
stringTransform: 'Text Processing',
|
||||
userFillUp: 'Await Response',
|
||||
userFillUpDescription: `Pauses the workflow and waits for the user's message before continuing.`,
|
||||
@ -1696,6 +1698,7 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
parserDescription:
|
||||
'Extracts raw text and structure from files for downstream processing.',
|
||||
tokenizer: 'Tokenizer',
|
||||
tokenizerRequired: 'Please add the Tokenizer node first',
|
||||
tokenizerDescription:
|
||||
'Transforms text into the required data structure (e.g., vector embeddings for Embedding Search) depending on the chosen search method.',
|
||||
splitter: 'Token Splitter',
|
||||
@ -1709,15 +1712,25 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
'Use an LLM to extract structured insights from document chunks—such as summaries, classifications, etc.',
|
||||
outputFormat: 'Output format',
|
||||
lang: 'Language',
|
||||
fileFormats: 'File formats',
|
||||
fields: 'Fields',
|
||||
fileFormats: 'File format',
|
||||
fileFormatOptions: {
|
||||
pdf: 'PDF',
|
||||
spreadsheet: 'Spreadsheet',
|
||||
image: 'Image',
|
||||
email: 'Email',
|
||||
'text&markdown': 'Text & Markup',
|
||||
word: 'Word',
|
||||
slides: 'PPT',
|
||||
audio: 'Audio',
|
||||
},
|
||||
fields: 'Field',
|
||||
addParser: 'Add Parser',
|
||||
hierarchy: 'Hierarchy',
|
||||
regularExpressions: 'Regular Expressions',
|
||||
overlappedPercent: 'Overlapped percent',
|
||||
searchMethod: 'Search method',
|
||||
begin: 'File',
|
||||
parserMethod: 'Parser method',
|
||||
parserMethod: 'Parsing method',
|
||||
systemPrompt: 'System Prompt',
|
||||
systemPromptPlaceholder:
|
||||
'Enter system prompt for image analysis, if empty the system default value will be used',
|
||||
@ -1785,11 +1798,14 @@ Important structured information may include: names, dates, locations, events, k
|
||||
},
|
||||
filenameEmbeddingWeight: 'Filename embedding weight',
|
||||
tokenizerFieldsOptions: {
|
||||
text: 'Text',
|
||||
text: 'Processed Text',
|
||||
keywords: 'Keywords',
|
||||
questions: 'Questions',
|
||||
summary: 'Augmented Context',
|
||||
},
|
||||
imageParseMethodOptions: {
|
||||
ocr: 'OCR',
|
||||
},
|
||||
},
|
||||
datasetOverview: {
|
||||
downloadTip: 'Files being downloaded from data sources. ',
|
||||
|
||||
@ -866,6 +866,19 @@ export default {
|
||||
noteDescription: 'Nota',
|
||||
notePlaceholder: 'Por favor ingresa una nota',
|
||||
runningHintText: 'está corriendo...🕞',
|
||||
|
||||
invoke: 'Solicitud HTTP',
|
||||
invokeDescription:
|
||||
'Un componente capaz de llamar a servicios remotos, utilizando las salidas de otros componentes o constantes como entradas.',
|
||||
url: 'Url',
|
||||
method: 'Método',
|
||||
timeout: 'Tiempo de espera',
|
||||
headers: 'Encabezados',
|
||||
cleanHtml: 'Limpiar HTML',
|
||||
cleanHtmlTip:
|
||||
'Si la respuesta está formateada en HTML y solo se desea el contenido principal, actívelo.',
|
||||
invalidUrl:
|
||||
'Debe ser una URL válida o una URL con marcadores de posición de variables en el formato {nombre_variable} o {componente@variable}',
|
||||
},
|
||||
footer: {
|
||||
profile: 'Todos los derechos reservados @ React',
|
||||
|
||||
@ -1096,6 +1096,8 @@ export default {
|
||||
cleanHtml: 'Nettoyer le HTML',
|
||||
cleanHtmlTip:
|
||||
'Si la réponse est au format HTML et que seul le contenu principal est souhaité, activez cette option.',
|
||||
invalidUrl:
|
||||
'Doit être une URL valide ou une URL avec des espaces réservés de variables au format {nom_variable} ou {composant@variable}',
|
||||
reference: 'Référence',
|
||||
input: 'Entrée',
|
||||
output: 'Sortie',
|
||||
|
||||
@ -1051,6 +1051,20 @@ export default {
|
||||
note: 'Catatan',
|
||||
noteDescription: 'Catatan',
|
||||
notePlaceholder: 'Silakan masukkan catatan',
|
||||
|
||||
invoke: 'Permintaan HTTP',
|
||||
invokeDescription:
|
||||
'Komponen yang mampu memanggil layanan remote, menggunakan output komponen lain atau konstanta sebagai input.',
|
||||
url: 'Url',
|
||||
method: 'Metode',
|
||||
timeout: 'Waktu habis',
|
||||
headers: 'Header',
|
||||
cleanHtml: 'Bersihkan HTML',
|
||||
cleanHtmlTip:
|
||||
'Jika respons diformat HTML dan hanya ingin konten utama, aktifkan opsi ini.',
|
||||
invalidUrl:
|
||||
'Harus berupa URL yang valid atau URL dengan placeholder variabel dalam format {nama_variabel} atau {komponen@variabel}',
|
||||
|
||||
prompt: 'Prompt',
|
||||
promptTip:
|
||||
'Gunakan prompt sistem untuk menjelaskan tugas untuk LLM, tentukan bagaimana harus merespons, dan menguraikan persyaratan lainnya. Prompt sistem sering digunakan bersama dengan kunci (variabel), yang berfungsi sebagai berbagai input data untuk LLM. Gunakan garis miring `/` atau tombol (x) untuk menampilkan kunci yang digunakan.',
|
||||
|
||||
@ -1098,6 +1098,8 @@ export default {
|
||||
cleanHtml: 'HTMLをクリーン',
|
||||
cleanHtmlTip:
|
||||
'応答がHTML形式であり、主要なコンテンツのみが必要な場合は、これをオンにしてください。',
|
||||
invalidUrl:
|
||||
'有効なURLまたは{variable_name}または{component@variable}形式の変数プレースホルダーを含むURLである必要があります',
|
||||
reference: '参照',
|
||||
input: '入力',
|
||||
output: '出力',
|
||||
|
||||
@ -1066,6 +1066,8 @@ export default {
|
||||
cleanHtml: 'Limpar HTML',
|
||||
cleanHtmlTip:
|
||||
'Se a resposta for formatada em HTML e apenas o conteúdo principal for desejado, ative esta opção.',
|
||||
invalidUrl:
|
||||
'Deve ser uma URL válida ou uma URL com marcadores de posição de variáveis no formato {nome_variável} ou {componente@variável}',
|
||||
|
||||
reference: 'Referência',
|
||||
input: 'Entrada',
|
||||
|
||||
@ -1327,6 +1327,8 @@ export default {
|
||||
cleanHtml: 'Очистить HTML',
|
||||
cleanHtmlTip:
|
||||
'Включите, если нужен только основной контент из HTML-ответа.',
|
||||
invalidUrl:
|
||||
'Должен быть действительный URL или URL с заполнителями переменных в формате {имя_переменной} или {компонент@переменная}',
|
||||
reference: 'Ссылка',
|
||||
input: 'Вход',
|
||||
output: 'Выход',
|
||||
|
||||
@ -1128,6 +1128,8 @@ export default {
|
||||
cleanHtml: 'Làm sạch HTML',
|
||||
cleanHtmlTip:
|
||||
'Nếu phản hồi được định dạng HTML và chỉ muốn nội dung chính, hãy bật nó lên.',
|
||||
invalidUrl:
|
||||
'Phải là URL hợp lệ hoặc URL có chứa các biến theo định dạng {ten_bien} hoặc {thanh_phan@bien}',
|
||||
reference: 'Tham khảo',
|
||||
input: 'Đầu vào',
|
||||
output: 'Đầu ra',
|
||||
|
||||
@ -1153,6 +1153,8 @@ export default {
|
||||
headers: '請求頭',
|
||||
cleanHtml: '清除 HTML',
|
||||
cleanHtmlTip: '如果回應是 HTML 格式並且只需要主要內容,請將其開啟。',
|
||||
invalidUrl:
|
||||
'必須是有效的 URL 或包含變量佔位符的 URL,格式為 {variable_name} 或 {component@variable}',
|
||||
reference: '引用',
|
||||
input: '輸入',
|
||||
output: '輸出',
|
||||
|
||||
@ -1359,6 +1359,8 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
headers: '请求头',
|
||||
cleanHtml: '清除 HTML',
|
||||
cleanHtmlTip: '如果响应是 HTML 格式且只需要主要内容,请将其打开。',
|
||||
invalidUrl:
|
||||
'必须是有效的 URL 或包含变量占位符的 URL,格式为 {variable_name} 或 {component@variable}',
|
||||
reference: '引用',
|
||||
input: '输入',
|
||||
output: '输出',
|
||||
@ -1454,7 +1456,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
addAgent: '添加智能体',
|
||||
agentDescription: '构建具备推理、工具调用和多智能体协同的智能体组件。',
|
||||
maxRecords: '最大记录数',
|
||||
createAgent: '创建智能体',
|
||||
createAgent: '智能体流程',
|
||||
stringTransform: '文本处理',
|
||||
userFillUp: '等待输入',
|
||||
userFillUpDescription: `此组件会暂停当前的流程并等待用户发送消息,接收到消息之后再进行之后的流程。`,
|
||||
@ -1611,6 +1613,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
parser: '解析器',
|
||||
parserDescription: '从文件中提取原始文本和结构以供下游处理。',
|
||||
tokenizer: '分词器',
|
||||
tokenizerRequired: '请先添加Tokenizer节点',
|
||||
tokenizerDescription:
|
||||
'根据所选的搜索方法,将文本转换为所需的数据结构(例如,用于嵌入搜索的向量嵌入)。',
|
||||
splitter: '分词器拆分器',
|
||||
|
||||
@ -30,6 +30,10 @@ function InnerButtonEdge({
|
||||
sourceHandleId,
|
||||
}: EdgeProps<Edge<{ isHovered: boolean }>>) {
|
||||
const deleteEdgeById = useGraphStore((state) => state.deleteEdgeById);
|
||||
const highlightedPlaceholderEdgeId = useGraphStore(
|
||||
(state) => state.highlightedPlaceholderEdgeId,
|
||||
);
|
||||
|
||||
const [edgePath, labelX, labelY] = getBezierPath({
|
||||
sourceX,
|
||||
sourceY,
|
||||
@ -44,6 +48,13 @@ function InnerButtonEdge({
|
||||
: {};
|
||||
}, [selected]);
|
||||
|
||||
const placeholderHighlightStyle = useMemo(() => {
|
||||
const isHighlighted = highlightedPlaceholderEdgeId === id;
|
||||
return isHighlighted
|
||||
? { strokeWidth: 2, stroke: 'var(--accent-primary)' }
|
||||
: {};
|
||||
}, [highlightedPlaceholderEdgeId, id]);
|
||||
|
||||
const onEdgeClick = () => {
|
||||
deleteEdgeById(id);
|
||||
};
|
||||
@ -81,7 +92,12 @@ function InnerButtonEdge({
|
||||
<BaseEdge
|
||||
path={edgePath}
|
||||
markerEnd={markerEnd}
|
||||
style={{ ...style, ...selectedStyle, ...showHighlight }}
|
||||
style={{
|
||||
...style,
|
||||
...selectedStyle,
|
||||
...showHighlight,
|
||||
...placeholderHighlightStyle,
|
||||
}}
|
||||
className={cn('text-text-secondary')}
|
||||
/>
|
||||
|
||||
|
||||
@ -182,8 +182,12 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) {
|
||||
|
||||
const { clearActiveDropdown } = useDropdownManager();
|
||||
|
||||
const { removePlaceholderNode, onNodeCreated, setCreatedPlaceholderRef } =
|
||||
usePlaceholderManager(reactFlowInstance);
|
||||
const {
|
||||
removePlaceholderNode,
|
||||
onNodeCreated,
|
||||
setCreatedPlaceholderRef,
|
||||
checkAndRemoveExistingPlaceholder,
|
||||
} = usePlaceholderManager(reactFlowInstance);
|
||||
|
||||
const { calculateDropdownPosition } = useDropdownPosition(reactFlowInstance);
|
||||
|
||||
@ -204,6 +208,7 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) {
|
||||
calculateDropdownPosition,
|
||||
removePlaceholderNode,
|
||||
clearActiveDropdown,
|
||||
checkAndRemoveExistingPlaceholder,
|
||||
);
|
||||
|
||||
const onPaneClick = useCallback(() => {
|
||||
|
||||
@ -107,7 +107,7 @@ function OperatorItemList({
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right">
|
||||
<TooltipContent side="right" sideOffset={24}>
|
||||
<p>{t(`flow.${lowerFirst(operator)}Description`)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
@ -127,7 +127,7 @@ function AccordionOperators({
|
||||
return (
|
||||
<Accordion
|
||||
type="multiple"
|
||||
className="px-2 text-text-title max-h-[45vh] overflow-auto"
|
||||
className="px-2 text-text-title max-h-[45vh] overflow-auto scrollbar-none"
|
||||
defaultValue={['item-1', 'item-2', 'item-3', 'item-4', 'item-5']}
|
||||
>
|
||||
<AccordionItem value="item-1">
|
||||
@ -249,7 +249,7 @@ export function InnerNextStepDropdown({
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: position.x,
|
||||
top: position.y + 10,
|
||||
top: position.y,
|
||||
zIndex: 1000,
|
||||
}}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
|
||||
@ -283,3 +283,16 @@
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
|
||||
.hideScrollbar {
|
||||
/* Webkit browsers (Chrome, Safari, Edge) */
|
||||
&::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Firefox */
|
||||
scrollbar-width: none;
|
||||
|
||||
/* IE和Edge */
|
||||
-ms-overflow-style: none;
|
||||
}
|
||||
|
||||
@ -1,18 +1,12 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { NodeProps, Position } from '@xyflow/react';
|
||||
import { Skeleton } from 'antd';
|
||||
import { memo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { NodeHandleId, Operator } from '../../constant';
|
||||
import OperatorIcon from '../../operator-icon';
|
||||
import { NodeHandleId } from '../../constant';
|
||||
import { CommonHandle } from './handle';
|
||||
import { LeftHandleStyle } from './handle-icon';
|
||||
import styles from './index.less';
|
||||
import { NodeWrapper } from './node-wrapper';
|
||||
|
||||
function InnerPlaceholderNode({ data, id, selected }: NodeProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
function InnerPlaceholderNode({ id, selected }: NodeProps) {
|
||||
return (
|
||||
<NodeWrapper selected={selected}>
|
||||
<CommonHandle
|
||||
@ -25,20 +19,16 @@ function InnerPlaceholderNode({ data, id, selected }: NodeProps) {
|
||||
></CommonHandle>
|
||||
|
||||
<section className="flex items-center gap-2">
|
||||
<OperatorIcon name={data.label as Operator}></OperatorIcon>
|
||||
<div className="truncate text-center font-semibold text-sm">
|
||||
{t(`flow.placeholder`, 'Placeholder')}
|
||||
</div>
|
||||
<Skeleton.Avatar
|
||||
active
|
||||
size={24}
|
||||
shape="square"
|
||||
style={{ backgroundColor: 'rgba(255,255,255,0.05)' }}
|
||||
/>
|
||||
</section>
|
||||
|
||||
<section
|
||||
className={cn(styles.generateParameters, 'flex gap-2 flex-col mt-2')}
|
||||
>
|
||||
<Skeleton active paragraph={{ rows: 2 }} title={false} />
|
||||
<div className="flex gap-2">
|
||||
<Skeleton.Button active size="small" />
|
||||
<Skeleton.Button active size="small" />
|
||||
</div>
|
||||
<section className={'flex gap-2 flex-col'} style={{ marginTop: 10 }}>
|
||||
<Skeleton.Input active style={{ width: '100%', height: 30 }} />
|
||||
</section>
|
||||
</NodeWrapper>
|
||||
);
|
||||
|
||||
@ -943,4 +943,6 @@ export const DROPDOWN_ADDITIONAL_OFFSET = 50;
|
||||
export const HALF_PLACEHOLDER_NODE_WIDTH = PLACEHOLDER_NODE_WIDTH / 2;
|
||||
export const HALF_PLACEHOLDER_NODE_HEIGHT =
|
||||
PLACEHOLDER_NODE_HEIGHT + DROPDOWN_SPACING + DROPDOWN_ADDITIONAL_OFFSET;
|
||||
export const DROPDOWN_HORIZONTAL_OFFSET = 28;
|
||||
export const DROPDOWN_VERTICAL_OFFSET = 74;
|
||||
export const PREVENT_CLOSE_DELAY = 300;
|
||||
|
||||
@ -26,6 +26,7 @@ import { INextOperatorForm } from '../../interface';
|
||||
import { buildOutputList } from '../../utils/build-output-list';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { Output } from '../components/output';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
import { FormSchema, FormSchemaType } from './schema';
|
||||
import { useEditVariableRecord } from './use-edit-variable';
|
||||
import { VariableDialog } from './variable-dialog';
|
||||
@ -98,7 +99,13 @@ function InvokeForm({ node }: INextOperatorForm) {
|
||||
<FormItem>
|
||||
<FormLabel>{t('flow.url')}</FormLabel>
|
||||
<FormControl>
|
||||
<Input {...field} placeholder="http://" />
|
||||
<PromptEditor
|
||||
value={field.value}
|
||||
onChange={field.onChange}
|
||||
placeholder="http://"
|
||||
showToolbar={false}
|
||||
multiLine={false}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
|
||||
@ -6,8 +6,54 @@ export const VariableFormSchema = z.object({
|
||||
value: z.string(),
|
||||
});
|
||||
|
||||
// {user_id} or {component@variable}
|
||||
const placeholderRegex = /\{([a-zA-Z_][a-zA-Z0-9_.@-]*)\}/g;
|
||||
|
||||
// URL validation schema that accepts:
|
||||
// 1. Standard URLs (e.g. https://example.com/api)
|
||||
// 2. URLs with variable placeholders in curly braces (e.g. https://api/{user_id}/posts)
|
||||
const urlValidation = z.string().refine(
|
||||
(val) => {
|
||||
if (!val) return false;
|
||||
|
||||
const hasPlaceholders = val.includes('{') && val.includes('}');
|
||||
const matches = [...val.matchAll(placeholderRegex)];
|
||||
|
||||
if (hasPlaceholders) {
|
||||
if (
|
||||
!matches.length ||
|
||||
matches.some((m) => !/^[a-zA-Z_][a-zA-Z0-9_.@-]*$/.test(m[1]))
|
||||
)
|
||||
return false;
|
||||
|
||||
if ((val.match(/{/g) || []).length !== (val.match(/}/g) || []).length)
|
||||
return false;
|
||||
|
||||
const testURL = val.replace(placeholderRegex, 'placeholder');
|
||||
|
||||
return isValidURL(testURL);
|
||||
}
|
||||
|
||||
return isValidURL(val);
|
||||
},
|
||||
{
|
||||
message: 'Must be a valid URL or URL with variable placeholders',
|
||||
},
|
||||
);
|
||||
|
||||
function isValidURL(str: string): boolean {
|
||||
try {
|
||||
// Try to construct a full URL; prepend http:// if protocol is missing
|
||||
new URL(str.startsWith('http') ? str : `http://${str}`);
|
||||
return true;
|
||||
} catch {
|
||||
// Allow relative paths (e.g. /api/users) if needed
|
||||
return /^\/[a-zA-Z0-9]/.test(str);
|
||||
}
|
||||
}
|
||||
|
||||
export const FormSchema = z.object({
|
||||
url: z.string().url(),
|
||||
url: urlValidation,
|
||||
method: z.string(),
|
||||
timeout: z.number(),
|
||||
headers: z.string(),
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { useFetchAgent } from '@/hooks/use-agent-request';
|
||||
import { RAGFlowNodeType } from '@/interfaces/database/flow';
|
||||
import { useCallback } from 'react';
|
||||
import { Operator } from '../constant';
|
||||
import useGraphStore from '../store';
|
||||
import { buildDslComponentsByGraph } from '../utils';
|
||||
|
||||
@ -10,15 +11,35 @@ export const useBuildDslData = () => {
|
||||
|
||||
const buildDslData = useCallback(
|
||||
(currentNodes?: RAGFlowNodeType[]) => {
|
||||
const nodesToProcess = currentNodes ?? nodes;
|
||||
|
||||
// Filter out placeholder nodes and related edges
|
||||
const filteredNodes = nodesToProcess.filter(
|
||||
(node) => node.data?.label !== Operator.Placeholder,
|
||||
);
|
||||
|
||||
const filteredEdges = edges.filter((edge) => {
|
||||
const sourceNode = nodesToProcess.find(
|
||||
(node) => node.id === edge.source,
|
||||
);
|
||||
const targetNode = nodesToProcess.find(
|
||||
(node) => node.id === edge.target,
|
||||
);
|
||||
return (
|
||||
sourceNode?.data?.label !== Operator.Placeholder &&
|
||||
targetNode?.data?.label !== Operator.Placeholder
|
||||
);
|
||||
});
|
||||
|
||||
const dslComponents = buildDslComponentsByGraph(
|
||||
currentNodes ?? nodes,
|
||||
edges,
|
||||
filteredNodes,
|
||||
filteredEdges,
|
||||
data.dsl.components,
|
||||
);
|
||||
|
||||
return {
|
||||
...data.dsl,
|
||||
graph: { nodes: currentNodes ?? nodes, edges },
|
||||
graph: { nodes: filteredNodes, edges: filteredEdges },
|
||||
components: dslComponents,
|
||||
};
|
||||
},
|
||||
|
||||
@ -2,6 +2,7 @@ import { Connection, Position } from '@xyflow/react';
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { useDropdownManager } from '../canvas/context';
|
||||
import { Operator, PREVENT_CLOSE_DELAY } from '../constant';
|
||||
import useGraphStore from '../store';
|
||||
import { useAddNode } from './use-add-node';
|
||||
|
||||
interface ConnectionStartParams {
|
||||
@ -26,6 +27,7 @@ export const useConnectionDrag = (
|
||||
) => { x: number; y: number },
|
||||
removePlaceholderNode: () => void,
|
||||
clearActiveDropdown: () => void,
|
||||
checkAndRemoveExistingPlaceholder: () => void,
|
||||
) => {
|
||||
// Reference for whether connection is established
|
||||
const isConnectedRef = useRef(false);
|
||||
@ -38,6 +40,7 @@ export const useConnectionDrag = (
|
||||
|
||||
const { addCanvasNode } = useAddNode(reactFlowInstance);
|
||||
const { setActiveDropdown } = useDropdownManager();
|
||||
const { setHighlightedPlaceholderEdgeId } = useGraphStore();
|
||||
|
||||
/**
|
||||
* Connection start handler function
|
||||
@ -81,10 +84,17 @@ export const useConnectionDrag = (
|
||||
}
|
||||
|
||||
if (isHandleClick) {
|
||||
removePlaceholderNode();
|
||||
hideModal();
|
||||
clearActiveDropdown();
|
||||
connectionStartRef.current = null;
|
||||
mouseStartPosRef.current = null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Check and remove existing placeholder-node before creating new one
|
||||
checkAndRemoveExistingPlaceholder();
|
||||
|
||||
// Create placeholder node and establish connection
|
||||
const mockEvent = { clientX, clientY };
|
||||
const contextData = {
|
||||
@ -101,9 +111,13 @@ export const useConnectionDrag = (
|
||||
contextData,
|
||||
)(mockEvent);
|
||||
|
||||
// Record the created placeholder node ID
|
||||
if (newNodeId) {
|
||||
setCreatedPlaceholderRef(newNodeId);
|
||||
|
||||
if (connectionStartRef.current) {
|
||||
const edgeId = `xy-edge__${connectionStartRef.current.nodeId}${connectionStartRef.current.handleId}-${newNodeId}end`;
|
||||
setHighlightedPlaceholderEdgeId(edgeId);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate placeholder node position and display dropdown menu
|
||||
@ -140,6 +154,11 @@ export const useConnectionDrag = (
|
||||
calculateDropdownPosition,
|
||||
setActiveDropdown,
|
||||
showModal,
|
||||
setHighlightedPlaceholderEdgeId,
|
||||
checkAndRemoveExistingPlaceholder,
|
||||
removePlaceholderNode,
|
||||
hideModal,
|
||||
clearActiveDropdown,
|
||||
],
|
||||
);
|
||||
|
||||
@ -187,7 +206,13 @@ export const useConnectionDrag = (
|
||||
removePlaceholderNode();
|
||||
hideModal();
|
||||
clearActiveDropdown();
|
||||
}, [removePlaceholderNode, hideModal, clearActiveDropdown]);
|
||||
setHighlightedPlaceholderEdgeId(null);
|
||||
}, [
|
||||
removePlaceholderNode,
|
||||
hideModal,
|
||||
clearActiveDropdown,
|
||||
setHighlightedPlaceholderEdgeId,
|
||||
]);
|
||||
|
||||
return {
|
||||
onConnectStart,
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import {
|
||||
HALF_PLACEHOLDER_NODE_HEIGHT,
|
||||
DROPDOWN_HORIZONTAL_OFFSET,
|
||||
DROPDOWN_VERTICAL_OFFSET,
|
||||
HALF_PLACEHOLDER_NODE_WIDTH,
|
||||
} from '../constant';
|
||||
|
||||
@ -29,8 +30,11 @@ export const useDropdownPosition = (reactFlowInstance: any) => {
|
||||
|
||||
// Calculate dropdown position in flow coordinate system
|
||||
const dropdownFlowPosition = {
|
||||
x: placeholderNodePosition.x - HALF_PLACEHOLDER_NODE_WIDTH, // Placeholder node left-aligned offset
|
||||
y: placeholderNodePosition.y + HALF_PLACEHOLDER_NODE_HEIGHT, // Placeholder node height plus spacing
|
||||
x:
|
||||
placeholderNodePosition.x +
|
||||
HALF_PLACEHOLDER_NODE_WIDTH +
|
||||
DROPDOWN_HORIZONTAL_OFFSET,
|
||||
y: placeholderNodePosition.y - DROPDOWN_VERTICAL_OFFSET,
|
||||
};
|
||||
|
||||
// Convert flow coordinates back to screen coordinates
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { Operator } from '../constant';
|
||||
import useGraphStore from '../store';
|
||||
|
||||
/**
|
||||
@ -11,6 +12,46 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
// Flag indicating whether user has selected a node
|
||||
const userSelectedNodeRef = useRef(false);
|
||||
|
||||
/**
|
||||
* Check if placeholder node exists and remove it if found
|
||||
* Ensures only one placeholder can exist on the panel
|
||||
*/
|
||||
const checkAndRemoveExistingPlaceholder = useCallback(() => {
|
||||
const { nodes, edges } = useGraphStore.getState();
|
||||
|
||||
// Find existing placeholder node
|
||||
const existingPlaceholder = nodes.find(
|
||||
(node) => node.data?.label === Operator.Placeholder,
|
||||
);
|
||||
|
||||
if (existingPlaceholder && reactFlowInstance) {
|
||||
// Remove edges related to placeholder
|
||||
const edgesToRemove = edges.filter(
|
||||
(edge) =>
|
||||
edge.target === existingPlaceholder.id ||
|
||||
edge.source === existingPlaceholder.id,
|
||||
);
|
||||
|
||||
// Remove placeholder node
|
||||
const nodesToRemove = [existingPlaceholder];
|
||||
|
||||
if (nodesToRemove.length > 0 || edgesToRemove.length > 0) {
|
||||
reactFlowInstance.deleteElements({
|
||||
nodes: nodesToRemove,
|
||||
edges: edgesToRemove,
|
||||
});
|
||||
}
|
||||
|
||||
// Clear highlighted placeholder edge
|
||||
useGraphStore.getState().setHighlightedPlaceholderEdgeId(null);
|
||||
|
||||
// Update ref reference
|
||||
if (createdPlaceholderRef.current === existingPlaceholder.id) {
|
||||
createdPlaceholderRef.current = null;
|
||||
}
|
||||
}
|
||||
}, [reactFlowInstance]);
|
||||
|
||||
/**
|
||||
* Function to remove placeholder node
|
||||
* Called when user clicks blank area or cancels operation
|
||||
@ -21,7 +62,8 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
reactFlowInstance &&
|
||||
!userSelectedNodeRef.current
|
||||
) {
|
||||
const { nodes, edges } = useGraphStore.getState();
|
||||
const { nodes, edges, setHighlightedPlaceholderEdgeId } =
|
||||
useGraphStore.getState();
|
||||
|
||||
// Remove edges related to placeholder
|
||||
const edgesToRemove = edges.filter(
|
||||
@ -42,6 +84,8 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
});
|
||||
}
|
||||
|
||||
setHighlightedPlaceholderEdgeId(null);
|
||||
|
||||
createdPlaceholderRef.current = null;
|
||||
}
|
||||
|
||||
@ -57,7 +101,13 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
(newNodeId: string) => {
|
||||
// First establish connection between new node and source, then delete placeholder
|
||||
if (createdPlaceholderRef.current && reactFlowInstance) {
|
||||
const { nodes, edges, addEdge, updateNode } = useGraphStore.getState();
|
||||
const {
|
||||
nodes,
|
||||
edges,
|
||||
addEdge,
|
||||
updateNode,
|
||||
setHighlightedPlaceholderEdgeId,
|
||||
} = useGraphStore.getState();
|
||||
|
||||
// Find placeholder node to get its position
|
||||
const placeholderNode = nodes.find(
|
||||
@ -107,6 +157,8 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
edges: edgesToRemove,
|
||||
});
|
||||
}
|
||||
|
||||
setHighlightedPlaceholderEdgeId(null);
|
||||
}
|
||||
|
||||
// Mark that user has selected a node
|
||||
@ -135,6 +187,7 @@ export const usePlaceholderManager = (reactFlowInstance: any) => {
|
||||
onNodeCreated,
|
||||
setCreatedPlaceholderRef,
|
||||
resetUserSelectedFlag,
|
||||
checkAndRemoveExistingPlaceholder,
|
||||
createdPlaceholderRef: createdPlaceholderRef.current,
|
||||
userSelectedNodeRef: userSelectedNodeRef.current,
|
||||
};
|
||||
|
||||
@ -39,6 +39,7 @@ export type RFState = {
|
||||
selectedEdgeIds: string[];
|
||||
clickedNodeId: string; // currently selected node
|
||||
clickedToolId: string; // currently selected tool id
|
||||
highlightedPlaceholderEdgeId: string | null;
|
||||
onNodesChange: OnNodesChange<RAGFlowNodeType>;
|
||||
onEdgesChange: OnEdgesChange;
|
||||
onEdgeMouseEnter?: EdgeMouseHandler<Edge>;
|
||||
@ -89,6 +90,7 @@ export type RFState = {
|
||||
) => void; // Deleting a condition of a classification operator will delete the related edge
|
||||
findAgentToolNodeById: (id: string | null) => string | undefined;
|
||||
selectNodeIds: (nodeIds: string[]) => void;
|
||||
setHighlightedPlaceholderEdgeId: (edgeId: string | null) => void;
|
||||
};
|
||||
|
||||
// this is our useStore hook that we can use in our components to get parts of the store and call actions
|
||||
@ -101,6 +103,7 @@ const useGraphStore = create<RFState>()(
|
||||
selectedEdgeIds: [] as string[],
|
||||
clickedNodeId: '',
|
||||
clickedToolId: '',
|
||||
highlightedPlaceholderEdgeId: null,
|
||||
onNodesChange: (changes) => {
|
||||
set({
|
||||
nodes: applyNodeChanges(changes, get().nodes),
|
||||
@ -127,8 +130,9 @@ const useGraphStore = create<RFState>()(
|
||||
},
|
||||
onConnect: (connection: Connection) => {
|
||||
const { updateFormDataOnConnect } = get();
|
||||
const newEdges = addEdge(connection, get().edges);
|
||||
set({
|
||||
edges: addEdge(connection, get().edges),
|
||||
edges: newEdges,
|
||||
});
|
||||
updateFormDataOnConnect(connection);
|
||||
},
|
||||
@ -526,6 +530,9 @@ const useGraphStore = create<RFState>()(
|
||||
})),
|
||||
);
|
||||
},
|
||||
setHighlightedPlaceholderEdgeId: (edgeId) => {
|
||||
set({ highlightedPlaceholderEdgeId: edgeId });
|
||||
},
|
||||
})),
|
||||
{ name: 'graph', trace: true },
|
||||
),
|
||||
|
||||
@ -143,7 +143,7 @@ const buildOperatorParams = (operatorName: string) =>
|
||||
// initializeOperatorParams(operatorName), // Final processing, for guarantee
|
||||
);
|
||||
|
||||
const ExcludeOperators = [Operator.Note, Operator.Tool];
|
||||
const ExcludeOperators = [Operator.Note, Operator.Tool, Operator.Placeholder];
|
||||
|
||||
export function isBottomSubAgent(edges: Edge[], nodeId?: string) {
|
||||
const edge = edges.find(
|
||||
|
||||
@ -1,13 +1,20 @@
|
||||
import { useToast } from '@/components/hooks/use-toast';
|
||||
import message from '@/components/ui/message';
|
||||
import { AgentCategory, DataflowOperator } from '@/constants/agent';
|
||||
import { FileMimeType } from '@/constants/common';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { EmptyDsl, useSetAgent } from '@/hooks/use-agent-request';
|
||||
import { message } from 'antd';
|
||||
import { Node } from '@xyflow/react';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { DataflowEmptyDsl } from './hooks/use-create-agent';
|
||||
import { FormSchemaType } from './upload-agent-dialog/upload-agent-form';
|
||||
|
||||
function hasNode(nodes: Node[], operator: DataflowOperator) {
|
||||
return nodes.some((x) => x.data.label === operator);
|
||||
}
|
||||
|
||||
export const useHandleImportJsonFile = () => {
|
||||
const {
|
||||
visible: fileUploadVisible,
|
||||
@ -32,8 +39,28 @@ export const useHandleImportJsonFile = () => {
|
||||
try {
|
||||
const graph = JSON.parse(graphStr);
|
||||
if (graphStr && !isEmpty(graph) && Array.isArray(graph?.nodes)) {
|
||||
const dsl = { ...EmptyDsl, graph };
|
||||
setAgent({ title: name, dsl });
|
||||
const nodes: Node[] = graph.nodes;
|
||||
|
||||
let isAgent = true;
|
||||
|
||||
if (
|
||||
hasNode(nodes, DataflowOperator.Begin) &&
|
||||
hasNode(nodes, DataflowOperator.Parser)
|
||||
) {
|
||||
isAgent = false;
|
||||
}
|
||||
|
||||
const dsl = isAgent
|
||||
? { ...EmptyDsl, graph }
|
||||
: { ...DataflowEmptyDsl, graph };
|
||||
|
||||
setAgent({
|
||||
title: name,
|
||||
dsl,
|
||||
canvas_category: isAgent
|
||||
? AgentCategory.AgentCanvas
|
||||
: AgentCategory.DataflowCanvas,
|
||||
});
|
||||
hideFileUploadModal();
|
||||
} else {
|
||||
message.error(errorMessage);
|
||||
|
||||
@ -1,10 +1,14 @@
|
||||
import { ParseDocumentType } from '@/components/layout-recognize-form-field';
|
||||
import { initialLlmBaseValues } from '@/constants/agent';
|
||||
import {
|
||||
initialLlmBaseValues,
|
||||
DataflowOperator as Operator,
|
||||
} from '@/constants/agent';
|
||||
import {
|
||||
ChatVariableEnabledField,
|
||||
variableEnabledFieldMap,
|
||||
} from '@/constants/chat';
|
||||
import { setInitialChatVariableEnabledFieldValue } from '@/utils/chat';
|
||||
export { DataflowOperator as Operator } from '@/constants/agent';
|
||||
|
||||
import {
|
||||
Circle,
|
||||
@ -112,16 +116,6 @@ export enum AgentDialogueMode {
|
||||
|
||||
export const BeginId = 'File';
|
||||
|
||||
export enum Operator {
|
||||
Begin = 'File',
|
||||
Note = 'Note',
|
||||
Parser = 'Parser',
|
||||
Tokenizer = 'Tokenizer',
|
||||
Splitter = 'Splitter',
|
||||
HierarchicalMerger = 'HierarchicalMerger',
|
||||
Extractor = 'Extractor',
|
||||
}
|
||||
|
||||
export const SwitchLogicOperatorOptions = ['and', 'or'];
|
||||
|
||||
export const CommonOperatorList = Object.values(Operator).filter(
|
||||
|
||||
@ -1,14 +1,17 @@
|
||||
import { crossLanguageOptions } from '@/components/cross-language-form-field';
|
||||
import { LayoutRecognizeFormField } from '@/components/layout-recognize-form-field';
|
||||
import { LLMFormField } from '@/components/llm-setting-items/llm-form-field';
|
||||
import {
|
||||
LLMFormField,
|
||||
LLMFormFieldProps,
|
||||
} from '@/components/llm-setting-items/llm-form-field';
|
||||
import {
|
||||
SelectWithSearch,
|
||||
SelectWithSearchFlagOptionType,
|
||||
} from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { buildOptions } from '@/utils/form';
|
||||
import { upperFirst } from 'lodash';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { FileType, OutputFormatMap } from '../../constant';
|
||||
import { FileType, OutputFormatMap, PdfOutputFormat } from '../../constant';
|
||||
import { CommonProps } from './interface';
|
||||
import { buildFieldNameWithPrefix } from './utils';
|
||||
|
||||
@ -16,7 +19,10 @@ function buildOutputOptionsFormatMap() {
|
||||
return Object.entries(OutputFormatMap).reduce<
|
||||
Record<string, SelectWithSearchFlagOptionType[]>
|
||||
>((pre, [key, value]) => {
|
||||
pre[key] = buildOptions(value);
|
||||
pre[key] = Object.values(value).map((v) => ({
|
||||
label: v === PdfOutputFormat.Json ? 'JSON' : upperFirst(v),
|
||||
value: v,
|
||||
}));
|
||||
return pre;
|
||||
}, {});
|
||||
}
|
||||
@ -57,10 +63,14 @@ export function ParserMethodFormField({
|
||||
);
|
||||
}
|
||||
|
||||
export function LargeModelFormField({ prefix }: CommonProps) {
|
||||
export function LargeModelFormField({
|
||||
prefix,
|
||||
options,
|
||||
}: CommonProps & Pick<LLMFormFieldProps, 'options'>) {
|
||||
return (
|
||||
<LLMFormField
|
||||
name={buildFieldNameWithPrefix('llm_id', prefix)}
|
||||
options={options}
|
||||
></LLMFormField>
|
||||
);
|
||||
}
|
||||
|
||||
@ -11,11 +11,14 @@ import { CommonProps } from './interface';
|
||||
import { useSetInitialLanguage } from './use-set-initial-language';
|
||||
import { buildFieldNameWithPrefix } from './utils';
|
||||
|
||||
const options = buildOptions(ImageParseMethod);
|
||||
|
||||
export function ImageFormFields({ prefix }: CommonProps) {
|
||||
const { t } = useTranslation();
|
||||
const form = useFormContext();
|
||||
const options = buildOptions(
|
||||
ImageParseMethod,
|
||||
t,
|
||||
'dataflow.imageParseMethodOptions',
|
||||
);
|
||||
const parseMethodName = buildFieldNameWithPrefix('parse_method', prefix);
|
||||
|
||||
const parseMethod = useWatch({
|
||||
|
||||
@ -36,10 +36,6 @@ import { VideoFormFields } from './video-form-fields';
|
||||
|
||||
const outputList = buildOutputList(initialParserValues.outputs);
|
||||
|
||||
const FileFormatOptions = buildOptions(FileType).filter(
|
||||
(x) => x.value !== FileType.Video, // Temporarily hide the video option
|
||||
);
|
||||
|
||||
const FileFormatWidgetMap = {
|
||||
[FileType.PDF]: PdfFormFields,
|
||||
[FileType.Video]: VideoFormFields,
|
||||
@ -83,6 +79,14 @@ function ParserItem({ name, index, fieldLength, remove }: ParserItemProps) {
|
||||
const values = form.getValues();
|
||||
const parserList = values.setups.slice(); // Adding, deleting, or modifying the parser array will not change the reference.
|
||||
|
||||
const FileFormatOptions = buildOptions(
|
||||
FileType,
|
||||
t,
|
||||
'dataflow.fileFormatOptions',
|
||||
).filter(
|
||||
(x) => x.value !== FileType.Video, // Temporarily hide the video option
|
||||
);
|
||||
|
||||
const filteredFileFormatOptions = useMemo(() => {
|
||||
const otherFileFormatList = parserList
|
||||
.filter((_, idx) => idx !== index)
|
||||
@ -91,7 +95,7 @@ function ParserItem({ name, index, fieldLength, remove }: ParserItemProps) {
|
||||
return FileFormatOptions.filter((x) => {
|
||||
return !otherFileFormatList.includes(x.value);
|
||||
});
|
||||
}, [index, parserList]);
|
||||
}, [FileFormatOptions, index, parserList]);
|
||||
|
||||
const Widget =
|
||||
typeof fileFormat === 'string' && fileFormat in FileFormatWidgetMap
|
||||
|
||||
@ -1,13 +1,24 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import {
|
||||
LargeModelFormField,
|
||||
OutputFormatFormFieldProps,
|
||||
} from './common-form-fields';
|
||||
|
||||
export function VideoFormFields({ prefix }: OutputFormatFormFieldProps) {
|
||||
const modelOptions = useComposeLlmOptionsByModelTypes([
|
||||
LlmModelType.Chat,
|
||||
LlmModelType.Image2text,
|
||||
LlmModelType.Speech2text,
|
||||
]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Multimodal Model */}
|
||||
<LargeModelFormField prefix={prefix}></LargeModelFormField>
|
||||
<LargeModelFormField
|
||||
prefix={prefix}
|
||||
options={modelOptions}
|
||||
></LargeModelFormField>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ import {
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import message from '@/components/ui/message';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { ReactFlowProvider } from '@xyflow/react';
|
||||
@ -30,6 +31,7 @@ import { ComponentPropsWithoutRef, useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import DataFlowCanvas from './canvas';
|
||||
import { DropdownProvider } from './canvas/context';
|
||||
import { Operator } from './constant';
|
||||
import { LogContext } from './context';
|
||||
import { useCancelCurrentDataflow } from './hooks/use-cancel-dataflow';
|
||||
import { useHandleExportOrImportJsonFile } from './hooks/use-export-json';
|
||||
@ -42,6 +44,7 @@ import {
|
||||
} from './hooks/use-save-graph';
|
||||
import { LogSheet } from './log-sheet';
|
||||
import { SettingDialog } from './setting-dialog';
|
||||
import useGraphStore from './store';
|
||||
import { useAgentHistoryManager } from './use-agent-history-manager';
|
||||
import { VersionDialog } from './version-dialog';
|
||||
|
||||
@ -101,8 +104,14 @@ export default function DataFlow() {
|
||||
|
||||
const [uploadedFileData, setUploadedFileData] =
|
||||
useState<Record<string, any>>();
|
||||
const findNodeByName = useGraphStore((state) => state.findNodeByName);
|
||||
|
||||
const handleRunAgent = useCallback(() => {
|
||||
if (!findNodeByName(Operator.Tokenizer)) {
|
||||
message.warning(t('dataflow.tokenizerRequired'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (isParsing) {
|
||||
// show log sheet
|
||||
showLogSheet();
|
||||
@ -110,7 +119,7 @@ export default function DataFlow() {
|
||||
hideLogSheet();
|
||||
handleRun();
|
||||
}
|
||||
}, [handleRun, hideLogSheet, isParsing, showLogSheet]);
|
||||
}, [findNodeByName, handleRun, hideLogSheet, isParsing, showLogSheet, t]);
|
||||
|
||||
const { handleCancel } = useCancelCurrentDataflow({
|
||||
messageId,
|
||||
@ -153,9 +162,9 @@ export default function DataFlow() {
|
||||
onClick={handleRunAgent}
|
||||
loading={running}
|
||||
>
|
||||
{running || (
|
||||
<CirclePlay className={isParsing ? 'animate-spin' : ''} />
|
||||
)}
|
||||
<CirclePlay
|
||||
className={isParsing || isLogEmpty ? 'animate-spin' : ''}
|
||||
/>
|
||||
|
||||
{isParsing || running ? t('dataflow.running') : t('flow.run')}
|
||||
</ButtonLoading>
|
||||
|
||||
@ -61,24 +61,25 @@ export function LogSheet({
|
||||
<SheetHeader>
|
||||
<SheetTitle className="flex items-center gap-2.5">
|
||||
<Logs className="size-4" /> {t('flow.log')}
|
||||
<Button
|
||||
variant={'ghost'}
|
||||
disabled={!isCompleted}
|
||||
onClick={navigateToDataflowResult({
|
||||
id: messageId, // 'log_id',
|
||||
[PipelineResultSearchParams.AgentId]: id, // 'agent_id',
|
||||
[PipelineResultSearchParams.DocumentId]: uploadedFileData?.id, //'doc_id',
|
||||
[PipelineResultSearchParams.AgentTitle]: agent.title, //'title',
|
||||
[PipelineResultSearchParams.IsReadOnly]: 'true',
|
||||
[PipelineResultSearchParams.Type]: 'dataflow',
|
||||
[PipelineResultSearchParams.CreatedBy]:
|
||||
uploadedFileData?.created_by,
|
||||
[PipelineResultSearchParams.DocumentExtension]:
|
||||
uploadedFileData?.extension,
|
||||
})}
|
||||
>
|
||||
{t('dataflow.viewResult')} <ArrowUpRight />
|
||||
</Button>
|
||||
{isCompleted && (
|
||||
<Button
|
||||
variant={'ghost'}
|
||||
onClick={navigateToDataflowResult({
|
||||
id: messageId, // 'log_id',
|
||||
[PipelineResultSearchParams.AgentId]: id, // 'agent_id',
|
||||
[PipelineResultSearchParams.DocumentId]: uploadedFileData?.id, //'doc_id',
|
||||
[PipelineResultSearchParams.AgentTitle]: agent.title, //'title',
|
||||
[PipelineResultSearchParams.IsReadOnly]: 'true',
|
||||
[PipelineResultSearchParams.Type]: 'dataflow',
|
||||
[PipelineResultSearchParams.CreatedBy]:
|
||||
uploadedFileData?.created_by,
|
||||
[PipelineResultSearchParams.DocumentExtension]:
|
||||
uploadedFileData?.extension,
|
||||
})}
|
||||
>
|
||||
{t('dataflow.viewResult')} <ArrowUpRight />
|
||||
</Button>
|
||||
)}
|
||||
</SheetTitle>
|
||||
</SheetHeader>
|
||||
<section className="max-h-[82vh] overflow-auto mt-6">
|
||||
|
||||
@ -48,9 +48,10 @@ export const ObjectContainer = (props: IObjContainerProps) => {
|
||||
useEffect(() => {
|
||||
if (activeEditIndex !== undefined && editDivRef.current) {
|
||||
editDivRef.current.focus();
|
||||
editDivRef.current.textContent = content.value;
|
||||
editDivRef.current.textContent = content.value as string;
|
||||
editDivRef.current.style.whiteSpace = 'pre-wrap';
|
||||
}
|
||||
}, [activeEditIndex, content]);
|
||||
}, [activeEditIndex, content, editDivRef]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@ -33,7 +33,8 @@ const useFetchFileLogList = () => {
|
||||
const [searchParams] = useSearchParams();
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const { filterValue, handleFilterSubmit } = useHandleFilterSubmit();
|
||||
const { filterValue, setFilterValue, handleFilterSubmit } =
|
||||
useHandleFilterSubmit();
|
||||
const { id } = useParams();
|
||||
const [active, setActive] = useState<(typeof LogTabs)[keyof typeof LogTabs]>(
|
||||
LogTabs.FILE_LOGS,
|
||||
@ -89,6 +90,7 @@ const useFetchFileLogList = () => {
|
||||
active,
|
||||
setActive,
|
||||
filterValue,
|
||||
setFilterValue,
|
||||
handleFilterSubmit,
|
||||
};
|
||||
};
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
import FileStatusBadge from '@/components/file-status-badge';
|
||||
import { FilterCollection } from '@/components/list-filter-bar/interface';
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
import { useIsDarkTheme } from '@/components/theme-provider';
|
||||
import { AntToolTip } from '@/components/ui/tooltip';
|
||||
import { RunningStatusMap } from '@/constants/knowledge';
|
||||
import { useFetchDocumentList } from '@/hooks/use-document-request';
|
||||
import { t } from 'i18next';
|
||||
import { CircleQuestionMark } from 'lucide-react';
|
||||
import { FC, useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { RunningStatus, RunningStatusMap } from '../dataset/constant';
|
||||
import { RunningStatus } from '../dataset/constant';
|
||||
import { LogTabs } from './dataset-common';
|
||||
import { DatasetFilter } from './dataset-filter';
|
||||
import { useFetchFileLogList, useFetchOverviewTital } from './hook';
|
||||
@ -84,34 +85,6 @@ const CardFooterProcess: FC<CardFooterProcessProps> = ({
|
||||
);
|
||||
};
|
||||
|
||||
const filters = [
|
||||
{
|
||||
field: 'operation_status',
|
||||
label: t('knowledgeDetails.status'),
|
||||
list: Object.values(RunningStatus).map((value) => {
|
||||
// const value = key as RunningStatus;
|
||||
console.log(value);
|
||||
return {
|
||||
id: value,
|
||||
label: RunningStatusMap[value].label,
|
||||
};
|
||||
}),
|
||||
},
|
||||
{
|
||||
field: 'types',
|
||||
label: t('knowledgeDetails.task'),
|
||||
list: [
|
||||
{
|
||||
id: 'Parse',
|
||||
label: 'Parse',
|
||||
},
|
||||
{
|
||||
id: 'Download',
|
||||
label: 'Download',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const FileLogsPage: FC = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
@ -169,10 +142,56 @@ const FileLogsPage: FC = () => {
|
||||
setPagination,
|
||||
active,
|
||||
filterValue,
|
||||
setFilterValue,
|
||||
handleFilterSubmit,
|
||||
setActive,
|
||||
} = useFetchFileLogList();
|
||||
|
||||
const filters = useMemo(() => {
|
||||
const filterCollection: FilterCollection[] = [
|
||||
{
|
||||
field: 'operation_status',
|
||||
label: t('knowledgeDetails.status'),
|
||||
list: Object.values(RunningStatus).map((value) => {
|
||||
// const value = key as RunningStatus;
|
||||
console.log(value);
|
||||
return {
|
||||
id: value,
|
||||
// label: RunningStatusMap[value].label,
|
||||
label: (
|
||||
<FileStatusBadge
|
||||
status={value as RunningStatus}
|
||||
name={RunningStatusMap[value as RunningStatus]}
|
||||
/>
|
||||
),
|
||||
};
|
||||
}),
|
||||
},
|
||||
// {
|
||||
// field: 'types',
|
||||
// label: t('knowledgeDetails.task'),
|
||||
// list: [
|
||||
// {
|
||||
// id: 'Parse',
|
||||
// label: 'Parse',
|
||||
// },
|
||||
// {
|
||||
// id: 'Download',
|
||||
// label: 'Download',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
];
|
||||
if (active === LogTabs.FILE_LOGS) {
|
||||
return filterCollection;
|
||||
}
|
||||
if (active === LogTabs.DATASET_LOGS) {
|
||||
const list = filterCollection.filter((item, index) => index === 0);
|
||||
return list;
|
||||
}
|
||||
return [];
|
||||
}, [active, t]);
|
||||
|
||||
const tableList = useMemo(() => {
|
||||
console.log('tableList', tableOriginData);
|
||||
if (tableOriginData && tableOriginData.logs?.length) {
|
||||
@ -187,6 +206,7 @@ const FileLogsPage: FC = () => {
|
||||
}, [tableOriginData]);
|
||||
|
||||
const changeActiveLogs = (active: (typeof LogTabs)[keyof typeof LogTabs]) => {
|
||||
setFilterValue({});
|
||||
setActive(active);
|
||||
};
|
||||
const handlePaginationChange = (page: number, pageSize: number) => {
|
||||
|
||||
@ -1,157 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { Button } from '@/components/ui/button';
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormDescription,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui/select';
|
||||
import { FormSlider } from '@/components/ui/slider';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import ChunkMethodCard from './chunk-method-card';
|
||||
|
||||
const formSchema = z.object({
|
||||
parser_id: z.string().min(1, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
}),
|
||||
a: z.number().min(2, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
}),
|
||||
b: z.string().min(2, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
}),
|
||||
c: z.number().min(2, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
}),
|
||||
d: z.string().min(2, {
|
||||
message: 'Username must be at least 2 characters.',
|
||||
}),
|
||||
});
|
||||
|
||||
export default function AdvancedSettingForm() {
|
||||
const form = useForm<z.infer<typeof formSchema>>({
|
||||
resolver: zodResolver(formSchema),
|
||||
defaultValues: {
|
||||
parser_id: '',
|
||||
},
|
||||
});
|
||||
|
||||
function onSubmit(values: z.infer<typeof formSchema>) {
|
||||
console.log(values);
|
||||
}
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-8">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="a"
|
||||
render={({ field }) => (
|
||||
<FormItem className="w-2/5">
|
||||
<FormLabel>Username</FormLabel>
|
||||
<FormControl>
|
||||
<FormSlider {...field}></FormSlider>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
This is your public display name.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<ChunkMethodCard></ChunkMethodCard>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="a"
|
||||
render={({ field }) => (
|
||||
<FormItem className="w-2/5">
|
||||
<FormLabel>Username</FormLabel>
|
||||
<FormControl>
|
||||
<FormSlider {...field}></FormSlider>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
This is your public display name.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="b"
|
||||
render={({ field }) => (
|
||||
<FormItem className="w-2/5">
|
||||
<FormLabel>Username</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a verified email to display" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
<SelectItem value="m@example.com">m@example.com</SelectItem>
|
||||
<SelectItem value="m@google.com">m@google.com</SelectItem>
|
||||
<SelectItem value="m@support.com">m@support.com</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormDescription>
|
||||
This is your public display name.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="c"
|
||||
render={({ field }) => (
|
||||
<FormItem className="w-2/5">
|
||||
<FormLabel>Username</FormLabel>
|
||||
<FormControl>
|
||||
<FormSlider {...field}></FormSlider>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
This is your public display name.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="d"
|
||||
render={({ field }) => (
|
||||
<FormItem className="w-2/5">
|
||||
<FormLabel>Username</FormLabel>
|
||||
<FormControl>
|
||||
<Textarea {...field}></Textarea>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
This is your public display name.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<Button size={'sm'} type="submit" className="w-2/5">
|
||||
Test
|
||||
</Button>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user