Compare commits

..

2 Commits

Author SHA1 Message Date
a0a7eb9100 Fix unreachable code in loop.py and iteration.py
Co-authored-by: JinHai-CN <33142505+JinHai-CN@users.noreply.github.com>
2026-02-06 02:18:09 +00:00
22f17f6334 Initial plan 2026-02-06 02:13:19 +00:00
21 changed files with 229 additions and 416 deletions

View File

@ -48,22 +48,13 @@ RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
apt install -y libatk-bridge2.0-0 && \
apt install -y libpython3-dev libgtk-4-1 libnss3 xdg-utils libgbm-dev && \
apt install -y libjemalloc-dev && \
apt install -y gnupg unzip curl wget git vim less && \
apt install -y nginx unzip curl wget git vim less && \
apt install -y ghostscript && \
apt install -y pandoc && \
apt install -y texlive && \
apt install -y fonts-freefont-ttf fonts-noto-cjk && \
apt install -y postgresql-client
ARG NGINX_VERSION=1.29.5-1~noble
RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \
mkdir -p /etc/apt/keyrings && \
curl -fsSL https://nginx.org/keys/nginx_signing.key | gpg --dearmor -o /etc/apt/keyrings/nginx-archive-keyring.gpg && \
echo "deb [signed-by=/etc/apt/keyrings/nginx-archive-keyring.gpg] https://nginx.org/packages/mainline/ubuntu/ noble nginx" > /etc/apt/sources.list.d/nginx.list && \
apt update && \
apt install -y nginx=${NGINX_VERSION} && \
apt-mark hold nginx
# Install uv
RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \
if [ "$NEED_MIRROR" == "1" ]; then \

View File

@ -57,12 +57,10 @@ class Iteration(ComponentBase, ABC):
return cid
def _invoke(self, **kwargs):
if self.check_if_canceled("Iteration processing"):
return
arr = self._canvas.get_variable_value(self._param.items_ref)
if not isinstance(arr, list):
self.set_output("_ERROR", self._param.items_ref + " must be an array, but its type is "+str(type(arr)))
if not self.check_if_canceled("Iteration processing"):
arr = self._canvas.get_variable_value(self._param.items_ref)
if not isinstance(arr, list):
self.set_output("_ERROR", self._param.items_ref + " must be an array, but its type is "+str(type(arr)))
def thoughts(self) -> str:
return "Need to process {} items.".format(len(self._canvas.get_variable_value(self._param.items_ref)))

View File

@ -51,29 +51,27 @@ class Loop(ComponentBase, ABC):
return cid
def _invoke(self, **kwargs):
if self.check_if_canceled("Loop processing"):
return
for item in self._param.loop_variables:
if any([not item.get("variable"), not item.get("input_mode"), not item.get("value"),not item.get("type")]):
assert "Loop Variable is not complete."
if item["input_mode"]=="variable":
self.set_output(item["variable"],self._canvas.get_variable_value(item["value"]))
elif item["input_mode"]=="constant":
self.set_output(item["variable"],item["value"])
else:
if item["type"] == "number":
self.set_output(item["variable"], 0)
elif item["type"] == "string":
self.set_output(item["variable"], "")
elif item["type"] == "boolean":
self.set_output(item["variable"], False)
elif item["type"].startswith("object"):
self.set_output(item["variable"], {})
elif item["type"].startswith("array"):
self.set_output(item["variable"], [])
if not self.check_if_canceled("Loop processing"):
for item in self._param.loop_variables:
if any([not item.get("variable"), not item.get("input_mode"), not item.get("value"),not item.get("type")]):
assert "Loop Variable is not complete."
if item["input_mode"]=="variable":
self.set_output(item["variable"],self._canvas.get_variable_value(item["value"]))
elif item["input_mode"]=="constant":
self.set_output(item["variable"],item["value"])
else:
self.set_output(item["variable"], "")
if item["type"] == "number":
self.set_output(item["variable"], 0)
elif item["type"] == "string":
self.set_output(item["variable"], "")
elif item["type"] == "boolean":
self.set_output(item["variable"], False)
elif item["type"].startswith("object"):
self.set_output(item["variable"], {})
elif item["type"].startswith("array"):
self.set_output(item["variable"], [])
else:
self.set_output(item["variable"], "")
def thoughts(self) -> str:

View File

@ -617,9 +617,7 @@ async def run():
return get_data_error_result(message="Document not found!")
if str(req["run"]) == TaskStatus.CANCEL.value:
tasks = list(TaskService.query(doc_id=id))
has_unfinished_task = any((task.progress or 0) < 1 for task in tasks)
if str(doc.run) in [TaskStatus.RUNNING.value, TaskStatus.CANCEL.value] or has_unfinished_task:
if str(doc.run) == TaskStatus.RUNNING.value:
cancel_all_task_of(id)
else:
return get_data_error_result(message="Cannot cancel a task that is not in RUNNING status")

View File

@ -204,9 +204,7 @@ class RDBMSConnector(LoadConnector, PollConnector):
value = row_dict[col]
if isinstance(value, (dict, list)):
value = json.dumps(value, ensure_ascii=False)
# Use brackets around field name to ensure it's distinguishable
# after chunking (TxtParser strips \n delimiters during merge)
content_parts.append(f"{col}】: {value}")
content_parts.append(f"{col}: {value}")
content = "\n".join(content_parts)

View File

@ -138,24 +138,20 @@ def meta_filter(metas: dict, filters: list[dict], logic: str = "and"):
ids.extend(docids)
return ids
for f in filters:
k = f["key"]
if k not in metas:
# Key not found in metas: treat as no match
ids = []
else:
v2docs = metas[k]
for k, v2docs in metas.items():
for f in filters:
if k != f["key"]:
continue
ids = filter_out(v2docs, f["op"], f["value"])
if not doc_ids:
doc_ids = set(ids)
else:
if logic == "and":
doc_ids = doc_ids & set(ids)
if not doc_ids:
return []
if not doc_ids:
doc_ids = set(ids)
else:
doc_ids = doc_ids | set(ids)
if logic == "and":
doc_ids = doc_ids & set(ids)
if not doc_ids:
return []
else:
doc_ids = doc_ids | set(ids)
return list(doc_ids)

View File

@ -156,55 +156,6 @@ class RAGFlowExcelParser:
continue
return raw_items
@staticmethod
def _get_actual_row_count(ws):
max_row = ws.max_row
if not max_row:
return 0
if max_row <= 10000:
return max_row
max_col = min(ws.max_column or 1, 50)
def row_has_data(row_idx):
for col_idx in range(1, max_col + 1):
cell = ws.cell(row=row_idx, column=col_idx)
if cell.value is not None and str(cell.value).strip():
return True
return False
if not any(row_has_data(i) for i in range(1, min(101, max_row + 1))):
return 0
left, right = 1, max_row
last_data_row = 1
while left <= right:
mid = (left + right) // 2
found = False
for r in range(mid, min(mid + 10, max_row + 1)):
if row_has_data(r):
found = True
last_data_row = max(last_data_row, r)
break
if found:
left = mid + 1
else:
right = mid - 1
for r in range(last_data_row, min(last_data_row + 500, max_row + 1)):
if row_has_data(r):
last_data_row = r
return last_data_row
@staticmethod
def _get_rows_limited(ws):
actual_rows = RAGFlowExcelParser._get_actual_row_count(ws)
if actual_rows == 0:
return []
return list(ws.iter_rows(min_row=1, max_row=actual_rows))
def html(self, fnm, chunk_rows=256):
from html import escape
@ -220,7 +171,7 @@ class RAGFlowExcelParser:
for sheetname in wb.sheetnames:
ws = wb[sheetname]
try:
rows = RAGFlowExcelParser._get_rows_limited(ws)
rows = list(ws.rows)
except Exception as e:
logging.warning(f"Skip sheet '{sheetname}' due to rows access error: {e}")
continue
@ -272,7 +223,7 @@ class RAGFlowExcelParser:
for sheetname in wb.sheetnames:
ws = wb[sheetname]
try:
rows = RAGFlowExcelParser._get_rows_limited(ws)
rows = list(ws.rows)
except Exception as e:
logging.warning(f"Skip sheet '{sheetname}' due to rows access error: {e}")
continue
@ -287,8 +238,6 @@ class RAGFlowExcelParser:
t = str(ti[i].value) if i < len(ti) else ""
t += ("" if t else "") + str(c.value)
fields.append(t)
if not fields:
continue
line = "; ".join(fields)
if sheetname.lower().find("sheet") < 0:
line += " ——" + sheetname
@ -300,14 +249,14 @@ class RAGFlowExcelParser:
if fnm.split(".")[-1].lower().find("xls") >= 0:
wb = RAGFlowExcelParser._load_excel_to_workbook(BytesIO(binary))
total = 0
for sheetname in wb.sheetnames:
try:
ws = wb[sheetname]
total += RAGFlowExcelParser._get_actual_row_count(ws)
except Exception as e:
logging.warning(f"Skip sheet '{sheetname}' due to rows access error: {e}")
continue
try:
ws = wb[sheetname]
total += len(list(ws.rows))
except Exception as e:
logging.warning(f"Skip sheet '{sheetname}' due to rows access error: {e}")
continue
return total
if fnm.split(".")[-1].lower() in ["csv", "txt"]:

View File

@ -31,7 +31,7 @@ At its core, an Agent Context Engine is built on a triumvirate of next-generatio
2. The Memory Layer: An Agents intelligence is defined by its ability to learn from interaction. The Memory Layer is a specialized retrieval system for dynamic, episodic data: conversation history, user preferences, and the agents own internal state (e.g., "waiting for human input"). It manages the lifecycle of this data—storing raw dialogue, triggering summarization into semantic memory, and retrieving relevant past interactions to provide continuity and personalization. Technologically, it is a close sibling to RAG, but focused on a temporal stream of data.
3. The Tool Orchestrator: As MCP (Model Context Protocol) enables the connection of hundreds of internal services as tools, a new problem arises: tool selection. The Context Engine solves this with Tool Retrieval. Instead of dumping all tool descriptions into the prompt, it maintains an index of tools and—critically—an index of Skills (best practices on when and how to use tools). For a given task, it retrieves only the most relevant tools and instructions, transforming the LLMs job from "searching a haystack" to "following a recipe."
3. The Tool Orchestrator: As MCP (Model Context Protocol) enables the connection of hundreds of internal services as tools, a new problem arises: tool selection. The Context Engine solves this with Tool Retrieval. Instead of dumping all tool descriptions into the prompt, it maintains an index of tools and—critically—an index of Playbooks or Guidelines (best practices on when and how to use tools). For a given task, it retrieves only the most relevant tools and instructions, transforming the LLMs job from "searching a haystack" to "following a recipe."
## Why we need a dedicated engine? The case for a unified substrate

View File

@ -3,7 +3,7 @@ sidebar_position: 1
slug: /what-is-rag
---
# What is Retrieval-Augmented-Generation (RAG)?
# What is Retreival-Augmented-Generation (RAG)?
Since large language models (LLMs) became the focus of technology, their ability to handle general knowledge has been astonishing. However, when questions shift to internal corporate documents, proprietary knowledge bases, or real-time data, the limitations of LLMs become glaringly apparent: they cannot access private information outside their training data. Retrieval-Augmented Generation (RAG) was born precisely to address this core need. Before an LLM generates an answer, it first retrieves the most relevant context from an external knowledge base and inputs it as "reference material" to the LLM, thereby guiding it to produce accurate answers. In short, RAG elevates LLMs from "relying on memory" to "having evidence to rely on," significantly improving their accuracy and trustworthiness in specialized fields and real-time information queries.

View File

@ -19,10 +19,6 @@ from mcp.client.streamable_http import streamablehttp_client
async def main():
try:
# To access RAGFlow server in `host` mode, you need to attach `api_key` for each request to indicate identification.
# async with streamablehttp_client("http://localhost:9382/mcp/", headers={"api_key": "ragflow-fixS-TicrohljzFkeLLWIaVhW7XlXPXIUW5solFor6o"}) as (read_stream, write_stream, _):
# Or follow the requirements of OAuth 2.1 Section 5 with Authorization header
# async with streamablehttp_client("http://localhost:9382/mcp/", headers={"Authorization": "Bearer ragflow-fixS-TicrohljzFkeLLWIaVhW7XlXPXIUW5solFor6o"}) as (read_stream, write_stream, _):
async with streamablehttp_client("http://localhost:9382/mcp/") as (read_stream, write_stream, _):
async with ClientSession(read_stream, write_stream) as session:
await session.initialize()

View File

@ -22,18 +22,18 @@ from collections import OrderedDict
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from functools import wraps
from typing import Any
import click
import httpx
import mcp.types as types
from mcp.server.lowlevel import Server
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.responses import JSONResponse, Response
from starlette.routing import Mount, Route
from strenum import StrEnum
import mcp.types as types
from mcp.server.lowlevel import Server
class LaunchMode(StrEnum):
SELF_HOST = "self-host"
@ -68,6 +68,10 @@ class RAGFlowConnector:
self.api_url = f"{self.base_url}/api/{self.version}"
self._async_client = None
def bind_api_key(self, api_key: str):
self.api_key = api_key
self.authorization_header = {"Authorization": f"Bearer {self.api_key}"}
async def _get_client(self):
if self._async_client is None:
self._async_client = httpx.AsyncClient(timeout=httpx.Timeout(60.0))
@ -78,18 +82,16 @@ class RAGFlowConnector:
await self._async_client.aclose()
self._async_client = None
async def _post(self, path, json=None, stream=False, files=None, api_key: str = ""):
if not api_key:
async def _post(self, path, json=None, stream=False, files=None):
if not self.api_key:
return None
client = await self._get_client()
res = await client.post(url=self.api_url + path, json=json, headers={"Authorization": f"Bearer {api_key}"})
res = await client.post(url=self.api_url + path, json=json, headers=self.authorization_header)
return res
async def _get(self, path, params=None, api_key: str = ""):
if not api_key:
return None
async def _get(self, path, params=None):
client = await self._get_client()
res = await client.get(url=self.api_url + path, params=params, headers={"Authorization": f"Bearer {api_key}"})
res = await client.get(url=self.api_url + path, params=params, headers=self.authorization_header)
return res
def _is_cache_valid(self, ts):
@ -127,18 +129,8 @@ class RAGFlowConnector:
self._document_metadata_cache[dataset_id] = (doc_id_meta_list, self._get_expiry_timestamp())
self._document_metadata_cache.move_to_end(dataset_id)
async def list_datasets(
self,
*,
api_key: str,
page: int = 1,
page_size: int = 1000,
orderby: str = "create_time",
desc: bool = True,
id: str | None = None,
name: str | None = None,
):
res = await self._get("/datasets", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name}, api_key=api_key)
async def list_datasets(self, page: int = 1, page_size: int = 1000, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None):
res = await self._get("/datasets", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
if not res or res.status_code != 200:
raise Exception([types.TextContent(type="text", text="Cannot process this operation.")])
@ -153,8 +145,6 @@ class RAGFlowConnector:
async def retrieval(
self,
*,
api_key: str,
dataset_ids,
document_ids=None,
question="",
@ -172,7 +162,7 @@ class RAGFlowConnector:
# If no dataset_ids provided or empty list, get all available dataset IDs
if not dataset_ids:
dataset_list_str = await self.list_datasets(api_key=api_key)
dataset_list_str = await self.list_datasets()
dataset_ids = []
# Parse the dataset list to extract IDs
@ -199,7 +189,7 @@ class RAGFlowConnector:
"document_ids": document_ids,
}
# Send a POST request to the backend service (using requests library as an example, actual implementation may vary)
res = await self._post("/retrieval", json=data_json, api_key=api_key)
res = await self._post("/retrieval", json=data_json)
if not res or res.status_code != 200:
raise Exception([types.TextContent(type="text", text="Cannot process this operation.")])
@ -209,7 +199,7 @@ class RAGFlowConnector:
chunks = []
# Cache document metadata and dataset information
document_cache, dataset_cache = await self._get_document_metadata_cache(dataset_ids, api_key=api_key, force_refresh=force_refresh)
document_cache, dataset_cache = await self._get_document_metadata_cache(dataset_ids, force_refresh=force_refresh)
# Process chunks with enhanced field mapping including per-chunk metadata
for chunk_data in data.get("chunks", []):
@ -238,7 +228,7 @@ class RAGFlowConnector:
raise Exception([types.TextContent(type="text", text=res.get("message"))])
async def _get_document_metadata_cache(self, dataset_ids, *, api_key: str, force_refresh=False):
async def _get_document_metadata_cache(self, dataset_ids, force_refresh=False):
"""Cache document metadata for all documents in the specified datasets"""
document_cache = {}
dataset_cache = {}
@ -248,7 +238,7 @@ class RAGFlowConnector:
dataset_meta = None if force_refresh else self._get_cached_dataset_metadata(dataset_id)
if not dataset_meta:
# First get dataset info for name
dataset_res = await self._get("/datasets", {"id": dataset_id, "page_size": 1}, api_key=api_key)
dataset_res = await self._get("/datasets", {"id": dataset_id, "page_size": 1})
if dataset_res and dataset_res.status_code == 200:
dataset_data = dataset_res.json()
if dataset_data.get("code") == 0 and dataset_data.get("data"):
@ -265,9 +255,7 @@ class RAGFlowConnector:
doc_id_meta_list = []
docs = {}
while page:
docs_res = await self._get(f"/datasets/{dataset_id}/documents?page={page}", api_key=api_key)
if not docs_res:
break
docs_res = await self._get(f"/datasets/{dataset_id}/documents?page={page}")
docs_data = docs_res.json()
if docs_data.get("code") == 0 and docs_data.get("data", {}).get("docs"):
for doc in docs_data["data"]["docs"]:
@ -347,59 +335,9 @@ async def sse_lifespan(server: Server) -> AsyncIterator[dict]:
app = Server("ragflow-mcp-server", lifespan=sse_lifespan)
AUTH_TOKEN_STATE_KEY = "ragflow_auth_token"
def _to_text(value: Any) -> str:
if isinstance(value, bytes):
return value.decode(errors="ignore")
return str(value)
def _extract_token_from_headers(headers: Any) -> str | None:
if not headers or not hasattr(headers, "get"):
return None
auth_keys = ("authorization", "Authorization", b"authorization", b"Authorization")
for key in auth_keys:
auth = headers.get(key)
if not auth:
continue
auth_text = _to_text(auth).strip()
if auth_text.lower().startswith("bearer "):
token = auth_text[7:].strip()
if token:
return token
api_key_keys = ("api_key", "x-api-key", "Api-Key", "X-API-Key", b"api_key", b"x-api-key", b"Api-Key", b"X-API-Key")
for key in api_key_keys:
token = headers.get(key)
if token:
token_text = _to_text(token).strip()
if token_text:
return token_text
return None
def _extract_token_from_request(request: Any) -> str | None:
if request is None:
return None
state = getattr(request, "state", None)
if state is not None:
token = getattr(state, AUTH_TOKEN_STATE_KEY, None)
if token:
return token
token = _extract_token_from_headers(getattr(request, "headers", None))
if token and state is not None:
setattr(state, AUTH_TOKEN_STATE_KEY, token)
return token
def with_api_key(required: bool = True):
def with_api_key(required=True):
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
@ -409,14 +347,26 @@ def with_api_key(required: bool = True):
raise ValueError("Get RAGFlow Context failed")
connector = ragflow_ctx.conn
api_key = HOST_API_KEY
if MODE == LaunchMode.HOST:
api_key = _extract_token_from_request(getattr(ctx, "request", None)) or ""
if required and not api_key:
headers = ctx.session._init_options.capabilities.experimental.get("headers", {})
token = None
# lower case here, because of Starlette conversion
auth = headers.get("authorization", "")
if auth.startswith("Bearer "):
token = auth.removeprefix("Bearer ").strip()
elif "api_key" in headers:
token = headers["api_key"]
if required and not token:
raise ValueError("RAGFlow API key or Bearer token is required.")
return await func(*args, connector=connector, api_key=api_key, **kwargs)
connector.bind_api_key(token)
else:
connector.bind_api_key(HOST_API_KEY)
return await func(*args, connector=connector, **kwargs)
return wrapper
@ -425,8 +375,8 @@ def with_api_key(required: bool = True):
@app.list_tools()
@with_api_key(required=True)
async def list_tools(*, connector: RAGFlowConnector, api_key: str) -> list[types.Tool]:
dataset_description = await connector.list_datasets(api_key=api_key)
async def list_tools(*, connector) -> list[types.Tool]:
dataset_description = await connector.list_datasets()
return [
types.Tool(
@ -496,13 +446,7 @@ async def list_tools(*, connector: RAGFlowConnector, api_key: str) -> list[types
@app.call_tool()
@with_api_key(required=True)
async def call_tool(
name: str,
arguments: dict,
*,
connector: RAGFlowConnector,
api_key: str,
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
async def call_tool(name: str, arguments: dict, *, connector) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
if name == "ragflow_retrieval":
document_ids = arguments.get("document_ids", [])
dataset_ids = arguments.get("dataset_ids", [])
@ -518,7 +462,7 @@ async def call_tool(
# If no dataset_ids provided or empty list, get all available dataset IDs
if not dataset_ids:
dataset_list_str = await connector.list_datasets(api_key=api_key)
dataset_list_str = await connector.list_datasets()
dataset_ids = []
# Parse the dataset list to extract IDs
@ -533,7 +477,6 @@ async def call_tool(
continue
return await connector.retrieval(
api_key=api_key,
dataset_ids=dataset_ids,
document_ids=document_ids,
question=question,
@ -567,13 +510,17 @@ def create_starlette_app():
path = scope["path"]
if path.startswith("/messages/") or path.startswith("/sse") or path.startswith("/mcp"):
headers = dict(scope["headers"])
token = _extract_token_from_headers(headers)
token = None
auth_header = headers.get(b"authorization")
if auth_header and auth_header.startswith(b"Bearer "):
token = auth_header.removeprefix(b"Bearer ").strip()
elif b"api_key" in headers:
token = headers[b"api_key"]
if not token:
response = JSONResponse({"error": "Missing or invalid authorization header"}, status_code=401)
await response(scope, receive, send)
return
scope.setdefault("state", {})[AUTH_TOKEN_STATE_KEY] = token
await self.app(scope, receive, send)
@ -600,9 +547,10 @@ def create_starlette_app():
# Add streamable HTTP route if enabled
streamablehttp_lifespan = None
if TRANSPORT_STREAMABLE_HTTP_ENABLED:
from mcp.server.streamable_http_manager import StreamableHTTPSessionManager
from starlette.types import Receive, Scope, Send
from mcp.server.streamable_http_manager import StreamableHTTPSessionManager
session_manager = StreamableHTTPSessionManager(
app=app,
event_store=None,
@ -610,11 +558,8 @@ def create_starlette_app():
stateless=True,
)
class StreamableHTTPEntry:
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await session_manager.handle_request(scope, receive, send)
streamable_http_entry = StreamableHTTPEntry()
async def handle_streamable_http(scope: Scope, receive: Receive, send: Send) -> None:
await session_manager.handle_request(scope, receive, send)
@asynccontextmanager
async def streamablehttp_lifespan(app: Starlette) -> AsyncIterator[None]:
@ -625,12 +570,7 @@ def create_starlette_app():
finally:
logging.info("StreamableHTTP application shutting down...")
routes.extend(
[
Route("/mcp", endpoint=streamable_http_entry, methods=["GET", "POST", "DELETE"]),
Mount("/mcp", app=streamable_http_entry),
]
)
routes.append(Mount("/mcp", app=handle_streamable_http))
return Starlette(
debug=True,
@ -691,6 +631,9 @@ def main(base_url, host, port, mode, api_key, transport_sse_enabled, transport_s
if MODE == LaunchMode.SELF_HOST and not HOST_API_KEY:
raise click.UsageError("--api-key is required when --mode is 'self-host'")
if TRANSPORT_STREAMABLE_HTTP_ENABLED and MODE == LaunchMode.HOST:
raise click.UsageError("The --host mode is not supported with streamable-http transport yet.")
if not TRANSPORT_STREAMABLE_HTTP_ENABLED and JSON_RESPONSE:
JSON_RESPONSE = False
@ -747,7 +690,7 @@ if __name__ == "__main__":
--base-url=http://127.0.0.1:9380 \
--mode=self-host --api-key=ragflow-xxxxx
2. Host mode (multi-tenant, clients must provide Authorization headers):
2. Host mode (multi-tenant, self-host only, clients must provide Authorization headers):
uv run mcp/server/server.py --host=127.0.0.1 --port=9382 \
--base-url=http://127.0.0.1:9380 \
--mode=host

View File

@ -21,7 +21,7 @@ dependencies = [
"cn2an==0.5.22",
"cohere==5.6.2",
"Crawl4AI>=0.4.0,<1.0.0",
"dashscope==1.25.11",
"dashscope==1.20.11",
"deepl==1.18.0",
"demjson3==3.0.6",
"discord-py==2.3.2",

View File

@ -44,7 +44,7 @@ class Excel(ExcelParser):
wb = Excel._load_excel_to_workbook(BytesIO(binary))
total = 0
for sheet_name in wb.sheetnames:
total += Excel._get_actual_row_count(wb[sheet_name])
total += len(list(wb[sheet_name].rows))
res, fails, done = [], [], 0
rn = 0
flow_images = []
@ -66,7 +66,7 @@ class Excel(ExcelParser):
flow_images.append(img)
try:
rows = Excel._get_rows_limited(ws)
rows = list(ws.rows)
except Exception as e:
logging.warning(f"Skip sheet '{sheet_name}' due to rows access error: {e}")
continue

View File

@ -165,8 +165,6 @@ def set_progress(task_id, from_page=0, to_page=-1, prog=None, msg="Processing...
if cancel:
raise TaskCanceledException(msg)
logging.info(f"set_progress({task_id}), progress: {prog}, progress_msg: {msg}")
except TaskCanceledException:
raise
except DoesNotExist:
logging.warning(f"set_progress({task_id}) got exception DoesNotExist")
except Exception as e:
@ -695,8 +693,6 @@ async def run_dataflow(task: dict):
for i, ck in enumerate(chunks):
v = vects[i].tolist()
ck["q_%d_vec" % len(v)] = v
except TaskCanceledException:
raise
except Exception as e:
set_progress(task_id, prog=-1, msg=f"[ERROR]: {e}")
PipelineOperationLogService.create(document_id=doc_id, pipeline_id=dataflow_id,
@ -964,9 +960,8 @@ async def do_handle_task(task):
task_tenant_id = task["tenant_id"]
task_embedding_id = task["embd_id"]
task_language = task["language"]
doc_task_llm_id = task["parser_config"].get("llm_id") or task["llm_id"]
kb_task_llm_id = task['kb_parser_config'].get("llm_id") or task["llm_id"]
task['llm_id'] = kb_task_llm_id
task_llm_id = task["parser_config"].get("llm_id") or task["llm_id"]
task["llm_id"] = task_llm_id
task_dataset_id = task["kb_id"]
task_doc_id = task["doc_id"]
task_document_name = task["name"]
@ -1037,7 +1032,7 @@ async def do_handle_task(task):
return
# bind LLM for raptor
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=kb_task_llm_id, lang=task_language)
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
# run RAPTOR
async with kg_limiter:
chunks, token_count = await run_raptor_for_kb(
@ -1081,7 +1076,7 @@ async def do_handle_task(task):
graphrag_conf = kb_parser_config.get("graphrag", {})
start_ts = timer()
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=kb_task_llm_id, lang=task_language)
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
with_resolution = graphrag_conf.get("resolution", False)
with_community = graphrag_conf.get("community", False)
async with kg_limiter:
@ -1106,7 +1101,6 @@ async def do_handle_task(task):
return
else:
# Standard chunking methods
task['llm_id'] = doc_task_llm_id
start_ts = timer()
chunks = await build_chunks(task, progress_callback)
logging.info("Build document {}: {:.2f}s".format(task_document_name, timer() - start_ts))
@ -1117,8 +1111,6 @@ async def do_handle_task(task):
start_ts = timer()
try:
token_count, vector_size = await embedding(chunks, embedding_model, task_parser_config, progress_callback)
except TaskCanceledException:
raise
except Exception as e:
error_message = "Generate embedding error:{}".format(str(e))
progress_callback(-1, error_message)
@ -1136,17 +1128,13 @@ async def do_handle_task(task):
async def _maybe_insert_chunks(_chunks):
if has_canceled(task_id):
progress_callback(-1, msg="Task has been canceled.")
return False
return True
insert_result = await insert_chunks(task_id, task_tenant_id, task_dataset_id, _chunks, progress_callback)
return bool(insert_result)
try:
if not await _maybe_insert_chunks(chunks):
return
if has_canceled(task_id):
progress_callback(-1, msg="Task has been canceled.")
return
logging.info(
"Indexing doc({}), page({}-{}), chunks({}), elapsed: {:.2f}".format(
@ -1215,12 +1203,6 @@ async def handle_task():
DONE_TASKS += 1
CURRENT_TASKS.pop(task_id, None)
logging.info(f"handle_task done for task {json.dumps(task)}")
except TaskCanceledException as e:
DONE_TASKS += 1
CURRENT_TASKS.pop(task_id, None)
logging.info(
f"handle_task canceled for task {task_id}: {getattr(e, 'msg', str(e))}"
)
except Exception as e:
FAILED_TASKS += 1
CURRENT_TASKS.pop(task_id, None)

8
uv.lock generated
View File

@ -1557,17 +1557,15 @@ wheels = [
[[package]]
name = "dashscope"
version = "1.25.11"
version = "1.20.11"
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "certifi" },
{ name = "cryptography" },
{ name = "requests" },
{ name = "websocket-client" },
]
wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/15/35551e6c6d3ea19df754ed32aa5f281b2052ef9e1ff1538f2708f74f3312/dashscope-1.25.11-py3-none-any.whl", hash = "sha256:93e86437f5f30e759e98292f0490e44eff00c337968363f27d29dd42ec7cc07c", size = 1342054, upload-time = "2026-02-03T02:49:48.711Z" },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/25/21/0ddfa1aae7f45b3039d10d61ede77dedfc70d24ff946e7d0ecb92e9a2c85/dashscope-1.20.11-py3-none-any.whl", hash = "sha256:7367802c5ae136c6c1f4f8a16f9aba628e97adefae8afdebce6bbf518d0065d1", size = 1264221, upload-time = "2024-10-14T05:30:25.083Z" },
]
[[package]]
@ -6273,7 +6271,7 @@ requires-dist = [
{ name = "cn2an", specifier = "==0.5.22" },
{ name = "cohere", specifier = "==5.6.2" },
{ name = "crawl4ai", specifier = ">=0.4.0,<1.0.0" },
{ name = "dashscope", specifier = "==1.25.11" },
{ name = "dashscope", specifier = "==1.20.11" },
{ name = "deepl", specifier = "==1.18.0" },
{ name = "demjson3", specifier = "==3.0.6" },
{ name = "discord-py", specifier = "==2.3.2" },

View File

@ -73,7 +73,6 @@ if (process.env.NODE_ENV === 'development') {
trackAllPureComponents: true,
trackExtraHooks: [],
logOnDifferentValues: true,
exclude: [/^RouterProvider$/],
});
},
);
@ -151,13 +150,6 @@ const RootProvider = ({ children }: React.PropsWithChildren) => {
);
};
const RouterProviderWrapper: React.FC<{ router: typeof routers }> = ({
router,
}) => {
return <RouterProvider router={router}></RouterProvider>;
};
RouterProviderWrapper.whyDidYouRender = false;
export default function AppContainer() {
// const [router, setRouter] = useState<any>(null);
@ -171,7 +163,8 @@ export default function AppContainer() {
return (
<RootProvider>
<RouterProviderWrapper router={routers} />
<RouterProvider router={routers}></RouterProvider>
{/* <RouterProvider router={router}></RouterProvider> */}
</RootProvider>
);
}

View File

@ -101,6 +101,7 @@ export const RAGFlowAvatar = memo(
}}
className={cn(
'bg-gradient-to-b',
`from-[${from}] to-[${to}]`,
'flex items-center justify-center',
'text-white ',
{ 'rounded-md': !isPerson },

View File

@ -4,7 +4,6 @@ import * as DialogPrimitive from '@radix-ui/react-dialog';
import { Loader, X } from 'lucide-react';
import { FC, ReactNode, useCallback, useEffect, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { DialogDescription } from '../dialog';
import { createPortalModal } from './modal-manage';
export interface ModalProps {
@ -185,7 +184,6 @@ const Modal: ModalType = ({
style={style}
onClick={(e) => e.stopPropagation()}
>
<DialogDescription></DialogDescription>
{/* title */}
{title && (
<div

View File

@ -3,7 +3,6 @@ import { ButtonLoading } from '@/components/ui/button';
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
@ -156,20 +155,10 @@ export function DatasetCreatingDialog({
return (
<Dialog open onOpenChange={hideModal}>
<DialogContent
className="sm:max-w-[425px] focus-visible:!outline-none flex flex-col"
onKeyDown={(e) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
const form = document.getElementById(FormId) as HTMLFormElement;
form?.requestSubmit();
}
}}
>
<DialogContent className="sm:max-w-[425px] focus-visible:!outline-none flex flex-col">
<DialogHeader>
<DialogTitle>{t('knowledgeList.createKnowledgeBase')}</DialogTitle>
</DialogHeader>
<DialogDescription></DialogDescription>
<InputForm onOk={onOk}></InputForm>
<DialogFooter>
<ButtonLoading type="submit" form={FormId} loading={loading}>

View File

@ -1,4 +1,4 @@
import { lazy, Suspense } from 'react';
import { lazy } from 'react';
import { createBrowserRouter, Navigate, type RouteObject } from 'react-router';
import FallbackComponent from './components/fallback-component';
import { IS_ENTERPRISE } from './pages/admin/utils';
@ -66,253 +66,252 @@ export enum Routes {
AdminMonitoring = `${Admin}/monitoring`,
}
const defaultRouteFallback = (
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/30 backdrop-blur-[1px]">
<div className="h-8 w-8 animate-spin rounded-full border-2 border-white/70 border-t-transparent" />
</div>
);
type LazyRouteConfig = Omit<RouteObject, 'Component' | 'children'> & {
Component?: () => Promise<{ default: React.ComponentType<any> }>;
children?: LazyRouteConfig[];
};
const withLazyRoute = (
importer: () => Promise<{ default: React.ComponentType<any> }>,
fallback: React.ReactNode = defaultRouteFallback,
) => {
const LazyComponent = lazy(importer);
const Wrapped: React.FC<any> = (props) => (
<Suspense fallback={fallback}>
<LazyComponent {...props} />
</Suspense>
);
Wrapped.displayName = `LazyRoute(${
(LazyComponent as unknown as React.ComponentType<any>).displayName ||
LazyComponent.name ||
'Component'
})`;
return Wrapped;
};
const routeConfigOptions = [
const routeConfig = [
{
path: '/login',
Component: () => import('@/pages/login-next'),
Component: lazy(() => import('@/pages/login-next')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: '/login-next',
Component: () => import('@/pages/login-next'),
Component: lazy(() => import('@/pages/login-next')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.ChatShare,
Component: () => import('@/pages/next-chats/share'),
Component: lazy(() => import('@/pages/next-chats/share')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.AgentShare,
Component: () => import('@/pages/agent/share'),
Component: lazy(() => import('@/pages/agent/share')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.ChatWidget,
Component: () => import('@/pages/next-chats/widget'),
Component: lazy(() => import('@/pages/next-chats/widget')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.AgentList,
Component: () => import('@/pages/agents'),
Component: lazy(() => import('@/pages/agents')),
errorElement: <FallbackComponent />,
},
{
path: '/document/:id',
Component: () => import('@/pages/document-viewer'),
Component: lazy(() => import('@/pages/document-viewer')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: '/*',
Component: () => import('@/pages/404'),
Component: lazy(() => import('@/pages/404')),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.Root,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
wrappers: ['@/wrappers/auth'],
children: [
{
path: Routes.Root,
Component: () => import('@/pages/home'),
Component: lazy(() => import('@/pages/home')),
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.Datasets,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Datasets,
Component: () => import('@/pages/datasets'),
Component: lazy(() => import('@/pages/datasets')),
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.Chats,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Chats,
Component: () => import('@/pages/next-chats'),
Component: lazy(() => import('@/pages/next-chats')),
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.Chat + '/:id',
layout: false,
Component: () => import('@/pages/next-chats/chat'),
Component: lazy(() => import('@/pages/next-chats/chat')),
errorElement: <FallbackComponent />,
},
{
path: Routes.Searches,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Searches,
Component: () => import('@/pages/next-searches'),
Component: lazy(() => import('@/pages/next-searches')),
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.Memories,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Memories,
Component: () => import('@/pages/memories'),
Component: lazy(() => import('@/pages/memories')),
},
],
errorElement: <FallbackComponent />,
},
{
path: `${Routes.Memory}`,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: `${Routes.Memory}`,
layout: false,
Component: () => import('@/pages/memory'),
Component: lazy(() => import('@/pages/memory')),
children: [
{
path: `${Routes.Memory}/${Routes.MemoryMessage}/:id`,
Component: () => import('@/pages/memory/memory-message'),
Component: lazy(() => import('@/pages/memory/memory-message')),
},
{
path: `${Routes.Memory}/${Routes.MemorySetting}/:id`,
Component: () => import('@/pages/memory/memory-setting'),
Component: lazy(() => import('@/pages/memory/memory-setting')),
},
],
},
],
errorElement: <FallbackComponent />,
},
{
path: `${Routes.Search}/:id`,
layout: false,
Component: () => import('@/pages/next-search'),
Component: lazy(() => import('@/pages/next-search')),
errorElement: <FallbackComponent />,
},
{
path: `${Routes.SearchShare}`,
layout: false,
Component: () => import('@/pages/next-search/share'),
Component: lazy(() => import('@/pages/next-search/share')),
errorElement: <FallbackComponent />,
},
{
path: Routes.Agents,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Agents,
Component: () => import('@/pages/agents'),
Component: lazy(() => import('@/pages/agents')),
},
],
errorElement: <FallbackComponent />,
},
{
path: `${Routes.AgentLogPage}/:id`,
layout: false,
Component: () => import('@/pages/agents/agent-log-page'),
Component: lazy(() => import('@/pages/agents/agent-log-page')),
errorElement: <FallbackComponent />,
},
{
path: `${Routes.Agent}/:id`,
layout: false,
Component: () => import('@/pages/agent'),
Component: lazy(() => import('@/pages/agent')),
errorElement: <FallbackComponent />,
},
{
path: Routes.AgentTemplates,
layout: false,
Component: () => import('@/pages/agents/agent-templates'),
Component: lazy(() => import('@/pages/agents/agent-templates')),
errorElement: <FallbackComponent />,
},
{
path: Routes.Files,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.Files,
Component: () => import('@/pages/files'),
Component: lazy(() => import('@/pages/files')),
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.DatasetBase,
layout: false,
Component: () => import('@/layouts/next'),
Component: lazy(() => import('@/layouts/next')),
children: [
{
path: Routes.DatasetBase,
element: <Navigate to={Routes.Dataset} replace />,
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.DatasetBase,
layout: false,
Component: () => import('@/pages/dataset'),
Component: lazy(() => import('@/pages/dataset')),
children: [
{
path: `${Routes.Dataset}/:id`,
Component: () => import('@/pages/dataset/dataset'),
Component: lazy(() => import('@/pages/dataset/dataset')),
},
{
path: `${Routes.DatasetBase}${Routes.DatasetTesting}/:id`,
Component: () => import('@/pages/dataset/testing'),
Component: lazy(() => import('@/pages/dataset/testing')),
},
{
path: `${Routes.DatasetBase}${Routes.KnowledgeGraph}/:id`,
Component: () => import('@/pages/dataset/knowledge-graph'),
Component: lazy(() => import('@/pages/dataset/knowledge-graph')),
},
{
path: `${Routes.DatasetBase}${Routes.DataSetOverview}/:id`,
Component: () => import('@/pages/dataset/dataset-overview'),
Component: lazy(() => import('@/pages/dataset/dataset-overview')),
},
{
path: `${Routes.DatasetBase}${Routes.DataSetSetting}/:id`,
Component: () => import('@/pages/dataset/dataset-setting'),
Component: lazy(() => import('@/pages/dataset/dataset-setting')),
},
],
errorElement: <FallbackComponent />,
},
{
path: `${Routes.DataflowResult}`,
layout: false,
Component: () => import('@/pages/dataflow-result'),
Component: lazy(() => import('@/pages/dataflow-result')),
errorElement: <FallbackComponent />,
},
{
path: `${Routes.ParsedResult}/chunks`,
layout: false,
Component: () =>
import('@/pages/chunk/parsed-result/add-knowledge/components/knowledge-chunk'),
Component: lazy(
() =>
import('@/pages/chunk/parsed-result/add-knowledge/components/knowledge-chunk'),
),
errorElement: <FallbackComponent />,
},
{
path: Routes.Chunk,
@ -320,28 +319,30 @@ const routeConfigOptions = [
children: [
{
path: Routes.Chunk,
Component: () => import('@/pages/chunk'),
Component: lazy(() => import('@/pages/chunk')),
children: [
{
path: `${Routes.ChunkResult}/:id`,
Component: () => import('@/pages/chunk/chunk-result'),
Component: lazy(() => import('@/pages/chunk/chunk-result')),
},
{
path: `${Routes.ResultView}/:id`,
Component: () => import('@/pages/chunk/result-view'),
Component: lazy(() => import('@/pages/chunk/result-view')),
},
],
},
],
errorElement: <FallbackComponent />,
},
{
path: Routes.Chunk,
layout: false,
Component: () => import('@/pages/chunk'),
Component: lazy(() => import('@/pages/chunk')),
errorElement: <FallbackComponent />,
},
{
path: '/user-setting',
Component: () => import('@/pages/user-setting'),
Component: lazy(() => import('@/pages/user-setting')),
layout: false,
children: [
{
@ -350,87 +351,92 @@ const routeConfigOptions = [
},
{
path: '/user-setting/profile',
Component: () => import('@/pages/user-setting/profile'),
Component: lazy(() => import('@/pages/user-setting/profile')),
},
{
path: '/user-setting/locale',
Component: () => import('@/pages/user-setting/setting-locale'),
Component: lazy(() => import('@/pages/user-setting/setting-locale')),
},
{
path: '/user-setting/model',
Component: () => import('@/pages/user-setting/setting-model'),
Component: lazy(() => import('@/pages/user-setting/setting-model')),
},
{
path: '/user-setting/team',
Component: () => import('@/pages/user-setting/setting-team'),
Component: lazy(() => import('@/pages/user-setting/setting-team')),
},
{
path: `/user-setting${Routes.Api}`,
Component: () => import('@/pages/user-setting/setting-api'),
Component: lazy(() => import('@/pages/user-setting/setting-api')),
},
{
path: `/user-setting${Routes.Mcp}`,
Component: () => import('@/pages/user-setting/mcp'),
Component: lazy(() => import('@/pages/user-setting/mcp')),
},
{
path: `/user-setting${Routes.DataSource}`,
Component: () => import('@/pages/user-setting/data-source'),
Component: lazy(() => import('@/pages/user-setting/data-source')),
},
],
errorElement: <FallbackComponent />,
},
{
path: `/user-setting${Routes.DataSource}${Routes.DataSourceDetailPage}`,
Component: () =>
import('@/pages/user-setting/data-source/data-source-detail-page'),
Component: lazy(
() => import('@/pages/user-setting/data-source/data-source-detail-page'),
),
layout: false,
errorElement: <FallbackComponent />,
},
{
path: Routes.Admin,
Component: () => import('@/pages/admin/layouts/root-layout'),
Component: lazy(() => import('@/pages/admin/layouts/root-layout')),
errorElement: <FallbackComponent />,
children: [
{
path: Routes.Admin,
Component: () => import('@/pages/admin/login'),
Component: lazy(() => import('@/pages/admin/login')),
},
{
path: Routes.Admin,
Component: () => import('@/pages/admin/layouts/authorized-layout'),
Component: lazy(
() => import('@/pages/admin/layouts/authorized-layout'),
),
children: [
{
path: `${Routes.AdminUserManagement}/:id`,
Component: () => import('@/pages/admin/user-detail'),
Component: lazy(() => import('@/pages/admin/user-detail')),
},
{
Component: () => import('@/pages/admin/layouts/navigation-layout'),
Component: lazy(
() => import('@/pages/admin/layouts/navigation-layout'),
),
children: [
{
path: Routes.AdminServices,
Component: () => import('@/pages/admin/service-status'),
Component: lazy(() => import('@/pages/admin/service-status')),
},
{
path: Routes.AdminUserManagement,
Component: () => import('@/pages/admin/users'),
Component: lazy(() => import('@/pages/admin/users')),
},
{
path: Routes.AdminSandboxSettings,
Component: () => import('@/pages/admin/sandbox-settings'),
Component: lazy(() => import('@/pages/admin/sandbox-settings')),
},
...(IS_ENTERPRISE
? [
{
path: Routes.AdminWhitelist,
Component: () => import('@/pages/admin/whitelist'),
Component: lazy(() => import('@/pages/admin/whitelist')),
},
{
path: Routes.AdminRoles,
Component: () => import('@/pages/admin/roles'),
Component: lazy(() => import('@/pages/admin/roles')),
},
{
path: Routes.AdminMonitoring,
Component: () => import('@/pages/admin/monitoring'),
Component: lazy(() => import('@/pages/admin/monitoring')),
},
]
: []),
@ -439,24 +445,9 @@ const routeConfigOptions = [
],
},
],
} satisfies LazyRouteConfig,
} satisfies RouteObject,
];
const wrapRoutes = (routes: LazyRouteConfig[]): RouteObject[] =>
routes.map((item) => {
const { Component, children, ...rest } = item;
const next: RouteObject = { ...rest, errorElement: <FallbackComponent /> };
if (Component) {
next.Component = withLazyRoute(Component);
}
if (children) {
next.children = wrapRoutes(children);
}
return next;
});
const routeConfig = wrapRoutes(routeConfigOptions);
const routers = createBrowserRouter(routeConfig, {
basename: import.meta.env.VITE_BASE_URL || '/',
});

View File

@ -101,12 +101,6 @@ export default defineConfig(({ mode, command }) => {
experimentalMinChunkSize: 30 * 1024,
chunkSizeWarningLimit: 1000,
rollupOptions: {
onwarn(warning, warn) {
if (warning.code === 'EMPTY_BUNDLE') {
return;
}
warn(warning);
},
output: {
manualChunks(id) {
// if (id.includes('src/components')) {