mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
46 Commits
58836d84fe
...
v0.21.0
| Author | SHA1 | Date | |
|---|---|---|---|
| fdac4afd10 | |||
| 769d701f56 | |||
| 8b512cdadf | |||
| 3ae126836a | |||
| e8bfda6020 | |||
| 34c54cd459 | |||
| 3d873d98fb | |||
| fbe25b5add | |||
| 0c6c7c8fe7 | |||
| e266f9a66f | |||
| fde6e5ab39 | |||
| 67529825e2 | |||
| 738a7d5c24 | |||
| 83ec915d51 | |||
| e535099f36 | |||
| 16b5feadb7 | |||
| 960f47c4d4 | |||
| 51139de178 | |||
| 1f5167f1ca | |||
| 578ea34b3e | |||
| 5fb3d2f55c | |||
| d99d1e3518 | |||
| 5b387b68ba | |||
| f92a45dcc4 | |||
| c4b8e4845c | |||
| 87659dcd3a | |||
| 6fd9508017 | |||
| 113851a692 | |||
| 66c69d10fe | |||
| 781d49cd0e | |||
| aaae938f54 | |||
| 9e73f799b2 | |||
| 21a62130c8 | |||
| 68e47c81d4 | |||
| f11d8af936 | |||
| 74ec734d69 | |||
| 8c75803b70 | |||
| ff4239c7cf | |||
| cf5867b146 | |||
| 77481ab3ab | |||
| 9c53b3336a | |||
| 24481f0332 | |||
| 4e6b84bb41 | |||
| 65c3f0406c | |||
| 7fb8b30cc2 | |||
| acca3640f7 |
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@ -120,3 +120,17 @@ jobs:
|
||||
packages-dir: sdk/python/dist/
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
- name: Build ragflow-cli
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
cd admin/client && \
|
||||
uv build
|
||||
|
||||
- name: Publish client package distributions to PyPI
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: admin/client/dist/
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -149,7 +149,7 @@ out
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
ragflow_cli.egg-info
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
@ -191,6 +191,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
|
||||
ENV PYTHONPATH=/ragflow/
|
||||
|
||||
COPY web web
|
||||
COPY admin admin
|
||||
COPY api api
|
||||
COPY conf conf
|
||||
COPY deepdoc deepdoc
|
||||
|
||||
12
README.md
12
README.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="520" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -84,8 +84,8 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Latest Updates
|
||||
|
||||
- 2025-10-15 Supports orchestrable ingestion pipeline.
|
||||
- 2025-08-08 Supports OpenAI's latest GPT-5 series models.
|
||||
- 2025-08-04 Supports new models, including Kimi K2 and Grok 4.
|
||||
- 2025-08-01 Supports agentic workflow and MCP.
|
||||
- 2025-05-23 Adds a Python/JavaScript code executor component to Agent.
|
||||
- 2025-05-05 Supports cross-language query.
|
||||
@ -187,7 +187,7 @@ releases! 🌟
|
||||
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
||||
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
||||
|
||||
> The command below downloads the `v0.20.5-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.20.5-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5` for the full edition `v0.20.5`.
|
||||
> The command below downloads the `v0.21.0-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.21.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0` for the full edition `v0.21.0`.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -200,8 +200,8 @@ releases! 🌟
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|--------------------------|
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
12
README_id.md
12
README_id.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="520" alt="Logo ragflow">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="520" alt="Logo ragflow">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
||||
@ -80,8 +80,8 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Pembaruan Terbaru
|
||||
|
||||
- 2025-10-15 Dukungan untuk jalur data yang terorkestrasi.
|
||||
- 2025-08-08 Mendukung model seri GPT-5 terbaru dari OpenAI.
|
||||
- 2025-08-04 Mendukung model baru, termasuk Kimi K2 dan Grok 4.
|
||||
- 2025-08-01 Mendukung alur kerja agen dan MCP.
|
||||
- 2025-05-23 Menambahkan komponen pelaksana kode Python/JS ke Agen.
|
||||
- 2025-05-05 Mendukung kueri lintas bahasa.
|
||||
@ -181,7 +181,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
||||
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> Perintah di bawah ini mengunduh edisi v0.20.5-slim dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.20.5-slim, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server. Misalnya, atur RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5 untuk edisi lengkap v0.20.5.
|
||||
> Perintah di bawah ini mengunduh edisi v0.21.0-slim dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.21.0-slim, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server. Misalnya, atur RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0 untuk edisi lengkap v0.21.0.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -194,8 +194,8 @@ $ docker compose -f docker-compose.yml up -d
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
12
README_ja.md
12
README_ja.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="350" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -60,8 +60,8 @@
|
||||
|
||||
## 🔥 最新情報
|
||||
|
||||
- 2025-10-15 オーケストレーションされたデータパイプラインのサポート。
|
||||
- 2025-08-08 OpenAI の最新 GPT-5 シリーズモデルをサポートします。
|
||||
- 2025-08-04 新モデル、キミK2およびGrok 4をサポート。
|
||||
- 2025-08-01 エージェントワークフローとMCPをサポート。
|
||||
- 2025-05-23 エージェントに Python/JS コードエグゼキュータコンポーネントを追加しました。
|
||||
- 2025-05-05 言語間クエリをサポートしました。
|
||||
@ -160,7 +160,7 @@
|
||||
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
||||
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
||||
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.20.5-slim エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.20.5-slim とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。例えば、完全版 v0.20.5 をダウンロードするには、RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5 と設定します。
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.21.0-slim エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.21.0-slim とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。例えば、完全版 v0.21.0 をダウンロードするには、RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0 と設定します。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -173,8 +173,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
12
README_ko.md
12
README_ko.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="520" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -60,8 +60,8 @@
|
||||
|
||||
## 🔥 업데이트
|
||||
|
||||
- 2025-10-15 조정된 데이터 파이프라인 지원.
|
||||
- 2025-08-08 OpenAI의 최신 GPT-5 시리즈 모델을 지원합니다.
|
||||
- 2025-08-04 새로운 모델인 Kimi K2와 Grok 4를 포함하여 지원합니다.
|
||||
- 2025-08-01 에이전트 워크플로우와 MCP를 지원합니다.
|
||||
- 2025-05-23 Agent에 Python/JS 코드 실행기 구성 요소를 추가합니다.
|
||||
- 2025-05-05 언어 간 쿼리를 지원합니다.
|
||||
@ -160,7 +160,7 @@
|
||||
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
||||
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.20.5-slim 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.20.5-slim과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오. 예를 들어, 전체 버전인 v0.20.5을 다운로드하려면 RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5로 설정합니다.
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.21.0-slim 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.21.0-slim과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오. 예를 들어, 전체 버전인 v0.21.0을 다운로드하려면 RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0로 설정합니다.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -173,8 +173,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="520" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="520" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
||||
@ -80,8 +80,8 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Últimas Atualizações
|
||||
|
||||
- 10-15-2025 Suporte para pipelines de dados orquestrados.
|
||||
- 08-08-2025 Suporta a mais recente série GPT-5 da OpenAI.
|
||||
- 04-08-2025 Suporta novos modelos, incluindo Kimi K2 e Grok 4.
|
||||
- 01-08-2025 Suporta fluxo de trabalho agente e MCP.
|
||||
- 23-05-2025 Adicione o componente executor de código Python/JS ao Agente.
|
||||
- 05-05-2025 Suporte a consultas entre idiomas.
|
||||
@ -180,7 +180,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
||||
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
||||
|
||||
> O comando abaixo baixa a edição `v0.20.5-slim` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.20.5-slim`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor. Por exemplo: defina `RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5` para a edição completa `v0.20.5`.
|
||||
> O comando abaixo baixa a edição `v0.21.0-slim` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.21.0-slim`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor. Por exemplo: defina `RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0` para a edição completa `v0.21.0`.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -193,8 +193,8 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
| Tag da imagem RAGFlow | Tamanho da imagem (GB) | Possui modelos de incorporação? | Estável? |
|
||||
| --------------------- | ---------------------- | ------------------------------- | ------------------------ |
|
||||
| v0.20.5 | ~9 | :heavy_check_mark: | Lançamento estável |
|
||||
| v0.20.5-slim | ~2 | ❌ | Lançamento estável |
|
||||
| v0.21.0 | ~9 | :heavy_check_mark: | Lançamento estável |
|
||||
| v0.21.0-slim | ~2 | ❌ | Lançamento estável |
|
||||
| nightly | ~9 | :heavy_check_mark: | _Instável_ build noturno |
|
||||
| nightly-slim | ~2 | ❌ | _Instável_ build noturno |
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="350" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -83,8 +83,8 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-10-15 支援可編排的資料管道。
|
||||
- 2025-08-08 支援 OpenAI 最新的 GPT-5 系列模型。
|
||||
- 2025-08-04 支援 Kimi K2 和 Grok 4 等模型.
|
||||
- 2025-08-01 支援 agentic workflow 和 MCP
|
||||
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
||||
- 2025-05-05 支援跨語言查詢。
|
||||
@ -183,7 +183,7 @@
|
||||
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
||||
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
||||
|
||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.20.5-slim`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.20.5-slim` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。例如,你可以透過設定 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5` 來下載 RAGFlow 鏡像的 `v0.20.5` 完整發行版。
|
||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.21.0-slim`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.21.0-slim` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。例如,你可以透過設定 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0` 來下載 RAGFlow 鏡像的 `v0.21.0` 完整發行版。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -196,8 +196,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
14
README_zh.md
14
README_zh.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a href="https://demo.ragflow.io/">
|
||||
<img src="web/src/assets/logo-with-text.png" width="350" alt="ragflow logo">
|
||||
<img src="web/src/assets/logo-with-text.svg" width="350" alt="ragflow logo">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.20.5">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -83,8 +83,8 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型.
|
||||
- 2025-08-04 新增对 Kimi K2 和 Grok 4 等模型的支持.
|
||||
- 2025-10-15 支持可编排的数据管道。
|
||||
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型。
|
||||
- 2025-08-01 支持 agentic workflow 和 MCP。
|
||||
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
||||
- 2025-05-05 支持跨语言查询。
|
||||
@ -183,7 +183,7 @@
|
||||
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
||||
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
||||
|
||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.20.5-slim`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.20.5-slim` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。比如,你可以通过设置 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5` 来下载 RAGFlow 镜像的 `v0.20.5` 完整发行版。
|
||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.21.0-slim`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.21.0-slim` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。比如,你可以通过设置 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0` 来下载 RAGFlow 镜像的 `v0.21.0` 完整发行版。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -196,8 +196,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.20.5 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.20.5-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.21.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.21.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
47
admin/build_cli_release.sh
Executable file
47
admin/build_cli_release.sh
Executable file
@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 Start building..."
|
||||
echo "================================"
|
||||
|
||||
PROJECT_NAME="ragflow-cli"
|
||||
|
||||
RELEASE_DIR="release"
|
||||
BUILD_DIR="dist"
|
||||
SOURCE_DIR="src"
|
||||
PACKAGE_DIR="ragflow_cli"
|
||||
|
||||
echo "🧹 Clean old build folder..."
|
||||
rm -rf release/
|
||||
|
||||
echo "📁 Prepare source code..."
|
||||
mkdir release/$PROJECT_NAME/$SOURCE_DIR -p
|
||||
cp pyproject.toml release/$PROJECT_NAME/pyproject.toml
|
||||
cp README.md release/$PROJECT_NAME/README.md
|
||||
|
||||
mkdir release/$PROJECT_NAME/$SOURCE_DIR/$PACKAGE_DIR -p
|
||||
cp admin_client.py release/$PROJECT_NAME/$SOURCE_DIR/$PACKAGE_DIR/admin_client.py
|
||||
|
||||
if [ -d "release/$PROJECT_NAME/$SOURCE_DIR" ]; then
|
||||
echo "✅ source dir: release/$PROJECT_NAME/$SOURCE_DIR"
|
||||
else
|
||||
echo "❌ source dir not exist: release/$PROJECT_NAME/$SOURCE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "🔨 Make build file..."
|
||||
cd release/$PROJECT_NAME
|
||||
export PYTHONPATH=$(pwd)
|
||||
python -m build
|
||||
|
||||
echo "✅ check build result..."
|
||||
if [ -d "$BUILD_DIR" ]; then
|
||||
echo "📦 Package generated:"
|
||||
ls -la $BUILD_DIR/
|
||||
else
|
||||
echo "❌ Build Failed: $BUILD_DIR not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "🎉 Build finished successfully!"
|
||||
@ -15,22 +15,48 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
||||
- **Admin Service**: A backend service that interfaces with the RAGFlow system to execute administrative operations and monitor its status.
|
||||
- **Admin CLI**: A command-line interface that allows users to connect to the Admin Service and issue commands for system management.
|
||||
|
||||
|
||||
|
||||
### Starting the Admin Service
|
||||
|
||||
1. Before start Admin Service, please make sure RAGFlow system is already started.
|
||||
#### Launching from source code
|
||||
|
||||
1. Before start Admin Service, please make sure RAGFlow system is already started.
|
||||
|
||||
2. Launch from source code:
|
||||
|
||||
```bash
|
||||
python admin/server/admin_server.py
|
||||
```
|
||||
The service will start and listen for incoming connections from the CLI on the configured port.
|
||||
|
||||
#### Using docker image
|
||||
|
||||
1. Before startup, please configure the `docker_compose.yml` file to enable admin server:
|
||||
|
||||
```bash
|
||||
command:
|
||||
- --enable-adminserver
|
||||
```
|
||||
|
||||
2. Start the containers, the service will start and listen for incoming connections from the CLI on the configured port.
|
||||
|
||||
|
||||
2. Run the service script:
|
||||
```bash
|
||||
python admin/admin_server.py
|
||||
```
|
||||
The service will start and listen for incoming connections from the CLI on the configured port.
|
||||
|
||||
### Using the Admin CLI
|
||||
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Launch the CLI client:
|
||||
2. Install ragflow-cli.
|
||||
```bash
|
||||
python admin/admin_client.py -h 0.0.0.0 -p 9381
|
||||
pip install ragflow-cli
|
||||
```
|
||||
3. Launch the CLI client:
|
||||
```bash
|
||||
ragflow-cli -h 0.0.0.0 -p 9381
|
||||
```
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
|
||||
|
||||
|
||||
## Supported Commands
|
||||
|
||||
@ -42,12 +68,7 @@ Commands are case-insensitive and must be terminated with a semicolon (`;`).
|
||||
- Lists all available services within the RAGFlow system.
|
||||
- `SHOW SERVICE <id>;`
|
||||
- Shows detailed status information for the service identified by `<id>`.
|
||||
- `STARTUP SERVICE <id>;`
|
||||
- Attempts to start the service identified by `<id>`.
|
||||
- `SHUTDOWN SERVICE <id>;`
|
||||
- Attempts to gracefully shut down the service identified by `<id>`.
|
||||
- `RESTART SERVICE <id>;`
|
||||
- Attempts to restart the service identified by `<id>`.
|
||||
|
||||
|
||||
### User Management Commands
|
||||
|
||||
@ -55,10 +76,17 @@ Commands are case-insensitive and must be terminated with a semicolon (`;`).
|
||||
- Lists all users known to the system.
|
||||
- `SHOW USER '<username>';`
|
||||
- Shows details and permissions for the specified user. The username must be enclosed in single or double quotes.
|
||||
|
||||
- `CREATE USER <username> <password>;`
|
||||
- Create user by username and password. The username and password must be enclosed in single or double quotes.
|
||||
|
||||
- `DROP USER '<username>';`
|
||||
- Removes the specified user from the system. Use with caution.
|
||||
- `ALTER USER PASSWORD '<username>' '<new_password>';`
|
||||
- Changes the password for the specified user.
|
||||
- `ALTER USER ACTIVE <username> <on/off>;`
|
||||
- Changes the user to active or inactive.
|
||||
|
||||
|
||||
### Data and Agent Commands
|
||||
|
||||
@ -16,14 +16,14 @@
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
from cmd import Cmd
|
||||
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
||||
from typing import Dict, List, Any
|
||||
from lark import Lark, Transformer, Tree
|
||||
from lark import Lark, Transformer, Tree, Token
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from api.common.base64 import encode_to_base64
|
||||
|
||||
GRAMMAR = r"""
|
||||
start: command
|
||||
@ -192,12 +192,59 @@ def encrypt(input_string):
|
||||
return base64.b64encode(cipher_text).decode("utf-8")
|
||||
|
||||
|
||||
class AdminCommandParser:
|
||||
def encode_to_base64(input_string):
|
||||
base64_encoded = base64.b64encode(input_string.encode('utf-8'))
|
||||
return base64_encoded.decode('utf-8')
|
||||
|
||||
|
||||
class AdminCLI(Cmd):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.parser = Lark(GRAMMAR, start='start', parser='lalr', transformer=AdminTransformer())
|
||||
self.command_history = []
|
||||
self.is_interactive = False
|
||||
self.admin_account = "admin@ragflow.io"
|
||||
self.admin_password: str = "admin"
|
||||
self.host: str = ""
|
||||
self.port: int = 0
|
||||
|
||||
def parse_command(self, command_str: str) -> Dict[str, Any]:
|
||||
intro = r"""Type "\h" for help."""
|
||||
prompt = "admin> "
|
||||
|
||||
def onecmd(self, command: str) -> bool:
|
||||
try:
|
||||
# print(f"command: {command}")
|
||||
result = self.parse_command(command)
|
||||
|
||||
# if 'type' in result and result.get('type') == 'empty':
|
||||
# return False
|
||||
|
||||
if isinstance(result, dict):
|
||||
if 'type' in result and result.get('type') == 'empty':
|
||||
return False
|
||||
|
||||
self.execute_command(result)
|
||||
|
||||
if isinstance(result, Tree):
|
||||
return False
|
||||
|
||||
if result.get('type') == 'meta' and result.get('command') in ['q', 'quit', 'exit']:
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nUse '\\q' to quit")
|
||||
except EOFError:
|
||||
print("\nGoodbye!")
|
||||
return True
|
||||
return False
|
||||
|
||||
def emptyline(self) -> bool:
|
||||
return False
|
||||
|
||||
def default(self, line: str) -> bool:
|
||||
return self.onecmd(line)
|
||||
|
||||
def parse_command(self, command_str: str) -> dict[str, str] | Tree[Token]:
|
||||
if not command_str.strip():
|
||||
return {'type': 'empty'}
|
||||
|
||||
@ -209,16 +256,6 @@ class AdminCommandParser:
|
||||
except Exception as e:
|
||||
return {'type': 'error', 'message': f'Parse error: {str(e)}'}
|
||||
|
||||
|
||||
class AdminCLI:
|
||||
def __init__(self):
|
||||
self.parser = AdminCommandParser()
|
||||
self.is_interactive = False
|
||||
self.admin_account = "admin@ragflow.io"
|
||||
self.admin_password: str = "admin"
|
||||
self.host: str = ""
|
||||
self.port: int = 0
|
||||
|
||||
def verify_admin(self, args):
|
||||
|
||||
conn_info = self._parse_connection_args(args)
|
||||
@ -267,10 +304,25 @@ class AdminCLI:
|
||||
columns = list(data[0].keys())
|
||||
col_widths = {}
|
||||
|
||||
def get_string_width(text):
|
||||
half_width_chars = (
|
||||
" !\"#$%&'()*+,-./0123456789:;<=>?@"
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`"
|
||||
"abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
"\t\n\r"
|
||||
)
|
||||
width = 0
|
||||
for char in text:
|
||||
if char in half_width_chars:
|
||||
width += 1
|
||||
else:
|
||||
width += 2
|
||||
return width
|
||||
|
||||
for col in columns:
|
||||
max_width = len(str(col))
|
||||
max_width = get_string_width(str(col))
|
||||
for item in data:
|
||||
value_len = len(str(item.get(col, '')))
|
||||
value_len = get_string_width(str(item.get(col, '')))
|
||||
if value_len > max_width:
|
||||
max_width = value_len
|
||||
col_widths[col] = max(2, max_width)
|
||||
@ -308,7 +360,7 @@ class AdminCLI:
|
||||
continue
|
||||
|
||||
print(f"command: {command}")
|
||||
result = self.parser.parse_command(command)
|
||||
result = self.parse_command(command)
|
||||
self.execute_command(result)
|
||||
|
||||
if isinstance(result, Tree):
|
||||
@ -595,10 +647,17 @@ def main():
|
||||
/_/ |_/_/ |_\____/_/ /_/\____/|__/|__/ /_/ |_\__,_/_/ /_/ /_/_/_/ /_/
|
||||
""")
|
||||
if cli.verify_admin(sys.argv):
|
||||
cli.run_interactive()
|
||||
cli.cmdloop()
|
||||
else:
|
||||
print(r"""
|
||||
____ ___ ______________ ___ __ _
|
||||
/ __ \/ | / ____/ ____/ /___ _ __ / | ____/ /___ ___ (_)___
|
||||
/ /_/ / /| |/ / __/ /_ / / __ \ | /| / / / /| |/ __ / __ `__ \/ / __ \
|
||||
/ _, _/ ___ / /_/ / __/ / / /_/ / |/ |/ / / ___ / /_/ / / / / / / / / / /
|
||||
/_/ |_/_/ |_\____/_/ /_/\____/|__/|__/ /_/ |_\__,_/_/ /_/ /_/_/_/ /_/
|
||||
""")
|
||||
if cli.verify_admin(sys.argv):
|
||||
cli.run_interactive()
|
||||
cli.cmdloop()
|
||||
# cli.run_single_command(sys.argv[1:])
|
||||
|
||||
|
||||
24
admin/client/pyproject.toml
Normal file
24
admin/client/pyproject.toml
Normal file
@ -0,0 +1,24 @@
|
||||
[project]
|
||||
name = "ragflow-cli"
|
||||
version = "0.21.0.dev5"
|
||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"requests>=2.30.0,<3.0.0",
|
||||
"beartype>=0.18.5,<0.19.0",
|
||||
"pycryptodomex>=3.10.0",
|
||||
"lark>=1.1.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"pytest>=8.3.5",
|
||||
"requests>=2.32.3",
|
||||
"requests-toolbelt>=1.0.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
ragflow-cli = "admin_client:main"
|
||||
24
admin/pyproject.toml
Normal file
24
admin/pyproject.toml
Normal file
@ -0,0 +1,24 @@
|
||||
[project]
|
||||
name = "ragflow-cli"
|
||||
version = "0.21.0.dev2"
|
||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"requests>=2.30.0,<3.0.0",
|
||||
"beartype>=0.18.5,<0.19.0",
|
||||
"pycryptodomex>=3.10.0",
|
||||
"lark>=1.1.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"pytest>=8.3.5",
|
||||
"requests>=2.32.3",
|
||||
"requests-toolbelt>=1.0.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
ragflow-cli = "ragflow_cli.admin_client:main"
|
||||
@ -26,7 +26,7 @@ from routes import admin_bp
|
||||
from api.utils.log_utils import init_root_logger
|
||||
from api.constants import SERVICE_CONF
|
||||
from api import settings
|
||||
from config import load_configurations, SERVICE_CONFIGS
|
||||
from admin.server.config import load_configurations, SERVICE_CONFIGS
|
||||
|
||||
stop_event = threading.Event()
|
||||
|
||||
@ -17,7 +17,7 @@
|
||||
|
||||
from flask import Blueprint, request
|
||||
|
||||
from auth import login_verify
|
||||
from admin.server.auth import login_verify
|
||||
from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from api.common.exceptions import AdminException
|
||||
@ -27,7 +27,7 @@ from api.utils.crypt import decrypt
|
||||
from api.utils import health_utils
|
||||
|
||||
from api.common.exceptions import AdminException, UserAlreadyExistsError, UserNotFoundError
|
||||
from config import SERVICE_CONFIGS
|
||||
from admin.server.config import SERVICE_CONFIGS
|
||||
|
||||
|
||||
class UserMgr:
|
||||
@ -177,8 +177,17 @@ class ServiceMgr:
|
||||
def get_all_services():
|
||||
result = []
|
||||
configs = SERVICE_CONFIGS.configs
|
||||
for config in configs:
|
||||
result.append(config.to_dict())
|
||||
for service_id, config in enumerate(configs):
|
||||
config_dict = config.to_dict()
|
||||
try:
|
||||
service_detail = ServiceMgr.get_service_details(service_id)
|
||||
if service_detail['alive']:
|
||||
config_dict['status'] = 'Alive'
|
||||
else:
|
||||
config_dict['status'] = 'Timeout'
|
||||
except Exception:
|
||||
config_dict['status'] = 'Timeout'
|
||||
result.append(config_dict)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
726
agent/templates/advanced_ingestion_pipeline.json
Normal file
726
agent/templates/advanced_ingestion_pipeline.json
Normal file
File diff suppressed because one or more lines are too long
493
agent/templates/chunk_summary.json
Normal file
493
agent/templates/chunk_summary.json
Normal file
File diff suppressed because one or more lines are too long
1172
agent/templates/stock_research_report.json
Normal file
1172
agent/templates/stock_research_report.json
Normal file
File diff suppressed because one or more lines are too long
369
agent/templates/title_chunker.json
Normal file
369
agent/templates/title_chunker.json
Normal file
File diff suppressed because one or more lines are too long
@ -53,12 +53,13 @@ class ExeSQLParam(ToolParamBase):
|
||||
self.max_records = 1024
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(self.db_type, "Choose DB type", ['mysql', 'postgres', 'mariadb', 'mssql', 'IBM DB2'])
|
||||
self.check_valid_value(self.db_type, "Choose DB type", ['mysql', 'postgres', 'mariadb', 'mssql', 'IBM DB2', 'trino'])
|
||||
self.check_empty(self.database, "Database name")
|
||||
self.check_empty(self.username, "database username")
|
||||
self.check_empty(self.host, "IP Address")
|
||||
self.check_positive_integer(self.port, "IP Port")
|
||||
self.check_empty(self.password, "Database password")
|
||||
if self.db_type != "trino":
|
||||
self.check_empty(self.password, "Database password")
|
||||
self.check_positive_integer(self.max_records, "Maximum number of records")
|
||||
if self.database == "rag_flow":
|
||||
if self.host == "ragflow-mysql":
|
||||
@ -123,6 +124,45 @@ class ExeSQL(ToolBase, ABC):
|
||||
r'PWD=' + self._param.password
|
||||
)
|
||||
db = pyodbc.connect(conn_str)
|
||||
elif self._param.db_type == 'trino':
|
||||
try:
|
||||
import trino
|
||||
from trino.auth import BasicAuthentication
|
||||
except Exception:
|
||||
raise Exception("Missing dependency 'trino'. Please install: pip install trino")
|
||||
|
||||
def _parse_catalog_schema(db: str):
|
||||
if not db:
|
||||
return None, None
|
||||
if "." in db:
|
||||
c, s = db.split(".", 1)
|
||||
elif "/" in db:
|
||||
c, s = db.split("/", 1)
|
||||
else:
|
||||
c, s = db, "default"
|
||||
return c, s
|
||||
|
||||
catalog, schema = _parse_catalog_schema(self._param.database)
|
||||
if not catalog:
|
||||
raise Exception("For Trino, `database` must be 'catalog.schema' or at least 'catalog'.")
|
||||
|
||||
http_scheme = "https" if os.environ.get("TRINO_USE_TLS", "0") == "1" else "http"
|
||||
auth = None
|
||||
if http_scheme == "https" and self._param.password:
|
||||
auth = BasicAuthentication(self._param.username, self._param.password)
|
||||
|
||||
try:
|
||||
db = trino.dbapi.connect(
|
||||
host=self._param.host,
|
||||
port=int(self._param.port or 8080),
|
||||
user=self._param.username or "ragflow",
|
||||
catalog=catalog,
|
||||
schema=schema or "default",
|
||||
http_scheme=http_scheme,
|
||||
auth=auth
|
||||
)
|
||||
except Exception as e:
|
||||
raise Exception("Database Connection Failed! \n" + str(e))
|
||||
elif self._param.db_type == 'IBM DB2':
|
||||
import ibm_db
|
||||
conn_str = (
|
||||
|
||||
@ -51,7 +51,7 @@ from rag.utils.redis_conn import REDIS_CONN
|
||||
@manager.route('/templates', methods=['GET']) # noqa: F821
|
||||
@login_required
|
||||
def templates():
|
||||
return get_json_result(data=[c.to_dict() for c in CanvasTemplateService.query(canvas_category=CanvasCategory.Agent)])
|
||||
return get_json_result(data=[c.to_dict() for c in CanvasTemplateService.get_all()])
|
||||
|
||||
|
||||
@manager.route('/rm', methods=['POST']) # noqa: F821
|
||||
@ -409,6 +409,49 @@ def test_db_connect():
|
||||
ibm_db.fetch_assoc(stmt)
|
||||
ibm_db.close(conn)
|
||||
return get_json_result(data="Database Connection Successful!")
|
||||
elif req["db_type"] == 'trino':
|
||||
def _parse_catalog_schema(db: str):
|
||||
if not db:
|
||||
return None, None
|
||||
if "." in db:
|
||||
c, s = db.split(".", 1)
|
||||
elif "/" in db:
|
||||
c, s = db.split("/", 1)
|
||||
else:
|
||||
c, s = db, "default"
|
||||
return c, s
|
||||
try:
|
||||
import trino
|
||||
import os
|
||||
from trino.auth import BasicAuthentication
|
||||
except Exception:
|
||||
return server_error_response("Missing dependency 'trino'. Please install: pip install trino")
|
||||
|
||||
catalog, schema = _parse_catalog_schema(req["database"])
|
||||
if not catalog:
|
||||
return server_error_response("For Trino, 'database' must be 'catalog.schema' or at least 'catalog'.")
|
||||
|
||||
http_scheme = "https" if os.environ.get("TRINO_USE_TLS", "0") == "1" else "http"
|
||||
|
||||
auth = None
|
||||
if http_scheme == "https" and req.get("password"):
|
||||
auth = BasicAuthentication(req.get("username") or "ragflow", req["password"])
|
||||
|
||||
conn = trino.dbapi.connect(
|
||||
host=req["host"],
|
||||
port=int(req["port"] or 8080),
|
||||
user=req["username"] or "ragflow",
|
||||
catalog=catalog,
|
||||
schema=schema or "default",
|
||||
http_scheme=http_scheme,
|
||||
auth=auth
|
||||
)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT 1")
|
||||
cur.fetchall()
|
||||
cur.close()
|
||||
conn.close()
|
||||
return get_json_result(data="Database Connection Successful!")
|
||||
else:
|
||||
return server_error_response("Unsupported database type.")
|
||||
if req["db_type"] != 'mssql':
|
||||
|
||||
@ -568,7 +568,7 @@ def change_parser():
|
||||
|
||||
def reset_doc():
|
||||
nonlocal doc
|
||||
e = DocumentService.update_by_id(doc.id, {"parser_id": req["parser_id"], "progress": 0, "progress_msg": "", "run": TaskStatus.UNSTART.value})
|
||||
e = DocumentService.update_by_id(doc.id, {"pipeline_id": req["pipeline_id"], "parser_id": req["parser_id"], "progress": 0, "progress_msg": "", "run": TaskStatus.UNSTART.value})
|
||||
if not e:
|
||||
return get_data_error_result(message="Document not found!")
|
||||
if doc.token_num > 0:
|
||||
|
||||
@ -36,6 +36,7 @@ from api import settings
|
||||
from rag.nlp import search
|
||||
from api.constants import DATASET_NAME_LIMIT
|
||||
from rag.settings import PAGERANK_FLD
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
|
||||
|
||||
@ -187,6 +188,9 @@ def detail():
|
||||
return get_data_error_result(
|
||||
message="Can't find this knowledgebase!")
|
||||
kb["size"] = DocumentService.get_total_size_by_kb_id(kb_id=kb["id"],keywords="", run_status=[], types=[])
|
||||
for key in ["graphrag_task_finish_at", "raptor_task_finish_at", "mindmap_task_finish_at"]:
|
||||
if finish_at := kb.get(key):
|
||||
kb[key] = finish_at.strftime("%Y-%m-%d %H:%M:%S")
|
||||
return get_json_result(data=kb)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
@ -760,18 +764,25 @@ def delete_kb_task():
|
||||
match pipeline_task_type:
|
||||
case PipelineTaskType.GRAPH_RAG:
|
||||
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
||||
kb_task_id = "graphrag_task_id"
|
||||
kb_task_id_field = "graphrag_task_id"
|
||||
task_id = kb.graphrag_task_id
|
||||
kb_task_finish_at = "graphrag_task_finish_at"
|
||||
case PipelineTaskType.RAPTOR:
|
||||
kb_task_id = "raptor_task_id"
|
||||
kb_task_id_field = "raptor_task_id"
|
||||
task_id = kb.raptor_task_id
|
||||
kb_task_finish_at = "raptor_task_finish_at"
|
||||
case PipelineTaskType.MINDMAP:
|
||||
kb_task_id = "mindmap_task_id"
|
||||
kb_task_id_field = "mindmap_task_id"
|
||||
task_id = kb.mindmap_task_id
|
||||
kb_task_finish_at = "mindmap_task_finish_at"
|
||||
case _:
|
||||
return get_error_data_result(message="Internal Error: Invalid task type")
|
||||
|
||||
ok = KnowledgebaseService.update_by_id(kb_id, {kb_task_id: "", kb_task_finish_at: None})
|
||||
def cancel_task(task_id):
|
||||
REDIS_CONN.set(f"{task_id}-cancel", "x")
|
||||
cancel_task(task_id)
|
||||
|
||||
ok = KnowledgebaseService.update_by_id(kb_id, {kb_task_id_field: "", kb_task_finish_at: None})
|
||||
if not ok:
|
||||
return server_error_response(f"Internal error: cannot delete task {pipeline_task_type}")
|
||||
|
||||
|
||||
@ -143,15 +143,12 @@ class UserCanvasService(CommonService):
|
||||
]
|
||||
if keywords:
|
||||
agents = cls.model.select(*fields).join(User, on=(cls.model.user_id == User.id)).where(
|
||||
cls.model.user_id.in_(joined_tenant_ids),
|
||||
fn.LOWER(cls.model.title).contains(keywords.lower())
|
||||
#(((cls.model.user_id.in_(joined_tenant_ids)) & (cls.model.permission == TenantPermission.TEAM.value)) | (cls.model.user_id == user_id)),
|
||||
#(fn.LOWER(cls.model.title).contains(keywords.lower()))
|
||||
(((cls.model.user_id.in_(joined_tenant_ids)) & (cls.model.permission == TenantPermission.TEAM.value)) | (cls.model.user_id == user_id)),
|
||||
(fn.LOWER(cls.model.title).contains(keywords.lower()))
|
||||
)
|
||||
else:
|
||||
agents = cls.model.select(*fields).join(User, on=(cls.model.user_id == User.id)).where(
|
||||
cls.model.user_id.in_(joined_tenant_ids)
|
||||
#(((cls.model.user_id.in_(joined_tenant_ids)) & (cls.model.permission == TenantPermission.TEAM.value)) | (cls.model.user_id == user_id))
|
||||
(((cls.model.user_id.in_(joined_tenant_ids)) & (cls.model.permission == TenantPermission.TEAM.value)) | (cls.model.user_id == user_id))
|
||||
)
|
||||
if canvas_category:
|
||||
agents = agents.where(cls.model.canvas_category == canvas_category)
|
||||
|
||||
@ -397,9 +397,10 @@ class KnowledgebaseService(CommonService):
|
||||
else:
|
||||
kbs = kbs.order_by(cls.model.getter_by(orderby).asc())
|
||||
|
||||
total = kbs.count()
|
||||
kbs = kbs.paginate(page_number, items_per_page)
|
||||
|
||||
return list(kbs.dicts()), kbs.count()
|
||||
return list(kbs.dicts()), total
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
|
||||
@ -151,10 +151,12 @@ def get_data_error_result(code=settings.RetCode.DATA_ERROR, message="Sorry! Data
|
||||
def server_error_response(e):
|
||||
logging.exception(e)
|
||||
try:
|
||||
if e.code == 401:
|
||||
return get_json_result(code=401, message=repr(e))
|
||||
except BaseException:
|
||||
pass
|
||||
msg = repr(e).lower()
|
||||
if getattr(e, "code", None) == 401 or ("unauthorized" in msg) or ("401" in msg):
|
||||
return get_json_result(code=settings.RetCode.UNAUTHORIZED, message=repr(e))
|
||||
except Exception as ex:
|
||||
logging.warning(f"error checking authorization: {ex}")
|
||||
|
||||
if len(e.args) > 1:
|
||||
try:
|
||||
serialized_data = serialize_for_json(e.args[1])
|
||||
|
||||
@ -803,6 +803,12 @@
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding"
|
||||
},
|
||||
{
|
||||
"llm_name": "glm-asr",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "speech2text"
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -5140,4 +5146,4 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
16
docker/.env
16
docker/.env
@ -37,9 +37,12 @@ OPENSEARCH_PASSWORD=infini_rag_flow_OS_01
|
||||
|
||||
# The port used to expose the Kibana service to the host machine,
|
||||
# allowing EXTERNAL access to the service running inside the Docker container.
|
||||
# To enable kibana, you need to:
|
||||
# 1. Ensure that COMPOSE_PROFILES includes kibana, for example: COMPOSE_PROFILES=${DOC_ENGINE},kibana
|
||||
# 2. Comment out or delete the following configurations of the es service in docker-compose-base.yml: xpack.security.enabled、xpack.security.http.ssl.enabled、xpack.security.transport.ssl.enabled (for details: https://www.elastic.co/docs/deploy-manage/security/self-auto-setup#stack-existing-settings-detected)
|
||||
# 3. Adjust the es.hosts in conf/service_config.yaml or docker/service_conf.yaml.template to 'https://localhost:1200'
|
||||
# 4. After the startup is successful, in the es container, execute the command to generate the kibana token: `bin/elasticsearch-create-enrollment-token -s kibana`, then you can use kibana normally
|
||||
KIBANA_PORT=6601
|
||||
KIBANA_USER=rag_flow
|
||||
KIBANA_PASSWORD=infini_rag_flow
|
||||
|
||||
# The maximum amount of the memory, in bytes, that a specific Docker container can use while running.
|
||||
# Update it according to the available memory in the host machine.
|
||||
@ -91,15 +94,16 @@ REDIS_PASSWORD=infini_rag_flow
|
||||
# The port used to expose RAGFlow's HTTP API service to the host machine,
|
||||
# allowing EXTERNAL access to the service running inside the Docker container.
|
||||
SVR_HTTP_PORT=9380
|
||||
ADMIN_SVR_HTTP_PORT=9381
|
||||
|
||||
# The RAGFlow Docker image to download.
|
||||
# Defaults to the v0.20.5-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5-slim
|
||||
# Defaults to the v0.21.0-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0-slim
|
||||
#
|
||||
# To download the RAGFlow Docker image with embedding models, uncomment the following line instead:
|
||||
# RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5
|
||||
# RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0
|
||||
#
|
||||
# The Docker image of the v0.20.5 edition includes built-in embedding models:
|
||||
# The Docker image of the v0.21.0 edition includes built-in embedding models:
|
||||
# - BAAI/bge-large-zh-v1.5
|
||||
# - maidalun1020/bce-embedding-base_v1
|
||||
#
|
||||
|
||||
@ -79,8 +79,8 @@ The [.env](./.env) file contains important environment variables for Docker.
|
||||
- `RAGFLOW-IMAGE`
|
||||
The Docker image edition. Available editions:
|
||||
|
||||
- `infiniflow/ragflow:v0.20.5-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.20.5`: The RAGFlow Docker image with embedding models including:
|
||||
- `infiniflow/ragflow:v0.21.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.21.0`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
@ -77,7 +77,7 @@ services:
|
||||
container_name: ragflow-infinity
|
||||
profiles:
|
||||
- infinity
|
||||
image: infiniflow/infinity:v0.6.0-dev7
|
||||
image: infiniflow/infinity:v0.6.0
|
||||
volumes:
|
||||
- infinity_data:/var/infinity
|
||||
- ./infinity_conf.toml:/infinity_conf.toml
|
||||
@ -207,6 +207,30 @@ services:
|
||||
start_period: 10s
|
||||
|
||||
|
||||
kibana:
|
||||
container_name: ragflow-kibana
|
||||
profiles:
|
||||
- kibana
|
||||
image: kibana:${STACK_VERSION}
|
||||
ports:
|
||||
- ${KIBANA_PORT-5601}:5601
|
||||
env_file: .env
|
||||
environment:
|
||||
- TZ=${TIMEZONE}
|
||||
volumes:
|
||||
- kibana_data:/usr/share/kibana/data
|
||||
depends_on:
|
||||
es01:
|
||||
condition: service_started
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5601/api/status"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 120
|
||||
networks:
|
||||
- ragflow
|
||||
restart: on-failure
|
||||
|
||||
|
||||
volumes:
|
||||
esdata01:
|
||||
@ -221,6 +245,8 @@ volumes:
|
||||
driver: local
|
||||
redis_data:
|
||||
driver: local
|
||||
kibana_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
ragflow:
|
||||
|
||||
@ -22,9 +22,14 @@ services:
|
||||
# - --no-transport-sse-enabled # Disable legacy SSE endpoints (/sse and /messages/)
|
||||
# - --no-transport-streamable-http-enabled # Disable Streamable HTTP transport (/mcp endpoint)
|
||||
# - --no-json-response # Disable JSON response mode in Streamable HTTP transport (instead of SSE over HTTP)
|
||||
|
||||
# Example configration to start Admin server:
|
||||
# command:
|
||||
# - --enable-adminserver
|
||||
container_name: ragflow-server
|
||||
ports:
|
||||
- ${SVR_HTTP_PORT}:9380
|
||||
- ${ADMIN_SVR_HTTP_PORT}:9381
|
||||
- 80:80
|
||||
- 443:443
|
||||
- 5678:5678
|
||||
|
||||
@ -11,6 +11,7 @@ function usage() {
|
||||
echo " --disable-webserver Disables the web server (nginx + ragflow_server)."
|
||||
echo " --disable-taskexecutor Disables task executor workers."
|
||||
echo " --enable-mcpserver Enables the MCP server."
|
||||
echo " --enable-adminserver Enables the Admin server."
|
||||
echo " --consumer-no-beg=<num> Start range for consumers (if using range-based)."
|
||||
echo " --consumer-no-end=<num> End range for consumers (if using range-based)."
|
||||
echo " --workers=<num> Number of task executors to run (if range is not used)."
|
||||
@ -21,12 +22,14 @@ function usage() {
|
||||
echo " $0 --disable-webserver --consumer-no-beg=0 --consumer-no-end=5"
|
||||
echo " $0 --disable-webserver --workers=2 --host-id=myhost123"
|
||||
echo " $0 --enable-mcpserver"
|
||||
echo " $0 --enable-adminserver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
ENABLE_WEBSERVER=1 # Default to enable web server
|
||||
ENABLE_TASKEXECUTOR=1 # Default to enable task executor
|
||||
ENABLE_MCP_SERVER=0
|
||||
ENABLE_ADMIN_SERVER=0 # Default close admin server
|
||||
CONSUMER_NO_BEG=0
|
||||
CONSUMER_NO_END=0
|
||||
WORKERS=1
|
||||
@ -70,6 +73,10 @@ for arg in "$@"; do
|
||||
ENABLE_MCP_SERVER=1
|
||||
shift
|
||||
;;
|
||||
--enable-adminserver)
|
||||
ENABLE_ADMIN_SERVER=1
|
||||
shift
|
||||
;;
|
||||
--mcp-host=*)
|
||||
MCP_HOST="${arg#*=}"
|
||||
shift
|
||||
@ -185,6 +192,12 @@ if [[ "${ENABLE_WEBSERVER}" -eq 1 ]]; then
|
||||
done &
|
||||
fi
|
||||
|
||||
if [[ "${ENABLE_ADMIN_SERVER}" -eq 1 ]]; then
|
||||
echo "Starting admin_server..."
|
||||
while true; do
|
||||
"$PY" admin/server/admin_server.py
|
||||
done &
|
||||
fi
|
||||
|
||||
if [[ "${ENABLE_MCP_SERVER}" -eq 1 ]]; then
|
||||
start_mcp_server
|
||||
|
||||
@ -99,8 +99,8 @@ RAGFlow utilizes MinIO as its object storage solution, leveraging its scalabilit
|
||||
- `RAGFLOW-IMAGE`
|
||||
The Docker image edition. Available editions:
|
||||
|
||||
- `infiniflow/ragflow:v0.20.5-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.20.5`: The RAGFlow Docker image with embedding models including:
|
||||
- `infiniflow/ragflow:v0.21.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.21.0`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
@ -77,7 +77,7 @@ After building the infiniflow/ragflow:nightly-slim image, you are ready to launc
|
||||
|
||||
1. Edit Docker Compose Configuration
|
||||
|
||||
Open the `docker/.env` file. Find the `RAGFLOW_IMAGE` setting and change the image reference from `infiniflow/ragflow:v0.20.5-slim` to `infiniflow/ragflow:nightly-slim` to use the pre-built image.
|
||||
Open the `docker/.env` file. Find the `RAGFLOW_IMAGE` setting and change the image reference from `infiniflow/ragflow:v0.21.0-slim` to `infiniflow/ragflow:nightly-slim` to use the pre-built image.
|
||||
|
||||
|
||||
2. Launch the Service
|
||||
|
||||
24
docs/faq.mdx
24
docs/faq.mdx
@ -30,29 +30,19 @@ The "garbage in garbage out" status quo remains unchanged despite the fact that
|
||||
|
||||
Each RAGFlow release is available in two editions:
|
||||
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.20.5-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.20.5`
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.21.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.21.0`
|
||||
|
||||
---
|
||||
|
||||
### Which embedding models can be deployed locally?
|
||||
|
||||
RAGFlow offers two Docker image editions, `v0.20.5-slim` and `v0.20.5`:
|
||||
RAGFlow offers two Docker image editions, `v0.21.0-slim` and `v0.21.0`:
|
||||
|
||||
- `infiniflow/ragflow:v0.20.5-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.20.5`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
- Embedding models that will be downloaded once you select them in the RAGFlow UI:
|
||||
- `BAAI/bge-base-en-v1.5`
|
||||
- `BAAI/bge-large-en-v1.5`
|
||||
- `BAAI/bge-small-en-v1.5`
|
||||
- `BAAI/bge-small-zh-v1.5`
|
||||
- `jinaai/jina-embeddings-v2-base-en`
|
||||
- `jinaai/jina-embeddings-v2-small-en`
|
||||
- `nomic-ai/nomic-embed-text-v1.5`
|
||||
- `sentence-transformers/all-MiniLM-L6-v2`
|
||||
- `infiniflow/ragflow:v0.21.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.21.0`: The RAGFlow Docker image with the following built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
---
|
||||
|
||||
|
||||
@ -9,7 +9,7 @@ The component equipped with reasoning, tool usage, and multi-agent collaboration
|
||||
|
||||
---
|
||||
|
||||
An **Agent** component fine-tunes the LLM and sets its prompt. From v0.20.5 onwards, an **Agent** component is able to work independently and with the following capabilities:
|
||||
An **Agent** component fine-tunes the LLM and sets its prompt. From v0.21.0 onwards, an **Agent** component is able to work independently and with the following capabilities:
|
||||
|
||||
- Autonomous reasoning with reflection and adjustment based on environmental feedback.
|
||||
- Use of tools or subagents to complete tasks.
|
||||
@ -147,7 +147,7 @@ An **Agent** component relies on keys (variables) to specify its data inputs. It
|
||||
|
||||
#### Advanced usage
|
||||
|
||||
From v0.20.5 onwards, four framework-level prompt blocks are available in the **System prompt** field, enabling you to customize and *override* prompts at the framework level. Type `/` or click **(x)** to view them; they appear under the **Framework** entry in the dropdown menu.
|
||||
From v0.21.0 onwards, four framework-level prompt blocks are available in the **System prompt** field, enabling you to customize and *override* prompts at the framework level. Type `/` or click **(x)** to view them; they appear under the **Framework** entry in the dropdown menu.
|
||||
|
||||
- `task_analysis` prompt block
|
||||
- This block is responsible for analyzing tasks — either a user task or a task assigned by the lead Agent when the **Agent** component is acting as a Sub-Agent.
|
||||
|
||||
@ -48,7 +48,7 @@ You start an AI conversation by creating an assistant.
|
||||
- If no target language is selected, the system will search only in the language of your query, which may cause relevant information in other languages to be missed.
|
||||
- **Variable** refers to the variables (keys) to be used in the system prompt. `{knowledge}` is a reserved variable. Click **Add** to add more variables for the system prompt.
|
||||
- If you are uncertain about the logic behind **Variable**, leave it *as-is*.
|
||||
- As of v0.20.5, if you add custom variables here, the only way you can pass in their values is to call:
|
||||
- As of v0.21.0, if you add custom variables here, the only way you can pass in their values is to call:
|
||||
- HTTP method [Converse with chat assistant](../../references/http_api_reference.md#converse-with-chat-assistant), or
|
||||
- Python method [Converse with chat assistant](../../references/python_api_reference.md#converse-with-chat-assistant).
|
||||
|
||||
|
||||
@ -124,7 +124,7 @@ See [Run retrieval test](./run_retrieval_test.md) for details.
|
||||
|
||||
## Search for dataset
|
||||
|
||||
As of RAGFlow v0.20.5, the search feature is still in a rudimentary form, supporting only dataset search by name.
|
||||
As of RAGFlow v0.21.0, the search feature is still in a rudimentary form, supporting only dataset search by name.
|
||||
|
||||

|
||||
|
||||
|
||||
@ -21,6 +21,10 @@ Ensure that your metadata is in JSON format; otherwise, your updates will not be
|
||||
|
||||

|
||||
|
||||
## Related APIs
|
||||
|
||||
[Retrieve chunks](../../references/http_api_reference.md#retrieve-chunks)
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### Can I set metadata for multiple documents at once?
|
||||
|
||||
@ -87,4 +87,4 @@ RAGFlow's file management allows you to download an uploaded file:
|
||||
|
||||

|
||||
|
||||
> As of RAGFlow v0.20.5, bulk download is not supported, nor can you download an entire folder.
|
||||
> As of RAGFlow v0.21.0, bulk download is not supported, nor can you download an entire folder.
|
||||
|
||||
@ -1,3 +1,9 @@
|
||||
---
|
||||
sidebar_position: 6
|
||||
slug: /manage_users_and_services
|
||||
---
|
||||
|
||||
|
||||
# Admin CLI and Admin Service
|
||||
|
||||
|
||||
@ -8,31 +14,48 @@ The Admin CLI and Admin Service form a client-server architectural suite for RAG
|
||||
|
||||
## Starting the Admin Service
|
||||
|
||||
### Launching from source code
|
||||
|
||||
1. Before start Admin Service, please make sure RAGFlow system is already started.
|
||||
2. Switch to ragflow/ directory and run the service script:
|
||||
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
python admin/admin_server.py
|
||||
```
|
||||
2. Launch from source code:
|
||||
|
||||
The service will start and listen for incoming connections from the CLI on the configured port. Default port is 9381.
|
||||
```bash
|
||||
python admin/server/admin_server.py
|
||||
```
|
||||
|
||||
The service will start and listen for incoming connections from the CLI on the configured port.
|
||||
|
||||
### Using docker image
|
||||
|
||||
1. Before startup, please configure the `docker_compose.yml` file to enable admin server:
|
||||
|
||||
```bash
|
||||
command:
|
||||
- --enable-adminserver
|
||||
```
|
||||
|
||||
2. Start the containers, the service will start and listen for incoming connections from the CLI on the configured port.
|
||||
|
||||
|
||||
|
||||
## Using the Admin CLI
|
||||
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Launch the CLI client:
|
||||
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
python admin/admin_client.py -h 0.0.0.0 -p 9381
|
||||
```
|
||||
2. Install ragflow-cli.
|
||||
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
```bash
|
||||
pip install ragflow-cli
|
||||
```
|
||||
|
||||
3. Launch the CLI client:
|
||||
|
||||
```bash
|
||||
ragflow-cli -h 0.0.0.0 -p 9381
|
||||
```
|
||||
|
||||
Enter superuser's password to login. Default password is `admin`.
|
||||
|
||||
|
||||
|
||||
@ -50,7 +73,7 @@ Commands are case-insensitive and must be terminated with a semicolon(;).
|
||||
|
||||
`SHOW SERVICE <id>;`
|
||||
|
||||
- Shows detailed status information for the service identified by <id>.
|
||||
- Shows detailed status information for the service identified by **id**.
|
||||
- [Example](#example-show-service)
|
||||
|
||||
### User Management Commands
|
||||
@ -115,16 +138,16 @@ Commands are case-insensitive and must be terminated with a semicolon(;).
|
||||
admin> list services;
|
||||
command: list services;
|
||||
Listing all services
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
| extra | host | id | name | port | service_type |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
| {} | 0.0.0.0 | 0 | ragflow_0 | 9380 | ragflow_server |
|
||||
| {'meta_type': 'mysql', 'password': 'infini_rag_flow', 'username': 'root'} | localhost | 1 | mysql | 5455 | meta_data |
|
||||
| {'password': 'infini_rag_flow', 'store_type': 'minio', 'user': 'rag_flow'} | localhost | 2 | minio | 9000 | file_store |
|
||||
| {'password': 'infini_rag_flow', 'retrieval_type': 'elasticsearch', 'username': 'elastic'} | localhost | 3 | elasticsearch | 1200 | retrieval |
|
||||
| {'db_name': 'default_db', 'retrieval_type': 'infinity'} | localhost | 4 | infinity | 23817 | retrieval |
|
||||
| {'database': 1, 'mq_type': 'redis', 'password': 'infini_rag_flow'} | localhost | 5 | redis | 6379 | message_queue |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+---------+
|
||||
| extra | host | id | name | port | service_type | status |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+---------+
|
||||
| {} | 0.0.0.0 | 0 | ragflow_0 | 9380 | ragflow_server | Timeout |
|
||||
| {'meta_type': 'mysql', 'password': 'infini_rag_flow', 'username': 'root'} | localhost | 1 | mysql | 5455 | meta_data | Alive |
|
||||
| {'password': 'infini_rag_flow', 'store_type': 'minio', 'user': 'rag_flow'} | localhost | 2 | minio | 9000 | file_store | Alive |
|
||||
| {'password': 'infini_rag_flow', 'retrieval_type': 'elasticsearch', 'username': 'elastic'} | localhost | 3 | elasticsearch | 1200 | retrieval | Alive |
|
||||
| {'db_name': 'default_db', 'retrieval_type': 'infinity'} | localhost | 4 | infinity | 23817 | retrieval | Timeout |
|
||||
| {'database': 1, 'mq_type': 'redis', 'password': 'infini_rag_flow'} | localhost | 5 | redis | 6379 | message_queue | Alive |
|
||||
+-------------------------------------------------------------------------------------------+-----------+----+---------------+-------+----------------+---------+
|
||||
|
||||
```
|
||||
|
||||
|
||||
@ -18,7 +18,7 @@ RAGFlow ships with a built-in [Langfuse](https://langfuse.com) integration so th
|
||||
Langfuse stores traces, spans and prompt payloads in a purpose-built observability backend and offers filtering and visualisations on top.
|
||||
|
||||
:::info NOTE
|
||||
• RAGFlow **≥ 0.20.5** (contains the Langfuse connector)
|
||||
• RAGFlow **≥ 0.21.0** (contains the Langfuse connector)
|
||||
• A Langfuse workspace (cloud or self-hosted) with a _Project Public Key_ and _Secret Key_
|
||||
:::
|
||||
|
||||
|
||||
@ -66,10 +66,10 @@ To upgrade RAGFlow, you must upgrade **both** your code **and** your Docker imag
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
```
|
||||
|
||||
2. Switch to the latest, officially published release, e.g., `v0.20.5`:
|
||||
2. Switch to the latest, officially published release, e.g., `v0.21.0`:
|
||||
|
||||
```bash
|
||||
git checkout -f v0.20.5
|
||||
git checkout -f v0.21.0
|
||||
```
|
||||
|
||||
3. Update **ragflow/docker/.env**:
|
||||
@ -83,14 +83,14 @@ To upgrade RAGFlow, you must upgrade **both** your code **and** your Docker imag
|
||||
<TabItem value="slim">
|
||||
|
||||
```bash
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5-slim
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0-slim
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="full">
|
||||
|
||||
```bash
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -114,10 +114,10 @@ No, you do not need to. Upgrading RAGFlow in itself will *not* remove your uploa
|
||||
1. From an environment with Internet access, pull the required Docker image.
|
||||
2. Save the Docker image to a **.tar** file.
|
||||
```bash
|
||||
docker save -o ragflow.v0.20.5.tar infiniflow/ragflow:v0.20.5
|
||||
docker save -o ragflow.v0.21.0.tar infiniflow/ragflow:v0.21.0
|
||||
```
|
||||
3. Copy the **.tar** file to the target server.
|
||||
4. Load the **.tar** file into Docker:
|
||||
```bash
|
||||
docker load -i ragflow.v0.20.5.tar
|
||||
docker load -i ragflow.v0.21.0.tar
|
||||
```
|
||||
|
||||
@ -44,7 +44,7 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
|
||||
`vm.max_map_count`. This value sets the maximum number of memory map areas a process may have. Its default value is 65530. While most applications require fewer than a thousand maps, reducing this value can result in abnormal behaviors, and the system will throw out-of-memory errors when a process reaches the limitation.
|
||||
|
||||
RAGFlow v0.20.5 uses Elasticsearch or [Infinity](https://github.com/infiniflow/infinity) for multiple recall. Setting the value of `vm.max_map_count` correctly is crucial to the proper functioning of the Elasticsearch component.
|
||||
RAGFlow v0.21.0 uses Elasticsearch or [Infinity](https://github.com/infiniflow/infinity) for multiple recall. Setting the value of `vm.max_map_count` correctly is crucial to the proper functioning of the Elasticsearch component.
|
||||
|
||||
<Tabs
|
||||
defaultValue="linux"
|
||||
@ -184,13 +184,13 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
```bash
|
||||
$ git clone https://github.com/infiniflow/ragflow.git
|
||||
$ cd ragflow/docker
|
||||
$ git checkout -f v0.20.5
|
||||
$ git checkout -f v0.21.0
|
||||
```
|
||||
|
||||
3. Use the pre-built Docker images and start up the server:
|
||||
|
||||
:::tip NOTE
|
||||
The command below downloads the `v0.20.5-slim` edition of the RAGFlow Docker image. Refer to the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.20.5-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.20.5` for the full edition `v0.20.5`.
|
||||
The command below downloads the `v0.21.0-slim` edition of the RAGFlow Docker image. Refer to the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.21.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.21.0` for the full edition `v0.21.0`.
|
||||
:::
|
||||
|
||||
```bash
|
||||
@ -207,8 +207,8 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models and Python packages? | Stable? |
|
||||
| ------------------- | --------------- | ----------------------------------------- | ------------------------ |
|
||||
| `v0.20.5` | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| `v0.20.5-slim` | ≈2 | ❌ | Stable release |
|
||||
| `v0.21.0` | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| `v0.21.0-slim` | ≈2 | ❌ | Stable release |
|
||||
| `nightly` | ≈9 | :heavy_check_mark: | *Unstable* nightly build |
|
||||
| `nightly-slim` | ≈2 | ❌ | *Unstable* nightly build |
|
||||
|
||||
@ -217,7 +217,7 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
```
|
||||
|
||||
:::danger IMPORTANT
|
||||
The embedding models included in `v0.20.5` and `nightly` are:
|
||||
The embedding models included in `v0.21.0` and `nightly` are:
|
||||
|
||||
- BAAI/bge-large-zh-v1.5
|
||||
- maidalun1020/bce-embedding-base_v1
|
||||
|
||||
@ -19,7 +19,7 @@ import TOCInline from '@theme/TOCInline';
|
||||
|
||||
### Cross-language search
|
||||
|
||||
Cross-language search (also known as cross-lingual retrieval) is a feature introduced in version 0.20.5. It enables users to submit queries in one language (for example, English) and retrieve relevant documents written in other languages such as Chinese or Spanish. This feature is enabled by the system’s default chat model, which translates queries to ensure accurate matching of semantic meaning across languages.
|
||||
Cross-language search (also known as cross-lingual retrieval) is a feature introduced in version 0.21.0. It enables users to submit queries in one language (for example, English) and retrieve relevant documents written in other languages such as Chinese or Spanish. This feature is enabled by the system’s default chat model, which translates queries to ensure accurate matching of semantic meaning across languages.
|
||||
|
||||
By enabling cross-language search, users can effortlessly access a broader range of information regardless of language barriers, significantly enhancing the system’s usability and inclusiveness.
|
||||
|
||||
|
||||
@ -1823,7 +1823,21 @@ curl --request POST \
|
||||
{
|
||||
"question": "What is advantage of ragflow?",
|
||||
"dataset_ids": ["b2a62730759d11ef987d0242ac120004"],
|
||||
"document_ids": ["77df9ef4759a11ef8bdd0242ac120004"]
|
||||
"document_ids": ["77df9ef4759a11ef8bdd0242ac120004"],
|
||||
"metadata_condition": {
|
||||
"conditions": [
|
||||
{
|
||||
"name": "author",
|
||||
"comparison_operator": "=",
|
||||
"value": "Toby"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"comparison_operator": "not contains",
|
||||
"value": "amd"
|
||||
}
|
||||
]
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
@ -1858,7 +1872,25 @@ curl --request POST \
|
||||
- `"cross_languages"`: (*Body parameter*) `list[string]`
|
||||
The languages that should be translated into, in order to achieve keywords retrievals in different languages.
|
||||
- `"metadata_condition"`: (*Body parameter*), `object`
|
||||
The metadata condition for filtering chunks.
|
||||
The metadata condition used for filtering chunks:
|
||||
- `"conditions"`: (*Body parameter*), `array`
|
||||
A list of metadata filter conditions.
|
||||
- `"name"`: `string` - The metadata field name to filter by, e.g., `"author"`, `"company"`, `"url"`. Ensure this parameter before use. See [Set metadata](../guides/dataset/set_metadata.md) for details.
|
||||
- `comparison_operator`: `string` - The comparison operator. Can be one of:
|
||||
- `"contains"`
|
||||
- `"not contains"`
|
||||
- `"start with"`
|
||||
- `"empty"`
|
||||
- `"not empty"`
|
||||
- `"="`
|
||||
- `"≠"`
|
||||
- `">"`
|
||||
- `"<"`
|
||||
- `"≥"`
|
||||
- `"≤"`
|
||||
- `"value"`: `string` - The value to compare.
|
||||
|
||||
|
||||
#### Response
|
||||
|
||||
Success:
|
||||
|
||||
@ -698,6 +698,58 @@ print("Async bulk parsing initiated.")
|
||||
|
||||
---
|
||||
|
||||
### Parse documents (with document status)
|
||||
|
||||
```python
|
||||
DataSet.parse_documents(document_ids: list[str]) -> list[tuple[str, str, int, int]]
|
||||
```
|
||||
|
||||
*Asynchronously* parses documents in the current dataset.
|
||||
|
||||
This method encapsulates `async_parse_documents()`. It awaits the completion of all parsing tasks before returning detailed results, including the parsing status and statistics for each document. If a keyboard interruption occurs (e.g., `Ctrl+C`), all pending parsing tasks will be cancelled gracefully.
|
||||
|
||||
#### Parameters
|
||||
|
||||
##### document_ids: `list[str]`, *Required*
|
||||
|
||||
The IDs of the documents to parse.
|
||||
|
||||
#### Returns
|
||||
|
||||
A list of tuples with detailed parsing results:
|
||||
|
||||
```python
|
||||
[
|
||||
(document_id: str, status: str, chunk_count: int, token_count: int),
|
||||
...
|
||||
]
|
||||
```
|
||||
- `status`: The final parsing state (e.g., `success`, `failed`, `cancelled`).
|
||||
- `chunk_count`: The number of content chunks created from the document.
|
||||
- `token_count`: The total number of tokens processed.
|
||||
|
||||
---
|
||||
|
||||
#### Example
|
||||
|
||||
```python
|
||||
rag_object = RAGFlow(api_key="<YOUR_API_KEY>", base_url="http://<YOUR_BASE_URL>:9380")
|
||||
dataset = rag_object.create_dataset(name="dataset_name")
|
||||
documents = dataset.list_documents(keywords="test")
|
||||
ids = [doc.id for doc in documents]
|
||||
|
||||
try:
|
||||
finished = dataset.parse_documents(ids)
|
||||
for doc_id, status, chunk_count, token_count in finished:
|
||||
print(f"Document {doc_id} parsing finished with status: {status}, chunks: {chunk_count}, tokens: {token_count}")
|
||||
except KeyboardInterrupt:
|
||||
print("\nParsing interrupted by user. All pending tasks have been cancelled.")
|
||||
except Exception as e:
|
||||
print(f"Parsing failed: {e}")
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Stop parsing documents
|
||||
|
||||
```python
|
||||
|
||||
@ -33,7 +33,7 @@ A complete list of models supported by RAGFlow, which will continue to expand.
|
||||
| Jina | | :heavy_check_mark: | :heavy_check_mark: | | | |
|
||||
| LeptonAI | :heavy_check_mark: | | | | | |
|
||||
| LocalAI | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | |
|
||||
| LM-Studio | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
|
||||
| LM-Studio | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | |
|
||||
| MiniMax | :heavy_check_mark: | | | | | |
|
||||
| Mistral | :heavy_check_mark: | :heavy_check_mark: | | | | |
|
||||
| ModelScope | :heavy_check_mark: | | | | | |
|
||||
|
||||
@ -9,8 +9,8 @@ Key features, improvements and bug fixes in the latest releases.
|
||||
|
||||
:::info
|
||||
Each RAGFlow release is available in two editions:
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.20.5-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.20.5`
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.21.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.21.0`
|
||||
:::
|
||||
|
||||
:::danger IMPORTANT
|
||||
@ -22,6 +22,34 @@ The embedding models included in a full edition are:
|
||||
These two embedding models are optimized specifically for English and Chinese, so performance may be compromised if you use them to embed documents in other languages.
|
||||
:::
|
||||
|
||||
## v0.21.0
|
||||
|
||||
Released on October 15, 2025.
|
||||
|
||||
### New features
|
||||
|
||||
- Orchestratable ingestion pipeline: Supports customized data ingestion and cleansing workflows, enabling users to flexibly design their data flows or directly apply the official data flow templates on the canvas.
|
||||
- GraphRAG & RAPTOR write process optimized: Replaces the automatic incremental build process with manual batch building, significantly reducing construction overhead.
|
||||
- Long-context RAG: Automatically generates document-level table of contents (TOC) structures to mitigate context loss caused by inaccurate or excessive chunking, substantially improving retrieval quality. This feature is now available via a TOC extraction template.
|
||||
- Video file parsing: Expands the system's multimodal data processing capabilities by supporting video file parsing.
|
||||
- Admin CLI: Introduces a new command-line tool for system administration, allowing users to manage and monitor RAGFlow's service status via command line.
|
||||
|
||||
### Improvements
|
||||
|
||||
- Redesigns RAGFlow's Login and Registration pages.
|
||||
- Upgrades RAGFlow's document engine Infinity to v0.6.0.
|
||||
|
||||
### Added models
|
||||
|
||||
- Tongyi Qwen 3 series
|
||||
- Claude Sonnet 4.5
|
||||
- Meituan LongCat-Flash-Thinking
|
||||
|
||||
## New agent templates
|
||||
|
||||
- Company Research Report Deep Dive Agent: Designed for financial institutions to help analysts quickly organize information, generate research reports, and make investment decisions.
|
||||
- Orchestratable Ingestion Pipeline Template: Allows users to apply this template on the canvas to rapidly establish standardized data ingestion and cleansing processes.
|
||||
|
||||
## v0.20.5
|
||||
|
||||
Released on September 10, 2025.
|
||||
@ -580,7 +608,7 @@ Released on September 30, 2024.
|
||||
|
||||
### Compatibility changes
|
||||
|
||||
From this release onwards, RAGFlow offers slim editions of its Docker images to improve the experience for users with limited Internet access. A slim edition of RAGFlow's Docker image does not include built-in BGE/BCE embedding models and has a size of about 1GB; a full edition of RAGFlow is approximately 9GB and includes both built-in embedding models and embedding models that will be downloaded once you select them in the RAGFlow UI.
|
||||
From this release onwards, RAGFlow offers slim editions of its Docker images to improve the experience for users with limited Internet access. A slim edition of RAGFlow's Docker image does not include built-in BGE/BCE embedding models and has a size of about 1GB; a full edition of RAGFlow is approximately 9GB and includes two built-in embedding models.
|
||||
|
||||
The default Docker image edition is `nightly-slim`. The following list clarifies the differences between various editions:
|
||||
|
||||
|
||||
@ -56,7 +56,7 @@ env:
|
||||
ragflow:
|
||||
image:
|
||||
repository: infiniflow/ragflow
|
||||
tag: v0.20.5-slim
|
||||
tag: v0.21.0-slim
|
||||
pullPolicy: IfNotPresent
|
||||
pullSecrets: []
|
||||
# Optional service configuration overrides
|
||||
@ -96,7 +96,7 @@ ragflow:
|
||||
infinity:
|
||||
image:
|
||||
repository: infiniflow/infinity
|
||||
tag: v0.6.0-dev7
|
||||
tag: v0.6.0
|
||||
pullPolicy: IfNotPresent
|
||||
pullSecrets: []
|
||||
storage:
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow"
|
||||
version = "0.20.5"
|
||||
version = "0.21.0"
|
||||
description = "[RAGFlow](https://ragflow.io/) is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding. It offers a streamlined RAG workflow for businesses of any scale, combining LLM (Large Language Models) to provide truthful question-answering capabilities, backed by well-founded citations from various complex formatted data."
|
||||
authors = [{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }]
|
||||
license-files = ["LICENSE"]
|
||||
@ -46,7 +46,7 @@ dependencies = [
|
||||
"html-text==0.6.2",
|
||||
"httpx[socks]==0.27.2",
|
||||
"huggingface-hub>=0.25.0,<0.26.0",
|
||||
"infinity-sdk==0.6.0.dev7",
|
||||
"infinity-sdk==0.6.0",
|
||||
"infinity-emb>=0.0.66,<0.0.67",
|
||||
"itsdangerous==2.1.2",
|
||||
"json-repair==0.35.0",
|
||||
|
||||
@ -166,7 +166,7 @@ class HierarchicalMerger(ProcessBase):
|
||||
img = None
|
||||
for i in path:
|
||||
txt += lines[i] + "\n"
|
||||
concat_img(img, id2image(section_images[i], partial(STORAGE_IMPL.get)))
|
||||
concat_img(img, id2image(section_images[i], partial(STORAGE_IMPL.get, tenant_id=self._canvas._tenant_id)))
|
||||
cks.append(txt)
|
||||
images.append(img)
|
||||
|
||||
@ -180,7 +180,7 @@ class HierarchicalMerger(ProcessBase):
|
||||
]
|
||||
async with trio.open_nursery() as nursery:
|
||||
for d in cks:
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put), get_uuid())
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put, tenant_id=self._canvas._tenant_id), get_uuid())
|
||||
self.set_output("chunks", cks)
|
||||
|
||||
self.callback(1, "Done.")
|
||||
|
||||
@ -411,7 +411,7 @@ class Parser(ProcessBase):
|
||||
dispositions = content_disposition.strip().split(";")
|
||||
if dispositions[0].lower() == "attachment":
|
||||
filename = part.get_filename()
|
||||
payload = part.get_payload(decode=True)
|
||||
payload = part.get_payload(decode=True).decode(part.get_content_charset())
|
||||
attachments.append({
|
||||
"filename": filename,
|
||||
"payload": payload,
|
||||
@ -448,7 +448,7 @@ class Parser(ProcessBase):
|
||||
for t in msg.attachments:
|
||||
attachments.append({
|
||||
"filename": t.name,
|
||||
"payload": t.data # binary
|
||||
"payload": t.data.decode("utf-8")
|
||||
})
|
||||
email_content["attachments"] = attachments
|
||||
|
||||
@ -512,4 +512,4 @@ class Parser(ProcessBase):
|
||||
outs = self.output()
|
||||
async with trio.open_nursery() as nursery:
|
||||
for d in outs.get("json", []):
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put), get_uuid())
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put, tenant_id=self._canvas._tenant_id), get_uuid())
|
||||
|
||||
@ -87,7 +87,7 @@ class Splitter(ProcessBase):
|
||||
sections, section_images = [], []
|
||||
for o in from_upstream.json_result or []:
|
||||
sections.append((o.get("text", ""), o.get("position_tag", "")))
|
||||
section_images.append(id2image(o.get("img_id"), partial(STORAGE_IMPL.get)))
|
||||
section_images.append(id2image(o.get("img_id"), partial(STORAGE_IMPL.get, tenant_id=self._canvas._tenant_id)))
|
||||
|
||||
chunks, images = naive_merge_with_images(
|
||||
sections,
|
||||
@ -106,6 +106,6 @@ class Splitter(ProcessBase):
|
||||
]
|
||||
async with trio.open_nursery() as nursery:
|
||||
for d in cks:
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put), get_uuid())
|
||||
nursery.start_soon(image2id, d, partial(STORAGE_IMPL.put, tenant_id=self._canvas._tenant_id), get_uuid())
|
||||
self.set_output("chunks", cks)
|
||||
self.callback(1, "Done.")
|
||||
|
||||
@ -234,8 +234,8 @@ class DeepInfraSeq2txt(Base):
|
||||
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
|
||||
|
||||
|
||||
class CometAPISeq2txt(Base):
|
||||
_FACTORY_NAME = "CometAPI"
|
||||
|
||||
@ -244,7 +244,8 @@ class CometAPISeq2txt(Base):
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
|
||||
|
||||
class DeerAPISeq2txt(Base):
|
||||
_FACTORY_NAME = "DeerAPI"
|
||||
|
||||
@ -253,3 +254,44 @@ class DeerAPISeq2txt(Base):
|
||||
base_url = "https://api.deerapi.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
|
||||
class ZhipuSeq2txt(Base):
|
||||
_FACTORY_NAME = "ZHIPU-AI"
|
||||
|
||||
def __init__(self, key, model_name="glm-asr", base_url="https://open.bigmodel.cn/api/paas/v4", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://open.bigmodel.cn/api/paas/v4"
|
||||
self.base_url = base_url
|
||||
self.api_key = key
|
||||
self.model_name = model_name
|
||||
self.gen_conf = kwargs.get("gen_conf", {})
|
||||
self.stream = kwargs.get("stream", False)
|
||||
|
||||
def transcription(self, audio_path):
|
||||
payload = {
|
||||
"model": self.model_name,
|
||||
"temperature": str(self.gen_conf.get("temperature", 0.75)) or "0.75",
|
||||
"stream": self.stream,
|
||||
}
|
||||
|
||||
headers = {"Authorization": f"Bearer {self.api_key}"}
|
||||
with open(audio_path, "rb") as audio_file:
|
||||
files = {"file": audio_file}
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
url=f"{self.base_url}/audio/transcriptions",
|
||||
data=payload,
|
||||
files=files,
|
||||
headers=headers,
|
||||
)
|
||||
body = response.json()
|
||||
if response.status_code == 200:
|
||||
full_content = body["text"]
|
||||
return full_content, num_tokens_from_string(full_content)
|
||||
else:
|
||||
error = body["error"]
|
||||
return f"**ERROR**: code: {error['code']}, message: {error['message']}", 0
|
||||
except Exception as e:
|
||||
return "**ERROR**: " + str(e), 0
|
||||
|
||||
@ -124,7 +124,7 @@ def kb_prompt(kbinfos, max_tokens, hash_id=False):
|
||||
|
||||
knowledges = []
|
||||
for i, ck in enumerate(kbinfos["chunks"][:chunks_num]):
|
||||
cnt = "\nID: {}".format(i if not hash_id else hash_str2int(get_value(ck, "id", "chunk_id"), 100))
|
||||
cnt = "\nID: {}".format(i if not hash_id else hash_str2int(get_value(ck, "id", "chunk_id"), 500))
|
||||
cnt += draw_node("Title", get_value(ck, "docnm_kwd", "document_name"))
|
||||
cnt += draw_node("URL", ck['url']) if "url" in ck else ""
|
||||
for k, v in docs.get(get_value(ck, "doc_id", "document_id"), {}).items():
|
||||
@ -680,8 +680,7 @@ async def gen_toc_from_text(txt_info: dict, chat_mdl, callback=None):
|
||||
chat_mdl,
|
||||
gen_conf={"temperature": 0.0, "top_p": 0.9}
|
||||
)
|
||||
print(ans, "::::::::::::::::::::::::::::::::::::", flush=True)
|
||||
txt_info["toc"] = ans if ans else []
|
||||
txt_info["toc"] = ans if ans and not isinstance(ans, str) else []
|
||||
if callback:
|
||||
callback(msg="")
|
||||
except Exception as e:
|
||||
@ -728,15 +727,13 @@ async def run_toc_from_text(chunks, chat_mdl, callback=None):
|
||||
|
||||
for chunk in chunks_res:
|
||||
titles.extend(chunk.get("toc", []))
|
||||
|
||||
print(titles, ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
|
||||
|
||||
# Filter out entries with title == -1
|
||||
prune = len(titles) > 512
|
||||
max_len = 12 if prune else 22
|
||||
filtered = []
|
||||
for x in titles:
|
||||
if not x.get("title") or x["title"] == "-1":
|
||||
if not isinstance(x, dict) or not x.get("title") or x["title"] == "-1":
|
||||
continue
|
||||
if len(rag_tokenizer.tokenize(x["title"]).split(" ")) > max_len:
|
||||
continue
|
||||
@ -745,12 +742,16 @@ async def run_toc_from_text(chunks, chat_mdl, callback=None):
|
||||
filtered.append(x)
|
||||
|
||||
logging.info(f"\n\nFiltered TOC sections:\n{filtered}")
|
||||
if not filtered:
|
||||
return []
|
||||
|
||||
# Generate initial level (level/title)
|
||||
raw_structure = [x.get("title", "") for x in filtered]
|
||||
|
||||
# Assign hierarchy levels using LLM
|
||||
toc_with_levels = assign_toc_levels(raw_structure, chat_mdl, {"temperature": 0.0, "top_p": 0.9})
|
||||
if not toc_with_levels:
|
||||
return []
|
||||
|
||||
# Merge structure and content (by index)
|
||||
prune = len(toc_with_levels) > 512
|
||||
@ -779,7 +780,6 @@ def relevant_chunks_with_toc(query: str, toc:list[dict], chat_mdl, topn: int=6):
|
||||
chat_mdl,
|
||||
gen_conf={"temperature": 0.0, "top_p": 0.9}
|
||||
)
|
||||
print(ans, "::::::::::::::::::::::::::::::::::::", flush=True)
|
||||
id2score = {}
|
||||
for ti, sc in zip(toc, ans):
|
||||
if not isinstance(sc, dict) or sc.get("score", -1) < 1:
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import concurrent
|
||||
# from beartype import BeartypeConf
|
||||
# from beartype.claw import beartype_all # <-- you didn't sign up for this
|
||||
# beartype_all(conf=BeartypeConf(violation_type=UserWarning)) # <-- emit warnings from all code
|
||||
@ -317,7 +317,7 @@ async def build_chunks(task, progress_callback):
|
||||
d["img_id"] = ""
|
||||
docs.append(d)
|
||||
return
|
||||
await image2id(d, partial(STORAGE_IMPL.put), d["id"], task["kb_id"])
|
||||
await image2id(d, partial(STORAGE_IMPL.put, tenant_id=task["tenant_id"]), d["id"], task["kb_id"])
|
||||
docs.append(d)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
@ -370,38 +370,6 @@ async def build_chunks(task, progress_callback):
|
||||
nursery.start_soon(doc_question_proposal, chat_mdl, d, task["parser_config"]["auto_questions"])
|
||||
progress_callback(msg="Question generation {} chunks completed in {:.2f}s".format(len(docs), timer() - st))
|
||||
|
||||
if task["parser_id"].lower() == "naive" and task["parser_config"].get("toc_extraction", False):
|
||||
progress_callback(msg="Start to generate table of content ...")
|
||||
chat_mdl = LLMBundle(task["tenant_id"], LLMType.CHAT, llm_name=task["llm_id"], lang=task["language"])
|
||||
docs = sorted(docs, key=lambda d:(
|
||||
d.get("page_num_int", 0)[0] if isinstance(d.get("page_num_int", 0), list) else d.get("page_num_int", 0),
|
||||
d.get("top_int", 0)[0] if isinstance(d.get("top_int", 0), list) else d.get("top_int", 0)
|
||||
))
|
||||
toc: list[dict] = await run_toc_from_text([d["content_with_weight"] for d in docs], chat_mdl, progress_callback)
|
||||
logging.info("------------ T O C -------------\n"+json.dumps(toc, ensure_ascii=False, indent=' '))
|
||||
ii = 0
|
||||
while ii < len(toc):
|
||||
try:
|
||||
idx = int(toc[ii]["chunk_id"])
|
||||
del toc[ii]["chunk_id"]
|
||||
toc[ii]["ids"] = [docs[idx]["id"]]
|
||||
if ii == len(toc) -1:
|
||||
break
|
||||
for jj in range(idx+1, int(toc[ii+1]["chunk_id"])+1):
|
||||
toc[ii]["ids"].append(docs[jj]["id"])
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
ii += 1
|
||||
|
||||
if toc:
|
||||
d = copy.deepcopy(docs[-1])
|
||||
d["content_with_weight"] = json.dumps(toc, ensure_ascii=False)
|
||||
d["toc_kwd"] = "toc"
|
||||
d["available_int"] = 0
|
||||
d["page_num_int"] = 100000000
|
||||
d["id"] = xxhash.xxh64((d["content_with_weight"] + str(d["doc_id"])).encode("utf-8", "surrogatepass")).hexdigest()
|
||||
docs.append(d)
|
||||
|
||||
if task["kb_parser_config"].get("tag_kb_ids", []):
|
||||
progress_callback(msg="Start to tag for every chunk ...")
|
||||
kb_ids = task["kb_parser_config"]["tag_kb_ids"]
|
||||
@ -451,6 +419,39 @@ async def build_chunks(task, progress_callback):
|
||||
return docs
|
||||
|
||||
|
||||
def build_TOC(task, docs, progress_callback):
|
||||
progress_callback(msg="Start to generate table of content ...")
|
||||
chat_mdl = LLMBundle(task["tenant_id"], LLMType.CHAT, llm_name=task["llm_id"], lang=task["language"])
|
||||
docs = sorted(docs, key=lambda d:(
|
||||
d.get("page_num_int", 0)[0] if isinstance(d.get("page_num_int", 0), list) else d.get("page_num_int", 0),
|
||||
d.get("top_int", 0)[0] if isinstance(d.get("top_int", 0), list) else d.get("top_int", 0)
|
||||
))
|
||||
toc: list[dict] = trio.run(run_toc_from_text, [d["content_with_weight"] for d in docs], chat_mdl, progress_callback)
|
||||
logging.info("------------ T O C -------------\n"+json.dumps(toc, ensure_ascii=False, indent=' '))
|
||||
ii = 0
|
||||
while ii < len(toc):
|
||||
try:
|
||||
idx = int(toc[ii]["chunk_id"])
|
||||
del toc[ii]["chunk_id"]
|
||||
toc[ii]["ids"] = [docs[idx]["id"]]
|
||||
if ii == len(toc) -1:
|
||||
break
|
||||
for jj in range(idx+1, int(toc[ii+1]["chunk_id"])+1):
|
||||
toc[ii]["ids"].append(docs[jj]["id"])
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
ii += 1
|
||||
|
||||
if toc:
|
||||
d = copy.deepcopy(docs[-1])
|
||||
d["content_with_weight"] = json.dumps(toc, ensure_ascii=False)
|
||||
d["toc_kwd"] = "toc"
|
||||
d["available_int"] = 0
|
||||
d["page_num_int"] = 100000000
|
||||
d["id"] = xxhash.xxh64((d["content_with_weight"] + str(d["doc_id"])).encode("utf-8", "surrogatepass")).hexdigest()
|
||||
return d
|
||||
|
||||
|
||||
def init_kb(row, vector_size: int):
|
||||
idxnm = search.index_name(row["tenant_id"])
|
||||
return settings.docStoreConn.createIdx(idxnm, row.get("kb_id", ""), vector_size)
|
||||
@ -691,7 +692,7 @@ async def run_raptor_for_kb(row, kb_parser_config, chat_mdl, embd_mdl, vector_si
|
||||
raptor_config["threshold"],
|
||||
)
|
||||
original_length = len(chunks)
|
||||
chunks = await raptor(chunks, row["kb_parser_config"]["raptor"]["random_seed"], callback)
|
||||
chunks = await raptor(chunks, kb_parser_config["raptor"]["random_seed"], callback)
|
||||
doc = {
|
||||
"doc_id": fake_doc_id,
|
||||
"kb_id": [str(row["kb_id"])],
|
||||
@ -753,7 +754,7 @@ async def insert_es(task_id, task_tenant_id, task_dataset_id, chunks, progress_c
|
||||
return True
|
||||
|
||||
|
||||
@timeout(60*60*2, 1)
|
||||
@timeout(60*60*3, 1)
|
||||
async def do_handle_task(task):
|
||||
task_type = task.get("task_type", "")
|
||||
|
||||
@ -773,6 +774,8 @@ async def do_handle_task(task):
|
||||
task_document_name = task["name"]
|
||||
task_parser_config = task["parser_config"]
|
||||
task_start_ts = timer()
|
||||
toc_thread = None
|
||||
executor = concurrent.futures.ThreadPoolExecutor()
|
||||
|
||||
# prepare the progress callback function
|
||||
progress_callback = partial(set_progress, task_id, task_from_page, task_to_page)
|
||||
@ -814,8 +817,22 @@ async def do_handle_task(task):
|
||||
|
||||
kb_parser_config = kb.parser_config
|
||||
if not kb_parser_config.get("raptor", {}).get("use_raptor", False):
|
||||
progress_callback(prog=-1.0, msg="Internal error: Invalid RAPTOR configuration")
|
||||
return
|
||||
kb_parser_config.update(
|
||||
{
|
||||
"raptor": {
|
||||
"use_raptor": True,
|
||||
"prompt": "Please summarize the following paragraphs. Be careful with the numbers, do not make things up. Paragraphs as following:\n {cluster_content}\nThe above is the content you need to summarize.",
|
||||
"max_token": 256,
|
||||
"threshold": 0.1,
|
||||
"max_cluster": 64,
|
||||
"random_seed": 0,
|
||||
},
|
||||
}
|
||||
)
|
||||
if not KnowledgebaseService.update_by_id(kb.id, {"parser_config":kb_parser_config}):
|
||||
progress_callback(prog=-1.0, msg="Internal error: Invalid RAPTOR configuration")
|
||||
return
|
||||
|
||||
# bind LLM for raptor
|
||||
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
|
||||
# run RAPTOR
|
||||
@ -838,8 +855,25 @@ async def do_handle_task(task):
|
||||
|
||||
kb_parser_config = kb.parser_config
|
||||
if not kb_parser_config.get("graphrag", {}).get("use_graphrag", False):
|
||||
progress_callback(prog=-1.0, msg="Internal error: Invalid GraphRAG configuration")
|
||||
return
|
||||
kb_parser_config.update(
|
||||
{
|
||||
"graphrag": {
|
||||
"use_graphrag": True,
|
||||
"entity_types": [
|
||||
"organization",
|
||||
"person",
|
||||
"geo",
|
||||
"event",
|
||||
"category",
|
||||
],
|
||||
"method": "light",
|
||||
}
|
||||
}
|
||||
)
|
||||
if not KnowledgebaseService.update_by_id(kb.id, {"parser_config":kb_parser_config}):
|
||||
progress_callback(prog=-1.0, msg="Internal error: Invalid GraphRAG configuration")
|
||||
return
|
||||
|
||||
|
||||
graphrag_conf = kb_parser_config.get("graphrag", {})
|
||||
start_ts = timer()
|
||||
@ -874,8 +908,6 @@ async def do_handle_task(task):
|
||||
if not chunks:
|
||||
progress_callback(1., msg=f"No chunk built from {task_document_name}")
|
||||
return
|
||||
# TODO: exception handler
|
||||
## set_progress(task["did"], -1, "ERROR: ")
|
||||
progress_callback(msg="Generate {} chunks".format(len(chunks)))
|
||||
start_ts = timer()
|
||||
try:
|
||||
@ -889,6 +921,8 @@ async def do_handle_task(task):
|
||||
progress_message = "Embedding chunks ({:.2f}s)".format(timer() - start_ts)
|
||||
logging.info(progress_message)
|
||||
progress_callback(msg=progress_message)
|
||||
if task["parser_id"].lower() == "naive" and task["parser_config"].get("toc_extraction", False):
|
||||
toc_thread = executor.submit(build_TOC,task, chunks, progress_callback)
|
||||
|
||||
chunk_count = len(set([chunk["id"] for chunk in chunks]))
|
||||
start_ts = timer()
|
||||
@ -903,8 +937,17 @@ async def do_handle_task(task):
|
||||
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, token_count, chunk_count, 0)
|
||||
|
||||
time_cost = timer() - start_ts
|
||||
progress_callback(msg="Indexing done ({:.2f}s).".format(time_cost))
|
||||
if toc_thread:
|
||||
d = toc_thread.result()
|
||||
if d:
|
||||
e = await insert_es(task_id, task_tenant_id, task_dataset_id, [d], progress_callback)
|
||||
if not e:
|
||||
return
|
||||
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, 0, 1, 0)
|
||||
|
||||
task_time_cost = timer() - task_start_ts
|
||||
progress_callback(prog=1.0, msg="Indexing done ({:.2f}s). Task done ({:.2f}s)".format(time_cost, task_time_cost))
|
||||
progress_callback(prog=1.0, msg="Task done ({:.2f}s)".format(task_time_cost))
|
||||
logging.info(
|
||||
"Chunk doc({}), page({}-{}), chunks({}), token({}), elapsed:{:.2f}".format(task_document_name, task_from_page,
|
||||
task_to_page, len(chunks),
|
||||
|
||||
@ -60,7 +60,7 @@ class RAGFlowMinio:
|
||||
)
|
||||
return r
|
||||
|
||||
def put(self, bucket, fnm, binary):
|
||||
def put(self, bucket, fnm, binary, tenant_id=None):
|
||||
for _ in range(3):
|
||||
try:
|
||||
if not self.conn.bucket_exists(bucket):
|
||||
@ -76,13 +76,13 @@ class RAGFlowMinio:
|
||||
self.__open__()
|
||||
time.sleep(1)
|
||||
|
||||
def rm(self, bucket, fnm):
|
||||
def rm(self, bucket, fnm, tenant_id=None):
|
||||
try:
|
||||
self.conn.remove_object(bucket, fnm)
|
||||
except Exception:
|
||||
logging.exception(f"Fail to remove {bucket}/{fnm}:")
|
||||
|
||||
def get(self, bucket, filename):
|
||||
def get(self, bucket, filename, tenant_id=None):
|
||||
for _ in range(1):
|
||||
try:
|
||||
r = self.conn.get_object(bucket, filename)
|
||||
@ -93,7 +93,7 @@ class RAGFlowMinio:
|
||||
time.sleep(1)
|
||||
return
|
||||
|
||||
def obj_exist(self, bucket, filename):
|
||||
def obj_exist(self, bucket, filename, tenant_id=None):
|
||||
try:
|
||||
if not self.conn.bucket_exists(bucket):
|
||||
return False
|
||||
@ -121,7 +121,7 @@ class RAGFlowMinio:
|
||||
logging.exception(f"bucket_exist {bucket} got exception")
|
||||
return False
|
||||
|
||||
def get_presigned_url(self, bucket, fnm, expires):
|
||||
def get_presigned_url(self, bucket, fnm, expires, tenant_id=None):
|
||||
for _ in range(10):
|
||||
try:
|
||||
return self.conn.get_presigned_url("GET", bucket, fnm, expires)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow-sdk"
|
||||
version = "0.20.5"
|
||||
version = "0.21.0"
|
||||
description = "Python client sdk of [RAGFlow](https://github.com/infiniflow/ragflow). RAGFlow is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding."
|
||||
authors = [{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
|
||||
@ -100,12 +100,51 @@ class DataSet(Base):
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
|
||||
|
||||
def _get_documents_status(self, document_ids):
|
||||
import time
|
||||
terminal_states = {"DONE", "FAIL", "CANCEL"}
|
||||
interval_sec = 1
|
||||
pending = set(document_ids)
|
||||
finished = []
|
||||
while pending:
|
||||
for doc_id in list(pending):
|
||||
def fetch_doc(doc_id: str) -> Document | None:
|
||||
try:
|
||||
docs = self.list_documents(id=doc_id)
|
||||
return docs[0] if docs else None
|
||||
except Exception:
|
||||
return None
|
||||
doc = fetch_doc(doc_id)
|
||||
if doc is None:
|
||||
continue
|
||||
if isinstance(doc.run, str) and doc.run.upper() in terminal_states:
|
||||
finished.append((doc_id, doc.run, doc.chunk_count, doc.token_count))
|
||||
pending.discard(doc_id)
|
||||
elif float(doc.progress or 0.0) >= 1.0:
|
||||
finished.append((doc_id, "DONE", doc.chunk_count, doc.token_count))
|
||||
pending.discard(doc_id)
|
||||
if pending:
|
||||
time.sleep(interval_sec)
|
||||
return finished
|
||||
|
||||
def async_parse_documents(self, document_ids):
|
||||
res = self.post(f"/datasets/{self.id}/chunks", {"document_ids": document_ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
def parse_documents(self, document_ids):
|
||||
try:
|
||||
self.async_parse_documents(document_ids)
|
||||
self._get_documents_status(document_ids)
|
||||
except KeyboardInterrupt:
|
||||
self.async_cancel_parse_documents(document_ids)
|
||||
|
||||
return self._get_documents_status(document_ids)
|
||||
|
||||
|
||||
def async_cancel_parse_documents(self, document_ids):
|
||||
res = self.rm(f"/datasets/{self.id}/chunks", {"document_ids": document_ids})
|
||||
|
||||
2
sdk/python/uv.lock
generated
2
sdk/python/uv.lock
generated
@ -342,7 +342,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ragflow-sdk"
|
||||
version = "0.20.5"
|
||||
version = "0.21.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "beartype" },
|
||||
|
||||
108
uv.lock
generated
108
uv.lock
generated
@ -834,10 +834,10 @@ wheels = [
|
||||
[[package]]
|
||||
name = "cobble"
|
||||
version = "0.1.4"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/54/7a/a507c709be2c96e1bb6102eb7b7f4026c5e5e223ef7d745a17d239e9d844/cobble-0.1.4.tar.gz", hash = "sha256:de38be1539992c8a06e569630717c485a5f91be2192c461ea2b220607dfa78aa" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/54/7a/a507c709be2c96e1bb6102eb7b7f4026c5e5e223ef7d745a17d239e9d844/cobble-0.1.4.tar.gz", hash = "sha256:de38be1539992c8a06e569630717c485a5f91be2192c461ea2b220607dfa78aa", size = 3805, upload-time = "2024-06-01T18:11:09.528Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d5/e1/3714a2f371985215c219c2a70953d38e3eed81ef165aed061d21de0e998b/cobble-0.1.4-py3-none-any.whl", hash = "sha256:36c91b1655e599fd428e2b95fdd5f0da1ca2e9f1abb0bc871dec21a0e78a2b44" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d5/e1/3714a2f371985215c219c2a70953d38e3eed81ef165aed061d21de0e998b/cobble-0.1.4-py3-none-any.whl", hash = "sha256:36c91b1655e599fd428e2b95fdd5f0da1ca2e9f1abb0bc871dec21a0e78a2b44", size = 3984, upload-time = "2024-06-01T18:11:07.911Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -873,10 +873,10 @@ wheels = [
|
||||
[[package]]
|
||||
name = "colorclass"
|
||||
version = "2.2.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d7/1a/31ff00a33569a3b59d65bbdc445c73e12f92ad28195b7ace299f68b9af70/colorclass-2.2.2.tar.gz", hash = "sha256:6d4fe287766166a98ca7bc6f6312daf04a0481b1eda43e7173484051c0ab4366" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/d7/1a/31ff00a33569a3b59d65bbdc445c73e12f92ad28195b7ace299f68b9af70/colorclass-2.2.2.tar.gz", hash = "sha256:6d4fe287766166a98ca7bc6f6312daf04a0481b1eda43e7173484051c0ab4366", size = 16709, upload-time = "2021-12-09T00:41:35.661Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/30/b6/daf3e2976932da4ed3579cff7a30a53d22ea9323ee4f0d8e43be60454897/colorclass-2.2.2-py2.py3-none-any.whl", hash = "sha256:6f10c273a0ef7a1150b1120b6095cbdd68e5cf36dfd5d0fc957a2500bbf99a55" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/b6/daf3e2976932da4ed3579cff7a30a53d22ea9323ee4f0d8e43be60454897/colorclass-2.2.2-py2.py3-none-any.whl", hash = "sha256:6f10c273a0ef7a1150b1120b6095cbdd68e5cf36dfd5d0fc957a2500bbf99a55", size = 18995, upload-time = "2021-12-09T00:41:34.653Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -894,10 +894,10 @@ wheels = [
|
||||
[[package]]
|
||||
name = "compressed-rtf"
|
||||
version = "1.0.7"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/b7/0c/929a4e8ef9d7143f54d77dadb5f370cc7b98534b1bd6e1124d0abe8efb24/compressed_rtf-1.0.7.tar.gz", hash = "sha256:7c30859334839f3cdc7d10796af5b434bb326b9df7cb5a65e95a8eacb2951b0e" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/0c/929a4e8ef9d7143f54d77dadb5f370cc7b98534b1bd6e1124d0abe8efb24/compressed_rtf-1.0.7.tar.gz", hash = "sha256:7c30859334839f3cdc7d10796af5b434bb326b9df7cb5a65e95a8eacb2951b0e", size = 8152, upload-time = "2025-03-24T22:39:32.062Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/07/1d/62f5bf92e12335eb63517f42671ed78512d48bbc69e02a942dd7b90f03f0/compressed_rtf-1.0.7-py3-none-any.whl", hash = "sha256:b7904921d78c67a0a4b7fff9fb361a00ae2b447b6edca010ce321cd98fa0fcc0" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/07/1d/62f5bf92e12335eb63517f42671ed78512d48bbc69e02a942dd7b90f03f0/compressed_rtf-1.0.7-py3-none-any.whl", hash = "sha256:b7904921d78c67a0a4b7fff9fb361a00ae2b447b6edca010ce321cd98fa0fcc0", size = 7968, upload-time = "2025-03-24T23:03:57.433Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1352,18 +1352,18 @@ wheels = [
|
||||
[[package]]
|
||||
name = "easygui"
|
||||
version = "0.98.3"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/cc/ad/e35f7a30272d322be09dc98592d2f55d27cc933a7fde8baccbbeb2bd9409/easygui-0.98.3.tar.gz", hash = "sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/cc/ad/e35f7a30272d322be09dc98592d2f55d27cc933a7fde8baccbbeb2bd9409/easygui-0.98.3.tar.gz", hash = "sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04", size = 85583, upload-time = "2022-04-01T13:15:50.752Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8e/a7/b276ff776533b423710a285c8168b52551cb2ab0855443131fdc7fd8c16f/easygui-0.98.3-py2.py3-none-any.whl", hash = "sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8e/a7/b276ff776533b423710a285c8168b52551cb2ab0855443131fdc7fd8c16f/easygui-0.98.3-py2.py3-none-any.whl", hash = "sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba", size = 92655, upload-time = "2022-04-01T13:15:49.568Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ebcdic"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/0d/2f/633031205333bee5f9f93761af8268746aa75f38754823aabb8570eb245b/ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0d/2f/633031205333bee5f9f93761af8268746aa75f38754823aabb8570eb245b/ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1", size = 128537, upload-time = "2019-08-09T00:54:35.544Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1482,7 +1482,7 @@ wheels = [
|
||||
[[package]]
|
||||
name = "extract-msg"
|
||||
version = "0.41.5"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "chardet" },
|
||||
@ -1494,9 +1494,9 @@ dependencies = [
|
||||
{ name = "rtfde" },
|
||||
{ name = "tzlocal" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/ef/fa/67443d9b9f505c32cba96e34745223378b84cd4795c387310788cc8b6d7d/extract_msg-0.41.5.tar.gz", hash = "sha256:99d4fdc0c0912c836370bf9fbb6e77558bb978499c1b5fdd31634684e323885c" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ef/fa/67443d9b9f505c32cba96e34745223378b84cd4795c387310788cc8b6d7d/extract_msg-0.41.5.tar.gz", hash = "sha256:99d4fdc0c0912c836370bf9fbb6e77558bb978499c1b5fdd31634684e323885c", size = 181877, upload-time = "2023-06-11T17:19:42.931Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/be/e2/f0ed8df3907ad6e90e762d8e90adb4e25d12fea851a8371611fa14405782/extract_msg-0.41.5-py2.py3-none-any.whl", hash = "sha256:ad70dcdab3701b0fae554168c9642ad4ebef7f2ec283313c55e895a6518911e5" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/be/e2/f0ed8df3907ad6e90e762d8e90adb4e25d12fea851a8371611fa14405782/extract_msg-0.41.5-py2.py3-none-any.whl", hash = "sha256:ad70dcdab3701b0fae554168c9642ad4ebef7f2ec283313c55e895a6518911e5", size = 185222, upload-time = "2023-06-11T17:19:40.781Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2642,13 +2642,13 @@ wheels = [
|
||||
[[package]]
|
||||
name = "imapclient"
|
||||
version = "2.3.1"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/19/d8/a4a0337d5e39a0569d89793d5053d7535eefd9b8756df4e10dc114caf3c2/IMAPClient-2.3.1.zip", hash = "sha256:26ea995664fae3a88b878ebce2aff7402931697b86658b7882043ddb01b0e6ba" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/19/d8/a4a0337d5e39a0569d89793d5053d7535eefd9b8756df4e10dc114caf3c2/IMAPClient-2.3.1.zip", hash = "sha256:26ea995664fae3a88b878ebce2aff7402931697b86658b7882043ddb01b0e6ba" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/13/9c/b2890e73bc9eee53fe63218e3f3cb774a6beefdb7b5c47928a81cc3b3c13/IMAPClient-2.3.1-py2.py3-none-any.whl", hash = "sha256:057f28025d2987c63e065afb0e4370b0b850b539b0e1494cea0427e88130108c" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/13/9c/b2890e73bc9eee53fe63218e3f3cb774a6beefdb7b5c47928a81cc3b3c13/IMAPClient-2.3.1-py2.py3-none-any.whl", hash = "sha256:057f28025d2987c63e065afb0e4370b0b850b539b0e1494cea0427e88130108c" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2679,7 +2679,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "infinity-sdk"
|
||||
version = "0.6.0.dev7"
|
||||
version = "0.6.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
@ -2696,7 +2696,7 @@ dependencies = [
|
||||
{ name = "thrift" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/28/ec/f44f451d588f0d1d729eb1fcf1c0006d9fdeb116a33017e94d181dbee851/infinity_sdk-0.6.0.dev7-py3-none-any.whl", hash = "sha256:be4f51b667154ea407c2964769f10ebc00e362d3788e70e6c79f96df4970a40c", size = 75304, upload-time = "2025-10-10T02:42:08.49Z" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f4/12/1ce243cbede6da5fc28e5462d90d96b13995446b3a90889287d31255b36e/infinity_sdk-0.6.0-py3-none-any.whl", hash = "sha256:e379853ffc44acba428572d633032e6c9bb842d1f08e9cad690916f52a8c6ba8", size = 75256, upload-time = "2025-10-14T12:05:13.918Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2981,10 +2981,10 @@ wheels = [
|
||||
[[package]]
|
||||
name = "lark-parser"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/5a/ee/fd1192d7724419ddfe15b6f17d1c8742800d4de917c0adac3b6aaf22e921/lark-parser-0.12.0.tar.gz", hash = "sha256:15967db1f1214013dca65b1180745047b9be457d73da224fcda3d9dd4e96a138" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/5a/ee/fd1192d7724419ddfe15b6f17d1c8742800d4de917c0adac3b6aaf22e921/lark-parser-0.12.0.tar.gz", hash = "sha256:15967db1f1214013dca65b1180745047b9be457d73da224fcda3d9dd4e96a138", size = 235029, upload-time = "2021-08-30T09:14:44.484Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/76/00/90f05db333fe1aa6b6ffea83a35425b7d53ea95c8bba0b1597f226cf1d5f/lark_parser-0.12.0-py2.py3-none-any.whl", hash = "sha256:0eaf30cb5ba787fe404d73a7d6e61df97b21d5a63ac26c5008c78a494373c675" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/76/00/90f05db333fe1aa6b6ffea83a35425b7d53ea95c8bba0b1597f226cf1d5f/lark_parser-0.12.0-py2.py3-none-any.whl", hash = "sha256:0eaf30cb5ba787fe404d73a7d6e61df97b21d5a63ac26c5008c78a494373c675", size = 103498, upload-time = "2021-08-30T13:01:01.603Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3157,13 +3157,13 @@ wheels = [
|
||||
[[package]]
|
||||
name = "mammoth"
|
||||
version = "1.11.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "cobble" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/ed/3c/a58418d2af00f2da60d4a51e18cd0311307b72d48d2fffec36a97b4a5e44/mammoth-1.11.0.tar.gz", hash = "sha256:a0f59e442f34d5b6447f4b0999306cbf3e67aaabfa8cb516f878fb1456744637" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/3c/a58418d2af00f2da60d4a51e18cd0311307b72d48d2fffec36a97b4a5e44/mammoth-1.11.0.tar.gz", hash = "sha256:a0f59e442f34d5b6447f4b0999306cbf3e67aaabfa8cb516f878fb1456744637", size = 53142, upload-time = "2025-09-19T10:35:20.373Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ca/54/2e39566a131b13f6d8d193f974cb6a34e81bb7cc2fa6f7e03de067b36588/mammoth-1.11.0-py2.py3-none-any.whl", hash = "sha256:c077ab0d450bd7c0c6ecd529a23bf7e0fa8190c929e28998308ff4eada3f063b" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ca/54/2e39566a131b13f6d8d193f974cb6a34e81bb7cc2fa6f7e03de067b36588/mammoth-1.11.0-py2.py3-none-any.whl", hash = "sha256:c077ab0d450bd7c0c6ecd529a23bf7e0fa8190c929e28998308ff4eada3f063b", size = 54752, upload-time = "2025-09-19T10:35:18.699Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3199,14 +3199,14 @@ wheels = [
|
||||
[[package]]
|
||||
name = "markdownify"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/83/1b/6f2697b51eaca81f08852fd2734745af15718fea10222a1d40f8a239c4ea/markdownify-1.2.0.tar.gz", hash = "sha256:f6c367c54eb24ee953921804dfe6d6575c5e5b42c643955e7242034435de634c" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/83/1b/6f2697b51eaca81f08852fd2734745af15718fea10222a1d40f8a239c4ea/markdownify-1.2.0.tar.gz", hash = "sha256:f6c367c54eb24ee953921804dfe6d6575c5e5b42c643955e7242034435de634c", size = 18771, upload-time = "2025-08-09T17:44:15.302Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6a/e2/7af643acb4cae0741dffffaa7f3f7c9e7ab4046724543ba1777c401d821c/markdownify-1.2.0-py3-none-any.whl", hash = "sha256:48e150a1c4993d4d50f282f725c0111bd9eb25645d41fa2f543708fd44161351" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6a/e2/7af643acb4cae0741dffffaa7f3f7c9e7ab4046724543ba1777c401d821c/markdownify-1.2.0-py3-none-any.whl", hash = "sha256:48e150a1c4993d4d50f282f725c0111bd9eb25645d41fa2f543708fd44161351", size = 15561, upload-time = "2025-08-09T17:44:14.074Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3499,14 +3499,14 @@ wheels = [
|
||||
[[package]]
|
||||
name = "msoffcrypto-tool"
|
||||
version = "5.4.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "cryptography" },
|
||||
{ name = "olefile" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d2/b7/0fd6573157e0ec60c0c470e732ab3322fba4d2834fd24e1088d670522a01/msoffcrypto_tool-5.4.2.tar.gz", hash = "sha256:44b545adba0407564a0cc3d6dde6ca36b7c0fdf352b85bca51618fa1d4817370" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/d2/b7/0fd6573157e0ec60c0c470e732ab3322fba4d2834fd24e1088d670522a01/msoffcrypto_tool-5.4.2.tar.gz", hash = "sha256:44b545adba0407564a0cc3d6dde6ca36b7c0fdf352b85bca51618fa1d4817370", size = 41183, upload-time = "2024-08-08T15:50:28.462Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/03/54/7f6d3d9acad083dae8c22d9ab483b657359a1bf56fee1d7af88794677707/msoffcrypto_tool-5.4.2-py3-none-any.whl", hash = "sha256:274fe2181702d1e5a107ec1b68a4c9fea997a44972ae1cc9ae0cb4f6a50fef0e" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/03/54/7f6d3d9acad083dae8c22d9ab483b657359a1bf56fee1d7af88794677707/msoffcrypto_tool-5.4.2-py3-none-any.whl", hash = "sha256:274fe2181702d1e5a107ec1b68a4c9fea997a44972ae1cc9ae0cb4f6a50fef0e", size = 48713, upload-time = "2024-08-08T15:50:27.093Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3861,13 +3861,13 @@ wheels = [
|
||||
[[package]]
|
||||
name = "olefile"
|
||||
version = "0.46"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/34/81/e1ac43c6b45b4c5f8d9352396a14144bba52c8fec72a80f425f6a4d653ad/olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/34/81/e1ac43c6b45b4c5f8d9352396a14144bba52c8fec72a80f425f6a4d653ad/olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964" }
|
||||
|
||||
[[package]]
|
||||
name = "oletools"
|
||||
version = "0.60.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorclass" },
|
||||
{ name = "easygui" },
|
||||
@ -3876,9 +3876,9 @@ dependencies = [
|
||||
{ name = "pcodedmp" },
|
||||
{ name = "pyparsing" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/5c/2f/037f40e44706d542b94a2312ccc33ee2701ebfc9a83b46b55263d49ce55a/oletools-0.60.2.zip", hash = "sha256:ad452099f4695ffd8855113f453348200d195ee9fa341a09e197d66ee7e0b2c3" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/5c/2f/037f40e44706d542b94a2312ccc33ee2701ebfc9a83b46b55263d49ce55a/oletools-0.60.2.zip", hash = "sha256:ad452099f4695ffd8855113f453348200d195ee9fa341a09e197d66ee7e0b2c3", size = 3433750, upload-time = "2024-07-02T14:50:38.242Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ac/ff/05257b7183279b80ecec6333744de23f48f0faeeba46c93e6d13ce835515/oletools-0.60.2-py2.py3-none-any.whl", hash = "sha256:72ad8bd748fd0c4e7b5b4733af770d11543ebb2bf2697455f99f975fcd50cc96" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ac/ff/05257b7183279b80ecec6333744de23f48f0faeeba46c93e6d13ce835515/oletools-0.60.2-py2.py3-none-any.whl", hash = "sha256:72ad8bd748fd0c4e7b5b4733af770d11543ebb2bf2697455f99f975fcd50cc96", size = 989449, upload-time = "2024-07-02T14:50:29.122Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4346,14 +4346,14 @@ wheels = [
|
||||
[[package]]
|
||||
name = "pcodedmp"
|
||||
version = "1.2.6"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "oletools" },
|
||||
{ name = "win-unicode-console", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/3d/20/6d461e29135f474408d0d7f95b2456a9ba245560768ee51b788af10f7429/pcodedmp-1.2.6.tar.gz", hash = "sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/3d/20/6d461e29135f474408d0d7f95b2456a9ba245560768ee51b788af10f7429/pcodedmp-1.2.6.tar.gz", hash = "sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955", size = 35549, upload-time = "2019-07-30T18:05:42.516Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ba/72/b380fb5c89d89c3afafac8cf02a71a45f4f4a4f35531ca949a34683962d1/pcodedmp-1.2.6-py2.py3-none-any.whl", hash = "sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ba/72/b380fb5c89d89c3afafac8cf02a71a45f4f4a4f35531ca949a34683962d1/pcodedmp-1.2.6-py2.py3-none-any.whl", hash = "sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278", size = 30939, upload-time = "2019-07-30T18:05:40.483Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5436,7 +5436,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ragflow"
|
||||
version = "0.20.5"
|
||||
version = "0.21.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "akshare" },
|
||||
@ -5644,7 +5644,7 @@ requires-dist = [
|
||||
{ name = "httpx", extras = ["socks"], specifier = "==0.27.2" },
|
||||
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
||||
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0.dev7" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0" },
|
||||
{ name = "itsdangerous", specifier = "==2.1.2" },
|
||||
{ name = "json-repair", specifier = "==0.35.0" },
|
||||
{ name = "langfuse", specifier = ">=2.60.0" },
|
||||
@ -5809,8 +5809,8 @@ wheels = [
|
||||
[[package]]
|
||||
name = "red-black-tree-mod"
|
||||
version = "1.20"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/34/12/944f61bc67a1e918953741c0b3b75a28f96d8060d08fd3614233309ced3b/red-black-tree-mod-1.20.tar.gz", hash = "sha256:2448e6fc9cbf1be204c753f352c6ee49aa8156dbf1faa57dfc26bd7705077e0a" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/34/12/944f61bc67a1e918953741c0b3b75a28f96d8060d08fd3614233309ced3b/red-black-tree-mod-1.20.tar.gz", hash = "sha256:2448e6fc9cbf1be204c753f352c6ee49aa8156dbf1faa57dfc26bd7705077e0a", size = 28589, upload-time = "2013-11-04T16:58:20.788Z" }
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
@ -6068,14 +6068,14 @@ wheels = [
|
||||
[[package]]
|
||||
name = "rtfde"
|
||||
version = "0.0.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "lark-parser" },
|
||||
{ name = "oletools" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/81/ea/28f5ab6b46a072887c8c8fd8c8a1f7b54025fc4bb2e09024668ea6686044/RTFDE-0.0.2.tar.gz", hash = "sha256:b86b5d734950fe8745a5b89133f50554252dbd67c6d1b9265e23ee140e7ea8a2" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/81/ea/28f5ab6b46a072887c8c8fd8c8a1f7b54025fc4bb2e09024668ea6686044/RTFDE-0.0.2.tar.gz", hash = "sha256:b86b5d734950fe8745a5b89133f50554252dbd67c6d1b9265e23ee140e7ea8a2", size = 18891, upload-time = "2020-12-28T15:15:35.981Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5d/3f/39ba5a72620c43656bc80cb1f7afe0d498df4a48947d75ea0ca0752ffbf4/RTFDE-0.0.2-py3-none-any.whl", hash = "sha256:18386e4f060cee12a2a8035b0acf0cc99689f5dff1bf347bab7e92351860a21d" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5d/3f/39ba5a72620c43656bc80cb1f7afe0d498df4a48947d75ea0ca0752ffbf4/RTFDE-0.0.2-py3-none-any.whl", hash = "sha256:18386e4f060cee12a2a8035b0acf0cc99689f5dff1bf347bab7e92351860a21d", size = 34626, upload-time = "2020-12-28T15:15:35Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -7131,13 +7131,13 @@ wheels = [
|
||||
[[package]]
|
||||
name = "tzlocal"
|
||||
version = "5.3.1"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d" },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -7387,8 +7387,8 @@ sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/35/25e68fbc99e672
|
||||
[[package]]
|
||||
name = "win-unicode-console"
|
||||
version = "0.5"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/89/8d/7aad74930380c8972ab282304a2ff45f3d4927108bb6693cabcc9fc6a099/win_unicode_console-0.5.zip", hash = "sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e" }
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/89/8d/7aad74930380c8972ab282304a2ff45f3d4927108bb6693cabcc9fc6a099/win_unicode_console-0.5.zip", hash = "sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e", size = 31420, upload-time = "2016-06-25T19:48:54.05Z" }
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
|
||||
@ -104,7 +104,7 @@ const RootProvider = ({ children }: React.PropsWithChildren) => {
|
||||
<TooltipProvider>
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<ThemeProvider
|
||||
defaultTheme={ThemeEnum.Light}
|
||||
defaultTheme={ThemeEnum.Dark}
|
||||
storageKey="ragflow-ui-theme"
|
||||
>
|
||||
<Root>{children}</Root>
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 8.0 KiB |
50
web/src/assets/logo-with-text.svg
Normal file
50
web/src/assets/logo-with-text.svg
Normal file
@ -0,0 +1,50 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="1500px" height="500px" viewBox="0 0 1500 500" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>RAG- logos</title>
|
||||
<defs>
|
||||
<rect id="path-1" x="0" y="0" width="480.282452" height="480"></rect>
|
||||
<linearGradient x1="-19.6945332%" y1="78.7580689%" x2="78.6511106%" y2="-14.5268659%" id="linearGradient-3">
|
||||
<stop stop-color="#43CDE9" offset="0%"></stop>
|
||||
<stop stop-color="#4E40EC" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient x1="-19.8760293%" y1="78.7580689%" x2="78.7257229%" y2="-14.5268659%" id="linearGradient-4">
|
||||
<stop stop-color="#43CDE9" offset="0%"></stop>
|
||||
<stop stop-color="#4E40EC" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient x1="-20.3066254%" y1="78.7580689%" x2="78.902739%" y2="-14.5268659%" id="linearGradient-5">
|
||||
<stop stop-color="#43CDE9" offset="0%"></stop>
|
||||
<stop stop-color="#4E40EC" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="logos" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="GitHub-bg" transform="translate(-67, -129)">
|
||||
<g id="RAG--logos" transform="translate(67, 129)">
|
||||
<rect id="矩形" fill-opacity="0" fill="#D8D8D8" x="0" y="0" width="1500" height="500"></rect>
|
||||
<g id="rag-logo" transform="translate(22, 10)">
|
||||
<mask id="mask-2" fill="white">
|
||||
<use xlink:href="#path-1"></use>
|
||||
</mask>
|
||||
<use id="矩形" fill-opacity="0" fill="#D8D8D8" xlink:href="#path-1"></use>
|
||||
<path d="M91.6742598,285.524755 C100.185587,294.225017 100.18547,308.330398 91.673908,317.029482 L91.2380792,317.474803 C82.7265175,326.175066 68.9266032,326.175066 60.4152408,317.474803 C51.9038397,308.774541 51.903971,294.670338 60.415534,285.970076 L60.8514331,285.524755 C69.3630183,276.824493 83.1628154,276.825671 91.6742598,285.524755 Z" id="路径" fill="#53F3FD" mask="url(#mask-2)"></path>
|
||||
<path d="M195.695835,291.612434 C204.281754,300.406974 204.256864,314.640147 195.638943,323.402589 L134.628661,385.444627 C126.011688,394.207069 112.065969,394.182105 103.48005,385.387566 C94.8941307,376.593026 94.9193764,362.359853 103.536349,353.597411 L164.546987,291.555373 C173.164315,282.792931 187.109915,282.817895 195.695835,291.612434 Z" id="路径" fill="#43CDE9" mask="url(#mask-2)"></path>
|
||||
<path d="M278.834919,398.04199 C285.530372,387.681543 299.098865,384.881873 309.142632,391.789235 L310.453967,392.690662 C320.496557,399.598024 323.210961,413.596372 316.515509,423.95801 C309.820057,434.318457 296.250387,437.118127 286.207797,430.210765 L284.896462,429.309338 C274.852695,422.401976 272.139467,408.403628 278.834919,398.04199 Z" id="路径" fill="#53F3FD" mask="url(#mask-2)"></path>
|
||||
<path d="M423.798774,283.537264 C432.404449,292.254797 432.404449,306.388179 423.798774,315.104532 L393.828923,345.46185 C385.223249,354.179383 371.269415,354.179383 362.663741,345.46185 C354.058067,336.745498 354.058067,322.612116 362.663741,313.894583 L392.633592,283.537264 C401.239266,274.820912 415.1931,274.820912 423.798774,283.537264 Z" id="路径" fill="#43CDE9" mask="url(#mask-2)"></path>
|
||||
<path d="M423.808132,170.562223 C432.401329,179.313039 432.401329,193.500381 423.808132,202.250012 L202.660892,427.436888 C194.067695,436.187704 180.135277,436.187704 171.54208,427.436888 C162.948764,418.687257 162.948764,404.499915 171.54208,395.749099 L392.689319,170.562223 C401.282517,161.812592 415.214935,161.812592 423.808132,170.562223 Z" id="路径" fill="url(#linearGradient-3)" mask="url(#mask-2)"></path>
|
||||
<path d="M382.786724,101.550556 C391.376297,110.284631 391.376297,124.445361 382.786724,133.179436 L255.670977,262.449089 C247.082587,271.183637 233.157963,271.183637 224.569573,262.449089 C215.981183,253.714541 215.981183,239.554757 224.569573,230.820209 L351.68532,101.550556 C360.27371,92.8164813 374.198334,92.8164813 382.786724,101.550556 Z" id="路径" fill="url(#linearGradient-4)" mask="url(#mask-2)"></path>
|
||||
<path d="M315.714683,58.5360848 C324.339423,67.2590474 324.348924,81.4113439 315.736061,90.1460086 L151.753781,256.442347 C143.14068,265.177603 129.166512,265.187059 120.541773,256.463624 C111.917034,247.740189 111.907651,233.588838 120.52087,224.853582 L284.501963,58.5572433 C293.116014,49.8225431 307.089944,49.8130277 315.714683,58.5360848 Z" id="路径" fill="url(#linearGradient-5)" mask="url(#mask-2)"></path>
|
||||
<path d="M152.674088,111.602489 C161.254247,120.384434 161.230323,134.598332 152.620437,143.350004 L88.574692,208.451766 C79.9649246,217.203675 66.0297171,217.179931 57.4494992,208.397155 C48.8693051,199.615567 48.8932882,185.401431 57.5030675,176.649522 L121.548836,111.547879 C130.158604,102.796089 144.093811,102.820545 152.674088,111.602489 Z" id="路径" fill="#43CDE9" mask="url(#mask-2)"></path>
|
||||
<path d="M192.112981,46 C204.270268,46 214.125927,56.0203121 214.125927,68.380962 L214.125927,70.6190025 C214.125927,82.9796524 204.270268,93 192.112981,93 C179.955694,93 170.100035,82.9796524 170.100035,70.6190025 L170.100035,68.380962 C170.100035,56.0203121 179.955694,46 192.112981,46 Z" id="路径" fill="#53F3FD" mask="url(#mask-2)"></path>
|
||||
</g>
|
||||
<g id="RAG-Flow" transform="translate(558, 174)" fill="#66686A" fill-rule="nonzero">
|
||||
<path d="M60.4847896,91.2109375 L29.4118282,91.2109375 L29.4118282,147.65625 L0,147.65625 L0,3.7109375 L70.647016,3.7109375 C80.7440998,3.90625 88.5123402,5.14322917 93.951737,7.421875 C99.3911338,9.70052083 103.999964,13.0533854 107.778228,17.4804688 C110.905067,21.1263021 113.380481,25.1627604 115.20447,29.5898438 C117.028459,34.0169271 117.940454,39.0625 117.940454,44.7265625 C117.940454,51.5625 116.214178,58.2845052 112.761627,64.8925781 C109.309076,71.500651 103.609109,76.171875 95.661727,78.90625 C102.30626,81.5755208 107.012804,85.3678385 109.781359,90.2832031 C112.549914,95.1985677 113.934192,102.701823 113.934192,112.792969 L113.934192,122.460938 C113.934192,129.036458 114.194762,133.496094 114.715901,135.839844 C115.497611,139.550781 117.3216,142.285156 120.187869,144.042969 L120.187869,147.65625 L87.0629201,147.65625 C86.1509254,144.466146 85.4995006,141.894531 85.1086458,139.941406 C84.3269361,135.904948 83.90351,131.770833 83.8383675,127.539063 L83.64294,114.160156 C83.5126551,104.980469 81.8352363,98.8606771 78.6106837,95.8007812 C75.3861311,92.7408854 69.3441664,91.2109375 60.4847896,91.2109375 Z M78.7572543,65.0390625 C84.7503622,62.3046875 87.7469161,56.9010417 87.7469161,48.828125 C87.7469161,40.1041667 84.8480759,34.2447917 79.0503954,31.25 C75.7932716,29.5572917 70.9075859,28.7109375 64.3933382,28.7109375 L29.4118282,28.7109375 L29.4118282,67.3828125 L63.5139148,67.3828125 C70.2887323,67.3828125 75.3698455,66.6015625 78.7572543,65.0390625 Z" id="形状"></path>
|
||||
<path d="M228.161525,118.066406 L175.102977,118.066406 L165.136178,147.65625 L133.672362,147.65625 L185.069776,3.7109375 L219.074149,3.7109375 L270.080708,147.65625 L237.444327,147.65625 L228.161525,118.066406 Z M219.758145,93.2617188 L201.778821,36.6210938 L183.213216,93.2617188 L219.758145,93.2617188 Z" id="形状"></path>
|
||||
<path d="M376.686371,144.140625 C368.738989,149.023437 358.967618,151.464844 347.372257,151.464844 C328.285511,151.464844 312.651317,144.856771 300.469674,131.640625 C287.766891,118.359375 281.415499,100.195313 281.415499,77.1484375 C281.415499,53.8411458 287.832033,35.15625 300.665101,21.09375 C313.498169,7.03125 330.467784,0 351.573947,0 C369.878983,0 384.584897,4.63867188 395.691689,13.9160156 C406.798481,23.1933594 413.166158,34.765625 414.79472,48.6328125 L385.187465,48.6328125 C382.907478,38.8020833 377.337796,31.9335937 368.478419,28.0273438 C363.527591,25.8789062 358.023052,24.8046875 351.964801,24.8046875 C340.369441,24.8046875 330.842353,29.1829427 323.38354,37.9394531 C315.924726,46.6959635 312.195319,59.8632813 312.195319,77.4414063 C312.195319,95.1497396 316.234153,107.682292 324.31182,115.039063 C332.389487,122.395833 341.574576,126.074219 351.867088,126.074219 C361.964172,126.074219 370.237266,123.160807 376.686371,117.333984 C383.135477,111.507161 387.109168,103.873698 388.607445,94.4335938 L355.287068,94.4335938 L355.287068,70.4101563 L415.283289,70.4101563 L415.283289,147.65625 L395.349691,147.65625 L392.320566,129.6875 C386.522885,136.523438 381.311487,141.341146 376.686371,144.140625 Z" id="路径"></path>
|
||||
<polygon id="路径" points="550.729209 29.1992188 478.518773 29.1992188 478.518773 62.3046875 541.739547 62.3046875 541.739547 87.3046875 478.518773 87.3046875 478.518773 147.65625 448.618377 147.65625 448.618377 3.90625 550.729209 3.90625"></polygon>
|
||||
<polygon id="路径" points="569.197101 3.7109375 597.04551 3.7109375 597.04551 147.65625 569.197101 147.65625"></polygon>
|
||||
<path d="M713.031689,54.6875 C722.021351,65.9505208 726.516182,79.2643229 726.516182,94.6289063 C726.516182,110.253906 722.021351,123.616536 713.031689,134.716797 C704.042028,145.817057 690.394679,151.367188 672.089643,151.367188 C653.784607,151.367188 640.137258,145.817057 631.147596,134.716797 C622.157935,123.616536 617.663104,110.253906 617.663104,94.6289063 C617.663104,79.2643229 622.157935,65.9505208 631.147596,54.6875 C640.137258,43.4244792 653.784607,37.7929688 672.089643,37.7929688 C690.394679,37.7929688 704.042028,43.4244792 713.031689,54.6875 Z M671.991929,61.328125 C663.84912,61.328125 657.579156,64.2089844 653.182039,69.9707031 C648.784922,75.7324219 646.586363,83.9518229 646.586363,94.6289062 C646.586363,105.30599 648.784922,113.541667 653.182039,119.335938 C657.579156,125.130208 663.84912,128.027344 671.991929,128.027344 C680.134739,128.027344 686.388417,125.130208 690.752962,119.335938 C695.117508,113.541667 697.299781,105.30599 697.299781,94.6289062 C697.299781,83.9518229 695.117508,75.7324219 690.752962,69.9707031 C686.388417,64.2089844 680.134739,61.328125 671.991929,61.328125 Z" id="形状"></path>
|
||||
<polygon id="路径" points="827.259022 147.65625 810.549977 70.1171875 793.645504 147.65625 764.722245 147.65625 734.821848 41.2109375 764.722245 41.2109375 781.333576 117.578125 796.67463 41.2109375 824.913893 41.2109375 841.13437 117.871094 857.745701 41.2109375 886.766675 41.2109375 855.889141 147.65625"></polygon>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 10 KiB |
@ -1,13 +1,11 @@
|
||||
import { useIsDarkTheme } from '@/components/theme-provider';
|
||||
import { Background } from '@xyflow/react';
|
||||
|
||||
export function AgentBackground() {
|
||||
const isDarkTheme = useIsDarkTheme();
|
||||
|
||||
return (
|
||||
<Background
|
||||
color={isDarkTheme ? 'rgba(255,255,255,0.15)' : '#A8A9B3'}
|
||||
bgColor={isDarkTheme ? 'rgba(11, 11, 12, 1)' : 'rgba(0, 0, 0, 0.05)'}
|
||||
color="var(--text-primary)"
|
||||
bgColor="rgb(var(--bg-canvas))"
|
||||
className="rounded-lg"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@ -108,6 +108,7 @@ export function DataFlowSelect(props: IProps) {
|
||||
{...field}
|
||||
placeholder={t('dataFlowPlaceholder')}
|
||||
options={options}
|
||||
triggerClassName="!bg-bg-base"
|
||||
/>
|
||||
)}
|
||||
{isMult && (
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { cn } from '@/lib/utils';
|
||||
import { forwardRef } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@ -36,6 +37,7 @@ export const DelimiterInput = forwardRef<HTMLInputElement, InputProps & IProps>(
|
||||
maxLength={maxLength}
|
||||
defaultValue={defaultValue}
|
||||
ref={ref}
|
||||
className={cn('bg-bg-base', props.className)}
|
||||
{...props}
|
||||
></Input>
|
||||
);
|
||||
|
||||
@ -98,7 +98,7 @@ export function FileUploadDialog({
|
||||
|
||||
return (
|
||||
<Dialog open onOpenChange={hideModal}>
|
||||
<DialogContent className="sm:max-w-[425px]">
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>{t('fileManager.uploadFile')}</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
@ -63,7 +63,7 @@ export function MetadataFilterConditions({
|
||||
<Plus />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
<DropdownMenuContent className="max-h-[300px] !overflow-y-auto scrollbar-auto">
|
||||
{Object.keys(metadata.data).map((key, idx) => {
|
||||
return (
|
||||
<DropdownMenuItem key={idx} onClick={add(key)}>
|
||||
|
||||
@ -54,8 +54,11 @@ function MarkdownContent({
|
||||
const { setDocumentIds, data: fileThumbnails } =
|
||||
useFetchDocumentThumbnailsByIds();
|
||||
const contentWithCursor = useMemo(() => {
|
||||
// let text = DOMPurify.sanitize(content);
|
||||
let text = content;
|
||||
let text = DOMPurify.sanitize(content, {
|
||||
ADD_TAGS: ['think', 'section'],
|
||||
ADD_ATTR: ['class'],
|
||||
});
|
||||
// let text = content;
|
||||
if (text === '') {
|
||||
text = t('chat.searching');
|
||||
}
|
||||
|
||||
@ -1,10 +1,14 @@
|
||||
import { useIsDarkTheme } from '@/components/theme-provider';
|
||||
import { parseColorToRGB } from '@/utils/common-util';
|
||||
import React from 'react';
|
||||
|
||||
interface SpotlightProps {
|
||||
className?: string;
|
||||
opcity?: number;
|
||||
coverage?: number;
|
||||
X?: string;
|
||||
Y?: string;
|
||||
color?: string;
|
||||
}
|
||||
/**
|
||||
*
|
||||
@ -16,9 +20,20 @@ const Spotlight: React.FC<SpotlightProps> = ({
|
||||
className,
|
||||
opcity = 0.5,
|
||||
coverage = 60,
|
||||
X = '50%',
|
||||
Y = '190%',
|
||||
color,
|
||||
}) => {
|
||||
const isDark = useIsDarkTheme();
|
||||
const rgb = isDark ? '255, 255, 255' : '194, 221, 243';
|
||||
let realColor: [number, number, number] | undefined = undefined;
|
||||
if (color) {
|
||||
realColor = parseColorToRGB(color);
|
||||
}
|
||||
const rgb = realColor
|
||||
? realColor.join(',')
|
||||
: isDark
|
||||
? '255, 255, 255'
|
||||
: '194, 221, 243';
|
||||
return (
|
||||
<div
|
||||
className={`absolute inset-0 opacity-80 ${className} rounded-lg`}
|
||||
@ -30,7 +45,7 @@ const Spotlight: React.FC<SpotlightProps> = ({
|
||||
<div
|
||||
className="absolute inset-0"
|
||||
style={{
|
||||
background: `radial-gradient(circle at 50% 190%, rgba(${rgb},${opcity}) 0%, rgba(${rgb},0) ${coverage}%)`,
|
||||
background: `radial-gradient(circle at ${X} ${Y}, rgba(${rgb},${opcity}) 0%, rgba(${rgb},0) ${coverage}%)`,
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
></div>
|
||||
|
||||
@ -21,7 +21,7 @@ const ThemeProviderContext = createContext<ThemeProviderState>(initialState);
|
||||
|
||||
export function ThemeProvider({
|
||||
children,
|
||||
defaultTheme = ThemeEnum.Light,
|
||||
defaultTheme = ThemeEnum.Dark,
|
||||
storageKey = 'vite-ui-theme',
|
||||
...props
|
||||
}: ThemeProviderProps) {
|
||||
|
||||
@ -31,7 +31,7 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
<input
|
||||
type={type}
|
||||
className={cn(
|
||||
'flex h-8 w-full rounded-md border border-input bg-bg-card px-2 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium file:text-foreground placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50',
|
||||
'flex h-8 w-full rounded-md border border-input bg-bg-base px-2 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium file:text-foreground placeholder:text-text-disabled focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50',
|
||||
className,
|
||||
)}
|
||||
ref={ref}
|
||||
@ -65,7 +65,11 @@ const ExpandedInput = ({
|
||||
{prefix}
|
||||
</span>
|
||||
<Input
|
||||
className={cn({ 'pr-8': !!suffix, 'pl-8': !!prefix }, className)}
|
||||
className={cn(
|
||||
{ 'pr-8': !!suffix, 'pl-8': !!prefix },
|
||||
'bg-bg-base',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
></Input>
|
||||
<span
|
||||
|
||||
@ -291,7 +291,7 @@ export const RAGFlowSelect = forwardRef<
|
||||
onReset={handleReset}
|
||||
allowClear={allowClear}
|
||||
ref={ref}
|
||||
className={triggerClassName}
|
||||
className={cn(triggerClassName, 'bg-bg-base')}
|
||||
>
|
||||
<SelectValue placeholder={placeholder}>{label}</SelectValue>
|
||||
</SelectTrigger>
|
||||
|
||||
@ -8,7 +8,7 @@ const Table = React.forwardRef<
|
||||
>(({ className, rootClassName, ...props }, ref) => (
|
||||
<div
|
||||
className={cn(
|
||||
'relative w-full overflow-auto rounded-2xl bg-bg-card scrollbar-none',
|
||||
'relative w-full overflow-auto rounded-2xl bg-bg-card scrollbar-auto',
|
||||
rootClassName,
|
||||
)}
|
||||
>
|
||||
|
||||
@ -20,7 +20,7 @@ const TooltipContent = React.forwardRef<
|
||||
ref={ref}
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
'z-50 overflow-auto scrollbar-auto rounded-md border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-w-[20vw]',
|
||||
'z-50 overflow-auto scrollbar-auto rounded-md whitespace-pre-wrap border bg-popover px-3 py-1.5 text-sm text-popover-foreground shadow-md animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-w-[30vw]',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
@ -41,9 +41,7 @@ export const FormTooltip = ({ tooltip }: { tooltip: React.ReactNode }) => {
|
||||
>
|
||||
<Info className="size-3 ml-2" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>{tooltip}</p>
|
||||
</TooltipContent>
|
||||
<TooltipContent>{tooltip}</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
@ -24,6 +24,12 @@ export const useNavigatePage = () => {
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
const navigateToDatasetOverview = useCallback(
|
||||
(id: string) => () => {
|
||||
navigate(`${Routes.DatasetBase}${Routes.DataSetOverview}/${id}`);
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const navigateToDataFile = useCallback(
|
||||
(id: string) => () => {
|
||||
@ -160,6 +166,7 @@ export const useNavigatePage = () => {
|
||||
return {
|
||||
navigateToDatasetList,
|
||||
navigateToDataset,
|
||||
navigateToDatasetOverview,
|
||||
navigateToHome,
|
||||
navigateToProfile,
|
||||
navigateToChatList,
|
||||
|
||||
@ -46,6 +46,7 @@ export declare interface IFlow {
|
||||
export interface IFlowTemplate {
|
||||
avatar: string;
|
||||
canvas_type: string;
|
||||
canvas_category?: string;
|
||||
create_date: string;
|
||||
create_time: number;
|
||||
description: {
|
||||
|
||||
@ -161,6 +161,7 @@ export function Header() {
|
||||
<RAGFlowAvatar
|
||||
name={nickname}
|
||||
avatar={avatar}
|
||||
isPerson
|
||||
className="size-8 cursor-pointer"
|
||||
onClick={navigateToOldProfile}
|
||||
></RAGFlowAvatar>
|
||||
|
||||
@ -57,6 +57,8 @@ export default {
|
||||
},
|
||||
},
|
||||
login: {
|
||||
loginTitle: 'Sign in to Your Account',
|
||||
signUpTitle: 'Create an Account',
|
||||
login: 'Sign in',
|
||||
signUp: 'Sign up',
|
||||
loginDescription: 'We’re so excited to see you again!',
|
||||
@ -72,7 +74,8 @@ export default {
|
||||
nicknamePlaceholder: 'Please input nickname',
|
||||
register: 'Create an account',
|
||||
continue: 'Continue',
|
||||
title: 'Start building your smart assistants.',
|
||||
title: 'A leading RAG engine for LLM context',
|
||||
start: "Let's get started",
|
||||
description:
|
||||
'Sign up for free to explore top RAG technology. Create knowledge bases and AIs to empower your business.',
|
||||
review: 'from 500+ reviews',
|
||||
@ -114,7 +117,7 @@ export default {
|
||||
generateRaptor:
|
||||
'This will extract entities and relationships from all your documents in this dataset. The process may take a while to complete.',
|
||||
generate: 'Generate',
|
||||
raptor: 'Raptor',
|
||||
raptor: 'RAPTOR',
|
||||
processingType: 'Processing Type',
|
||||
dataPipeline: 'Ingestion pipeline',
|
||||
operations: 'Operations',
|
||||
@ -128,7 +131,7 @@ export default {
|
||||
fileName: 'File Name',
|
||||
datasetLogs: 'Dataset',
|
||||
fileLogs: 'File',
|
||||
overview: 'Overview',
|
||||
overview: 'Logs',
|
||||
success: 'Success',
|
||||
failed: 'Failed',
|
||||
completed: 'Completed',
|
||||
@ -270,7 +273,7 @@ export default {
|
||||
reRankModelWaring: 'Re-rank model is very time consuming.',
|
||||
},
|
||||
knowledgeConfiguration: {
|
||||
tocExtraction: 'toc toggle',
|
||||
tocExtraction: 'TOC Enhance',
|
||||
tocExtractionTip:
|
||||
" For existing chunks, generate a hierarchical table of contents (one directory per file). During queries, when Directory Enhancement is activated, the system will use a large model to determine which directory items are relevant to the user's question, thereby identifying the relevant chunks.",
|
||||
deleteGenerateModalContent: `
|
||||
@ -297,8 +300,8 @@ export default {
|
||||
dataFlowPlaceholder: 'Please select a pipeline.',
|
||||
buildItFromScratch: 'Build it from scratch',
|
||||
dataFlow: 'Pipeline',
|
||||
parseType: 'Parse Type',
|
||||
manualSetup: 'Manual Setup',
|
||||
parseType: 'Ingestion pipeline',
|
||||
manualSetup: 'Choose pipeline',
|
||||
builtIn: 'Built-in',
|
||||
titleDescription:
|
||||
'Update your knowledge base configuration here, particularly the chunking method.',
|
||||
@ -474,8 +477,9 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
useGraphRagTip:
|
||||
'Construct a knowledge graph over file chunks of the current knowledge base to enhance multi-hop question-answering involving nested logic. See https://ragflow.io/docs/dev/construct_knowledge_graph for details.',
|
||||
graphRagMethod: 'Method',
|
||||
graphRagMethodTip: `Light: (Default) Use prompts provided by github.com/HKUDS/LightRAG to extract entities and relationships. This option consumes fewer tokens, less memory, and fewer computational resources.</br>
|
||||
General: Use prompts provided by github.com/microsoft/graphrag to extract entities and relationships`,
|
||||
graphRagMethodTip: `
|
||||
Light: (Default) Use prompts provided by github.com/HKUDS/LightRAG to extract entities and relationships. This option consumes fewer tokens, less memory, and fewer computational resources.</br>
|
||||
General: Use prompts provided by github.com/microsoft/graphrag to extract entities and relationships`,
|
||||
resolution: 'Entity resolution',
|
||||
resolutionTip: `An entity deduplication switch. When enabled, the LLM will combine similar entities - e.g., '2025' and 'the year of 2025', or 'IT' and 'Information Technology' - to construct a more accurate graph`,
|
||||
community: 'Community reports',
|
||||
@ -951,6 +955,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
marketing: 'Marketing',
|
||||
consumerApp: 'Consumer App',
|
||||
other: 'Other',
|
||||
ingestionPipeline: 'Ingestion Pipeline',
|
||||
agents: 'Agents',
|
||||
days: 'Days',
|
||||
beginInput: 'Begin Input',
|
||||
@ -1669,6 +1674,7 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
page: '{{page}} /Page',
|
||||
},
|
||||
dataflowParser: {
|
||||
result: 'Result',
|
||||
parseSummary: 'Parse Summary',
|
||||
parseSummaryTip: 'Parser:deepdoc',
|
||||
rerunFromCurrentStep: 'Rerun From Current Step',
|
||||
@ -1703,17 +1709,17 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
parser: 'Parser',
|
||||
parserDescription:
|
||||
'Extracts raw text and structure from files for downstream processing.',
|
||||
tokenizer: 'Tokenizer',
|
||||
tokenizerRequired: 'Please add the Tokenizer node first',
|
||||
tokenizer: 'Indexer',
|
||||
tokenizerRequired: 'Please add the Indexer node first',
|
||||
tokenizerDescription:
|
||||
'Transforms text into the required data structure (e.g., vector embeddings for Embedding Search) depending on the chosen search method.',
|
||||
splitter: 'Token Splitter',
|
||||
splitter: 'Token',
|
||||
splitterDescription:
|
||||
'Split text into chunks by token length with optional delimiters and overlap.',
|
||||
hierarchicalMergerDescription:
|
||||
'Split documents into sections by title hierarchy with regex rules for finer control.',
|
||||
hierarchicalMerger: 'Title Splitter',
|
||||
extractor: 'Context Generator',
|
||||
hierarchicalMerger: 'Title',
|
||||
extractor: 'Transformer',
|
||||
extractorDescription:
|
||||
'Use an LLM to extract structured insights from document chunks—such as summaries, classifications, etc.',
|
||||
outputFormat: 'Output format',
|
||||
@ -1733,10 +1739,10 @@ This delimiter is used to split the input text into several text pieces echo of
|
||||
addParser: 'Add Parser',
|
||||
hierarchy: 'Hierarchy',
|
||||
regularExpressions: 'Regular Expressions',
|
||||
overlappedPercent: 'Overlapped percent',
|
||||
overlappedPercent: 'Overlapped percent (%)',
|
||||
searchMethod: 'Search method',
|
||||
searchMethodTip: `Defines how the content can be searched — by full-text, embedding, or both.
|
||||
The Tokenizer will store the content in the corresponding data structures for the selected methods.`,
|
||||
The Indexer will store the content in the corresponding data structures for the selected methods.`,
|
||||
begin: 'File',
|
||||
parserMethod: 'Parsing method',
|
||||
systemPrompt: 'System Prompt',
|
||||
@ -1745,11 +1751,11 @@ The Tokenizer will store the content in the corresponding data structures for th
|
||||
exportJson: 'Export JSON',
|
||||
viewResult: 'View result',
|
||||
running: 'Running',
|
||||
summary: 'Augmented Context',
|
||||
summary: 'Summary',
|
||||
keywords: 'Keywords',
|
||||
questions: 'Questions',
|
||||
metadata: 'Metadata',
|
||||
fieldName: 'Result Destination',
|
||||
fieldName: 'Result destination',
|
||||
prompts: {
|
||||
system: {
|
||||
keywords: `Role
|
||||
@ -1814,12 +1820,19 @@ Important structured information may include: names, dates, locations, events, k
|
||||
imageParseMethodOptions: {
|
||||
ocr: 'OCR',
|
||||
},
|
||||
note: 'Note',
|
||||
noteDescription: 'Note',
|
||||
notePlaceholder: 'Please enter a note',
|
||||
},
|
||||
datasetOverview: {
|
||||
downloadTip: 'Files being downloaded from data sources. ',
|
||||
processingTip: 'Files being processed by data flows.',
|
||||
processingTip: 'Files being processed by Ingestion pipeline.',
|
||||
totalFiles: 'Total Files',
|
||||
downloading: 'Downloading',
|
||||
downloadSuccessTip: 'Total successful downloads',
|
||||
downloadFailedTip: 'Total failed downloads',
|
||||
processingSuccessTip: 'Total successfully processed files',
|
||||
processingFailedTip: 'Total failed processes',
|
||||
processing: 'Processing',
|
||||
},
|
||||
},
|
||||
|
||||
@ -49,6 +49,8 @@ export default {
|
||||
promptPlaceholder: '请输入或使用 / 快速插入变量。',
|
||||
},
|
||||
login: {
|
||||
loginTitle: '登录账户',
|
||||
signUpTitle: '创建账户',
|
||||
login: '登录',
|
||||
signUp: '注册',
|
||||
loginDescription: '很高兴再次见到您!',
|
||||
@ -64,7 +66,8 @@ export default {
|
||||
nicknamePlaceholder: '请输入名称',
|
||||
register: '创建账户',
|
||||
continue: '继续',
|
||||
title: '开始构建您的智能助手',
|
||||
title: 'A leading RAG engine for LLM context',
|
||||
start: '立即开始',
|
||||
description:
|
||||
'免费注册以探索顶级 RAG 技术。 创建知识库和人工智能来增强您的业务',
|
||||
review: '来自 500 多条评论',
|
||||
@ -116,7 +119,7 @@ export default {
|
||||
fileName: '文件名',
|
||||
datasetLogs: '数据集',
|
||||
fileLogs: '文件',
|
||||
overview: '概览',
|
||||
overview: '日志',
|
||||
success: '成功',
|
||||
failed: '失败',
|
||||
completed: '已完成',
|
||||
@ -255,7 +258,7 @@ export default {
|
||||
theDocumentBeingParsedCannotBeDeleted: '正在解析的文档不能被删除',
|
||||
},
|
||||
knowledgeConfiguration: {
|
||||
tocExtraction: '目录提取',
|
||||
tocExtraction: '目录增强',
|
||||
tocExtractionTip:
|
||||
'对于已有的chunk生成层级结构的目录信息(每个文件一个目录)。在查询时,激活`目录增强`后,系统会用大模型去判断用户问题和哪些目录项相关,从而找到相关的chunk。',
|
||||
deleteGenerateModalContent: `
|
||||
@ -265,25 +268,25 @@ export default {
|
||||
<br/>
|
||||
是否要继续?
|
||||
`,
|
||||
extractRaptor: '从文档中提取Raptor',
|
||||
extractRaptor: '从文档中提取RAPTOR',
|
||||
extractKnowledgeGraph: '从文档中提取知识图谱',
|
||||
filterPlaceholder: '请输入',
|
||||
fileFilterTip: '',
|
||||
fileFilter: '正则匹配表达式',
|
||||
setDefaultTip: '',
|
||||
setDefault: '设置默认',
|
||||
eidtLinkDataPipeline: '编辑数据流',
|
||||
eidtLinkDataPipeline: '编辑pipeline',
|
||||
linkPipelineSetTip: '管理与此数据集的数据管道链接',
|
||||
default: '默认',
|
||||
dataPipeline: '数据流',
|
||||
linkDataPipeline: '关联数据流',
|
||||
dataPipeline: 'pipeline',
|
||||
linkDataPipeline: '关联pipeline',
|
||||
enableAutoGenerate: '是否启用自动生成',
|
||||
teamPlaceholder: '请选择团队',
|
||||
dataFlowPlaceholder: '请选择数据流',
|
||||
dataFlowPlaceholder: '请选择pipeline',
|
||||
buildItFromScratch: '去Scratch构建',
|
||||
dataFlow: '数据流',
|
||||
parseType: '切片方法',
|
||||
manualSetup: '手动设置',
|
||||
dataFlow: 'pipeline',
|
||||
parseType: 'Ingestion pipeline',
|
||||
manualSetup: '选择pipeline',
|
||||
builtIn: '内置',
|
||||
titleDescription: '在这里更新您的知识库详细信息,尤其是切片方法。',
|
||||
name: '知识库名称',
|
||||
@ -1585,6 +1588,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
page: '{{page}}条/页',
|
||||
},
|
||||
dataflowParser: {
|
||||
result: '结果',
|
||||
parseSummary: '解析摘要',
|
||||
parseSummaryTip: '解析器: deepdoc',
|
||||
rerunFromCurrentStep: '从当前步骤重新运行',
|
||||
@ -1607,7 +1611,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
<p>要保留这些更改,请点击“重新运行”以重新运行当前阶段。</p> `,
|
||||
changeStepModalConfirmText: '继续切换',
|
||||
changeStepModalCancelText: '取消',
|
||||
unlinkPipelineModalTitle: '解绑数据流',
|
||||
unlinkPipelineModalTitle: '解绑pipeline',
|
||||
unlinkPipelineModalContent: `
|
||||
<p>一旦取消链接,该数据集将不再连接到当前数据管道。</p>
|
||||
<p>正在解析的文件将继续解析,直到完成。</p>
|
||||
@ -1638,7 +1642,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
addParser: '增加解析器',
|
||||
hierarchy: '层次结构',
|
||||
regularExpressions: '正则表达式',
|
||||
overlappedPercent: '重叠百分比',
|
||||
overlappedPercent: '重叠百分比(%)',
|
||||
searchMethod: '搜索方法',
|
||||
searchMethodTip: `决定该数据集启用的搜索方式,可选择全文、向量,或两者兼有。
|
||||
Tokenizer 会根据所选方式将内容存储为对应的数据结构。`,
|
||||
@ -1706,13 +1710,20 @@ Tokenizer 会根据所选方式将内容存储为对应的数据结构。`,
|
||||
cancel: '取消',
|
||||
filenameEmbeddingWeight: '文件名嵌入权重',
|
||||
switchPromptMessage: '提示词将发生变化,请确认是否放弃已有提示词?',
|
||||
note: '注释',
|
||||
noteDescription: '注释',
|
||||
notePlaceholder: '请输入注释',
|
||||
},
|
||||
datasetOverview: {
|
||||
downloadTip: '正在从数据源下载文件。',
|
||||
processingTip: '正在由数据流处理文件。',
|
||||
processingTip: '正在由pipeline处理文件。',
|
||||
totalFiles: '文件总数',
|
||||
downloading: '正在下载',
|
||||
processing: '正在处理',
|
||||
downloadSuccessTip: '下载成功总数',
|
||||
downloadFailedTip: '下载失败总数',
|
||||
processingSuccessTip: '处理成功的文件总数',
|
||||
processingFailedTip: '处理失败的文件总数',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@ -232,7 +232,7 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) {
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className={styles.canvasWrapper}>
|
||||
<div className={cn(styles.canvasWrapper, 'px-5 pb-5')}>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
style={{ position: 'absolute', top: 10, left: 0 }}
|
||||
|
||||
@ -18,7 +18,7 @@ const InnerNodeHeader = ({
|
||||
wrapperClassName,
|
||||
}: IProps) => {
|
||||
return (
|
||||
<section className={cn(wrapperClassName, 'pb-4')}>
|
||||
<section className={cn(wrapperClassName, 'pb-2')}>
|
||||
<div className={cn(className, 'flex gap-2.5')}>
|
||||
<OperatorIcon name={label as Operator}></OperatorIcon>
|
||||
<span className="truncate text-center font-semibold text-sm">
|
||||
|
||||
@ -7,7 +7,7 @@ export function NodeWrapper({ children, className, selected }: IProps) {
|
||||
return (
|
||||
<section
|
||||
className={cn(
|
||||
'bg-text-title-invert p-2.5 rounded-sm w-[200px] text-xs group',
|
||||
'bg-text-title-invert p-2.5 rounded-md w-[200px] text-xs group',
|
||||
{ 'border border-accent-primary': selected },
|
||||
className,
|
||||
)}
|
||||
|
||||
@ -28,7 +28,18 @@ const NameFormSchema = z.object({
|
||||
name: z.string(),
|
||||
});
|
||||
|
||||
function NoteNode({ data, id, selected }: NodeProps<INoteNode>) {
|
||||
type NoteNodeProps = NodeProps<INoteNode> & {
|
||||
useWatchNoteFormChange?: typeof useWatchFormChange;
|
||||
useWatchNoteNameFormChange?: typeof useWatchNameFormChange;
|
||||
};
|
||||
|
||||
function NoteNode({
|
||||
data,
|
||||
id,
|
||||
selected,
|
||||
useWatchNoteFormChange,
|
||||
useWatchNoteNameFormChange,
|
||||
}: NoteNodeProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const form = useForm<z.infer<typeof FormSchema>>({
|
||||
@ -41,19 +52,19 @@ function NoteNode({ data, id, selected }: NodeProps<INoteNode>) {
|
||||
defaultValues: { name: data.name },
|
||||
});
|
||||
|
||||
useWatchFormChange(id, form);
|
||||
(useWatchNoteFormChange || useWatchFormChange)(id, form);
|
||||
|
||||
useWatchNameFormChange(id, nameForm);
|
||||
(useWatchNoteNameFormChange || useWatchNameFormChange)(id, nameForm);
|
||||
|
||||
return (
|
||||
<NodeWrapper
|
||||
className="p-0 w-full h-full flex flex-col"
|
||||
className="p-0 w-full h-full flex flex-col bg-bg-component border border-state-warning rounded-lg shadow-md pb-1"
|
||||
selected={selected}
|
||||
>
|
||||
<NodeResizeControl minWidth={190} minHeight={128} style={controlStyle}>
|
||||
<ResizeIcon />
|
||||
</NodeResizeControl>
|
||||
<section className="p-2 flex gap-2 bg-background-note items-center note-drag-handle rounded-t">
|
||||
<section className="px-2 py-1 flex gap-2 items-center note-drag-handle rounded-t border-t-2 border-state-warning">
|
||||
<NotebookPen className="size-4" />
|
||||
<Form {...nameForm}>
|
||||
<form className="flex-1">
|
||||
@ -67,7 +78,7 @@ function NoteNode({ data, id, selected }: NodeProps<INoteNode>) {
|
||||
placeholder={t('flow.notePlaceholder')}
|
||||
{...field}
|
||||
type="text"
|
||||
className="bg-transparent border-none focus-visible:outline focus-visible:outline-text-sub-title"
|
||||
className="bg-transparent border-none focus-visible:outline focus-visible:outline-text-sub-title p-1"
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
@ -78,7 +89,7 @@ function NoteNode({ data, id, selected }: NodeProps<INoteNode>) {
|
||||
</Form>
|
||||
</section>
|
||||
<Form {...form}>
|
||||
<form className="flex-1 p-1">
|
||||
<form className="flex-1 px-1 min-h-1">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="text"
|
||||
@ -87,7 +98,7 @@ function NoteNode({ data, id, selected }: NodeProps<INoteNode>) {
|
||||
<FormControl>
|
||||
<Textarea
|
||||
placeholder={t('flow.notePlaceholder')}
|
||||
className="resize-none rounded-none p-1 h-full overflow-auto bg-transparent focus-visible:ring-0 border-none"
|
||||
className="resize-none rounded-none p-1 py-0 overflow-auto bg-transparent focus-visible:ring-0 border-none text-text-secondary focus-visible:ring-offset-0 !text-xs"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
|
||||
@ -6,7 +6,7 @@ export function ResizeIcon() {
|
||||
height="14"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth="2"
|
||||
stroke="rgba(76, 164, 231, 1)"
|
||||
stroke="var(--text-disabled)"
|
||||
fill="none"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
|
||||
@ -200,12 +200,14 @@ export const useSendAgentMessage = ({
|
||||
beginParams,
|
||||
isShared,
|
||||
refetch,
|
||||
isTaskMode: isTask,
|
||||
}: {
|
||||
url?: string;
|
||||
addEventList?: (data: IEventList, messageId: string) => void;
|
||||
beginParams?: any[];
|
||||
isShared?: boolean;
|
||||
refetch?: () => void;
|
||||
isTaskMode?: boolean;
|
||||
}) => {
|
||||
const { id: agentId } = useParams();
|
||||
const { handleInputChange, value, setValue } = useHandleMessageInputChange();
|
||||
@ -217,7 +219,7 @@ export const useSendAgentMessage = ({
|
||||
return answerList[0]?.message_id;
|
||||
}, [answerList]);
|
||||
|
||||
const isTaskMode = useIsTaskMode();
|
||||
const isTaskMode = useIsTaskMode(isTask);
|
||||
|
||||
const { findReferenceByMessageId } = useFindMessageReference(answerList);
|
||||
const prologue = useGetBeginNodePrologue();
|
||||
@ -230,6 +232,7 @@ export const useSendAgentMessage = ({
|
||||
addNewestOneQuestion,
|
||||
addNewestOneAnswer,
|
||||
removeAllMessages,
|
||||
removeAllMessagesExceptFirst,
|
||||
scrollToBottom,
|
||||
} = useSelectDerivedMessages();
|
||||
const { addEventList: addEventListFun } = useContext(AgentChatLogContext);
|
||||
@ -321,8 +324,18 @@ export const useSendAgentMessage = ({
|
||||
stopOutputMessage();
|
||||
resetAnswerList();
|
||||
setSessionId(null);
|
||||
removeAllMessages();
|
||||
}, [resetAnswerList, removeAllMessages, stopOutputMessage]);
|
||||
if (isTaskMode) {
|
||||
removeAllMessages();
|
||||
} else {
|
||||
removeAllMessagesExceptFirst();
|
||||
}
|
||||
}, [
|
||||
stopOutputMessage,
|
||||
resetAnswerList,
|
||||
isTaskMode,
|
||||
removeAllMessages,
|
||||
removeAllMessagesExceptFirst,
|
||||
]);
|
||||
|
||||
const handlePressEnter = useCallback(() => {
|
||||
if (trim(value) === '') return;
|
||||
|
||||
@ -8,14 +8,27 @@ export const ExeSQLFormSchema = {
|
||||
username: z.string().min(1),
|
||||
host: z.string().min(1),
|
||||
port: z.number(),
|
||||
password: z.string().min(1),
|
||||
password: z.string().optional().or(z.literal('')),
|
||||
max_records: z.number(),
|
||||
};
|
||||
|
||||
export const FormSchema = z.object({
|
||||
sql: z.string().optional(),
|
||||
...ExeSQLFormSchema,
|
||||
});
|
||||
export const FormSchema = z
|
||||
.object({
|
||||
sql: z.string().optional(),
|
||||
...ExeSQLFormSchema,
|
||||
})
|
||||
.superRefine((v, ctx) => {
|
||||
if (
|
||||
v.db_type !== 'trino' &&
|
||||
!(v.password && v.password.trim().length > 0)
|
||||
) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['password'],
|
||||
message: 'String must contain at least 1 character(s)',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export function useSubmitForm() {
|
||||
const { testDbConnect, loading } = useTestDbConnect();
|
||||
|
||||
@ -27,13 +27,16 @@ export function useSelectBeginNodeDataInputs() {
|
||||
);
|
||||
}
|
||||
|
||||
export function useIsTaskMode() {
|
||||
export function useIsTaskMode(isTask?: boolean) {
|
||||
const getNode = useGraphStore((state) => state.getNode);
|
||||
|
||||
return useMemo(() => {
|
||||
if (typeof isTask === 'boolean') {
|
||||
return isTask;
|
||||
}
|
||||
const node = getNode(BeginId);
|
||||
return node?.data?.form?.mode === AgentDialogueMode.Task;
|
||||
}, [getNode]);
|
||||
}, [getNode, isTask]);
|
||||
}
|
||||
|
||||
export const useGetBeginNodeDataQuery = () => {
|
||||
|
||||
@ -59,6 +59,7 @@ export const useSendNextSharedMessage = (
|
||||
addEventList,
|
||||
beginParams: params,
|
||||
isShared: true,
|
||||
isTaskMode,
|
||||
});
|
||||
|
||||
const ok = useCallback(
|
||||
|
||||
@ -2139,6 +2139,7 @@ export const ExeSQLOptions = [
|
||||
'mariadb',
|
||||
'mssql',
|
||||
'IBM DB2',
|
||||
'trino',
|
||||
].map((x) => ({
|
||||
label: upperFirst(x),
|
||||
value: x,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user