mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 552023ee4b | |||
| 6c9b8ec860 | |||
| f9e6ad86b7 | |||
| e604634d2a | |||
| 590b9dabab | |||
| c283ea57fd | |||
| 50ff16e7a4 | |||
| 453287b06b | |||
| e166f132b3 | |||
| 42f4d4dbc8 | |||
| 7cb8368e0f | |||
| 0d7cfce6e1 | |||
| 2d7c1368f0 | |||
| db4371c745 | |||
| e6cd799d8a | |||
| ab29b58316 | |||
| 3f037c9786 | |||
| 53b991aa0e | |||
| 9e80f39caa | |||
| bdc2b74e8f | |||
| 1fd92e6bee | |||
| 02fd381072 | |||
| b6f3a6a68a | |||
| ae70512f5d | |||
| d4a123d6dd | |||
| ce816edb5f | |||
| ac2643700b | |||
| 558b252c5a | |||
| 754a5e1cee | |||
| e3e7c7ddaa | |||
| 76b278af8e | |||
| 1c6320828c | |||
| d72468426e | |||
| 796f4032b8 |
143
.gitignore
vendored
143
.gitignore
vendored
@ -44,3 +44,146 @@ nltk_data/
|
||||
.lh/
|
||||
.venv
|
||||
docker/data
|
||||
|
||||
|
||||
#--------------------------------------------------#
|
||||
# The following was generated with gitignore.nvim: #
|
||||
#--------------------------------------------------#
|
||||
# Gitignore for the following technologies: Node
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
# Serverless Webpack directories
|
||||
.webpack/
|
||||
|
||||
# SvelteKit build / generate output
|
||||
.svelte-kit
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -178,7 +178,7 @@ releases! 🌟
|
||||
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
||||
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
||||
|
||||
> The command below downloads the `v0.18.0-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.18.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0` for the full edition `v0.18.0`.
|
||||
> The command below downloads the `v0.19.0-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.19.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0` for the full edition `v0.19.0`.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -191,8 +191,8 @@ releases! 🌟
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
|-------------------|-----------------|-----------------------|--------------------------|
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
||||
@ -173,7 +173,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
||||
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> Perintah di bawah ini mengunduh edisi v0.18.0-slim dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.18.0-slim, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server. Misalnya, atur RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0 untuk edisi lengkap v0.18.0.
|
||||
> Perintah di bawah ini mengunduh edisi v0.19.0-slim dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.19.0-slim, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server. Misalnya, atur RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0 untuk edisi lengkap v0.19.0.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -186,8 +186,8 @@ $ docker compose -f docker-compose.yml up -d
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -152,7 +152,7 @@
|
||||
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
||||
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
||||
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.18.0-slim エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.18.0-slim とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。例えば、完全版 v0.18.0 をダウンロードするには、RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0 と設定します。
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.19.0-slim エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.19.0-slim とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。例えば、完全版 v0.19.0 をダウンロードするには、RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0 と設定します。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -165,8 +165,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -152,7 +152,7 @@
|
||||
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
||||
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.18.0-slim 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.18.0-slim과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오. 예를 들어, 전체 버전인 v0.18.0을 다운로드하려면 RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0로 설정합니다.
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.19.0-slim 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.19.0-slim과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오. 예를 들어, 전체 버전인 v0.19.0을 다운로드하려면 RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0로 설정합니다.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -165,8 +165,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
||||
@ -172,7 +172,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
||||
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
||||
|
||||
> O comando abaixo baixa a edição `v0.18.0-slim` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.18.0-slim`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor. Por exemplo: defina `RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0` para a edição completa `v0.18.0`.
|
||||
> O comando abaixo baixa a edição `v0.19.0-slim` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.19.0-slim`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor. Por exemplo: defina `RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0` para a edição completa `v0.19.0`.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -185,8 +185,8 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
| Tag da imagem RAGFlow | Tamanho da imagem (GB) | Possui modelos de incorporação? | Estável? |
|
||||
| --------------------- | ---------------------- | ------------------------------- | ------------------------ |
|
||||
| v0.18.0 | ~9 | :heavy_check_mark: | Lançamento estável |
|
||||
| v0.18.0-slim | ~2 | ❌ | Lançamento estável |
|
||||
| v0.19.0 | ~9 | :heavy_check_mark: | Lançamento estável |
|
||||
| v0.19.0-slim | ~2 | ❌ | Lançamento estável |
|
||||
| nightly | ~9 | :heavy_check_mark: | _Instável_ build noturno |
|
||||
| nightly-slim | ~2 | ❌ | _Instável_ build noturno |
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -151,7 +151,7 @@
|
||||
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
||||
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
||||
|
||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.18.0-slim`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.18.0-slim` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。例如,你可以透過設定 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0` 來下載 RAGFlow 鏡像的 `v0.18.0` 完整發行版。
|
||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.19.0-slim`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.19.0-slim` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。例如,你可以透過設定 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0` 來下載 RAGFlow 鏡像的 `v0.19.0` 完整發行版。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -164,8 +164,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.18.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.18.0">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v0.19.0-brightgreen" alt="docker pull infiniflow/ragflow:v0.19.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -152,7 +152,7 @@
|
||||
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
||||
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
||||
|
||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.18.0-slim`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.18.0-slim` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。比如,你可以通过设置 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0` 来下载 RAGFlow 镜像的 `v0.18.0` 完整发行版。
|
||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.19.0-slim`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.19.0-slim` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。比如,你可以通过设置 `RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0` 来下载 RAGFlow 镜像的 `v0.19.0` 完整发行版。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
@ -165,8 +165,8 @@
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.18.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.18.0-slim | ≈2 | ❌ | Stable release |
|
||||
| v0.19.0 | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| v0.19.0-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈9 | :heavy_check_mark: | _Unstable_ nightly build |
|
||||
| nightly-slim | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
|
||||
@ -17,6 +17,7 @@ import logging
|
||||
from abc import ABC
|
||||
import pandas as pd
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import re
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
@ -44,17 +45,28 @@ class Baidu(ComponentBase, ABC):
|
||||
return Baidu.be_output("")
|
||||
|
||||
try:
|
||||
url = 'http://www.baidu.com/s?wd=' + ans + '&rn=' + str(self._param.top_n)
|
||||
url = 'https://www.baidu.com/s?wd=' + ans + '&rn=' + str(self._param.top_n)
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36'}
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
'Connection': 'keep-alive',
|
||||
}
|
||||
response = requests.get(url=url, headers=headers)
|
||||
|
||||
url_res = re.findall(r"'url': \\\"(.*?)\\\"}", response.text)
|
||||
title_res = re.findall(r"'title': \\\"(.*?)\\\",\\n", response.text)
|
||||
body_res = re.findall(r"\"contentText\":\"(.*?)\"", response.text)
|
||||
baidu_res = [{"content": re.sub('<em>|</em>', '', '<a href="' + url + '">' + title + '</a> ' + body)} for
|
||||
url, title, body in zip(url_res, title_res, body_res)]
|
||||
del body_res, url_res, title_res
|
||||
# check if request success
|
||||
if response.status_code == 200:
|
||||
soup = BeautifulSoup(response.text, 'html.parser')
|
||||
url_res = []
|
||||
title_res = []
|
||||
body_res = []
|
||||
for item in soup.select('.result.c-container'):
|
||||
# extract title
|
||||
title_res.append(item.select_one('h3 a').get_text(strip=True))
|
||||
url_res.append(item.select_one('h3 a')['href'])
|
||||
body_res.append(item.select_one('.c-abstract').get_text(strip=True) if item.select_one('.c-abstract') else '')
|
||||
baidu_res = [{"content": re.sub('<em>|</em>', '', '<a href="' + url + '">' + title + '</a> ' + body)} for
|
||||
url, title, body in zip(url_res, title_res, body_res)]
|
||||
del body_res, url_res, title_res
|
||||
except Exception as e:
|
||||
return Baidu.be_output("**ERROR**: " + str(e))
|
||||
|
||||
|
||||
@ -79,15 +79,23 @@ class Code(ComponentBase, ABC):
|
||||
def _run(self, history, **kwargs):
|
||||
arguments = {}
|
||||
for input in self._param.arguments:
|
||||
assert "@" in input["component_id"], "Each code argument should bind to a specific compontent"
|
||||
component_id = input["component_id"].split("@")[0]
|
||||
refered_component_key = input["component_id"].split("@")[1]
|
||||
refered_component = self._canvas.get_component(component_id)["obj"]
|
||||
if "@" in input["component_id"]:
|
||||
component_id = input["component_id"].split("@")[0]
|
||||
refered_component_key = input["component_id"].split("@")[1]
|
||||
refered_component = self._canvas.get_component(component_id)["obj"]
|
||||
|
||||
for param in refered_component._param.query:
|
||||
if param["key"] == refered_component_key:
|
||||
if "value" in param:
|
||||
arguments[input["name"]] = param["value"]
|
||||
for param in refered_component._param.query:
|
||||
if param["key"] == refered_component_key:
|
||||
if "value" in param:
|
||||
arguments[input["name"]] = param["value"]
|
||||
else:
|
||||
cpn = self._canvas.get_component(input["component_id"])["obj"]
|
||||
if cpn.component_name.lower() == "answer":
|
||||
arguments[input["name"]] = self._canvas.get_history(1)[0]["content"]
|
||||
continue
|
||||
_, out = cpn.output(allow_partial=False)
|
||||
if not out.empty:
|
||||
arguments[input["name"]] = "\n".join(out["content"])
|
||||
|
||||
return self._execute_code(
|
||||
language=self._param.lang,
|
||||
|
||||
@ -105,6 +105,7 @@ class ExeSQL(Generate, ABC):
|
||||
sql_res = []
|
||||
for i in range(len(input_list)):
|
||||
single_sql = input_list[i]
|
||||
single_sql = single_sql.replace('```','')
|
||||
while self._loop <= self._param.loop:
|
||||
self._loop += 1
|
||||
if not single_sql:
|
||||
|
||||
@ -16,6 +16,7 @@
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
|
||||
from api import settings
|
||||
from api.db import StatusEnum
|
||||
from api.db.services.dialog_service import DialogService
|
||||
@ -23,15 +24,14 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.llm_service import TenantLLMService
|
||||
from api.db.services.user_service import TenantService
|
||||
from api.utils import get_uuid
|
||||
from api.utils.api_utils import get_error_data_result, token_required, get_result, check_duplicate_ids
|
||||
from api.utils.api_utils import check_duplicate_ids, get_error_data_result, get_result, token_required
|
||||
|
||||
|
||||
|
||||
@manager.route('/chats', methods=['POST']) # noqa: F821
|
||||
@manager.route("/chats", methods=["POST"]) # noqa: F821
|
||||
@token_required
|
||||
def create(tenant_id):
|
||||
req = request.json
|
||||
ids = [i for i in req.get("dataset_ids", []) if i]
|
||||
ids = [i for i in req.get("dataset_ids", []) if i]
|
||||
for kb_id in ids:
|
||||
kbs = KnowledgebaseService.accessible(kb_id=kb_id, user_id=tenant_id)
|
||||
if not kbs:
|
||||
@ -40,34 +40,30 @@ def create(tenant_id):
|
||||
kb = kbs[0]
|
||||
if kb.chunk_num == 0:
|
||||
return get_error_data_result(f"The dataset {kb_id} doesn't own parsed file")
|
||||
|
||||
|
||||
kbs = KnowledgebaseService.get_by_ids(ids) if ids else []
|
||||
embd_ids = [TenantLLMService.split_model_name_and_factory(kb.embd_id)[0] for kb in kbs] # remove vendor suffix for comparison
|
||||
embd_count = list(set(embd_ids))
|
||||
if len(embd_count) > 1:
|
||||
return get_result(message='Datasets use different embedding models."',
|
||||
code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
return get_result(message='Datasets use different embedding models."', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
req["kb_ids"] = ids
|
||||
# llm
|
||||
llm = req.get("llm")
|
||||
if llm:
|
||||
if "model_name" in llm:
|
||||
req["llm_id"] = llm.pop("model_name")
|
||||
if not TenantLLMService.query(tenant_id=tenant_id, llm_name=req["llm_id"], model_type="chat"):
|
||||
return get_error_data_result(f"`model_name` {req.get('llm_id')} doesn't exist")
|
||||
if req.get("llm_id") is not None:
|
||||
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(req["llm_id"])
|
||||
if not TenantLLMService.query(tenant_id=tenant_id, llm_name=llm_name, llm_factory=llm_factory, model_type="chat"):
|
||||
return get_error_data_result(f"`model_name` {req.get('llm_id')} doesn't exist")
|
||||
req["llm_setting"] = req.pop("llm")
|
||||
e, tenant = TenantService.get_by_id(tenant_id)
|
||||
if not e:
|
||||
return get_error_data_result(message="Tenant not found!")
|
||||
# prompt
|
||||
prompt = req.get("prompt")
|
||||
key_mapping = {"parameters": "variables",
|
||||
"prologue": "opener",
|
||||
"quote": "show_quote",
|
||||
"system": "prompt",
|
||||
"rerank_id": "rerank_model",
|
||||
"vector_similarity_weight": "keywords_similarity_weight"}
|
||||
key_list = ["similarity_threshold", "vector_similarity_weight", "top_n", "rerank_id","top_k"]
|
||||
key_mapping = {"parameters": "variables", "prologue": "opener", "quote": "show_quote", "system": "prompt", "rerank_id": "rerank_model", "vector_similarity_weight": "keywords_similarity_weight"}
|
||||
key_list = ["similarity_threshold", "vector_similarity_weight", "top_n", "rerank_id", "top_k"]
|
||||
if prompt:
|
||||
for new_key, old_key in key_mapping.items():
|
||||
if old_key in prompt:
|
||||
@ -85,9 +81,7 @@ def create(tenant_id):
|
||||
req["rerank_id"] = req.get("rerank_id", "")
|
||||
if req.get("rerank_id"):
|
||||
value_rerank_model = ["BAAI/bge-reranker-v2-m3", "maidalun1020/bce-reranker-base_v1"]
|
||||
if req["rerank_id"] not in value_rerank_model and not TenantLLMService.query(tenant_id=tenant_id,
|
||||
llm_name=req.get("rerank_id"),
|
||||
model_type="rerank"):
|
||||
if req["rerank_id"] not in value_rerank_model and not TenantLLMService.query(tenant_id=tenant_id, llm_name=req.get("rerank_id"), model_type="rerank"):
|
||||
return get_error_data_result(f"`rerank_model` {req.get('rerank_id')} doesn't exist")
|
||||
if not req.get("llm_id"):
|
||||
req["llm_id"] = tenant.llm_id
|
||||
@ -106,27 +100,24 @@ def create(tenant_id):
|
||||
{knowledge}
|
||||
The above is the knowledge base.""",
|
||||
"prologue": "Hi! I'm your assistant, what can I do for you?",
|
||||
"parameters": [
|
||||
{"key": "knowledge", "optional": False}
|
||||
],
|
||||
"parameters": [{"key": "knowledge", "optional": False}],
|
||||
"empty_response": "Sorry! No relevant content was found in the knowledge base!",
|
||||
"quote": True,
|
||||
"tts": False,
|
||||
"refine_multiturn": True
|
||||
"refine_multiturn": True,
|
||||
}
|
||||
key_list_2 = ["system", "prologue", "parameters", "empty_response", "quote", "tts", "refine_multiturn"]
|
||||
if "prompt_config" not in req:
|
||||
req['prompt_config'] = {}
|
||||
req["prompt_config"] = {}
|
||||
for key in key_list_2:
|
||||
temp = req['prompt_config'].get(key)
|
||||
if (not temp and key == 'system') or (key not in req["prompt_config"]):
|
||||
req['prompt_config'][key] = default_prompt[key]
|
||||
for p in req['prompt_config']["parameters"]:
|
||||
temp = req["prompt_config"].get(key)
|
||||
if (not temp and key == "system") or (key not in req["prompt_config"]):
|
||||
req["prompt_config"][key] = default_prompt[key]
|
||||
for p in req["prompt_config"]["parameters"]:
|
||||
if p["optional"]:
|
||||
continue
|
||||
if req['prompt_config']["system"].find("{%s}" % p["key"]) < 0:
|
||||
return get_error_data_result(
|
||||
message="Parameter '{}' is not used".format(p["key"]))
|
||||
if req["prompt_config"]["system"].find("{%s}" % p["key"]) < 0:
|
||||
return get_error_data_result(message="Parameter '{}' is not used".format(p["key"]))
|
||||
# save
|
||||
if not DialogService.save(**req):
|
||||
return get_error_data_result(message="Fail to new a chat!")
|
||||
@ -141,10 +132,7 @@ def create(tenant_id):
|
||||
renamed_dict[new_key] = value
|
||||
res["prompt"] = renamed_dict
|
||||
del res["prompt_config"]
|
||||
new_dict = {"similarity_threshold": res["similarity_threshold"],
|
||||
"keywords_similarity_weight": 1-res["vector_similarity_weight"],
|
||||
"top_n": res["top_n"],
|
||||
"rerank_model": res['rerank_id']}
|
||||
new_dict = {"similarity_threshold": res["similarity_threshold"], "keywords_similarity_weight": 1 - res["vector_similarity_weight"], "top_n": res["top_n"], "rerank_model": res["rerank_id"]}
|
||||
res["prompt"].update(new_dict)
|
||||
for key in key_list:
|
||||
del res[key]
|
||||
@ -156,11 +144,11 @@ def create(tenant_id):
|
||||
return get_result(data=res)
|
||||
|
||||
|
||||
@manager.route('/chats/<chat_id>', methods=['PUT']) # noqa: F821
|
||||
@manager.route("/chats/<chat_id>", methods=["PUT"]) # noqa: F821
|
||||
@token_required
|
||||
def update(tenant_id, chat_id):
|
||||
if not DialogService.query(tenant_id=tenant_id, id=chat_id, status=StatusEnum.VALID.value):
|
||||
return get_error_data_result(message='You do not own the chat')
|
||||
return get_error_data_result(message="You do not own the chat")
|
||||
req = request.json
|
||||
ids = req.get("dataset_ids")
|
||||
if "show_quotation" in req:
|
||||
@ -174,14 +162,12 @@ def update(tenant_id, chat_id):
|
||||
kb = kbs[0]
|
||||
if kb.chunk_num == 0:
|
||||
return get_error_data_result(f"The dataset {kb_id} doesn't own parsed file")
|
||||
|
||||
|
||||
kbs = KnowledgebaseService.get_by_ids(ids)
|
||||
embd_ids = [TenantLLMService.split_model_name_and_factory(kb.embd_id)[0] for kb in kbs] # remove vendor suffix for comparison
|
||||
embd_count = list(set(embd_ids))
|
||||
if len(embd_count) != 1:
|
||||
return get_result(
|
||||
message='Datasets use different embedding models."',
|
||||
code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
return get_result(message='Datasets use different embedding models."', code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
req["kb_ids"] = ids
|
||||
llm = req.get("llm")
|
||||
if llm:
|
||||
@ -195,13 +181,8 @@ def update(tenant_id, chat_id):
|
||||
return get_error_data_result(message="Tenant not found!")
|
||||
# prompt
|
||||
prompt = req.get("prompt")
|
||||
key_mapping = {"parameters": "variables",
|
||||
"prologue": "opener",
|
||||
"quote": "show_quote",
|
||||
"system": "prompt",
|
||||
"rerank_id": "rerank_model",
|
||||
"vector_similarity_weight": "keywords_similarity_weight"}
|
||||
key_list = ["similarity_threshold", "vector_similarity_weight", "top_n", "rerank_id","top_k"]
|
||||
key_mapping = {"parameters": "variables", "prologue": "opener", "quote": "show_quote", "system": "prompt", "rerank_id": "rerank_model", "vector_similarity_weight": "keywords_similarity_weight"}
|
||||
key_list = ["similarity_threshold", "vector_similarity_weight", "top_n", "rerank_id", "top_k"]
|
||||
if prompt:
|
||||
for new_key, old_key in key_mapping.items():
|
||||
if old_key in prompt:
|
||||
@ -214,16 +195,12 @@ def update(tenant_id, chat_id):
|
||||
res = res.to_json()
|
||||
if req.get("rerank_id"):
|
||||
value_rerank_model = ["BAAI/bge-reranker-v2-m3", "maidalun1020/bce-reranker-base_v1"]
|
||||
if req["rerank_id"] not in value_rerank_model and not TenantLLMService.query(tenant_id=tenant_id,
|
||||
llm_name=req.get("rerank_id"),
|
||||
model_type="rerank"):
|
||||
if req["rerank_id"] not in value_rerank_model and not TenantLLMService.query(tenant_id=tenant_id, llm_name=req.get("rerank_id"), model_type="rerank"):
|
||||
return get_error_data_result(f"`rerank_model` {req.get('rerank_id')} doesn't exist")
|
||||
if "name" in req:
|
||||
if not req.get("name"):
|
||||
return get_error_data_result(message="`name` cannot be empty.")
|
||||
if req["name"].lower() != res["name"].lower() \
|
||||
and len(
|
||||
DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)) > 0:
|
||||
if req["name"].lower() != res["name"].lower() and len(DialogService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)) > 0:
|
||||
return get_error_data_result(message="Duplicated chat name in updating chat.")
|
||||
if "prompt_config" in req:
|
||||
res["prompt_config"].update(req["prompt_config"])
|
||||
@ -246,7 +223,7 @@ def update(tenant_id, chat_id):
|
||||
return get_result()
|
||||
|
||||
|
||||
@manager.route('/chats', methods=['DELETE']) # noqa: F821
|
||||
@manager.route("/chats", methods=["DELETE"]) # noqa: F821
|
||||
@token_required
|
||||
def delete(tenant_id):
|
||||
errors = []
|
||||
@ -273,30 +250,23 @@ def delete(tenant_id):
|
||||
temp_dict = {"status": StatusEnum.INVALID.value}
|
||||
DialogService.update_by_id(id, temp_dict)
|
||||
success_count += 1
|
||||
|
||||
|
||||
if errors:
|
||||
if success_count > 0:
|
||||
return get_result(
|
||||
data={"success_count": success_count, "errors": errors},
|
||||
message=f"Partially deleted {success_count} chats with {len(errors)} errors"
|
||||
)
|
||||
return get_result(data={"success_count": success_count, "errors": errors}, message=f"Partially deleted {success_count} chats with {len(errors)} errors")
|
||||
else:
|
||||
return get_error_data_result(message="; ".join(errors))
|
||||
|
||||
|
||||
if duplicate_messages:
|
||||
if success_count > 0:
|
||||
return get_result(
|
||||
message=f"Partially deleted {success_count} chats with {len(duplicate_messages)} errors",
|
||||
data={"success_count": success_count, "errors": duplicate_messages}
|
||||
)
|
||||
return get_result(message=f"Partially deleted {success_count} chats with {len(duplicate_messages)} errors", data={"success_count": success_count, "errors": duplicate_messages})
|
||||
else:
|
||||
return get_error_data_result(message=";".join(duplicate_messages))
|
||||
|
||||
|
||||
return get_result()
|
||||
|
||||
|
||||
|
||||
@manager.route('/chats', methods=['GET']) # noqa: F821
|
||||
@manager.route("/chats", methods=["GET"]) # noqa: F821
|
||||
@token_required
|
||||
def list_chat(tenant_id):
|
||||
id = request.args.get("id")
|
||||
@ -316,13 +286,15 @@ def list_chat(tenant_id):
|
||||
if not chats:
|
||||
return get_result(data=[])
|
||||
list_assts = []
|
||||
key_mapping = {"parameters": "variables",
|
||||
"prologue": "opener",
|
||||
"quote": "show_quote",
|
||||
"system": "prompt",
|
||||
"rerank_id": "rerank_model",
|
||||
"vector_similarity_weight": "keywords_similarity_weight",
|
||||
"do_refer": "show_quotation"}
|
||||
key_mapping = {
|
||||
"parameters": "variables",
|
||||
"prologue": "opener",
|
||||
"quote": "show_quote",
|
||||
"system": "prompt",
|
||||
"rerank_id": "rerank_model",
|
||||
"vector_similarity_weight": "keywords_similarity_weight",
|
||||
"do_refer": "show_quotation",
|
||||
}
|
||||
key_list = ["similarity_threshold", "vector_similarity_weight", "top_n", "rerank_id"]
|
||||
for res in chats:
|
||||
renamed_dict = {}
|
||||
@ -331,10 +303,7 @@ def list_chat(tenant_id):
|
||||
renamed_dict[new_key] = value
|
||||
res["prompt"] = renamed_dict
|
||||
del res["prompt_config"]
|
||||
new_dict = {"similarity_threshold": res["similarity_threshold"],
|
||||
"keywords_similarity_weight": 1-res["vector_similarity_weight"],
|
||||
"top_n": res["top_n"],
|
||||
"rerank_model": res['rerank_id']}
|
||||
new_dict = {"similarity_threshold": res["similarity_threshold"], "keywords_similarity_weight": 1 - res["vector_similarity_weight"], "top_n": res["top_n"], "rerank_model": res["rerank_id"]}
|
||||
res["prompt"].update(new_dict)
|
||||
for key in key_list:
|
||||
del res[key]
|
||||
|
||||
@ -13,36 +13,37 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import logging
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from flask import request, session, redirect
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
from flask_login import login_required, current_user, login_user, logout_user
|
||||
from flask import redirect, request, session
|
||||
from flask_login import current_user, login_required, login_user, logout_user
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
|
||||
from api import settings
|
||||
from api.apps.auth import get_auth_client
|
||||
from api.db import FileType, UserTenantRole
|
||||
from api.db.db_models import TenantLLM
|
||||
from api.db.services.llm_service import TenantLLMService, LLMService
|
||||
from api.utils.api_utils import (
|
||||
server_error_response,
|
||||
validate_request,
|
||||
get_data_error_result,
|
||||
)
|
||||
from api.db.services.file_service import FileService
|
||||
from api.db.services.llm_service import LLMService, TenantLLMService
|
||||
from api.db.services.user_service import TenantService, UserService, UserTenantService
|
||||
from api.utils import (
|
||||
get_uuid,
|
||||
get_format_time,
|
||||
decrypt,
|
||||
download_img,
|
||||
current_timestamp,
|
||||
datetime_format,
|
||||
decrypt,
|
||||
download_img,
|
||||
get_format_time,
|
||||
get_uuid,
|
||||
)
|
||||
from api.utils.api_utils import (
|
||||
construct_response,
|
||||
get_data_error_result,
|
||||
get_json_result,
|
||||
server_error_response,
|
||||
validate_request,
|
||||
)
|
||||
from api.db import UserTenantRole, FileType
|
||||
from api import settings
|
||||
from api.db.services.user_service import UserService, TenantService, UserTenantService
|
||||
from api.db.services.file_service import FileService
|
||||
from api.utils.api_utils import get_json_result, construct_response
|
||||
from api.apps.auth import get_auth_client
|
||||
|
||||
|
||||
@manager.route("/login", methods=["POST", "GET"]) # noqa: F821
|
||||
@ -77,9 +78,7 @@ def login():
|
||||
type: object
|
||||
"""
|
||||
if not request.json:
|
||||
return get_json_result(
|
||||
data=False, code=settings.RetCode.AUTHENTICATION_ERROR, message="Unauthorized!"
|
||||
)
|
||||
return get_json_result(data=False, code=settings.RetCode.AUTHENTICATION_ERROR, message="Unauthorized!")
|
||||
|
||||
email = request.json.get("email", "")
|
||||
users = UserService.query(email=email)
|
||||
@ -94,9 +93,7 @@ def login():
|
||||
try:
|
||||
password = decrypt(password)
|
||||
except BaseException:
|
||||
return get_json_result(
|
||||
data=False, code=settings.RetCode.SERVER_ERROR, message="Fail to crypt password"
|
||||
)
|
||||
return get_json_result(data=False, code=settings.RetCode.SERVER_ERROR, message="Fail to crypt password")
|
||||
|
||||
user = UserService.query_user(email, password)
|
||||
if user:
|
||||
@ -116,7 +113,7 @@ def login():
|
||||
)
|
||||
|
||||
|
||||
@manager.route("/login/channels", methods=["GET"]) # noqa: F821
|
||||
@manager.route("/login/channels", methods=["GET"]) # noqa: F821
|
||||
def get_login_channels():
|
||||
"""
|
||||
Get all supported authentication channels.
|
||||
@ -124,22 +121,20 @@ def get_login_channels():
|
||||
try:
|
||||
channels = []
|
||||
for channel, config in settings.OAUTH_CONFIG.items():
|
||||
channels.append({
|
||||
"channel": channel,
|
||||
"display_name": config.get("display_name", channel.title()),
|
||||
"icon": config.get("icon", "sso"),
|
||||
})
|
||||
channels.append(
|
||||
{
|
||||
"channel": channel,
|
||||
"display_name": config.get("display_name", channel.title()),
|
||||
"icon": config.get("icon", "sso"),
|
||||
}
|
||||
)
|
||||
return get_json_result(data=channels)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
return get_json_result(
|
||||
data=[],
|
||||
message=f"Load channels failure, error: {str(e)}",
|
||||
code=settings.RetCode.EXCEPTION_ERROR
|
||||
)
|
||||
return get_json_result(data=[], message=f"Load channels failure, error: {str(e)}", code=settings.RetCode.EXCEPTION_ERROR)
|
||||
|
||||
|
||||
@manager.route("/login/<channel>", methods=["GET"]) # noqa: F821
|
||||
@manager.route("/login/<channel>", methods=["GET"]) # noqa: F821
|
||||
def oauth_login(channel):
|
||||
channel_config = settings.OAUTH_CONFIG.get(channel)
|
||||
if not channel_config:
|
||||
@ -152,7 +147,7 @@ def oauth_login(channel):
|
||||
return redirect(auth_url)
|
||||
|
||||
|
||||
@manager.route("/oauth/callback/<channel>", methods=["GET"]) # noqa: F821
|
||||
@manager.route("/oauth/callback/<channel>", methods=["GET"]) # noqa: F821
|
||||
def oauth_callback(channel):
|
||||
"""
|
||||
Handle the OAuth/OIDC callback for various channels dynamically.
|
||||
@ -190,7 +185,7 @@ def oauth_callback(channel):
|
||||
# Login or register
|
||||
users = UserService.query(email=user_info.email)
|
||||
user_id = get_uuid()
|
||||
|
||||
|
||||
if not users:
|
||||
try:
|
||||
try:
|
||||
@ -434,9 +429,7 @@ def user_info_from_feishu(access_token):
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
}
|
||||
res = requests.get(
|
||||
"https://open.feishu.cn/open-apis/authen/v1/user_info", headers=headers
|
||||
)
|
||||
res = requests.get("https://open.feishu.cn/open-apis/authen/v1/user_info", headers=headers)
|
||||
user_info = res.json()["data"]
|
||||
user_info["email"] = None if user_info.get("email") == "" else user_info["email"]
|
||||
return user_info
|
||||
@ -446,17 +439,13 @@ def user_info_from_github(access_token):
|
||||
import requests
|
||||
|
||||
headers = {"Accept": "application/json", "Authorization": f"token {access_token}"}
|
||||
res = requests.get(
|
||||
f"https://api.github.com/user?access_token={access_token}", headers=headers
|
||||
)
|
||||
res = requests.get(f"https://api.github.com/user?access_token={access_token}", headers=headers)
|
||||
user_info = res.json()
|
||||
email_info = requests.get(
|
||||
f"https://api.github.com/user/emails?access_token={access_token}",
|
||||
headers=headers,
|
||||
).json()
|
||||
user_info["email"] = next(
|
||||
(email for email in email_info if email["primary"]), None
|
||||
)["email"]
|
||||
user_info["email"] = next((email for email in email_info if email["primary"]), None)["email"]
|
||||
return user_info
|
||||
|
||||
|
||||
@ -516,9 +505,7 @@ def setting_user():
|
||||
request_data = request.json
|
||||
if request_data.get("password"):
|
||||
new_password = request_data.get("new_password")
|
||||
if not check_password_hash(
|
||||
current_user.password, decrypt(request_data["password"])
|
||||
):
|
||||
if not check_password_hash(current_user.password, decrypt(request_data["password"])):
|
||||
return get_json_result(
|
||||
data=False,
|
||||
code=settings.RetCode.AUTHENTICATION_ERROR,
|
||||
@ -549,9 +536,7 @@ def setting_user():
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
return get_json_result(
|
||||
data=False, message="Update failure!", code=settings.RetCode.EXCEPTION_ERROR
|
||||
)
|
||||
return get_json_result(data=False, message="Update failure!", code=settings.RetCode.EXCEPTION_ERROR)
|
||||
|
||||
|
||||
@manager.route("/info", methods=["GET"]) # noqa: F821
|
||||
@ -643,9 +628,23 @@ def user_register(user_id, user):
|
||||
"model_type": llm.model_type,
|
||||
"api_key": settings.API_KEY,
|
||||
"api_base": settings.LLM_BASE_URL,
|
||||
"max_tokens": llm.max_tokens if llm.max_tokens else 8192
|
||||
"max_tokens": llm.max_tokens if llm.max_tokens else 8192,
|
||||
}
|
||||
)
|
||||
if settings.LIGHTEN != 1:
|
||||
for buildin_embedding_model in settings.BUILTIN_EMBEDDING_MODELS:
|
||||
mdlnm, fid = TenantLLMService.split_model_name_and_factory(buildin_embedding_model)
|
||||
tenant_llm.append(
|
||||
{
|
||||
"tenant_id": user_id,
|
||||
"llm_factory": fid,
|
||||
"llm_name": mdlnm,
|
||||
"model_type": "embedding",
|
||||
"api_key": "",
|
||||
"api_base": "",
|
||||
"max_tokens": 1024 if buildin_embedding_model == "BAAI/bge-large-zh-v1.5@BAAI" else 512,
|
||||
}
|
||||
)
|
||||
|
||||
if not UserService.save(**user):
|
||||
return
|
||||
|
||||
@ -302,7 +302,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
if "max_tokens" in gen_conf:
|
||||
gen_conf["max_tokens"] = min(gen_conf["max_tokens"], max_tokens - used_token_count)
|
||||
|
||||
def repair_bad_citation_formats(answer: str, kbinfos: dict, idx: dict):
|
||||
def repair_bad_citation_formats(answer: str, kbinfos: dict, idx: set):
|
||||
max_index = len(kbinfos["chunks"])
|
||||
|
||||
def safe_add(i):
|
||||
@ -327,8 +327,8 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
find_and_replace(r"\$\[(\d+)\]\$") # $[12]$
|
||||
find_and_replace(r"\$\$(\d+)\${2,}") # $$12$$$$
|
||||
find_and_replace(r"\$(\d+)\$") # $12$
|
||||
find_and_replace(r"#(\d+)\$\$") # #12$$
|
||||
find_and_replace(r"##(\d+)\$") # ##12$
|
||||
find_and_replace(r"(#{2,})(\d+)(\${2,})", group_index=2) # 2+ # and 2+ $
|
||||
find_and_replace(r"(#{2,})(\d+)(#{1,})", group_index=2) # 2+ # and 1+ #
|
||||
find_and_replace(r"##(\d+)#{2,}") # ##12###
|
||||
find_and_replace(r"【(\d+)】") # 【12】
|
||||
find_and_replace(r"ref\s*(\d+)", flags=re.IGNORECASE) # ref12, ref 12, REF 12
|
||||
@ -623,4 +623,3 @@ def ask(question, kb_ids, tenant_id):
|
||||
answer = ans
|
||||
yield {"answer": answer, "reference": {}}
|
||||
yield decorate_answer(answer)
|
||||
|
||||
|
||||
@ -81,7 +81,7 @@ def init_settings():
|
||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
||||
LLM = get_base_config("user_default_llm", {})
|
||||
LLM_DEFAULT_MODELS = LLM.get("default_models", {})
|
||||
LLM_FACTORY = LLM.get("factory", "Tongyi-Qianwen")
|
||||
LLM_FACTORY = LLM.get("factory")
|
||||
LLM_BASE_URL = LLM.get("base_url")
|
||||
try:
|
||||
REGISTER_ENABLED = int(os.environ.get("REGISTER_ENABLED", "1"))
|
||||
|
||||
@ -567,7 +567,7 @@
|
||||
{
|
||||
"name": "Youdao",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
@ -755,7 +755,7 @@
|
||||
{
|
||||
"name": "BAAI",
|
||||
"logo": "",
|
||||
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
@ -996,7 +996,7 @@
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
"llm_name": "gemini-2.5-flash-preview-04-17",
|
||||
"llm_name": "gemini-2.5-flash-preview-05-20",
|
||||
"tags": "LLM,CHAT,1024K,IMAGE2TEXT",
|
||||
"max_tokens": 1048576,
|
||||
"model_type": "image2text",
|
||||
@ -1023,7 +1023,7 @@
|
||||
"model_type": "image2text"
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-pro-exp-03-25",
|
||||
"llm_name": "gemini-2.5-pro-preview-05-06",
|
||||
"tags": "LLM,IMAGE2TEXT,1024K",
|
||||
"max_tokens": 1048576,
|
||||
"model_type": "image2text"
|
||||
@ -3133,6 +3133,20 @@
|
||||
"tags": "LLM",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
"llm_name": "claude-opus-4-20250514",
|
||||
"tags": "LLM,IMAGE2TEXT,200k",
|
||||
"max_tokens": 204800,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514",
|
||||
"tags": "LLM,IMAGE2TEXT,200k",
|
||||
"max_tokens": 204800,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-3-7-sonnet-20250219",
|
||||
"tags": "LLM,IMAGE2TEXT,200k",
|
||||
@ -3283,4 +3297,4 @@
|
||||
"llm": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -53,14 +53,14 @@ def corpNorm(nm, add_region=True):
|
||||
nm = re.sub(r"&", "&", nm)
|
||||
nm = re.sub(r"[\(\)()\+'\"\t \*\\【】-]+", " ", nm)
|
||||
nm = re.sub(
|
||||
r"([—-]+.*| +co\..*|corp\..*| +inc\..*| +ltd.*)", "", nm, 10000, re.IGNORECASE
|
||||
r"([—-]+.*| +co\..*|corp\..*| +inc\..*| +ltd.*)", "", nm, count=10000, flags=re.IGNORECASE
|
||||
)
|
||||
nm = re.sub(
|
||||
r"(计算机|技术|(技术|科技|网络)*有限公司|公司|有限|研发中心|中国|总部)$",
|
||||
"",
|
||||
nm,
|
||||
10000,
|
||||
re.IGNORECASE,
|
||||
count=10000,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
if not nm or (len(nm) < 5 and not regions.isName(nm[0:2])):
|
||||
return nm
|
||||
|
||||
@ -51,7 +51,7 @@ PY = Pinyin()
|
||||
|
||||
|
||||
def rmHtmlTag(line):
|
||||
return re.sub(r"<[a-z0-9.\"=';,:\+_/ -]+>", " ", line, 100000, re.IGNORECASE)
|
||||
return re.sub(r"<[a-z0-9.\"=';,:\+_/ -]+>", " ", line, count=100000, flags=re.IGNORECASE)
|
||||
|
||||
|
||||
def highest_degree(dg):
|
||||
@ -507,7 +507,7 @@ def parse(cv):
|
||||
(r".*国有.*", "国企"),
|
||||
(r"[ ()\(\)人/·0-9-]+", ""),
|
||||
(r".*(元|规模|于|=|北京|上海|至今|中国|工资|州|shanghai|强|餐饮|融资|职).*", "")]:
|
||||
cv["corporation_type"] = re.sub(p, r, cv["corporation_type"], 1000, re.IGNORECASE)
|
||||
cv["corporation_type"] = re.sub(p, r, cv["corporation_type"], count=1000, flags=re.IGNORECASE)
|
||||
if len(cv["corporation_type"]) < 2:
|
||||
del cv["corporation_type"]
|
||||
|
||||
|
||||
10
docker/.env
10
docker/.env
@ -91,13 +91,13 @@ REDIS_PASSWORD=infini_rag_flow
|
||||
SVR_HTTP_PORT=9380
|
||||
|
||||
# The RAGFlow Docker image to download.
|
||||
# Defaults to the v0.18.0-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0-slim
|
||||
# Defaults to the v0.19.0-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0-slim
|
||||
#
|
||||
# To download the RAGFlow Docker image with embedding models, uncomment the following line instead:
|
||||
# RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0
|
||||
# RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0
|
||||
#
|
||||
# The Docker image of the v0.18.0 edition includes built-in embedding models:
|
||||
# The Docker image of the v0.19.0 edition includes built-in embedding models:
|
||||
# - BAAI/bge-large-zh-v1.5
|
||||
# - maidalun1020/bce-embedding-base_v1
|
||||
#
|
||||
@ -169,6 +169,8 @@ REGISTER_ENABLED=1
|
||||
# SANDBOX_BASE_NODEJS_IMAGE=infiniflow/sandbox-base-nodejs:latest
|
||||
# SANDBOX_EXECUTOR_MANAGER_PORT=9385
|
||||
# SANDBOX_ENABLE_SECCOMP=false
|
||||
# SANDBOX_MAX_MEMORY=256m # b, k, m, g
|
||||
# SANDBOX_TIMEOUT=10s # s, m, 1m30s
|
||||
|
||||
# Important: To enable sandbox, you must re-declare the compose profiles.
|
||||
# 1. Comment out the COMPOSE_PROFILES line above.
|
||||
|
||||
@ -78,8 +78,8 @@ The [.env](./.env) file contains important environment variables for Docker.
|
||||
- `RAGFLOW-IMAGE`
|
||||
The Docker image edition. Available editions:
|
||||
|
||||
- `infiniflow/ragflow:v0.18.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.18.0`: The RAGFlow Docker image with embedding models including:
|
||||
- `infiniflow/ragflow:v0.19.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.19.0`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
@ -124,6 +124,8 @@ services:
|
||||
- SANDBOX_BASE_PYTHON_IMAGE=${SANDBOX_BASE_PYTHON_IMAGE:-infiniflow/sandbox-base-python:latest}
|
||||
- SANDBOX_BASE_NODEJS_IMAGE=${SANDBOX_BASE_NODEJS_IMAGE:-infiniflow/sandbox-base-nodejs:latest}
|
||||
- SANDBOX_ENABLE_SECCOMP=${SANDBOX_ENABLE_SECCOMP:-false}
|
||||
- SANDBOX_MAX_MEMORY=${SANDBOX_MAX_MEMORY:-256m}
|
||||
- SANDBOX_TIMEOUT=${SANDBOX_TIMEOUT:-10s}
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "http://localhost:9385/healthz"]
|
||||
interval: 10s
|
||||
|
||||
@ -99,8 +99,8 @@ RAGFlow utilizes MinIO as its object storage solution, leveraging its scalabilit
|
||||
- `RAGFLOW-IMAGE`
|
||||
The Docker image edition. Available editions:
|
||||
|
||||
- `infiniflow/ragflow:v0.18.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.18.0`: The RAGFlow Docker image with embedding models including:
|
||||
- `infiniflow/ragflow:v0.19.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.19.0`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
@ -77,7 +77,7 @@ After building the infiniflow/ragflow:nightly-slim image, you are ready to launc
|
||||
|
||||
1. Edit Docker Compose Configuration
|
||||
|
||||
Open the `docker/.env` file. Find the `RAGFLOW_IMAGE` setting and change the image reference from `infiniflow/ragflow:v0.18.0-slim` to `infiniflow/ragflow:nightly-slim` to use the pre-built image.
|
||||
Open the `docker/.env` file. Find the `RAGFLOW_IMAGE` setting and change the image reference from `infiniflow/ragflow:v0.19.0-slim` to `infiniflow/ragflow:nightly-slim` to use the pre-built image.
|
||||
|
||||
|
||||
2. Launch the Service
|
||||
|
||||
@ -23,7 +23,7 @@ Once a connection is established, an MCP server communicates with its client in
|
||||
## Prerequisites
|
||||
|
||||
1. Ensure RAGFlow is upgraded to v0.18.0 or later.
|
||||
2. Have your RAGFlow API key ready. See [Acquire a RAGFlow API key](./acquire_ragflow_api_key.md).
|
||||
2. Have your RAGFlow API key ready. See [Acquire a RAGFlow API key](../acquire_ragflow_api_key.md).
|
||||
|
||||
:::tip INFO
|
||||
If you wish to try out our MCP server without upgrading RAGFlow, community contributor [yiminghub2024](https://github.com/yiminghub2024) 👏 shares their recommended steps [here](#launch-an-mcp-server-without-upgrading-ragflow).
|
||||
|
||||
@ -11,7 +11,7 @@ Switch your doc engine from Elasticsearch to Infinity.
|
||||
|
||||
RAGFlow uses Elasticsearch by default for storing full text and vectors. To switch to [Infinity](https://github.com/infiniflow/infinity/), follow these steps:
|
||||
|
||||
:::danger WARNING
|
||||
:::caution WARNING
|
||||
Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
|
||||
:::
|
||||
|
||||
@ -21,7 +21,7 @@ Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
|
||||
$ docker compose -f docker/docker-compose.yml down -v
|
||||
```
|
||||
|
||||
:::cautiion WARNING
|
||||
:::caution WARNING
|
||||
`-v` will delete the docker container volumes, and the existing data will be cleared.
|
||||
:::
|
||||
|
||||
|
||||
10
docs/faq.mdx
10
docs/faq.mdx
@ -30,17 +30,17 @@ The "garbage in garbage out" status quo remains unchanged despite the fact that
|
||||
|
||||
Each RAGFlow release is available in two editions:
|
||||
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.18.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.18.0`
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.19.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.19.0`
|
||||
|
||||
---
|
||||
|
||||
### Which embedding models can be deployed locally?
|
||||
|
||||
RAGFlow offers two Docker image editions, `v0.18.0-slim` and `v0.18.0`:
|
||||
RAGFlow offers two Docker image editions, `v0.19.0-slim` and `v0.19.0`:
|
||||
|
||||
- `infiniflow/ragflow:v0.18.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.18.0`: The RAGFlow Docker image with embedding models including:
|
||||
- `infiniflow/ragflow:v0.19.0-slim` (default): The RAGFlow Docker image without embedding models.
|
||||
- `infiniflow/ragflow:v0.19.0`: The RAGFlow Docker image with embedding models including:
|
||||
- Built-in embedding models:
|
||||
- `BAAI/bge-large-zh-v1.5`
|
||||
- `maidalun1020/bce-embedding-base_v1`
|
||||
|
||||
@ -25,7 +25,7 @@ When debugging your chat assistant, you can use AI search as a reference to veri
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### key difference between an AI search and an AI chat?
|
||||
### Key difference between an AI search and an AI chat?
|
||||
|
||||
A chat is a multi-turn AI conversation where you can define your retrieval strategy (a weighted reranking score can be used to replace the weighted vector similarity in a hybrid search) and choose your chat model. In an AI chat, you can configure advanced RAG strategies, such as knowledge graphs, auto-keyword, and auto-question, for your specific case. Retrieved chunks are not displayed along with the answer.
|
||||
|
||||
|
||||
@ -30,7 +30,7 @@ In the **Variable** section, you add, remove, or update variables.
|
||||
`{knowledge}` is the system's reserved variable, representing the chunks retrieved from the knowledge base(s) specified by **Knowledge bases** under the **Assistant settings** tab. If your chat assistant is associated with certain knowledge bases, you can keep it as is.
|
||||
|
||||
:::info NOTE
|
||||
It does not currently make a difference whether you set `{knowledge}` to optional or mandatory, but note that this design will be updated at a later point.
|
||||
It currently makes no difference whether `{knowledge}` is set as optional or mandatory, but please note this design will be updated in due course.
|
||||
:::
|
||||
|
||||
From v0.17.0 onward, you can start an AI chat without specifying knowledge bases. In this case, we recommend removing the `{knowledge}` variable to prevent unnecessary reference and keeping the **Empty response** field empty to avoid errors.
|
||||
|
||||
@ -42,9 +42,13 @@ You start an AI conversation by creating an assistant.
|
||||
- **Rerank model** sets the reranker model to use. It is left empty by default.
|
||||
- If **Rerank model** is left empty, the hybrid score system uses keyword similarity and vector similarity, and the default weight assigned to the vector similarity component is 1-0.7=0.3.
|
||||
- If **Rerank model** is selected, the hybrid score system uses keyword similarity and reranker score, and the default weight assigned to the reranker score is 1-0.7=0.3.
|
||||
- **Cross-language search**: Optional
|
||||
Select one or more target languages from the dropdown menu. The system’s default chat model will then translate your query into the selected target language(s). This translation ensures accurate semantic matching across languages, allowing you to retrieve relevant results regardless of language differences.
|
||||
- When selecting target languages, please ensure that these languages are present in the knowledge base to guarantee an effective search.
|
||||
- If no target language is selected, the system will search only in the language of your query, which may cause relevant information in other languages to be missed.
|
||||
- **Variable** refers to the variables (keys) to be used in the system prompt. `{knowledge}` is a reserved variable. Click **Add** to add more variables for the system prompt.
|
||||
- If you are uncertain about the logic behind **Variable**, leave it *as-is*.
|
||||
- As of v0.18.0, if you add custom variables here, the only way you can pass in their values is to call:
|
||||
- As of v0.19.0, if you add custom variables here, the only way you can pass in their values is to call:
|
||||
- HTTP method [Converse with chat assistant](../../references/http_api_reference.md#converse-with-chat-assistant), or
|
||||
- Python method [Converse with chat assistant](../../references/python_api_reference.md#converse-with-chat-assistant).
|
||||
|
||||
|
||||
@ -16,4 +16,4 @@ Please note that some of your settings may consume a significant amount of time.
|
||||
- On the configuration page of your knowledge base, switch off **Use RAPTOR to enhance retrieval**.
|
||||
- Extracting knowledge graph (GraphRAG) is time-consuming.
|
||||
- Disable **Auto-keyword** and **Auto-question** on the configuration page of your knowledge base, as both depend on the LLM.
|
||||
- **v0.17.0+:** If your document is plain text PDF and does not require GPU-intensive processes like OCR (Optical Character Recognition), TSR (Table Structure Recognition), or DLA (Document Layout Analysis), you can choose **Naive** over **DeepDoc** or other time-consuming large model options in the **Document parser** dropdown. This will substantially reduce document parsing time.
|
||||
- **v0.17.0+:** If all PDFs in your knowledge base are plain text and do not require GPU-intensive processes like OCR (Optical Character Recognition), TSR (Table Structure Recognition), or DLA (Document Layout Analysis), you can choose **Naive** over **DeepDoc** or other time-consuming large model options in the **Document parser** dropdown. This will substantially reduce document parsing time.
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
sidebar_position: -1
|
||||
slug: /configure_knowledge_base
|
||||
---
|
||||
|
||||
@ -67,6 +67,10 @@ The following embedding models can be deployed locally:
|
||||
- BAAI/bge-large-zh-v1.5
|
||||
- maidalun1020/bce-embedding-base_v1
|
||||
|
||||
:::danger IMPORTANT
|
||||
Please note these two embedding models support both English and Chinese. If your knowledge base contains other languages, the performance may be COMPROMISED.
|
||||
:::
|
||||
|
||||
### Upload file
|
||||
|
||||
- RAGFlow's **File Management** allows you to link a file to multiple knowledge bases, in which case each target knowledge base holds a reference to the file.
|
||||
@ -124,7 +128,7 @@ See [Run retrieval test](./run_retrieval_test.md) for details.
|
||||
|
||||
## Search for knowledge base
|
||||
|
||||
As of RAGFlow v0.18.0, the search feature is still in a rudimentary form, supporting only knowledge base search by name.
|
||||
As of RAGFlow v0.19.0, the search feature is still in a rudimentary form, supporting only knowledge base search by name.
|
||||
|
||||

|
||||
|
||||
|
||||
@ -47,7 +47,7 @@ The RAPTOR feature is disabled by default. To enable it, manually switch on the
|
||||
|
||||
### Prompt
|
||||
|
||||
The following prompt will be applied recursively for cluster summarization, with `{cluster_content}` serving as an internal parameter. We recommend that you keep it as-is for now. The design will be updated at a later point.
|
||||
The following prompt will be applied recursively for cluster summarization, with `{cluster_content}` serving as an internal parameter. We recommend that you keep it as-is for now. The design will be updated in due course.
|
||||
|
||||
```
|
||||
Please summarize the following paragraphs... Paragraphs as following:
|
||||
|
||||
@ -60,6 +60,15 @@ The switch is disabled by default. When enabled, RAGFlow performs the following
|
||||
Using a knowledge graph in a retrieval test will significantly increase the time to receive a response.
|
||||
:::
|
||||
|
||||
### Cross-language search
|
||||
|
||||
To perform a cross-language search, select one or more target languages from the dropdown menu. The system’s default chat model will then translate your query entered in the Test text field into the selected target language(s). This translation ensures accurate semantic matching across languages, allowing you to retrieve relevant results regardless of language differences.
|
||||
|
||||
:::tip NOTE
|
||||
- When selecting target languages, please ensure that these languages are present in the knowledge base to guarantee an effective search.
|
||||
- If no target language is selected, the system will search only in the language of your query, which may cause relevant information in other languages to be missed.
|
||||
:::
|
||||
|
||||
### Test text
|
||||
|
||||
This field is where you put in your testing query.
|
||||
|
||||
53
docs/guides/dataset/select_pdf_parser.md
Normal file
53
docs/guides/dataset/select_pdf_parser.md
Normal file
@ -0,0 +1,53 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
slug: /select_pdf_parser
|
||||
---
|
||||
|
||||
# Select PDF parser
|
||||
|
||||
Select a visual model for parsing your PDFs.
|
||||
|
||||
---
|
||||
|
||||
RAGFlow isn't one-size-fits-all. It is built for flexibility and supports deeper customization to accommodate more complex use cases. From v0.17.0 onwards, RAGFlow decouples DeepDoc-specific data extraction tasks from chunking methods **for PDF files**. This separation enables you to autonomously select a visual model for OCR (Optical Character Recognition), TSR (Table Structure Recognition), and DLR (Document Layout Recognition) tasks that balances speed and performance to suit your specific use cases. If your PDFs contain only plain text, you can opt to skip these tasks by selecting the **Naive** option, to reduce the overall parsing time.
|
||||
|
||||

|
||||
|
||||
## Prerequisites
|
||||
|
||||
- The PDF parser dropdown menu appears only when you select a chunking method compatible with PDFs, including:
|
||||
- **General**
|
||||
- **Manual**
|
||||
- **Paper**
|
||||
- **Book**
|
||||
- **Laws**
|
||||
- **Presentation**
|
||||
- **One**
|
||||
- To use a third-party visual model for parsing PDFs, ensure you have set a default img2txt model under **Set default models** on the **Model providers** page.
|
||||
|
||||
## Procedure
|
||||
|
||||
1. On your knowledge base's **Configuration** page, select a chunking method, say **General**.
|
||||
|
||||
_The **PDF parser** dropdown menu appears._
|
||||
|
||||
2. Select the option that works best with your scenario:
|
||||
|
||||
- DeepDoc: (Default) The default visual model for OCR, TSR, and DLR tasks, which is time-consuming.
|
||||
- Naive: Skip OCR, TSR, and DLR tasks if *all* your PDFs are plain text.
|
||||
- A third-party visual model provided by a specific model provider.
|
||||
|
||||
:::caution WARNING
|
||||
Third-party visual models are marked **Experimental**, because we have not fully tested these models for the aforementioned data extraction tasks.
|
||||
:::
|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### When should I select DeepDoc or a third-party visual model as the PDF parser?
|
||||
|
||||
Use a visual model to extract data if your PDFs contain formatted or image-based text rather than plain text. DeepDoc is the default visual model but can be time-consuming. You can also choose a lightweight or high-performance img2txt model depending on your needs and hardware capabilities.
|
||||
|
||||
### Can I select a visual model to parse my DOCX files?
|
||||
|
||||
No, you cannot. This dropdown menu is for PDFs only. To use this feature, convert your DOCX files to PDF first.
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
sidebar_position: 0
|
||||
slug: /set_metada
|
||||
---
|
||||
|
||||
@ -19,4 +19,10 @@ For example, if you have a dataset of HTML files and want the LLM to cite the so
|
||||
Ensure that your metadata is in JSON format; otherwise, your updates will not be applied.
|
||||
:::
|
||||
|
||||

|
||||

|
||||
|
||||
## Frequently asked questions
|
||||
|
||||
### Can I set metadata for multiple documents at once?
|
||||
|
||||
No, RAGFlow does not support batch metadata setting. If you still consider this feature essential, please [raise an issue](https://github.com/infiniflow/ragflow/issues) explaining your use case and its importance.
|
||||
@ -87,4 +87,4 @@ RAGFlow's file management allows you to download an uploaded file:
|
||||
|
||||

|
||||
|
||||
> As of RAGFlow v0.18.0, bulk download is not supported, nor can you download an entire folder.
|
||||
> As of RAGFlow v0.19.0, bulk download is not supported, nor can you download an entire folder.
|
||||
|
||||
@ -49,6 +49,6 @@ After logging into RAGFlow, you can *only* configure API Key on the **Model prov
|
||||
5. Click **OK** to confirm your changes.
|
||||
|
||||
:::note
|
||||
To update an existing model API key at a later point:
|
||||
To update an existing model API key:
|
||||

|
||||
:::
|
||||
@ -18,7 +18,7 @@ RAGFlow ships with a built-in [Langfuse](https://langfuse.com) integration so th
|
||||
Langfuse stores traces, spans and prompt payloads in a purpose-built observability backend and offers filtering and visualisations on top.
|
||||
|
||||
:::info NOTE
|
||||
• RAGFlow **≥ 0.18.0** (contains the Langfuse connector)
|
||||
• RAGFlow **≥ 0.19.0** (contains the Langfuse connector)
|
||||
• A Langfuse workspace (cloud or self-hosted) with a _Project Public Key_ and _Secret Key_
|
||||
:::
|
||||
|
||||
|
||||
@ -66,16 +66,16 @@ To upgrade RAGFlow, you must upgrade **both** your code **and** your Docker imag
|
||||
git clone https://github.com/infiniflow/ragflow.git
|
||||
```
|
||||
|
||||
2. Switch to the latest, officially published release, e.g., `v0.18.0`:
|
||||
2. Switch to the latest, officially published release, e.g., `v0.19.0`:
|
||||
|
||||
```bash
|
||||
git checkout -f v0.18.0
|
||||
git checkout -f v0.19.0
|
||||
```
|
||||
|
||||
3. Update **ragflow/docker/.env** as follows:
|
||||
|
||||
```bash
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0
|
||||
```
|
||||
|
||||
4. Update the RAGFlow image and restart RAGFlow:
|
||||
@ -92,10 +92,10 @@ To upgrade RAGFlow, you must upgrade **both** your code **and** your Docker imag
|
||||
1. From an environment with Internet access, pull the required Docker image.
|
||||
2. Save the Docker image to a **.tar** file.
|
||||
```bash
|
||||
docker save -o ragflow.v0.18.0.tar infiniflow/ragflow:v0.18.0
|
||||
docker save -o ragflow.v0.19.0.tar infiniflow/ragflow:v0.19.0
|
||||
```
|
||||
3. Copy the **.tar** file to the target server.
|
||||
4. Load the **.tar** file into Docker:
|
||||
```bash
|
||||
docker load -i ragflow.v0.18.0.tar
|
||||
docker load -i ragflow.v0.19.0.tar
|
||||
```
|
||||
|
||||
@ -44,7 +44,7 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
|
||||
`vm.max_map_count`. This value sets the maximum number of memory map areas a process may have. Its default value is 65530. While most applications require fewer than a thousand maps, reducing this value can result in abnormal behaviors, and the system will throw out-of-memory errors when a process reaches the limitation.
|
||||
|
||||
RAGFlow v0.18.0 uses Elasticsearch or [Infinity](https://github.com/infiniflow/infinity) for multiple recall. Setting the value of `vm.max_map_count` correctly is crucial to the proper functioning of the Elasticsearch component.
|
||||
RAGFlow v0.19.0 uses Elasticsearch or [Infinity](https://github.com/infiniflow/infinity) for multiple recall. Setting the value of `vm.max_map_count` correctly is crucial to the proper functioning of the Elasticsearch component.
|
||||
|
||||
<Tabs
|
||||
defaultValue="linux"
|
||||
@ -184,13 +184,13 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
```bash
|
||||
$ git clone https://github.com/infiniflow/ragflow.git
|
||||
$ cd ragflow/docker
|
||||
$ git checkout -f v0.18.0
|
||||
$ git checkout -f v0.19.0
|
||||
```
|
||||
|
||||
3. Use the pre-built Docker images and start up the server:
|
||||
|
||||
:::tip NOTE
|
||||
The command below downloads the `v0.18.0-slim` edition of the RAGFlow Docker image. Refer to the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.18.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.18.0` for the full edition `v0.18.0`.
|
||||
The command below downloads the `v0.19.0-slim` edition of the RAGFlow Docker image. Refer to the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.19.0-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server. For example: set `RAGFLOW_IMAGE=infiniflow/ragflow:v0.19.0` for the full edition `v0.19.0`.
|
||||
:::
|
||||
|
||||
```bash
|
||||
@ -205,10 +205,10 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
<APITable>
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models and Python packages? | Stable? |
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models and Python packages?:collision: | Stable? |
|
||||
| ------------------- | --------------- | ----------------------------------------- | ------------------------ |
|
||||
| `v0.18.0` | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| `v0.18.0-slim` | ≈2 | ❌ | Stable release |
|
||||
| `v0.19.0` | ≈9 | :heavy_check_mark: | Stable release |
|
||||
| `v0.19.0-slim` | ≈2 | ❌ | Stable release |
|
||||
| `nightly` | ≈9 | :heavy_check_mark: | *Unstable* nightly build |
|
||||
| `nightly-slim` | ≈2 | ❌ | *Unstable* nightly build |
|
||||
|
||||
@ -216,6 +216,15 @@ This section provides instructions on setting up the RAGFlow server on Linux. If
|
||||
</APITable>
|
||||
```
|
||||
|
||||
:::danger IMPORTANT
|
||||
:collision: The embedding models included in `v0.19.0` and `nightly` are:
|
||||
|
||||
- BAAI/bge-large-zh-v1.5
|
||||
- maidalun1020/bce-embedding-base_v1
|
||||
|
||||
Please note these two embedding models support both English and Chinese. If your knowledge base contains other languages, the performance may be COMPROMISED.
|
||||
:::
|
||||
|
||||
4. Check the server status after having the server up and running:
|
||||
|
||||
```bash
|
||||
@ -258,8 +267,6 @@ To add and configure an LLM:
|
||||
|
||||

|
||||
|
||||
> Each RAGFlow account is able to use **text-embedding-v2** for free, an embedding model of Tongyi-Qianwen. This is why you can see Tongyi-Qianwen in the **Added models** list. And you may need to update your Tongyi-Qianwen API key at a later point.
|
||||
|
||||
2. Click on the desired LLM and update the API key accordingly (DeepSeek-V2 in this case):
|
||||
|
||||

|
||||
|
||||
26
docs/references/glossary.mdx
Normal file
26
docs/references/glossary.mdx
Normal file
@ -0,0 +1,26 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
slug: /glossary
|
||||
---
|
||||
|
||||
# Glossary
|
||||
|
||||
Definitions of key terms and basic concepts related to RAGFlow.
|
||||
|
||||
---
|
||||
|
||||
import TOCInline from '@theme/TOCInline';
|
||||
|
||||
<TOCInline toc={toc} />
|
||||
|
||||
---
|
||||
|
||||
## C
|
||||
|
||||
### Cross-language search
|
||||
|
||||
Cross-language search (also known as cross-lingual retrieval) is a feature introduced in version 0.19.0. It enables users to submit queries in one language (for example, English) and retrieve relevant documents written in other languages such as Chinese or Spanish. This feature is enabled by the system’s default chat model, which translates queries to ensure accurate matching of semantic meaning across languages.
|
||||
|
||||
By enabling cross-language search, users can effortlessly access a broader range of information regardless of language barriers, significantly enhancing the system’s usability and inclusiveness.
|
||||
|
||||
This feature is available in the retrieval test and chat assistant settings. See [Run retrieval test](../guides/dataset/run_retrieval_test.md) and [Start AI chat](../guides/chat/start_chat.md) for further details.
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 1
|
||||
sidebar_position: 4
|
||||
slug: /http_api_reference
|
||||
---
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
sidebar_position: 5
|
||||
slug: /python_api_reference
|
||||
---
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
sidebar_position: 1
|
||||
slug: /supported_models
|
||||
---
|
||||
|
||||
|
||||
@ -9,10 +9,36 @@ Key features, improvements and bug fixes in the latest releases.
|
||||
|
||||
:::info
|
||||
Each RAGFlow release is available in two editions:
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.18.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.18.0`
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.19.0-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.19.0`
|
||||
:::
|
||||
|
||||
:::danger IMPORTANT
|
||||
:collision: The embedding models included in a full edition are:
|
||||
|
||||
- BAAI/bge-large-zh-v1.5
|
||||
- maidalun1020/bce-embedding-base_v1
|
||||
|
||||
Please note these two embedding models support both English and Chinese. If your knowledge base contains other languages, the performance may be COMPROMISED.
|
||||
:::
|
||||
|
||||
## v0.19.0
|
||||
|
||||
Released on May 26, 2025.
|
||||
|
||||
### New features
|
||||
|
||||
- Cross-language search is supported in the Knowledge and Chat modules, enhancing search accuracy and user experience in multilingual environments, such as in Chinese-English knowledge bases.
|
||||
- Agent component: A new Code component supports Python and JavaScript scripts, enabling developers to handle more complex tasks like dynamic data processing.
|
||||
- Enhanced image display: Images in Chat and Search now render directly within responses, rather than as external references. Knowledge retrieval testing can retrieve images directly, instead of texts extracted from images.
|
||||
- Claude 4: Developers can now use the newly released, most advanced Claude model.
|
||||
|
||||
> The following features are contributed by our community contributors:
|
||||
|
||||
- Agent component: Enables tool calling within the Generate Component. Kudos to [notsyncing](https://github.com/notsyncing).
|
||||
- Markdown rendering: Image references in a markdown file can be displayed after chunking. Kudos to [Woody-Hu](https://github.com/Woody-Hu).
|
||||
- Vector database support: OpenSearch can now be used as RAGFlow's document engine. Kudos to [pyyuhao](https://github.com/pyyuhao).
|
||||
|
||||
## v0.18.0
|
||||
|
||||
Released on April 23, 2025.
|
||||
@ -117,7 +143,7 @@ Released on March 3, 2025.
|
||||
- AI chat: Leverages Tavily-based web search to enhance contexts in agentic reasoning. To activate this, enter the correct Tavily API key under the **Assistant settings** tab of your chat assistant dialogue.
|
||||
- AI chat: Supports starting a chat without specifying knowledge bases.
|
||||
- AI chat: HTML files can also be previewed and referenced, in addition to PDF files.
|
||||
- Dataset: Adds a **PDF parser**, aka **Document parser**, dropdown menu to dataset configurations. This includes a DeepDoc model option, which is time-consuming, a much faster **naive** option (plain text), which skips DLA (Document Layout Analysis), OCR (Optical Character Recognition), and TSR (Table Structure Recognition) tasks, and several currently *experimental* large model options.
|
||||
- Dataset: Adds a **PDF parser**, aka **Document parser**, dropdown menu to dataset configurations. This includes a DeepDoc model option, which is time-consuming, a much faster **naive** option (plain text), which skips DLA (Document Layout Analysis), OCR (Optical Character Recognition), and TSR (Table Structure Recognition) tasks, and several currently *experimental* large model options. See [here](./guides/dataset/select_pdf_parser.md).
|
||||
- Agent component: **(x)** or a forward slash `/` can be used to insert available keys (variables) in the system prompt field of the **Generate** or **Template** component.
|
||||
- Object storage: Supports using Aliyun OSS (Object Storage Service) as a file storage option.
|
||||
- Models: Updates the supported model list for Tongyi-Qianwen (Qwen), adding DeepSeek-specific models; adds ModelScope as a model provider.
|
||||
|
||||
@ -27,13 +27,13 @@ env:
|
||||
REDIS_PASSWORD: infini_rag_flow_helm
|
||||
|
||||
# The RAGFlow Docker image to download.
|
||||
# Defaults to the v0.18.0-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE: infiniflow/ragflow:v0.18.0-slim
|
||||
# Defaults to the v0.19.0-slim edition, which is the RAGFlow Docker image without embedding models.
|
||||
RAGFLOW_IMAGE: infiniflow/ragflow:v0.19.0-slim
|
||||
#
|
||||
# To download the RAGFlow Docker image with embedding models, uncomment the following line instead:
|
||||
# RAGFLOW_IMAGE: infiniflow/ragflow:v0.18.0
|
||||
# RAGFLOW_IMAGE: infiniflow/ragflow:v0.19.0
|
||||
#
|
||||
# The Docker image of the v0.18.0 edition includes:
|
||||
# The Docker image of the v0.19.0 edition includes:
|
||||
# - Built-in embedding models:
|
||||
# - BAAI/bge-large-zh-v1.5
|
||||
# - BAAI/bge-reranker-v2-m3
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow"
|
||||
version = "0.18.0"
|
||||
version = "0.19.0"
|
||||
description = "[RAGFlow](https://ragflow.io/) is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding. It offers a streamlined RAG workflow for businesses of any scale, combining LLM (Large Language Models) to provide truthful question-answering capabilities, backed by well-founded citations from various complex formatted data."
|
||||
authors = [{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }]
|
||||
license-files = ["LICENSE"]
|
||||
|
||||
@ -343,7 +343,7 @@ def remove_contents_table(sections, eng=False):
|
||||
type("")) else sections[i][0]).strip()
|
||||
|
||||
if not re.match(r"(contents|目录|目次|table of contents|致谢|acknowledge)$",
|
||||
re.sub(r"( | |\u3000)+", "", get(i).split("@@")[0], re.IGNORECASE)):
|
||||
re.sub(r"( | |\u3000)+", "", get(i).split("@@")[0], flags=re.IGNORECASE)):
|
||||
i += 1
|
||||
continue
|
||||
sections.pop(i)
|
||||
@ -524,7 +524,7 @@ def naive_merge(sections, chunk_token_num=128, delimiter="\n。;!?"):
|
||||
if tnum < 8:
|
||||
pos = ""
|
||||
# Ensure that the length of the merged chunk does not exceed chunk_token_num
|
||||
if tk_nums[-1] > chunk_token_num:
|
||||
if cks[-1] == "" or tk_nums[-1] > chunk_token_num:
|
||||
|
||||
if t.find(pos) < 0:
|
||||
t += pos
|
||||
@ -560,7 +560,7 @@ def naive_merge_with_images(texts, images, chunk_token_num=128, delimiter="\n。
|
||||
if tnum < 8:
|
||||
pos = ""
|
||||
# Ensure that the length of the merged chunk does not exceed chunk_token_num
|
||||
if tk_nums[-1] > chunk_token_num:
|
||||
if cks[-1] == "" or tk_nums[-1] > chunk_token_num:
|
||||
if t.find(pos) < 0:
|
||||
t += pos
|
||||
cks.append(t)
|
||||
@ -627,7 +627,7 @@ def naive_merge_docx(sections, chunk_token_num=128, delimiter="\n。;!?"):
|
||||
tnum = num_tokens_from_string(t)
|
||||
if tnum < 8:
|
||||
pos = ""
|
||||
if tk_nums[-1] > chunk_token_num:
|
||||
if cks[-1] == "" or tk_nums[-1] > chunk_token_num:
|
||||
if t.find(pos) < 0:
|
||||
t += pos
|
||||
cks.append(t)
|
||||
|
||||
@ -71,7 +71,19 @@ class FulltextQueryer:
|
||||
txt = otxt
|
||||
return txt
|
||||
|
||||
@staticmethod
|
||||
def add_space_between_eng_zh(txt):
|
||||
# (ENG/ENG+NUM) + ZH
|
||||
txt = re.sub(r'([A-Za-z]+[0-9]+)([\u4e00-\u9fa5]+)', r'\1 \2', txt)
|
||||
# ENG + ZH
|
||||
txt = re.sub(r'([A-Za-z])([\u4e00-\u9fa5]+)', r'\1 \2', txt)
|
||||
# ZH + (ENG/ENG+NUM)
|
||||
txt = re.sub(r'([\u4e00-\u9fa5]+)([A-Za-z]+[0-9]+)', r'\1 \2', txt)
|
||||
txt = re.sub(r'([\u4e00-\u9fa5]+)([A-Za-z])', r'\1 \2', txt)
|
||||
return txt
|
||||
|
||||
def question(self, txt, tbl="qa", min_match: float = 0.6):
|
||||
txt = FulltextQueryer.add_space_between_eng_zh(txt)
|
||||
txt = re.sub(
|
||||
r"[ :|\r\n\t,,。??/`!!&^%%()\[\]{}<>]+",
|
||||
" ",
|
||||
|
||||
@ -368,6 +368,10 @@ async def build_chunks(task, progress_callback):
|
||||
|
||||
docs_to_tag = []
|
||||
for d in docs:
|
||||
task_canceled = TaskService.do_cancel(task["id"])
|
||||
if task_canceled:
|
||||
progress_callback(-1, msg="Task has been canceled.")
|
||||
return
|
||||
if settings.retrievaler.tag_content(tenant_id, kb_ids, d, all_tags, topn_tags=topn_tags, S=S) and len(d[TAG_FLD]) > 0:
|
||||
examples.append({"content": d["content_with_weight"], TAG_FLD: d[TAG_FLD]})
|
||||
else:
|
||||
@ -577,8 +581,22 @@ async def do_handle_task(task):
|
||||
start_ts = timer()
|
||||
doc_store_result = ""
|
||||
es_bulk_size = 4
|
||||
|
||||
async def delete_image(kb_id, chunk_id):
|
||||
try:
|
||||
async with minio_limiter:
|
||||
STORAGE_IMPL.delete(kb_id, chunk_id)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Deleting image of chunk {}/{}/{} got exception".format(task["location"], task["name"], chunk_id))
|
||||
raise
|
||||
|
||||
for b in range(0, len(chunks), es_bulk_size):
|
||||
doc_store_result = await trio.to_thread.run_sync(lambda: settings.docStoreConn.insert(chunks[b:b + es_bulk_size], search.index_name(task_tenant_id), task_dataset_id))
|
||||
task_canceled = TaskService.do_cancel(task_id)
|
||||
if task_canceled:
|
||||
progress_callback(-1, msg="Task has been canceled.")
|
||||
return
|
||||
if b % 128 == 0:
|
||||
progress_callback(prog=0.8 + 0.1 * (b + 1) / len(chunks), msg="")
|
||||
if doc_store_result:
|
||||
@ -592,7 +610,11 @@ async def do_handle_task(task):
|
||||
except DoesNotExist:
|
||||
logging.warning(f"do_handle_task update_chunk_ids failed since task {task['id']} is unknown.")
|
||||
doc_store_result = await trio.to_thread.run_sync(lambda: settings.docStoreConn.delete({"id": chunk_ids}, search.index_name(task_tenant_id), task_dataset_id))
|
||||
async with trio.open_nursery() as nursery:
|
||||
for chunk_id in chunk_ids:
|
||||
nursery.start_soon(delete_image, task_dataset_id, chunk_id)
|
||||
return
|
||||
|
||||
logging.info("Indexing doc({}), page({}-{}), chunks({}), elapsed: {:.2f}".format(task_document_name, task_from_page,
|
||||
task_to_page, len(chunks),
|
||||
timer() - start_ts))
|
||||
|
||||
114
sandbox/Makefile
Normal file
114
sandbox/Makefile
Normal file
@ -0,0 +1,114 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Force using Bash to ensure the source command is available
|
||||
SHELL := /bin/bash
|
||||
|
||||
# Environment variable definitions
|
||||
VENV := .venv
|
||||
PYTHON := $(VENV)/bin/python
|
||||
UV := uv
|
||||
ACTIVATE_SCRIPT := $(VENV)/bin/activate
|
||||
SYS_PYTHON := python3
|
||||
PYTHONPATH := $(shell pwd)
|
||||
|
||||
.PHONY: all setup ensure_env ensure_uv start stop restart build clean test logs
|
||||
|
||||
all: setup start
|
||||
|
||||
# 🌱 Initialize environment + install dependencies
|
||||
setup: ensure_env ensure_uv
|
||||
@echo "📦 Installing dependencies with uv..."
|
||||
source $(ACTIVATE_SCRIPT) && \
|
||||
export PYTHONPATH=$(PYTHONPATH)
|
||||
@$(UV) pip install -r executor_manager/requirements.txt
|
||||
@echo "✅ Setup complete."
|
||||
|
||||
# 🔑 Ensure .env exists (copy from .env.example on first run)
|
||||
ensure_env:
|
||||
@if [ ! -f ".env" ]; then \
|
||||
if [ -f ".env.example" ]; then \
|
||||
echo "📝 Creating .env from .env.example..."; \
|
||||
cp .env.example .env; \
|
||||
else \
|
||||
echo "⚠️ Warning: .env.example not found, creating empty .env"; \
|
||||
touch .env; \
|
||||
fi; \
|
||||
else \
|
||||
echo "✅ .env already exists."; \
|
||||
fi
|
||||
|
||||
# 🔧 Ensure uv is executable (install using system Python)
|
||||
ensure_uv:
|
||||
@if ! command -v $(UV) >/dev/null 2>&1; then \
|
||||
echo "🛠️ Installing uv using system Python..."; \
|
||||
$(SYS_PYTHON) -m pip install -q --upgrade pip; \
|
||||
$(SYS_PYTHON) -m pip install -q uv || (echo "⚠️ uv install failed, check manually" && exit 1); \
|
||||
fi
|
||||
|
||||
# 🐳 Service control (using safer variable loading)
|
||||
start:
|
||||
@echo "🚀 Starting services..."
|
||||
source $(ACTIVATE_SCRIPT) && \
|
||||
export PYTHONPATH=$(PYTHONPATH) && \
|
||||
[ -f .env ] && source .env || true && \
|
||||
bash scripts/start.sh
|
||||
|
||||
stop:
|
||||
@echo "🛑 Stopping services..."
|
||||
source $(ACTIVATE_SCRIPT) && \
|
||||
bash scripts/stop.sh
|
||||
|
||||
restart: stop start
|
||||
@echo "🔁 Restarting services..."
|
||||
|
||||
build:
|
||||
@echo "🔧 Building base sandbox images..."
|
||||
@if [ -f .env ]; then \
|
||||
source .env && \
|
||||
echo "🐍 Building base sandbox image for Python ($$SANDBOX_BASE_PYTHON_IMAGE)..." && \
|
||||
docker build -t "$$SANDBOX_BASE_PYTHON_IMAGE" ./sandbox_base_image/python && \
|
||||
echo "⬢ Building base sandbox image for Nodejs ($$SANDBOX_BASE_NODEJS_IMAGE)..." && \
|
||||
docker build -t "$$SANDBOX_BASE_NODEJS_IMAGE" ./sandbox_base_image/nodejs; \
|
||||
else \
|
||||
echo "⚠️ .env file not found, skipping build."; \
|
||||
fi
|
||||
|
||||
test:
|
||||
@echo "🧪 Running sandbox security tests..."
|
||||
source $(ACTIVATE_SCRIPT) && \
|
||||
export PYTHONPATH=$(PYTHONPATH) && \
|
||||
$(PYTHON) tests/sandbox_security_tests_full.py
|
||||
|
||||
logs:
|
||||
@echo "📋 Showing logs from api-server and executor-manager..."
|
||||
docker compose logs -f
|
||||
|
||||
# 🧹 Clean all containers and volumes
|
||||
clean:
|
||||
@echo "🧹 Cleaning all containers and volumes..."
|
||||
@docker compose down -v || true
|
||||
@if [ -f .env ]; then \
|
||||
source .env && \
|
||||
for i in $$(seq 0 $$((SANDBOX_EXECUTOR_MANAGER_POOL_SIZE - 1))); do \
|
||||
echo "🧹 Deleting sandbox_python_$$i..." && \
|
||||
docker rm -f sandbox_python_$$i 2>/dev/null || true && \
|
||||
echo "🧹 Deleting sandbox_nodejs_$$i..." && \
|
||||
docker rm -f sandbox_nodejs_$$i 2>/dev/null || true; \
|
||||
done; \
|
||||
else \
|
||||
echo "⚠️ .env not found, skipping container cleanup"; \
|
||||
fi
|
||||
218
sandbox/README.md
Normal file
218
sandbox/README.md
Normal file
@ -0,0 +1,218 @@
|
||||
# RAGFlow Sandbox
|
||||
|
||||
A secure, pluggable code execution backend for RAGFlow and beyond.
|
||||
|
||||
## 🔧 Features
|
||||
|
||||
- ✅ **Seamless RAGFlow Integration** — Out-of-the-box compatibility with the `code` component.
|
||||
- 🔐 **High Security** — Leverages [gVisor](https://gvisor.dev/) for syscall-level sandboxing.
|
||||
- 🔧 **Customizable Sandboxing** — Easily modify `seccomp` settings as needed.
|
||||
- 🧩 **Pluggable Runtime Support** — Easily extend to support any programming language.
|
||||
- ⚙️ **Developer Friendly** — Get started with a single command using `Makefile`.
|
||||
|
||||
## 🏗 Architecture
|
||||
|
||||
<p align="center">
|
||||
<img src="asserts/code_executor_manager.svg" width="520" alt="Architecture Diagram">
|
||||
</p>
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### 📋 Prerequisites
|
||||
|
||||
#### Required
|
||||
|
||||
- Linux distro compatible with gVisor
|
||||
- [gVisor](https://gvisor.dev/docs/user_guide/install/)
|
||||
- Docker >= `24.0.0`
|
||||
- Docker Compose >= `v2.26.1` like [RAGFlow](https://github.com/infiniflow/ragflow)
|
||||
- [uv](https://docs.astral.sh/uv/) as package and project manager
|
||||
|
||||
#### Optional (Recommended)
|
||||
|
||||
- [GNU Make](https://www.gnu.org/software/make/) for simplified CLI management
|
||||
|
||||
---
|
||||
|
||||
### 🐳 Build Docker Base Images
|
||||
|
||||
We use isolated base images for secure containerized execution:
|
||||
|
||||
```bash
|
||||
# Build base images manually
|
||||
docker build -t sandbox-base-python:latest ./sandbox_base_image/python
|
||||
docker build -t sandbox-base-nodejs:latest ./sandbox_base_image/nodejs
|
||||
|
||||
# OR use Makefile
|
||||
make build
|
||||
```
|
||||
|
||||
Then, build the executor manager image:
|
||||
|
||||
```bash
|
||||
docker build -t sandbox-executor-manager:latest ./executor_manager
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 📦 Running with RAGFlow
|
||||
|
||||
1. Ensure gVisor is correctly installed.
|
||||
2. Configure your `.env` in `docker/.env`:
|
||||
|
||||
- Uncomment sandbox-related variables.
|
||||
- Enable sandbox profile at the bottom.
|
||||
3. Add the following line to `/etc/hosts` as recommended:
|
||||
|
||||
```text
|
||||
127.0.0.1 sandbox-executor-manager
|
||||
```
|
||||
|
||||
4. Start RAGFlow service.
|
||||
|
||||
---
|
||||
|
||||
### 🧭 Running Standalone
|
||||
|
||||
#### Manual Setup
|
||||
|
||||
1. Initialize environment:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
2. Launch:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.yml up
|
||||
```
|
||||
|
||||
3. Test:
|
||||
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
export PYTHONPATH=$(pwd)
|
||||
uv pip install -r executor_manager/requirements.txt
|
||||
uv run tests/sandbox_security_tests_full.py
|
||||
```
|
||||
|
||||
#### With Make
|
||||
|
||||
```bash
|
||||
make # setup + build + launch + test
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 📈 Monitoring
|
||||
|
||||
```bash
|
||||
docker logs -f sandbox-executor-manager # Manual
|
||||
make logs # With Make
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 🧰 Makefile Toolbox
|
||||
|
||||
| Command | Description |
|
||||
| ----------------- | ------------------------------------------------ |
|
||||
| `make` | Setup, build, launch and test all at once |
|
||||
| `make setup` | Initialize environment and install uv |
|
||||
| `make ensure_env` | Auto-create `.env` if missing |
|
||||
| `make ensure_uv` | Install `uv` package manager if missing |
|
||||
| `make build` | Build all Docker base images |
|
||||
| `make start` | Start services with safe env loading and testing |
|
||||
| `make stop` | Gracefully stop all services |
|
||||
| `make restart` | Shortcut for `stop` + `start` |
|
||||
| `make test` | Run full test suite |
|
||||
| `make logs` | Stream container logs |
|
||||
| `make clean` | Stop and remove orphan containers and volumes |
|
||||
|
||||
---
|
||||
|
||||
## 🔐 Security
|
||||
|
||||
The RAGFlow sandbox is designed to balance security and usability, offering solid protection without compromising developer experience.
|
||||
|
||||
### ✅ gVisor Isolation
|
||||
|
||||
At its core, we use [gVisor](https://gvisor.dev/docs/architecture_guide/security/), a user-space kernel, to isolate code execution from the host system. gVisor intercepts and restricts syscalls, offering robust protection against container escapes and privilege escalations.
|
||||
|
||||
### 🔒 Optional seccomp Support (Advanced)
|
||||
|
||||
For users who need **zero-trust-level syscall control**, we support an additional `seccomp` profile. This feature restricts containers to only a predefined set of system calls, as specified in `executor_manager/seccomp-profile-default.json`.
|
||||
|
||||
> ⚠️ This feature is **disabled by default** to maintain compatibility and usability. Enabling it may cause compatibility issues with some dependencies.
|
||||
|
||||
#### To enable seccomp
|
||||
|
||||
1. Edit your `.env` file:
|
||||
|
||||
```dotenv
|
||||
SANDBOX_ENABLE_SECCOMP=true
|
||||
```
|
||||
|
||||
2. Customize allowed syscalls in:
|
||||
|
||||
```
|
||||
executor_manager/seccomp-profile-default.json
|
||||
```
|
||||
|
||||
This profile is passed to the container with:
|
||||
|
||||
```bash
|
||||
--security-opt seccomp=/app/seccomp-profile-default.json
|
||||
```
|
||||
|
||||
### 🧠 Python Code AST Inspection
|
||||
|
||||
In addition to sandboxing, Python code is **statically analyzed via AST (Abstract Syntax Tree)** before execution. Potentially malicious code (e.g. file operations, subprocess calls, etc.) is rejected early, providing an extra layer of protection.
|
||||
|
||||
---
|
||||
|
||||
This security model strikes a balance between **robust isolation** and **developer usability**. While `seccomp` can be highly restrictive, our default setup aims to keep things usable for most developers — no obscure crashes or cryptic setup required.
|
||||
|
||||
## 📦 Add Extra Dependencies for Supported Languages
|
||||
|
||||
Currently, the following languages are officially supported:
|
||||
|
||||
| Language | Priority |
|
||||
| -------- | -------- |
|
||||
| Python | High |
|
||||
| Node.js | Medium |
|
||||
|
||||
### 🐍 Python
|
||||
|
||||
To add Python dependencies, simply edit the following file:
|
||||
|
||||
```bash
|
||||
sandbox_base_image/python/requirements.txt
|
||||
```
|
||||
|
||||
Add any additional packages you need, one per line (just like a normal pip requirements file).
|
||||
|
||||
### 🟨 Node.js
|
||||
|
||||
To add Node.js dependencies:
|
||||
|
||||
1. Navigate to the Node.js base image directory:
|
||||
|
||||
```bash
|
||||
cd sandbox_base_image/nodejs
|
||||
```
|
||||
|
||||
2. Use `npm` to install the desired packages. For example:
|
||||
|
||||
```bash
|
||||
npm install lodash
|
||||
```
|
||||
|
||||
3. The dependencies will be saved to `package.json` and `package-lock.json`, and included in the Docker image when rebuilt.
|
||||
|
||||
---
|
||||
|
||||
## 🤝 Contribution
|
||||
|
||||
Contributions are welcome!
|
||||
4
sandbox/asserts/code_executor_manager.svg
Normal file
4
sandbox/asserts/code_executor_manager.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 45 KiB |
31
sandbox/docker-compose.yml
Normal file
31
sandbox/docker-compose.yml
Normal file
@ -0,0 +1,31 @@
|
||||
services:
|
||||
sandbox-executor-manager:
|
||||
container_name: sandbox-executor-manager
|
||||
build:
|
||||
context: .
|
||||
dockerfile: executor_manager/Dockerfile
|
||||
image: sandbox-executor-manager:latest
|
||||
runtime: runc
|
||||
privileged: true
|
||||
ports:
|
||||
- "${EXECUTOR_PORT:-9385}:9385"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- sandbox-network
|
||||
restart: always
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
environment:
|
||||
- SANDBOX_EXECUTOR_MANAGER_POOL_SIZE=${SANDBOX_EXECUTOR_MANAGER_POOL_SIZE:-5}
|
||||
- SANDBOX_BASE_PYTHON_IMAGE=${SANDBOX_BASE_PYTHON_IMAGE-"sandbox-base-python:latest"}
|
||||
- SANDBOX_BASE_NODEJS_IMAGE=${SANDBOX_BASE_NODEJS_IMAGE-"sandbox-base-nodejs:latest"}
|
||||
- SANDBOX_ENABLE_SECCOMP=${SANDBOX_ENABLE_SECCOMP:-false}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl --fail http://localhost:9385/healthz || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
sandbox-network:
|
||||
driver: bridge
|
||||
23
sandbox/executor_manager/Dockerfile
Normal file
23
sandbox/executor_manager/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
RUN grep -rl 'deb.debian.org' /etc/apt/ | xargs sed -i 's|http[s]*://deb.debian.org|https://mirrors.tuna.tsinghua.edu.cn|g' && \
|
||||
apt-get update && \
|
||||
apt-get install -y curl gcc && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN curl -fsSL https://mirrors.aliyun.com/docker-ce/linux/static/stable/x86_64/docker-24.0.7.tgz -o docker.tgz && \
|
||||
tar -xzf docker.tgz && \
|
||||
mv docker/docker /usr/bin/docker && \
|
||||
rm -rf docker docker.tgz
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.7.5 /uv /uvx /bin/
|
||||
ENV UV_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
COPY executor_manager/ .
|
||||
|
||||
RUN uv pip install --system -r requirements.txt
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "9385"]
|
||||
|
||||
15
sandbox/executor_manager/api/__init__.py
Normal file
15
sandbox/executor_manager/api/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
44
sandbox/executor_manager/api/handlers.py
Normal file
44
sandbox/executor_manager/api/handlers.py
Normal file
@ -0,0 +1,44 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import base64
|
||||
|
||||
from core.logger import logger
|
||||
from fastapi import Request
|
||||
from models.enums import ResultStatus
|
||||
from models.schemas import CodeExecutionRequest, CodeExecutionResult
|
||||
from services.execution import execute_code
|
||||
from services.limiter import limiter
|
||||
from services.security import analyze_code_security
|
||||
|
||||
|
||||
async def healthz_handler():
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@limiter.limit("5/second")
|
||||
async def run_code_handler(req: CodeExecutionRequest, request: Request):
|
||||
logger.info("🟢 Received /run request")
|
||||
|
||||
code = base64.b64decode(req.code_b64).decode("utf-8")
|
||||
is_safe, issues = analyze_code_security(code, language=req.language)
|
||||
if not is_safe:
|
||||
issue_details = "\n".join([f"Line {lineno}: {issue}" for issue, lineno in issues])
|
||||
return CodeExecutionResult(status=ResultStatus.PROGRAM_RUNNER_ERROR, stdout="", stderr=issue_details, exit_code=-999, detail="Code is unsafe")
|
||||
|
||||
try:
|
||||
return await execute_code(req)
|
||||
except Exception as e:
|
||||
return CodeExecutionResult(status=ResultStatus.PROGRAM_RUNNER_ERROR, stdout="", stderr=str(e), exit_code=-999, detail="unhandled_exception")
|
||||
23
sandbox/executor_manager/api/routes.py
Normal file
23
sandbox/executor_manager/api/routes.py
Normal file
@ -0,0 +1,23 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from fastapi import APIRouter
|
||||
|
||||
from api.handlers import healthz_handler, run_code_handler
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
router.get("/healthz")(healthz_handler)
|
||||
router.post("/run")(run_code_handler)
|
||||
15
sandbox/executor_manager/core/__init__.py
Normal file
15
sandbox/executor_manager/core/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
44
sandbox/executor_manager/core/config.py
Normal file
44
sandbox/executor_manager/core/config.py
Normal file
@ -0,0 +1,44 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from util import format_timeout_duration, parse_timeout_duration
|
||||
|
||||
from core.container import init_containers, teardown_containers
|
||||
from core.logger import logger
|
||||
|
||||
TIMEOUT = 10
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def _lifespan(app: FastAPI):
|
||||
"""Asynchronous lifecycle management"""
|
||||
size = int(os.getenv("SANDBOX_EXECUTOR_MANAGER_POOL_SIZE", 1))
|
||||
|
||||
success_count, total_task_count = await init_containers(size)
|
||||
logger.info(f"\n📊 Container pool initialization complete: {success_count}/{total_task_count} available")
|
||||
|
||||
yield
|
||||
|
||||
await teardown_containers()
|
||||
|
||||
|
||||
def init():
|
||||
TIMEOUT = parse_timeout_duration(os.getenv("SANDBOX_TIMEOUT"))
|
||||
logger.info(f"Global timeout: {format_timeout_duration(TIMEOUT)}")
|
||||
return _lifespan
|
||||
190
sandbox/executor_manager/core/container.py
Normal file
190
sandbox/executor_manager/core/container.py
Normal file
@ -0,0 +1,190 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import contextlib
|
||||
import os
|
||||
import time
|
||||
from queue import Empty, Queue
|
||||
from threading import Lock
|
||||
|
||||
from models.enums import SupportLanguage
|
||||
from util import env_setting_enabled, is_valid_memory_limit
|
||||
from utils.common import async_run_command
|
||||
|
||||
from core.logger import logger
|
||||
|
||||
_CONTAINER_QUEUES: dict[SupportLanguage, Queue] = {}
|
||||
_CONTAINER_LOCK: Lock = Lock()
|
||||
|
||||
|
||||
async def init_containers(size: int) -> tuple[int, int]:
|
||||
global _CONTAINER_QUEUES
|
||||
_CONTAINER_QUEUES = {SupportLanguage.PYTHON: Queue(), SupportLanguage.NODEJS: Queue()}
|
||||
|
||||
with _CONTAINER_LOCK:
|
||||
while not _CONTAINER_QUEUES[SupportLanguage.PYTHON].empty():
|
||||
_CONTAINER_QUEUES[SupportLanguage.PYTHON].get_nowait()
|
||||
while not _CONTAINER_QUEUES[SupportLanguage.NODEJS].empty():
|
||||
_CONTAINER_QUEUES[SupportLanguage.NODEJS].get_nowait()
|
||||
|
||||
create_tasks = []
|
||||
for i in range(size):
|
||||
name = f"sandbox_python_{i}"
|
||||
logger.info(f"🛠️ Creating Python container {i + 1}/{size}")
|
||||
create_tasks.append(_prepare_container(name, SupportLanguage.PYTHON))
|
||||
|
||||
name = f"sandbox_nodejs_{i}"
|
||||
logger.info(f"🛠️ Creating Node.js container {i + 1}/{size}")
|
||||
create_tasks.append(_prepare_container(name, SupportLanguage.NODEJS))
|
||||
|
||||
results = await asyncio.gather(*create_tasks, return_exceptions=True)
|
||||
success_count = sum(1 for r in results if r is True)
|
||||
total_task_count = len(create_tasks)
|
||||
return success_count, total_task_count
|
||||
|
||||
|
||||
async def teardown_containers():
|
||||
with _CONTAINER_LOCK:
|
||||
while not _CONTAINER_QUEUES[SupportLanguage.PYTHON].empty():
|
||||
name = _CONTAINER_QUEUES[SupportLanguage.PYTHON].get_nowait()
|
||||
await async_run_command("docker", "rm", "-f", name, timeout=5)
|
||||
while not _CONTAINER_QUEUES[SupportLanguage.NODEJS].empty():
|
||||
name = _CONTAINER_QUEUES[SupportLanguage.NODEJS].get_nowait()
|
||||
await async_run_command("docker", "rm", "-f", name, timeout=5)
|
||||
|
||||
|
||||
async def _prepare_container(name: str, language: SupportLanguage) -> bool:
|
||||
"""Prepare a single container"""
|
||||
with contextlib.suppress(Exception):
|
||||
await async_run_command("docker", "rm", "-f", name, timeout=5)
|
||||
|
||||
if await create_container(name, language):
|
||||
_CONTAINER_QUEUES[language].put(name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def create_container(name: str, language: SupportLanguage) -> bool:
|
||||
"""Asynchronously create a container"""
|
||||
create_args = [
|
||||
"docker",
|
||||
"run",
|
||||
"-d",
|
||||
"--runtime=runsc",
|
||||
"--name",
|
||||
name,
|
||||
"--read-only",
|
||||
"--tmpfs",
|
||||
"/workspace:rw,exec,size=100M,uid=65534,gid=65534",
|
||||
"--tmpfs",
|
||||
"/tmp:rw,exec,size=50M",
|
||||
"--user",
|
||||
"nobody",
|
||||
"--workdir",
|
||||
"/workspace",
|
||||
]
|
||||
if os.getenv("SANDBOX_MAX_MEMORY"):
|
||||
memory_limit = os.getenv("SANDBOX_MAX_MEMORY") or "256m"
|
||||
if is_valid_memory_limit(memory_limit):
|
||||
logger.info(f"SANDBOX_MAX_MEMORY: {os.getenv('SANDBOX_MAX_MEMORY')}")
|
||||
else:
|
||||
logger.info("Invalid SANDBOX_MAX_MEMORY, using default value: 256m")
|
||||
memory_limit = "256m"
|
||||
create_args.extend(["--memory", memory_limit])
|
||||
else:
|
||||
logger.info("Set default SANDBOX_MAX_MEMORY: 256m")
|
||||
create_args.extend(["--memory", "256m"])
|
||||
|
||||
if env_setting_enabled("SANDBOX_ENABLE_SECCOMP", "false"):
|
||||
logger.info(f"SANDBOX_ENABLE_SECCOMP: {os.getenv('SANDBOX_ENABLE_SECCOMP')}")
|
||||
create_args.extend(["--security-opt", "seccomp=/app/seccomp-profile-default.json"])
|
||||
|
||||
if language == SupportLanguage.PYTHON:
|
||||
create_args.append(os.getenv("SANDBOX_BASE_PYTHON_IMAGE", "sandbox-base-python:latest"))
|
||||
elif language == SupportLanguage.NODEJS:
|
||||
create_args.append(os.getenv("SANDBOX_BASE_NODEJS_IMAGE", "sandbox-base-nodejs:latest"))
|
||||
|
||||
logger.info(f"Sandbox config:\n\t {create_args}")
|
||||
|
||||
try:
|
||||
returncode, _, stderr = await async_run_command(*create_args, timeout=10)
|
||||
if returncode != 0:
|
||||
logger.error(f"❌ Container creation failed {name}: {stderr}")
|
||||
return False
|
||||
|
||||
if language == SupportLanguage.NODEJS:
|
||||
copy_cmd = ["docker", "exec", name, "bash", "-c", "cp -a /app/node_modules /workspace/"]
|
||||
returncode, _, stderr = await async_run_command(*copy_cmd, timeout=10)
|
||||
if returncode != 0:
|
||||
logger.error(f"❌ Failed to prepare dependencies for {name}: {stderr}")
|
||||
return False
|
||||
|
||||
return await container_is_running(name)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Container creation exception {name}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
async def recreate_container(name: str, language: SupportLanguage) -> bool:
|
||||
"""Asynchronously recreate a container"""
|
||||
logger.info(f"🛠️ Recreating container: {name}")
|
||||
try:
|
||||
await async_run_command("docker", "rm", "-f", name, timeout=5)
|
||||
|
||||
return await create_container(name, language)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Container {name} recreation failed: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
async def release_container(name: str, language: SupportLanguage):
|
||||
"""Asynchronously release a container"""
|
||||
with _CONTAINER_LOCK:
|
||||
if await container_is_running(name):
|
||||
_CONTAINER_QUEUES[language].put(name)
|
||||
logger.info(f"🟢 Released container: {name} (remaining available: {_CONTAINER_QUEUES[language].qsize()})")
|
||||
else:
|
||||
logger.warning(f"⚠️ Container {name} has crashed, attempting to recreate...")
|
||||
if await recreate_container(name, language):
|
||||
_CONTAINER_QUEUES[language].put(name)
|
||||
logger.info(f"✅ Container {name} successfully recreated and returned to queue")
|
||||
|
||||
|
||||
async def allocate_container_blocking(language: SupportLanguage, timeout=10) -> str:
|
||||
"""Asynchronously allocate an available container"""
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
name = _CONTAINER_QUEUES[language].get_nowait()
|
||||
|
||||
with _CONTAINER_LOCK:
|
||||
if not await container_is_running(name) and not await recreate_container(name, language):
|
||||
continue
|
||||
|
||||
return name
|
||||
except Empty:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
async def container_is_running(name: str) -> bool:
|
||||
"""Asynchronously check the container status"""
|
||||
try:
|
||||
returncode, stdout, _ = await async_run_command("docker", "inspect", "-f", "{{.State.Running}}", name, timeout=2)
|
||||
return returncode == 0 and stdout.strip() == "true"
|
||||
except Exception:
|
||||
return False
|
||||
19
sandbox/executor_manager/core/logger.py
Normal file
19
sandbox/executor_manager/core/logger.py
Normal file
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("sandbox")
|
||||
25
sandbox/executor_manager/main.py
Normal file
25
sandbox/executor_manager/main.py
Normal file
@ -0,0 +1,25 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from api.routes import router as api_router
|
||||
from core.config import init
|
||||
from fastapi import FastAPI
|
||||
from services.limiter import limiter, rate_limit_exceeded_handler
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
|
||||
app = FastAPI(lifespan=init())
|
||||
app.include_router(api_router)
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, rate_limit_exceeded_handler)
|
||||
15
sandbox/executor_manager/models/__init__.py
Normal file
15
sandbox/executor_manager/models/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
47
sandbox/executor_manager/models/enums.py
Normal file
47
sandbox/executor_manager/models/enums.py
Normal file
@ -0,0 +1,47 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class SupportLanguage(str, Enum):
|
||||
PYTHON = "python"
|
||||
NODEJS = "nodejs"
|
||||
|
||||
|
||||
class ResultStatus(str, Enum):
|
||||
SUCCESS = "success"
|
||||
PROGRAM_ERROR = "program_error"
|
||||
RESOURCE_LIMIT_EXCEEDED = "resource_limit_exceeded"
|
||||
UNAUTHORIZED_ACCESS = "unauthorized_access"
|
||||
RUNTIME_ERROR = "runtime_error"
|
||||
PROGRAM_RUNNER_ERROR = "program_runner_error"
|
||||
|
||||
|
||||
class ResourceLimitType(str, Enum):
|
||||
TIME = "time"
|
||||
MEMORY = "memory"
|
||||
OUTPUT = "output"
|
||||
|
||||
|
||||
class UnauthorizedAccessType(str, Enum):
|
||||
DISALLOWED_SYSCALL = "disallowed_syscall"
|
||||
FILE_ACCESS = "file_access"
|
||||
NETWORK_ACCESS = "network_access"
|
||||
|
||||
|
||||
class RuntimeErrorType(str, Enum):
|
||||
SIGNALLED = "signalled"
|
||||
NONZERO_EXIT = "nonzero_exit"
|
||||
53
sandbox/executor_manager/models/schemas.py
Normal file
53
sandbox/executor_manager/models/schemas.py
Normal file
@ -0,0 +1,53 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import base64
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from models.enums import ResourceLimitType, ResultStatus, RuntimeErrorType, SupportLanguage, UnauthorizedAccessType
|
||||
|
||||
|
||||
class CodeExecutionResult(BaseModel):
|
||||
status: ResultStatus
|
||||
stdout: str
|
||||
stderr: str
|
||||
exit_code: int
|
||||
detail: Optional[str] = None
|
||||
|
||||
# Resource usage
|
||||
time_used_ms: Optional[float] = None
|
||||
memory_used_kb: Optional[float] = None
|
||||
|
||||
# Error details
|
||||
resource_limit_type: Optional[ResourceLimitType] = None
|
||||
unauthorized_access_type: Optional[UnauthorizedAccessType] = None
|
||||
runtime_error_type: Optional[RuntimeErrorType] = None
|
||||
|
||||
|
||||
class CodeExecutionRequest(BaseModel):
|
||||
code_b64: str = Field(..., description="Base64 encoded code string")
|
||||
language: SupportLanguage = Field(default=SupportLanguage.PYTHON, description="Programming language")
|
||||
arguments: Optional[dict] = Field(default={}, description="Arguments")
|
||||
|
||||
@field_validator("code_b64")
|
||||
@classmethod
|
||||
def validate_base64(cls, v: str) -> str:
|
||||
try:
|
||||
base64.b64decode(v, validate=True)
|
||||
return v
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid base64 encoding: {str(e)}")
|
||||
3
sandbox/executor_manager/requirements.txt
Normal file
3
sandbox/executor_manager/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
fastapi
|
||||
uvicorn
|
||||
slowapi
|
||||
55
sandbox/executor_manager/seccomp-profile-default.json
Normal file
55
sandbox/executor_manager/seccomp-profile-default.json
Normal file
@ -0,0 +1,55 @@
|
||||
{
|
||||
"defaultAction": "SCMP_ACT_ERRNO",
|
||||
"archMap": [
|
||||
{
|
||||
"architecture": "SCMP_ARCH_X86_64",
|
||||
"subArchitectures": [
|
||||
"SCMP_ARCH_X86",
|
||||
"SCMP_ARCH_X32"
|
||||
]
|
||||
}
|
||||
],
|
||||
"syscalls": [
|
||||
{
|
||||
"names": [
|
||||
"read",
|
||||
"write",
|
||||
"exit",
|
||||
"sigreturn",
|
||||
"brk",
|
||||
"mmap",
|
||||
"munmap",
|
||||
"rt_sigaction",
|
||||
"rt_sigprocmask",
|
||||
"futex",
|
||||
"clone",
|
||||
"execve",
|
||||
"arch_prctl",
|
||||
"access",
|
||||
"openat",
|
||||
"close",
|
||||
"stat",
|
||||
"fstat",
|
||||
"lstat",
|
||||
"getpid",
|
||||
"gettid",
|
||||
"getuid",
|
||||
"getgid",
|
||||
"geteuid",
|
||||
"getegid",
|
||||
"clock_gettime",
|
||||
"nanosleep",
|
||||
"uname",
|
||||
"writev",
|
||||
"readlink",
|
||||
"getrandom",
|
||||
"statx",
|
||||
"faccessat2",
|
||||
"pread64",
|
||||
"pwrite64",
|
||||
"rt_sigreturn"
|
||||
],
|
||||
"action": "SCMP_ACT_ALLOW"
|
||||
}
|
||||
]
|
||||
}
|
||||
15
sandbox/executor_manager/services/__init__.py
Normal file
15
sandbox/executor_manager/services/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
245
sandbox/executor_manager/services/execution.py
Normal file
245
sandbox/executor_manager/services/execution.py
Normal file
@ -0,0 +1,245 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from core.config import TIMEOUT
|
||||
from core.container import allocate_container_blocking, release_container
|
||||
from core.logger import logger
|
||||
from models.enums import ResourceLimitType, ResultStatus, RuntimeErrorType, SupportLanguage, UnauthorizedAccessType
|
||||
from models.schemas import CodeExecutionRequest, CodeExecutionResult
|
||||
from utils.common import async_run_command
|
||||
|
||||
|
||||
async def execute_code(req: CodeExecutionRequest):
|
||||
"""Fully asynchronous execution logic"""
|
||||
language = req.language
|
||||
container = await allocate_container_blocking(language)
|
||||
if not container:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.PROGRAM_RUNNER_ERROR,
|
||||
stdout="",
|
||||
stderr="Container pool is busy",
|
||||
exit_code=-10,
|
||||
detail="no_available_container",
|
||||
)
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
workdir = f"/tmp/sandbox_{task_id}"
|
||||
os.makedirs(workdir, mode=0o700, exist_ok=True)
|
||||
|
||||
try:
|
||||
if language == SupportLanguage.PYTHON:
|
||||
code_name = "main.py"
|
||||
# code
|
||||
code_path = os.path.join(workdir, code_name)
|
||||
with open(code_path, "wb") as f:
|
||||
f.write(base64.b64decode(req.code_b64))
|
||||
# runner
|
||||
runner_name = "runner.py"
|
||||
runner_path = os.path.join(workdir, runner_name)
|
||||
with open(runner_path, "w") as f:
|
||||
f.write("""import json
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
from main import main
|
||||
if __name__ == "__main__":
|
||||
args = json.loads(sys.argv[1])
|
||||
result = main(**args)
|
||||
if result is not None:
|
||||
print(result)
|
||||
""")
|
||||
|
||||
elif language == SupportLanguage.NODEJS:
|
||||
code_name = "main.js"
|
||||
code_path = os.path.join(workdir, "main.js")
|
||||
with open(code_path, "wb") as f:
|
||||
f.write(base64.b64decode(req.code_b64))
|
||||
|
||||
runner_name = "runner.js"
|
||||
runner_path = os.path.join(workdir, "runner.js")
|
||||
with open(runner_path, "w") as f:
|
||||
f.write("""
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const args = JSON.parse(process.argv[2]);
|
||||
|
||||
const mainPath = path.join(__dirname, 'main.js');
|
||||
|
||||
if (fs.existsSync(mainPath)) {
|
||||
const { main } = require(mainPath);
|
||||
|
||||
if (typeof args === 'object' && args !== null) {
|
||||
main(args).then(result => {
|
||||
if (result !== null) {
|
||||
console.log(result);
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error('Error in main function:', err);
|
||||
});
|
||||
} else {
|
||||
console.error('Error: args is not a valid object:', args);
|
||||
}
|
||||
} else {
|
||||
console.error('main.js not found in the current directory');
|
||||
}
|
||||
""")
|
||||
# dirs
|
||||
returncode, _, stderr = await async_run_command("docker", "exec", container, "mkdir", "-p", f"/workspace/{task_id}", timeout=5)
|
||||
if returncode != 0:
|
||||
raise RuntimeError(f"Directory creation failed: {stderr}")
|
||||
|
||||
# archive
|
||||
tar_proc = await asyncio.create_subprocess_exec("tar", "czf", "-", "-C", workdir, code_name, runner_name, stdout=asyncio.subprocess.PIPE)
|
||||
tar_stdout, _ = await tar_proc.communicate()
|
||||
|
||||
# unarchive
|
||||
docker_proc = await asyncio.create_subprocess_exec(
|
||||
"docker", "exec", "-i", container, "tar", "xzf", "-", "-C", f"/workspace/{task_id}", stdin=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await docker_proc.communicate(input=tar_stdout)
|
||||
|
||||
if docker_proc.returncode != 0:
|
||||
raise RuntimeError(stderr.decode())
|
||||
|
||||
# exec
|
||||
start_time = time.time()
|
||||
try:
|
||||
logger.info(f"Passed in args: {req.arguments}")
|
||||
args_json = json.dumps(req.arguments or {})
|
||||
run_args = [
|
||||
"docker",
|
||||
"exec",
|
||||
"--workdir",
|
||||
f"/workspace/{task_id}",
|
||||
container,
|
||||
"timeout",
|
||||
str(TIMEOUT),
|
||||
language,
|
||||
]
|
||||
# flags
|
||||
if language == SupportLanguage.PYTHON:
|
||||
run_args.extend(["-I", "-B"])
|
||||
elif language == SupportLanguage.NODEJS:
|
||||
run_args.extend([])
|
||||
else:
|
||||
assert True, "Will never reach here"
|
||||
run_args.extend([runner_name, args_json])
|
||||
|
||||
returncode, stdout, stderr = await async_run_command(
|
||||
*run_args,
|
||||
timeout=TIMEOUT + 5,
|
||||
)
|
||||
|
||||
time_used_ms = (time.time() - start_time) * 1000
|
||||
|
||||
logger.info("----------------------------------------------")
|
||||
logger.info(f"Code: {str(base64.b64decode(req.code_b64))}")
|
||||
logger.info(f"{returncode=}")
|
||||
logger.info(f"{stdout=}")
|
||||
logger.info(f"{stderr=}")
|
||||
logger.info(f"{args_json=}")
|
||||
|
||||
if returncode == 0:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.SUCCESS,
|
||||
stdout=str(stdout),
|
||||
stderr=stderr,
|
||||
exit_code=0,
|
||||
time_used_ms=time_used_ms,
|
||||
)
|
||||
elif returncode == 124:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
|
||||
stdout="",
|
||||
stderr="Execution timeout",
|
||||
exit_code=-124,
|
||||
resource_limit_type=ResourceLimitType.TIME,
|
||||
time_used_ms=time_used_ms,
|
||||
)
|
||||
elif returncode == 137:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
|
||||
stdout="",
|
||||
stderr="Memory limit exceeded (killed by OOM)",
|
||||
exit_code=-137,
|
||||
resource_limit_type=ResourceLimitType.MEMORY,
|
||||
time_used_ms=time_used_ms,
|
||||
)
|
||||
return analyze_error_result(stderr, returncode)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
await async_run_command("docker", "exec", container, "pkill", "-9", language)
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
|
||||
stdout="",
|
||||
stderr="Execution timeout",
|
||||
exit_code=-1,
|
||||
resource_limit_type=ResourceLimitType.TIME,
|
||||
time_used_ms=(time.time() - start_time) * 1000,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Execution exception: {str(e)}")
|
||||
return CodeExecutionResult(status=ResultStatus.PROGRAM_RUNNER_ERROR, stdout="", stderr=str(e), exit_code=-3, detail="internal_error")
|
||||
|
||||
finally:
|
||||
# cleanup
|
||||
cleanup_tasks = [async_run_command("docker", "exec", container, "rm", "-rf", f"/workspace/{task_id}"), async_run_command("rm", "-rf", workdir)]
|
||||
await asyncio.gather(*cleanup_tasks, return_exceptions=True)
|
||||
await release_container(container, language)
|
||||
|
||||
|
||||
def analyze_error_result(stderr: str, exit_code: int) -> CodeExecutionResult:
|
||||
"""Analyze the error result and classify it"""
|
||||
if "Permission denied" in stderr:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.UNAUTHORIZED_ACCESS,
|
||||
stdout="",
|
||||
stderr=stderr,
|
||||
exit_code=exit_code,
|
||||
unauthorized_access_type=UnauthorizedAccessType.FILE_ACCESS,
|
||||
)
|
||||
elif "Operation not permitted" in stderr:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.UNAUTHORIZED_ACCESS,
|
||||
stdout="",
|
||||
stderr=stderr,
|
||||
exit_code=exit_code,
|
||||
unauthorized_access_type=UnauthorizedAccessType.DISALLOWED_SYSCALL,
|
||||
)
|
||||
elif "MemoryError" in stderr:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
|
||||
stdout="",
|
||||
stderr=stderr,
|
||||
exit_code=exit_code,
|
||||
resource_limit_type=ResourceLimitType.MEMORY,
|
||||
)
|
||||
else:
|
||||
return CodeExecutionResult(
|
||||
status=ResultStatus.PROGRAM_ERROR,
|
||||
stdout="",
|
||||
stderr=stderr,
|
||||
exit_code=exit_code,
|
||||
runtime_error_type=RuntimeErrorType.NONZERO_EXIT,
|
||||
)
|
||||
38
sandbox/executor_manager/services/limiter.py
Normal file
38
sandbox/executor_manager/services/limiter.py
Normal file
@ -0,0 +1,38 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from fastapi import Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from models.enums import ResultStatus
|
||||
from models.schemas import CodeExecutionResult
|
||||
from slowapi import Limiter
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
from slowapi.util import get_remote_address
|
||||
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
|
||||
async def rate_limit_exceeded_handler(request: Request, exc: Exception) -> JSONResponse:
|
||||
if isinstance(exc, RateLimitExceeded):
|
||||
return JSONResponse(
|
||||
content=CodeExecutionResult(
|
||||
status=ResultStatus.PROGRAM_RUNNER_ERROR,
|
||||
stdout="",
|
||||
stderr="Too many requests, please try again later",
|
||||
exit_code=-429,
|
||||
detail="Too many requests, please try again later",
|
||||
).model_dump(),
|
||||
)
|
||||
raise exc
|
||||
173
sandbox/executor_manager/services/security.py
Normal file
173
sandbox/executor_manager/services/security.py
Normal file
@ -0,0 +1,173 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import ast
|
||||
from typing import List, Tuple
|
||||
|
||||
from core.logger import logger
|
||||
from models.enums import SupportLanguage
|
||||
|
||||
|
||||
class SecurePythonAnalyzer(ast.NodeVisitor):
|
||||
"""
|
||||
An AST-based analyzer for detecting unsafe Python code patterns.
|
||||
"""
|
||||
|
||||
DANGEROUS_IMPORTS = {"os", "subprocess", "sys", "shutil", "socket", "ctypes", "pickle", "threading", "multiprocessing", "asyncio", "http.client", "ftplib", "telnetlib"}
|
||||
|
||||
DANGEROUS_CALLS = {
|
||||
"eval",
|
||||
"exec",
|
||||
"open",
|
||||
"__import__",
|
||||
"compile",
|
||||
"input",
|
||||
"system",
|
||||
"popen",
|
||||
"remove",
|
||||
"rename",
|
||||
"rmdir",
|
||||
"chdir",
|
||||
"chmod",
|
||||
"chown",
|
||||
"getattr",
|
||||
"setattr",
|
||||
"globals",
|
||||
"locals",
|
||||
"shutil.rmtree",
|
||||
"subprocess.call",
|
||||
"subprocess.Popen",
|
||||
"ctypes",
|
||||
"pickle.load",
|
||||
"pickle.loads",
|
||||
"pickle.dump",
|
||||
"pickle.dumps",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.unsafe_items: List[Tuple[str, int]] = []
|
||||
|
||||
def visit_Import(self, node: ast.Import):
|
||||
"""Check for dangerous imports."""
|
||||
for alias in node.names:
|
||||
if alias.name.split(".")[0] in self.DANGEROUS_IMPORTS:
|
||||
self.unsafe_items.append((f"Import: {alias.name}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_ImportFrom(self, node: ast.ImportFrom):
|
||||
"""Check for dangerous imports from specific modules."""
|
||||
if node.module and node.module.split(".")[0] in self.DANGEROUS_IMPORTS:
|
||||
self.unsafe_items.append((f"From Import: {node.module}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_Call(self, node: ast.Call):
|
||||
"""Check for dangerous function calls."""
|
||||
if isinstance(node.func, ast.Name) and node.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append((f"Call: {node.func.id}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_Attribute(self, node: ast.Attribute):
|
||||
"""Check for dangerous attribute access."""
|
||||
if isinstance(node.value, ast.Name) and node.value.id in self.DANGEROUS_IMPORTS:
|
||||
self.unsafe_items.append((f"Attribute Access: {node.value.id}.{node.attr}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_BinOp(self, node: ast.BinOp):
|
||||
"""Check for possible unsafe operations like concatenating strings with commands."""
|
||||
# This could be useful to detect `eval("os." + "system")`
|
||||
if isinstance(node.left, ast.Constant) and isinstance(node.right, ast.Constant):
|
||||
self.unsafe_items.append(("Possible unsafe string concatenation", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_FunctionDef(self, node: ast.FunctionDef):
|
||||
"""Check for dangerous function definitions (e.g., user-defined eval)."""
|
||||
if node.name in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append((f"Function Definition: {node.name}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_Assign(self, node: ast.Assign):
|
||||
"""Check for assignments to variables that might lead to dangerous operations."""
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append((f"Assignment to dangerous variable: {target.id}", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_Lambda(self, node: ast.Lambda):
|
||||
"""Check for lambda functions with dangerous operations."""
|
||||
if isinstance(node.body, ast.Call) and isinstance(node.body.func, ast.Name) and node.body.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("Lambda with dangerous function call", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_ListComp(self, node: ast.ListComp):
|
||||
"""Check for list comprehensions with dangerous operations."""
|
||||
# First, visit the generators to check for any issues there
|
||||
for elem in node.generators:
|
||||
if isinstance(elem, ast.comprehension):
|
||||
self.generic_visit(elem)
|
||||
|
||||
if isinstance(node.elt, ast.Call) and isinstance(node.elt.func, ast.Name) and node.elt.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("List comprehension with dangerous function call", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_DictComp(self, node: ast.DictComp):
|
||||
"""Check for dictionary comprehensions with dangerous operations."""
|
||||
# Check for dangerous calls in both the key and value expressions of the dictionary comprehension
|
||||
if isinstance(node.key, ast.Call) and isinstance(node.key.func, ast.Name) and node.key.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("Dict comprehension with dangerous function call in key", node.lineno))
|
||||
|
||||
if isinstance(node.value, ast.Call) and isinstance(node.value.func, ast.Name) and node.value.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("Dict comprehension with dangerous function call in value", node.lineno))
|
||||
|
||||
# Visit other sub-nodes (e.g., the generators in the comprehension)
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_SetComp(self, node: ast.SetComp):
|
||||
"""Check for set comprehensions with dangerous operations."""
|
||||
for elt in node.generators:
|
||||
if isinstance(elt, ast.comprehension):
|
||||
self.generic_visit(elt)
|
||||
|
||||
if isinstance(node.elt, ast.Call) and isinstance(node.elt.func, ast.Name) and node.elt.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("Set comprehension with dangerous function call", node.lineno))
|
||||
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_Yield(self, node: ast.Yield):
|
||||
"""Check for yield statements that could be used to produce unsafe values."""
|
||||
if isinstance(node.value, ast.Call) and isinstance(node.value.func, ast.Name) and node.value.func.id in self.DANGEROUS_CALLS:
|
||||
self.unsafe_items.append(("Yield with dangerous function call", node.lineno))
|
||||
self.generic_visit(node)
|
||||
|
||||
|
||||
def analyze_code_security(code: str, language: SupportLanguage) -> Tuple[bool, List[Tuple[str, int]]]:
|
||||
"""
|
||||
Analyze the provided code string and return whether it's safe and why.
|
||||
|
||||
:param code: The source code to analyze.
|
||||
:param language: The programming language of the code.
|
||||
:return: (is_safe: bool, issues: List of (description, line number))
|
||||
"""
|
||||
if language == SupportLanguage.PYTHON:
|
||||
try:
|
||||
tree = ast.parse(code)
|
||||
analyzer = SecurePythonAnalyzer()
|
||||
analyzer.visit(tree)
|
||||
return len(analyzer.unsafe_items) == 0, analyzer.unsafe_items
|
||||
except Exception as e:
|
||||
logger.error(f"[SafeCheck] Python parsing failed: {str(e)}")
|
||||
return False, [(f"Parsing Error: {str(e)}", -1)]
|
||||
else:
|
||||
logger.warning(f"[SafeCheck] Unsupported language for security analysis: {language} — defaulting to SAFE (manual review recommended)")
|
||||
return True, [(f"Unsupported language for security analysis: {language} — defaulted to SAFE, manual review recommended", -1)]
|
||||
76
sandbox/executor_manager/util.py
Normal file
76
sandbox/executor_manager/util.py
Normal file
@ -0,0 +1,76 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def is_enabled(value: str) -> bool:
|
||||
return str(value).strip().lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def env_setting_enabled(env_key: str, default: str = "false") -> bool:
|
||||
value = os.getenv(env_key, default)
|
||||
return is_enabled(value)
|
||||
|
||||
|
||||
def is_valid_memory_limit(mem: str | None) -> bool:
|
||||
"""
|
||||
Return True if the input string is a valid Docker memory limit (e.g. '256m', '1g').
|
||||
Units allowed: b, k, m, g (case-insensitive).
|
||||
Disallows zero or negative values.
|
||||
"""
|
||||
if not mem or not isinstance(mem, str):
|
||||
return False
|
||||
|
||||
mem = mem.strip().lower()
|
||||
|
||||
return re.fullmatch(r"[1-9]\d*(b|k|m|g)", mem) is not None
|
||||
|
||||
|
||||
def parse_timeout_duration(timeout: str | None, default_seconds: int = 10) -> int:
|
||||
"""
|
||||
Parses a string like '90s', '2m', '1m30s' into total seconds (int).
|
||||
Supports 's', 'm' (lower or upper case). Returns default if invalid.
|
||||
'1m30s' -> 90
|
||||
"""
|
||||
if not timeout or not isinstance(timeout, str):
|
||||
return default_seconds
|
||||
|
||||
timeout = timeout.strip().lower()
|
||||
|
||||
pattern = r"^(?:(\d+)m)?(?:(\d+)s)?$"
|
||||
match = re.fullmatch(pattern, timeout)
|
||||
if not match:
|
||||
return default_seconds
|
||||
|
||||
minutes = int(match.group(1)) if match.group(1) else 0
|
||||
seconds = int(match.group(2)) if match.group(2) else 0
|
||||
total = minutes * 60 + seconds
|
||||
|
||||
return total if total > 0 else default_seconds
|
||||
|
||||
|
||||
def format_timeout_duration(seconds: int) -> str:
|
||||
"""
|
||||
Formats an integer number of seconds into a string like '1m30s'.
|
||||
90 -> '1m30s'
|
||||
"""
|
||||
if seconds < 60:
|
||||
return f"{seconds}s"
|
||||
minutes, sec = divmod(seconds, 60)
|
||||
if sec == 0:
|
||||
return f"{minutes}m"
|
||||
return f"{minutes}m{sec}s"
|
||||
15
sandbox/executor_manager/utils/__init__.py
Normal file
15
sandbox/executor_manager/utils/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
36
sandbox/executor_manager/utils/common.py
Normal file
36
sandbox/executor_manager/utils/common.py
Normal file
@ -0,0 +1,36 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import asyncio
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
async def async_run_command(*args, timeout: float = 5) -> Tuple[int, str, str]:
|
||||
"""Safe asynchronous command execution tool"""
|
||||
proc = await asyncio.create_subprocess_exec(*args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
|
||||
|
||||
try:
|
||||
stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=timeout)
|
||||
if proc.returncode is None:
|
||||
raise RuntimeError("Process finished but returncode is None")
|
||||
return proc.returncode, stdout.decode(), stderr.decode()
|
||||
except asyncio.TimeoutError:
|
||||
proc.kill()
|
||||
await proc.wait()
|
||||
raise RuntimeError("Command timed out")
|
||||
except Exception as e:
|
||||
proc.kill()
|
||||
await proc.wait()
|
||||
raise e
|
||||
28
sandbox/pyproject.toml
Normal file
28
sandbox/pyproject.toml
Normal file
@ -0,0 +1,28 @@
|
||||
[project]
|
||||
name = "gvisor-sandbox"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"fastapi>=0.115.12",
|
||||
"httpx>=0.28.1",
|
||||
"pydantic>=2.11.4",
|
||||
"requests>=2.32.3",
|
||||
"slowapi>=0.1.9",
|
||||
"uvicorn>=0.34.2",
|
||||
]
|
||||
|
||||
[[tool.uv.index]]
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"basedpyright>=1.29.1",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 200
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["C4", "SIM", "TCH"]
|
||||
17
sandbox/sandbox_base_image/nodejs/Dockerfile
Normal file
17
sandbox/sandbox_base_image/nodejs/Dockerfile
Normal file
@ -0,0 +1,17 @@
|
||||
FROM node:24-bookworm-slim
|
||||
|
||||
RUN npm config set registry https://registry.npmmirror.com
|
||||
|
||||
# RUN grep -rl 'deb.debian.org' /etc/apt/ | xargs sed -i 's|http[s]*://deb.debian.org|https://mirrors.ustc.edu.cn|g' && \
|
||||
# apt-get update && \
|
||||
# apt-get install -y curl gcc make
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package-lock.json .
|
||||
|
||||
RUN npm install
|
||||
|
||||
CMD ["sleep", "infinity"]
|
||||
|
||||
294
sandbox/sandbox_base_image/nodejs/package-lock.json
generated
Normal file
294
sandbox/sandbox_base_image/nodejs/package-lock.json
generated
Normal file
@ -0,0 +1,294 @@
|
||||
{
|
||||
"name": "nodejs",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "nodejs",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmmirror.com/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmmirror.com/axios/-/axios-1.9.0.tgz",
|
||||
"integrity": "sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmmirror.com/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-errors": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz",
|
||||
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-object-atoms": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
|
||||
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.9",
|
||||
"resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"debug": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.2.tgz",
|
||||
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz",
|
||||
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"es-define-property": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"es-object-atoms": "^1.1.1",
|
||||
"function-bind": "^1.1.2",
|
||||
"get-proto": "^1.0.1",
|
||||
"gopd": "^1.2.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"math-intrinsics": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dunder-proto": "^1.0.1",
|
||||
"es-object-atoms": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz",
|
||||
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
15
sandbox/sandbox_base_image/nodejs/package.json
Normal file
15
sandbox/sandbox_base_image/nodejs/package.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "nodejs",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"axios": "^1.9.0"
|
||||
}
|
||||
}
|
||||
15
sandbox/sandbox_base_image/python/Dockerfile
Normal file
15
sandbox/sandbox_base_image/python/Dockerfile
Normal file
@ -0,0 +1,15 @@
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.7.5 /uv /uvx /bin/
|
||||
ENV UV_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN grep -rl 'deb.debian.org' /etc/apt/ | xargs sed -i 's|http[s]*://deb.debian.org|https://mirrors.tuna.tsinghua.edu.cn|g' && \
|
||||
apt-get update && \
|
||||
apt-get install -y curl gcc && \
|
||||
uv pip install --system -r requirements.txt
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
CMD ["sleep", "infinity"]
|
||||
3
sandbox/sandbox_base_image/python/requirements.txt
Normal file
3
sandbox/sandbox_base_image/python/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
numpy
|
||||
pandas
|
||||
requests
|
||||
21
sandbox/scripts/restart.sh
Executable file
21
sandbox/scripts/restart.sh
Executable file
@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
bash "$(dirname "$0")/stop.sh"
|
||||
bash "$(dirname "$0")/start.sh"
|
||||
72
sandbox/scripts/start.sh
Executable file
72
sandbox/scripts/start.sh
Executable file
@ -0,0 +1,72 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
BASE_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$BASE_DIR"
|
||||
|
||||
if [ -f .env ]; then
|
||||
source .env
|
||||
SANDBOX_EXECUTOR_MANAGER_PORT="${SANDBOX_EXECUTOR_MANAGER_PORT:-9385}" # Default to 9385 if not set in .env
|
||||
SANDBOX_EXECUTOR_MANAGER_POOL_SIZE="${SANDBOX_EXECUTOR_MANAGER_POOL_SIZE:-5}" # Default to 5 if not set in .env
|
||||
SANDBOX_BASE_PYTHON_IMAGE=${SANDBOX_BASE_PYTHON_IMAGE-"sandbox-base-python:latest"}
|
||||
SANDBOX_BASE_NODEJS_IMAGE=${SANDBOX_BASE_NODEJS_IMAGE-"sandbox-base-nodejs:latest"}
|
||||
else
|
||||
echo "⚠️ .env not found, using default ports and pool size"
|
||||
SANDBOX_EXECUTOR_MANAGER_PORT=9385
|
||||
SANDBOX_EXECUTOR_MANAGER_POOL_SIZE=5
|
||||
SANDBOX_BASE_PYTHON_IMAGE=sandbox-base-python:latest
|
||||
SANDBOX_BASE_NODEJS_IMAGE=sandbox-base-nodejs:latest
|
||||
fi
|
||||
|
||||
echo "📦 STEP 1: Build sandbox-base image ..."
|
||||
if [ -f .env ]; then
|
||||
source .env &&
|
||||
echo "🐍 Building base sandbox image for Python ($SANDBOX_BASE_PYTHON_IMAGE)..." &&
|
||||
docker build -t "$SANDBOX_BASE_PYTHON_IMAGE" ./sandbox_base_image/python &&
|
||||
echo "⬢ Building base sandbox image for Nodejs ($SANDBOX_BASE_NODEJS_IMAGE)..." &&
|
||||
docker build -t "$SANDBOX_BASE_NODEJS_IMAGE" ./sandbox_base_image/nodejs
|
||||
else
|
||||
echo "⚠️ .env file not found, skipping build."
|
||||
fi
|
||||
|
||||
echo "🧹 STEP 2: Clean up old sandbox containers (sandbox_nodejs_0~$((SANDBOX_EXECUTOR_MANAGER_POOL_SIZE - 1)) and sandbox_python_0~$((SANDBOX_EXECUTOR_MANAGER_POOL_SIZE - 1))) ..."
|
||||
for i in $(seq 0 $((SANDBOX_EXECUTOR_MANAGER_POOL_SIZE - 1))); do
|
||||
echo "🧹 Deleting sandbox_python_$i..."
|
||||
docker rm -f "sandbox_python_$i" >/dev/null 2>&1 || true
|
||||
|
||||
echo "🧹 Deleting sandbox_nodejs_$i..."
|
||||
docker rm -f "sandbox_nodejs_$i" >/dev/null 2>&1 || true
|
||||
done
|
||||
|
||||
echo "🔧 STEP 3: Build executor services ..."
|
||||
docker compose build
|
||||
|
||||
echo "🚀 STEP 4: Start services ..."
|
||||
docker compose up -d
|
||||
|
||||
echo "⏳ STEP 5a: Check if ports are open (basic connectivity) ..."
|
||||
bash ./scripts/wait-for-it.sh "localhost" "$SANDBOX_EXECUTOR_MANAGER_PORT" -t 30
|
||||
|
||||
echo "⏳ STEP 5b: Check if the interfaces are healthy (/healthz) ..."
|
||||
bash ./scripts/wait-for-it-http.sh "http://localhost:$SANDBOX_EXECUTOR_MANAGER_PORT/healthz" 30
|
||||
|
||||
echo "✅ STEP 6: Run security tests ..."
|
||||
python3 ./tests/sandbox_security_tests_full.py
|
||||
|
||||
echo "🎉 Service is ready: http://localhost:$SANDBOX_EXECUTOR_MANAGER_PORT/docs"
|
||||
40
sandbox/scripts/stop.sh
Executable file
40
sandbox/scripts/stop.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
BASE_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$BASE_DIR"
|
||||
|
||||
echo "🛑 Stopping all services..."
|
||||
docker compose down
|
||||
|
||||
echo "🧹 Deleting sandbox containers..."
|
||||
if [ -f .env ]; then
|
||||
source .env
|
||||
for i in $(seq 0 $((SANDBOX_EXECUTOR_MANAGER_POOL_SIZE - 1))); do
|
||||
echo "🧹 Deleting sandbox_python_$i..."
|
||||
docker rm -f "sandbox_python_$i" >/dev/null 2>&1 || true
|
||||
|
||||
echo "🧹 Deleting sandbox_nodejs_$i..."
|
||||
docker rm -f "sandbox_nodejs_$i" >/dev/null 2>&1 || true
|
||||
done
|
||||
else
|
||||
echo "⚠️ .env not found, skipping container cleanup"
|
||||
fi
|
||||
|
||||
echo "✅ Stopping and cleanup complete"
|
||||
31
sandbox/scripts/wait-for-it-http.sh
Executable file
31
sandbox/scripts/wait-for-it-http.sh
Executable file
@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
url=$1
|
||||
timeout=${2:-15}
|
||||
quiet=${3:-0}
|
||||
|
||||
for i in $(seq "$timeout"); do
|
||||
if curl -fs "$url" >/dev/null; then
|
||||
[[ "$quiet" -ne 1 ]] && echo "✔ $url is healthy after $i seconds"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "✖ Timeout after $timeout seconds waiting for $url"
|
||||
exit 1
|
||||
50
sandbox/scripts/wait-for-it.sh
Executable file
50
sandbox/scripts/wait-for-it.sh
Executable file
@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
host=$1
|
||||
port=$2
|
||||
shift 2
|
||||
|
||||
timeout=15
|
||||
quiet=0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
-t | --timeout)
|
||||
timeout="$2"
|
||||
shift 2
|
||||
;;
|
||||
-q | --quiet)
|
||||
quiet=1
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
for i in $(seq "$timeout"); do
|
||||
if nc -z "$host" "$port" >/dev/null 2>&1; then
|
||||
[[ "$quiet" -ne 1 ]] && echo "✔ $host:$port is available after $i seconds"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "✖ Timeout after $timeout seconds waiting for $host:$port"
|
||||
exit 1
|
||||
436
sandbox/tests/sandbox_security_tests_full.py
Normal file
436
sandbox/tests/sandbox_security_tests_full.py
Normal file
@ -0,0 +1,436 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import base64
|
||||
import os
|
||||
import textwrap
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from enum import Enum
|
||||
from typing import Dict, Optional
|
||||
|
||||
import requests
|
||||
from pydantic import BaseModel
|
||||
|
||||
API_URL = os.getenv("SANDBOX_API_URL", "http://localhost:9385/run")
|
||||
TIMEOUT = 15
|
||||
MAX_WORKERS = 5
|
||||
|
||||
|
||||
class ResultStatus(str, Enum):
|
||||
SUCCESS = "success"
|
||||
PROGRAM_ERROR = "program_error"
|
||||
RESOURCE_LIMIT_EXCEEDED = "resource_limit_exceeded"
|
||||
UNAUTHORIZED_ACCESS = "unauthorized_access"
|
||||
RUNTIME_ERROR = "runtime_error"
|
||||
PROGRAM_RUNNER_ERROR = "program_runner_error"
|
||||
|
||||
|
||||
class ResourceLimitType(str, Enum):
|
||||
TIME = "time"
|
||||
MEMORY = "memory"
|
||||
OUTPUT = "output"
|
||||
|
||||
|
||||
class UnauthorizedAccessType(str, Enum):
|
||||
DISALLOWED_SYSCALL = "disallowed_syscall"
|
||||
FILE_ACCESS = "file_access"
|
||||
NETWORK_ACCESS = "network_access"
|
||||
|
||||
|
||||
class RuntimeErrorType(str, Enum):
|
||||
SIGNALLED = "signalled"
|
||||
NONZERO_EXIT = "nonzero_exit"
|
||||
|
||||
|
||||
class ExecutionResult(BaseModel):
|
||||
status: ResultStatus
|
||||
stdout: str
|
||||
stderr: str
|
||||
exit_code: int
|
||||
detail: Optional[str] = None
|
||||
resource_limit_type: Optional[ResourceLimitType] = None
|
||||
unauthorized_access_type: Optional[UnauthorizedAccessType] = None
|
||||
runtime_error_type: Optional[RuntimeErrorType] = None
|
||||
|
||||
|
||||
class TestResult(BaseModel):
|
||||
name: str
|
||||
passed: bool
|
||||
duration: float
|
||||
expected_failure: bool = False
|
||||
result: Optional[ExecutionResult] = None
|
||||
error: Optional[str] = None
|
||||
validation_error: Optional[str] = None
|
||||
|
||||
|
||||
def encode_code(code: str) -> str:
|
||||
return base64.b64encode(code.encode("utf-8")).decode("utf-8")
|
||||
|
||||
|
||||
def execute_single_test(name: str, code: str, language: str, arguments: dict, expect_fail: bool = False) -> TestResult:
|
||||
"""Execute a single test case"""
|
||||
payload = {
|
||||
"code_b64": encode_code(textwrap.dedent(code)),
|
||||
"language": language,
|
||||
"arguments": arguments,
|
||||
}
|
||||
|
||||
test_result = TestResult(name=name, passed=False, duration=0, expected_failure=expect_fail)
|
||||
|
||||
really_processed = False
|
||||
try:
|
||||
while not really_processed:
|
||||
start_time = time.perf_counter()
|
||||
|
||||
resp = requests.post(API_URL, json=payload, timeout=TIMEOUT)
|
||||
resp.raise_for_status()
|
||||
response_data = resp.json()
|
||||
if response_data["exit_code"] == -429: # too many request
|
||||
print(f"[{name}] Reached request limit, retring...")
|
||||
time.sleep(0.5)
|
||||
continue
|
||||
really_processed = True
|
||||
|
||||
print("-------------------")
|
||||
print(f"{name}:\n{response_data}")
|
||||
print("-------------------")
|
||||
|
||||
test_result.duration = time.perf_counter() - start_time
|
||||
test_result.result = ExecutionResult(**response_data)
|
||||
|
||||
# Validate test result expectations
|
||||
validate_test_result(name, expect_fail, test_result)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
test_result.duration = time.perf_counter() - start_time
|
||||
test_result.error = f"Request failed: {str(e)}"
|
||||
test_result.result = ExecutionResult(
|
||||
status=ResultStatus.PROGRAM_RUNNER_ERROR,
|
||||
stdout="",
|
||||
stderr=str(e),
|
||||
exit_code=-999,
|
||||
detail="request_failed",
|
||||
)
|
||||
|
||||
return test_result
|
||||
|
||||
|
||||
def validate_test_result(name: str, expect_fail: bool, test_result: TestResult):
|
||||
"""Validate if the test result meets expectations"""
|
||||
if not test_result.result:
|
||||
test_result.passed = False
|
||||
test_result.validation_error = "No result returned"
|
||||
return
|
||||
|
||||
test_result.passed = test_result.result.status == ResultStatus.SUCCESS
|
||||
# General validation logic
|
||||
if expect_fail:
|
||||
# Tests expected to fail should return a non-success status
|
||||
if test_result.passed:
|
||||
test_result.validation_error = "Expected failure but actually succeeded"
|
||||
else:
|
||||
# Tests expected to succeed should return a success status
|
||||
if not test_result.passed:
|
||||
test_result.validation_error = f"Unexpected failure (status={test_result.result.status})"
|
||||
|
||||
|
||||
def get_test_cases() -> Dict[str, dict]:
|
||||
"""Return test cases (code, whether expected to fail)"""
|
||||
return {
|
||||
"1 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"2 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"3 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"4 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"5 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"6 Infinite loop: Should be forcibly terminated": {
|
||||
"code": """
|
||||
def main():
|
||||
while True:
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"7 Normal test: Python without dependencies": {
|
||||
"code": """
|
||||
def main():
|
||||
return {"data": "hello, world"}
|
||||
""",
|
||||
"should_fail": False,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"8 Normal test: Python with pandas, should pass without any error": {
|
||||
"code": """
|
||||
import pandas as pd
|
||||
|
||||
def main():
|
||||
data = {'Name': ['Alice', 'Bob', 'Charlie'],
|
||||
'Age': [25, 30, 35]}
|
||||
df = pd.DataFrame(data)
|
||||
""",
|
||||
"should_fail": False,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"9 Normal test: Nodejs without dependencies, should pass without any error": {
|
||||
"code": """
|
||||
const https = require('https');
|
||||
|
||||
async function main(args) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = https.get('https://example.com/', (res) => {
|
||||
let data = '';
|
||||
|
||||
res.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
clearTimeout(timeout);
|
||||
console.log('Body:', data);
|
||||
resolve(data);
|
||||
});
|
||||
});
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
req.destroy(new Error('Request timeout after 10s'));
|
||||
}, 10000);
|
||||
|
||||
req.on('error', (err) => {
|
||||
clearTimeout(timeout);
|
||||
console.error('Error:', err.message);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { main };
|
||||
""",
|
||||
"should_fail": False,
|
||||
"arguments": {},
|
||||
"language": "nodejs",
|
||||
},
|
||||
"10 Normal test: Nodejs with axios, should pass without any error": {
|
||||
"code": """
|
||||
const axios = require('axios');
|
||||
|
||||
async function main(args) {
|
||||
try {
|
||||
const response = await axios.get('https://example.com/', {
|
||||
timeout: 10000
|
||||
});
|
||||
console.log('Body:', response.data);
|
||||
} catch (error) {
|
||||
console.error('Error:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { main };
|
||||
""",
|
||||
"should_fail": False,
|
||||
"arguments": {},
|
||||
"language": "nodejs",
|
||||
},
|
||||
"11 Dangerous import: Should fail due to os module import": {
|
||||
"code": """
|
||||
import os
|
||||
|
||||
def main():
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"12 Dangerous import from subprocess: Should fail due to subprocess import": {
|
||||
"code": """
|
||||
from subprocess import Popen
|
||||
|
||||
def main():
|
||||
pass
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"13 Dangerous call: Should fail due to eval function call": {
|
||||
"code": """
|
||||
def main():
|
||||
eval('os.system("echo hello")')
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"14 Dangerous attribute access: Should fail due to shutil.rmtree": {
|
||||
"code": """
|
||||
import shutil
|
||||
|
||||
def main():
|
||||
shutil.rmtree('/some/path')
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"15 Dangerous binary operation: Should fail due to unsafe concatenation leading to eval": {
|
||||
"code": """
|
||||
def main():
|
||||
dangerous_string = "os." + "system"
|
||||
eval(dangerous_string + '("echo hello")')
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"16 Dangerous function definition: Should fail due to user-defined eval function": {
|
||||
"code": """
|
||||
def eval_function():
|
||||
eval('os.system("echo hello")')
|
||||
|
||||
def main():
|
||||
eval_function()
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
"17 Memory exhaustion(256m): Should fail due to exceeding memory limit(try to allocate 300m)": {
|
||||
"code": """
|
||||
def main():
|
||||
x = ['a' * 1024 * 1024] * 300 # 300MB
|
||||
""",
|
||||
"should_fail": True,
|
||||
"arguments": {},
|
||||
"language": "python",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def print_test_report(results: Dict[str, TestResult]):
|
||||
print("\n=== 🔍 Test Report ===")
|
||||
|
||||
max_name_len = max(len(name) for name in results)
|
||||
|
||||
for name, result in results.items():
|
||||
status = "✅" if result.passed else "❌"
|
||||
if result.expected_failure:
|
||||
status = "⚠️" if result.passed else "✓" # Expected failure case
|
||||
|
||||
print(f"{status} {name.ljust(max_name_len)} {result.duration:.2f}s")
|
||||
|
||||
if result.error:
|
||||
print(f" REQUEST ERROR: {result.error}")
|
||||
if result.validation_error:
|
||||
print(f" VALIDATION ERROR: {result.validation_error}")
|
||||
|
||||
if result.result and not result.passed:
|
||||
print(f" STATUS: {result.result.status}")
|
||||
if result.result.stderr:
|
||||
print(f" STDERR: {result.result.stderr[:200]}...")
|
||||
if result.result.detail:
|
||||
print(f" DETAIL: {result.result.detail}")
|
||||
|
||||
passed = sum(1 for r in results.values() if ((not r.expected_failure and r.passed) or (r.expected_failure and not r.passed)))
|
||||
failed = len(results) - passed
|
||||
|
||||
print("\n=== 📊 Statistics ===")
|
||||
print(f"✅ Passed: {passed}")
|
||||
print(f"❌ Failed: {failed}")
|
||||
print(f"📌 Total: {len(results)}")
|
||||
|
||||
|
||||
def main():
|
||||
print(f"🔐 Starting sandbox security tests (API: {API_URL})")
|
||||
print(f"🚀 Concurrent threads: {MAX_WORKERS}")
|
||||
|
||||
test_cases = get_test_cases()
|
||||
results = {}
|
||||
|
||||
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
|
||||
futures = {}
|
||||
for name, detail in test_cases.items():
|
||||
# ✅ Log when a task is submitted
|
||||
print(f"✅ Task submitted: {name}")
|
||||
time.sleep(0.4)
|
||||
future = executor.submit(execute_single_test, name, detail["code"], detail["language"], detail["arguments"], detail["should_fail"])
|
||||
futures[future] = name
|
||||
|
||||
print("\n=== 🚦 Test Progress ===")
|
||||
for i, future in enumerate(as_completed(futures)):
|
||||
name = futures[future]
|
||||
print(f" {i + 1}/{len(test_cases)} completed: {name}")
|
||||
try:
|
||||
results[name] = future.result()
|
||||
except Exception as e:
|
||||
print(f"⚠️ Test {name} execution exception: {str(e)}")
|
||||
results[name] = TestResult(name=name, passed=False, duration=0, error=f"Execution exception: {str(e)}")
|
||||
|
||||
print_test_report(results)
|
||||
|
||||
if any(not r.passed and not r.expected_failure for r in results.values()):
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
539
sandbox/uv.lock
generated
Normal file
539
sandbox/uv.lock
generated
Normal file
@ -0,0 +1,539 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.10"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.9.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "basedpyright"
|
||||
version = "1.29.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "nodejs-wheel-binaries" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b9/18/f5e488eac4960ad9a2e71b95f0d91cf93a982c7f68aa90e4e0554f0bc37e/basedpyright-1.29.1.tar.gz", hash = "sha256:06bbe6c3b50ab4af20f80e154049477a50d8b81d2522eadbc9f472f2f92cd44b", size = 21773469 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/95/1b/1bb837bbb7e259928f33d3c105dfef4f5349ef08b3ef45576801256e3234/basedpyright-1.29.1-py3-none-any.whl", hash = "sha256:b7eb65b9d4aaeeea29a349ac494252032a75a364942d0ac466d7f07ddeacc786", size = 11397959 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.4.26"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.8"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.2.18"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "wrapt" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.12"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gvisor-sandbox"
|
||||
version = "0.1.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "requests" },
|
||||
{ name = "slowapi" },
|
||||
{ name = "uvicorn" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "basedpyright" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "fastapi", specifier = ">=0.115.12" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "pydantic", specifier = ">=2.11.4" },
|
||||
{ name = "requests", specifier = ">=2.32.3" },
|
||||
{ name = "slowapi", specifier = ">=0.1.9" },
|
||||
{ name = "uvicorn", specifier = ">=0.34.2" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [{ name = "basedpyright", specifier = ">=1.29.1" }]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "limits"
|
||||
version = "5.1.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecated" },
|
||||
{ name = "packaging" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c6/94/a04e64f487a56f97aff67c53df609cc19d5c3f3e7e5697ec8a1ff8413829/limits-5.1.0.tar.gz", hash = "sha256:b298e4af0b47997da03cbeee9df027ddc2328f8630546125e81083bb56311827", size = 94655 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/de/00/876a5ec60addda62ee13ac4b588a5afc0d1a86a431645a91711ceae834cf/limits-5.1.0-py3-none-any.whl", hash = "sha256:f368d4572ac3ef8190cb8b9911ed481175a0b4189894a63cac95cae39ebeb147", size = 60472 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodejs-wheel-binaries"
|
||||
version = "22.15.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/45/5b/6c5f973765b96793d4e4d03684bcbd273b17e471ecc7e9bec4c32b595ebd/nodejs_wheel_binaries-22.15.0.tar.gz", hash = "sha256:ff81aa2a79db279c2266686ebcb829b6634d049a5a49fc7dc6921e4f18af9703", size = 8054 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d3/a8/a32e5bb99e95c536e7dac781cffab1e7e9f8661b8ee296b93df77e4df7f9/nodejs_wheel_binaries-22.15.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:aa16366d48487fff89446fb237693e777aa2ecd987208db7d4e35acc40c3e1b1", size = 50514526 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/e8/eb024dbb3a7d3b98c8922d1c306be989befad4d2132292954cb902f43b07/nodejs_wheel_binaries-22.15.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:a54bb3fee9170003fa8abc69572d819b2b1540344eff78505fcc2129a9175596", size = 51409179 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3f/0f/baa968456c3577e45c7d0e3715258bd175dcecc67b683a41a5044d5dae40/nodejs_wheel_binaries-22.15.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:867121ccf99d10523f6878a26db86e162c4939690e24cfb5bea56d01ea696c93", size = 57364460 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2f/a2/977f63cd07ed8fc27bc0d0cd72e801fc3691ffc8cd40a51496ff18a6d0a2/nodejs_wheel_binaries-22.15.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab0fbcda2ddc8aab7db1505d72cb958f99324b3834c4543541a305e02bfe860", size = 57889101 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/7f/57b9c24a4f0d25490527b043146aa0fdff2d8fdc82f90667cdaf6f00cfc9/nodejs_wheel_binaries-22.15.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2bde1d8e00cd955b9ce9ee9ac08309923e2778a790ee791b715e93e487e74bfd", size = 59190817 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fd/7f/970acbe33b81c22b3c7928f52e32347030aa46d23d779cf781cf9a9cf557/nodejs_wheel_binaries-22.15.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:acdd4ef73b6701aab9fbe02ac5e104f208a5e3c300402fa41ad7bc7f49499fbf", size = 60220316 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/07/4c/030243c04bb60f0de66c2d7ee3be289c6d28ef09113c06ffa417bdfedf8f/nodejs_wheel_binaries-22.15.0-py2.py3-none-win_amd64.whl", hash = "sha256:51deaf13ee474e39684ce8c066dfe86240edb94e7241950ca789befbbbcbd23d", size = 40718853 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1f/49/011d472814af4fabeaab7d7ce3d5a1a635a3dadc23ae404d1f546839ecb3/nodejs_wheel_binaries-22.15.0-py2.py3-none-win_arm64.whl", hash = "sha256:01a3fe4d60477f93bf21a44219db33548c75d7fed6dc6e6f4c05cf0adf015609", size = 36436645 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.4"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
{ name = "pydantic-core" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "slowapi"
|
||||
version = "0.1.9"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "limits" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a0/99/adfc7f94ca024736f061257d39118e1542bade7a52e86415a4c4ae92d8ff/slowapi-0.1.9.tar.gz", hash = "sha256:639192d0f1ca01b1c6d95bf6c71d794c3a9ee189855337b4821f7f457dddad77", size = 14028 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2b/bb/f71c4b7d7e7eb3fc1e8c0458a8979b912f40b58002b9fbf37729b8cb464b/slowapi-0.1.9-py3-none-any.whl", hash = "sha256:cfad116cfb84ad9d763ee155c1e5c5cbf00b0d47399a769b227865f5df576e36", size = 14670 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.46.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.13.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.34.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.17.2"
|
||||
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 }
|
||||
wheels = [
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5a/d1/1daec934997e8b160040c78d7b31789f19b122110a75eca3d4e8da0049e1/wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984", size = 53307 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/1b/7b/13369d42651b809389c1a7153baa01d9700430576c81a2f5c5e460df0ed9/wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22", size = 38486 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/62/bf/e0105016f907c30b4bd9e377867c48c34dc9c6c0c104556c9c9126bd89ed/wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7", size = 38777 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/27/70/0f6e0679845cbf8b165e027d43402a55494779295c4b08414097b258ac87/wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c", size = 83314 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/0f/77/0576d841bf84af8579124a93d216f55d6f74374e4445264cb378a6ed33eb/wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72", size = 74947 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/90/ec/00759565518f268ed707dcc40f7eeec38637d46b098a1f5143bff488fe97/wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061", size = 82778 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f8/5a/7cffd26b1c607b0b0c8a9ca9d75757ad7620c9c0a9b4a25d3f8a1480fafc/wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2", size = 81716 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/7e/09/dccf68fa98e862df7e6a60a61d43d644b7d095a5fc36dbb591bbd4a1c7b2/wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c", size = 74548 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b7/8e/067021fa3c8814952c5e228d916963c1115b983e21393289de15128e867e/wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62", size = 81334 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4b/0d/9d4b5219ae4393f718699ca1c05f5ebc0c40d076f7e65fd48f5f693294fb/wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563", size = 36427 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/72/6a/c5a83e8f61aec1e1aeef939807602fb880e5872371e95df2137142f5c58e/wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f", size = 38774 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750 },
|
||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 },
|
||||
]
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow-sdk"
|
||||
version = "0.18.0"
|
||||
version = "0.19.0"
|
||||
description = "Python client sdk of [RAGFlow](https://github.com/infiniflow/ragflow). RAGFlow is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding."
|
||||
authors = [{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
|
||||
@ -20,7 +20,9 @@ import pytest
|
||||
import requests
|
||||
|
||||
HOST_ADDRESS = os.getenv("HOST_ADDRESS", "http://127.0.0.1:9380")
|
||||
|
||||
ZHIPU_AI_API_KEY = os.getenv("ZHIPU_AI_API_KEY", "ca148e43209c40109e2bc2f56281dd11.BltyA2N1B043B7Ra")
|
||||
if ZHIPU_AI_API_KEY is None:
|
||||
pytest.exit("Error: Environment variable ZHIPU_AI_API_KEY must be set")
|
||||
|
||||
# def generate_random_email():
|
||||
# return 'user_' + ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))+'@1.com'
|
||||
@ -87,3 +89,64 @@ def get_auth():
|
||||
@pytest.fixture(scope="session")
|
||||
def get_email():
|
||||
return EMAIL
|
||||
|
||||
|
||||
def get_my_llms(auth, name):
|
||||
url = HOST_ADDRESS + "/v1/llm/my_llms"
|
||||
authorization = {"Authorization": auth}
|
||||
response = requests.get(url=url, headers=authorization)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
if name in res.get("data"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def add_models(auth):
|
||||
url = HOST_ADDRESS + "/v1/llm/set_api_key"
|
||||
authorization = {"Authorization": auth}
|
||||
models_info = {
|
||||
"ZHIPU-AI": {"llm_factory": "ZHIPU-AI", "api_key": ZHIPU_AI_API_KEY},
|
||||
}
|
||||
|
||||
for name, model_info in models_info.items():
|
||||
if not get_my_llms(auth, name):
|
||||
response = requests.post(url=url, headers=authorization, json=model_info)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
pytest.exit(f"Critical error in add_models: {res.get('message')}")
|
||||
|
||||
|
||||
def get_tenant_info(auth):
|
||||
url = HOST_ADDRESS + "/v1/user/tenant_info"
|
||||
authorization = {"Authorization": auth}
|
||||
response = requests.get(url=url, headers=authorization)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
return res["data"].get("tenant_id")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def set_tenant_info(get_auth):
|
||||
auth = get_auth
|
||||
try:
|
||||
add_models(auth)
|
||||
tenant_id = get_tenant_info(auth)
|
||||
except Exception as e:
|
||||
pytest.exit(f"Error in set_tenant_info: {str(e)}")
|
||||
url = HOST_ADDRESS + "/v1/user/set_tenant_info"
|
||||
authorization = {"Authorization": get_auth}
|
||||
tenant_info = {
|
||||
"tenant_id": tenant_id,
|
||||
"llm_id": "glm-4-flash@ZHIPU-AI",
|
||||
"embd_id": "BAAI/bge-large-zh-v1.5@BAAI",
|
||||
"img2txt_id": "glm-4v@ZHIPU-AI",
|
||||
"asr_id": "",
|
||||
"tts_id": None,
|
||||
}
|
||||
response = requests.post(url=url, headers=authorization, json=tenant_info)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from common import (
|
||||
add_chunk,
|
||||
batch_create_datasets,
|
||||
@ -49,9 +48,6 @@ MARKER_EXPRESSIONS = {
|
||||
"p3": "p1 or p2 or p3",
|
||||
}
|
||||
HOST_ADDRESS = os.getenv("HOST_ADDRESS", "http://127.0.0.1:9380")
|
||||
ZHIPU_AI_API_KEY = os.getenv("ZHIPU_AI_API_KEY", "ca148e43209c40109e2bc2f56281dd11.BltyA2N1B043B7Ra")
|
||||
if ZHIPU_AI_API_KEY is None:
|
||||
pytest.exit("Error: Environment variable ZHIPU_AI_API_KEY must be set")
|
||||
|
||||
|
||||
def pytest_addoption(parser: pytest.Parser) -> None:
|
||||
@ -85,67 +81,6 @@ def get_http_api_auth(get_api_key_fixture):
|
||||
return RAGFlowHttpApiAuth(get_api_key_fixture)
|
||||
|
||||
|
||||
def get_my_llms(auth, name):
|
||||
url = HOST_ADDRESS + "/v1/llm/my_llms"
|
||||
authorization = {"Authorization": auth}
|
||||
response = requests.get(url=url, headers=authorization)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
if name in res.get("data"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def add_models(auth):
|
||||
url = HOST_ADDRESS + "/v1/llm/set_api_key"
|
||||
authorization = {"Authorization": auth}
|
||||
models_info = {
|
||||
"ZHIPU-AI": {"llm_factory": "ZHIPU-AI", "api_key": ZHIPU_AI_API_KEY},
|
||||
}
|
||||
|
||||
for name, model_info in models_info.items():
|
||||
if not get_my_llms(auth, name):
|
||||
response = requests.post(url=url, headers=authorization, json=model_info)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
pytest.exit(f"Critical error in add_models: {res.get('message')}")
|
||||
|
||||
|
||||
def get_tenant_info(auth):
|
||||
url = HOST_ADDRESS + "/v1/user/tenant_info"
|
||||
authorization = {"Authorization": auth}
|
||||
response = requests.get(url=url, headers=authorization)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
return res["data"].get("tenant_id")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def set_tenant_info(get_auth):
|
||||
auth = get_auth
|
||||
try:
|
||||
add_models(auth)
|
||||
tenant_id = get_tenant_info(auth)
|
||||
except Exception as e:
|
||||
pytest.exit(f"Error in set_tenant_info: {str(e)}")
|
||||
url = HOST_ADDRESS + "/v1/user/set_tenant_info"
|
||||
authorization = {"Authorization": get_auth}
|
||||
tenant_info = {
|
||||
"tenant_id": tenant_id,
|
||||
"llm_id": "glm-4-flash@ZHIPU-AI",
|
||||
"embd_id": "BAAI/bge-large-zh-v1.5@BAAI",
|
||||
"img2txt_id": "glm-4v@ZHIPU-AI",
|
||||
"asr_id": "",
|
||||
"tts_id": None,
|
||||
}
|
||||
response = requests.post(url=url, headers=authorization, json=tenant_info)
|
||||
res = response.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_datasets(request, get_http_api_auth):
|
||||
def cleanup():
|
||||
|
||||
@ -14,8 +14,9 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from ragflow_sdk import RAGFlow
|
||||
from common import HOST_ADDRESS
|
||||
from ragflow_sdk import RAGFlow
|
||||
from ragflow_sdk.modules.chat import Chat
|
||||
|
||||
|
||||
def test_create_chat_with_name(get_api_key_fixture):
|
||||
@ -31,7 +32,18 @@ def test_create_chat_with_name(get_api_key_fixture):
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
rag.create_chat("test_create_chat", dataset_ids=[kb.id])
|
||||
llm = Chat.LLM(
|
||||
rag,
|
||||
{
|
||||
"model_name": "glm-4-flash@ZHIPU-AI",
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512,
|
||||
},
|
||||
)
|
||||
rag.create_chat("test_create_chat", dataset_ids=[kb.id], llm=llm)
|
||||
|
||||
|
||||
def test_update_chat_with_name(get_api_key_fixture):
|
||||
@ -47,7 +59,18 @@ def test_update_chat_with_name(get_api_key_fixture):
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
chat = rag.create_chat("test_update_chat", dataset_ids=[kb.id])
|
||||
llm = Chat.LLM(
|
||||
rag,
|
||||
{
|
||||
"model_name": "glm-4-flash@ZHIPU-AI",
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512,
|
||||
},
|
||||
)
|
||||
chat = rag.create_chat("test_update_chat", dataset_ids=[kb.id], llm=llm)
|
||||
chat.update({"name": "new_chat"})
|
||||
|
||||
|
||||
@ -64,7 +87,18 @@ def test_delete_chats_with_success(get_api_key_fixture):
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
chat = rag.create_chat("test_delete_chat", dataset_ids=[kb.id])
|
||||
llm = Chat.LLM(
|
||||
rag,
|
||||
{
|
||||
"model_name": "glm-4-flash@ZHIPU-AI",
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512,
|
||||
},
|
||||
)
|
||||
chat = rag.create_chat("test_delete_chat", dataset_ids=[kb.id], llm=llm)
|
||||
rag.delete_chats(ids=[chat.id])
|
||||
|
||||
|
||||
@ -81,6 +115,17 @@ def test_list_chats_with_success(get_api_key_fixture):
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
rag.create_chat("test_list_1", dataset_ids=[kb.id])
|
||||
rag.create_chat("test_list_2", dataset_ids=[kb.id])
|
||||
llm = Chat.LLM(
|
||||
rag,
|
||||
{
|
||||
"model_name": "glm-4-flash@ZHIPU-AI",
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512,
|
||||
},
|
||||
)
|
||||
rag.create_chat("test_list_1", dataset_ids=[kb.id], llm=llm)
|
||||
rag.create_chat("test_list_2", dataset_ids=[kb.id], llm=llm)
|
||||
rag.list_chats()
|
||||
|
||||
2
sdk/python/uv.lock
generated
2
sdk/python/uv.lock
generated
@ -342,7 +342,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ragflow-sdk"
|
||||
version = "0.18.0"
|
||||
version = "0.19.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "beartype" },
|
||||
|
||||
2
uv.lock
generated
2
uv.lock
generated
@ -4813,7 +4813,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ragflow"
|
||||
version = "0.18.0"
|
||||
version = "0.19.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "akshare" },
|
||||
|
||||
28
web/src/components/collapse.tsx
Normal file
28
web/src/components/collapse.tsx
Normal file
@ -0,0 +1,28 @@
|
||||
import {
|
||||
Collapsible,
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from '@/components/ui/collapsible';
|
||||
import { ListCollapse } from 'lucide-react';
|
||||
import { PropsWithChildren, ReactNode } from 'react';
|
||||
|
||||
type CollapseProps = {
|
||||
title?: ReactNode;
|
||||
rightContent?: ReactNode;
|
||||
} & PropsWithChildren;
|
||||
|
||||
export function Collapse({ title, children, rightContent }: CollapseProps) {
|
||||
return (
|
||||
<Collapsible defaultOpen>
|
||||
<CollapsibleTrigger className="w-full">
|
||||
<section className="flex justify-between items-center pb-2">
|
||||
<div className="flex items-center gap-1">
|
||||
<ListCollapse className="size-4" /> {title}
|
||||
</div>
|
||||
<div>{rightContent}</div>
|
||||
</section>
|
||||
</CollapsibleTrigger>
|
||||
<CollapsibleContent>{children}</CollapsibleContent>
|
||||
</Collapsible>
|
||||
);
|
||||
}
|
||||
@ -19,7 +19,7 @@ import {
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext';
|
||||
import { Variable } from 'lucide-react';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { ReactNode, useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '../ui/tooltip';
|
||||
import theme from './theme';
|
||||
@ -45,6 +45,7 @@ const Nodes: Array<Klass<LexicalNode>> = [
|
||||
type IProps = {
|
||||
value?: string;
|
||||
onChange?: (value?: string) => void;
|
||||
placeholder?: ReactNode;
|
||||
};
|
||||
|
||||
function PromptContent() {
|
||||
@ -99,7 +100,7 @@ function PromptContent() {
|
||||
);
|
||||
}
|
||||
|
||||
export function PromptEditor({ value, onChange }: IProps) {
|
||||
export function PromptEditor({ value, onChange, placeholder }: IProps) {
|
||||
const { t } = useTranslation();
|
||||
const initialConfig: InitialConfigType = {
|
||||
namespace: 'PromptEditor',
|
||||
@ -124,16 +125,25 @@ export function PromptEditor({ value, onChange }: IProps) {
|
||||
);
|
||||
|
||||
return (
|
||||
<LexicalComposer initialConfig={initialConfig}>
|
||||
<RichTextPlugin
|
||||
contentEditable={<PromptContent></PromptContent>}
|
||||
placeholder={
|
||||
<div className="absolute top-2 left-2">{t('common.pleaseInput')}</div>
|
||||
}
|
||||
ErrorBoundary={LexicalErrorBoundary}
|
||||
/>
|
||||
<VariablePickerMenuPlugin value={value}></VariablePickerMenuPlugin>
|
||||
<VariableOnChangePlugin onChange={onValueChange}></VariableOnChangePlugin>
|
||||
</LexicalComposer>
|
||||
<div className="relative">
|
||||
<LexicalComposer initialConfig={initialConfig}>
|
||||
<RichTextPlugin
|
||||
contentEditable={<PromptContent></PromptContent>}
|
||||
placeholder={
|
||||
<div
|
||||
className="absolute top-10 left-2 text-text-sub-title"
|
||||
data-xxx
|
||||
>
|
||||
{placeholder || t('common.pleaseInput')}
|
||||
</div>
|
||||
}
|
||||
ErrorBoundary={LexicalErrorBoundary}
|
||||
/>
|
||||
<VariablePickerMenuPlugin value={value}></VariablePickerMenuPlugin>
|
||||
<VariableOnChangePlugin
|
||||
onChange={onValueChange}
|
||||
></VariableOnChangePlugin>
|
||||
</LexicalComposer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ import { cva, type VariantProps } from 'class-variance-authority';
|
||||
import * as React from 'react';
|
||||
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Loader2 } from 'lucide-react';
|
||||
import { Loader2, Plus } from 'lucide-react';
|
||||
|
||||
const buttonVariants = cva(
|
||||
'inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0',
|
||||
@ -93,3 +93,18 @@ export const ButtonLoading = React.forwardRef<
|
||||
ButtonLoading.displayName = 'ButtonLoading';
|
||||
|
||||
export { Button, buttonVariants };
|
||||
|
||||
export const BlockButton = React.forwardRef<HTMLButtonElement, ButtonProps>(
|
||||
({ children, className, ...props }, ref) => {
|
||||
return (
|
||||
<Button
|
||||
variant={'outline'}
|
||||
ref={ref}
|
||||
className={cn('w-full border-dashed border-input-border', className)}
|
||||
{...props}
|
||||
>
|
||||
<Plus /> {children}
|
||||
</Button>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
@ -14,8 +14,7 @@ import {
|
||||
|
||||
import { Label } from '@/components/ui/label';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Info } from 'lucide-react';
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from './tooltip';
|
||||
import { FormTooltip } from './tooltip';
|
||||
|
||||
const Form = FormProvider;
|
||||
|
||||
@ -104,16 +103,7 @@ const FormLabel = React.forwardRef<
|
||||
{...props}
|
||||
>
|
||||
{props.children}
|
||||
{tooltip && (
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="size-3 ml-2" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>{tooltip}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
{tooltip && <FormTooltip tooltip={tooltip}></FormTooltip>}
|
||||
</Label>
|
||||
);
|
||||
});
|
||||
|
||||
@ -4,6 +4,7 @@ import * as TooltipPrimitive from '@radix-ui/react-tooltip';
|
||||
import * as React from 'react';
|
||||
|
||||
import { cn } from '@/lib/utils';
|
||||
import { Info } from 'lucide-react';
|
||||
|
||||
const TooltipProvider = TooltipPrimitive.Provider;
|
||||
|
||||
@ -28,3 +29,16 @@ const TooltipContent = React.forwardRef<
|
||||
TooltipContent.displayName = TooltipPrimitive.Content.displayName;
|
||||
|
||||
export { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger };
|
||||
|
||||
export const FormTooltip = ({ tooltip }: { tooltip: React.ReactNode }) => {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="size-3 ml-2" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>{tooltip}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
@ -1,22 +1,111 @@
|
||||
import { IFlow } from '@/interfaces/database/flow';
|
||||
import flowService from '@/services/flow-service';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { message } from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
|
||||
export const enum AgentApiAction {
|
||||
FetchAgentList = 'fetchAgentList',
|
||||
UpdateAgentSetting = 'updateAgentSetting',
|
||||
DeleteAgent = 'deleteAgent',
|
||||
}
|
||||
|
||||
export const useFetchAgentList = () => {
|
||||
const { data, isFetching: loading } = useQuery<IFlow[]>({
|
||||
queryKey: [AgentApiAction.FetchAgentList],
|
||||
initialData: [],
|
||||
export const useFetchAgentListByPage = () => {
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
|
||||
|
||||
const { data, isFetching: loading } = useQuery<{
|
||||
kbs: IFlow[];
|
||||
total: number;
|
||||
}>({
|
||||
queryKey: [
|
||||
AgentApiAction.FetchAgentList,
|
||||
{
|
||||
debouncedSearchString,
|
||||
...pagination,
|
||||
},
|
||||
],
|
||||
initialData: { kbs: [], total: 0 },
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await flowService.listCanvas();
|
||||
const { data } = await flowService.listCanvasTeam({
|
||||
keywords: debouncedSearchString,
|
||||
page_size: pagination.pageSize,
|
||||
page: pagination.current,
|
||||
});
|
||||
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
// setPagination({ page: 1 }); // TODO: 这里导致重复请求
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange],
|
||||
);
|
||||
|
||||
return {
|
||||
data: data.kbs,
|
||||
loading,
|
||||
searchString,
|
||||
handleInputChange: onInputChange,
|
||||
pagination: { ...pagination, total: data?.total },
|
||||
setPagination,
|
||||
};
|
||||
};
|
||||
|
||||
export const useUpdateAgentSetting = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [AgentApiAction.UpdateAgentSetting],
|
||||
mutationFn: async (params: any) => {
|
||||
const ret = await flowService.settingCanvas(params);
|
||||
if (ret?.data?.code === 0) {
|
||||
message.success('success');
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [AgentApiAction.FetchAgentList],
|
||||
});
|
||||
} else {
|
||||
message.error(ret?.data?.data);
|
||||
}
|
||||
return ret?.data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, updateAgentSetting: mutateAsync };
|
||||
};
|
||||
|
||||
export const useDeleteAgent = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [AgentApiAction.DeleteAgent],
|
||||
mutationFn: async (canvasIds: string[]) => {
|
||||
const { data } = await flowService.removeCanvas({ canvasIds });
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [AgentApiAction.FetchAgentList],
|
||||
});
|
||||
}
|
||||
return data?.data ?? [];
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, deleteAgent: mutateAsync };
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user