mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
111 Commits
ca40b56839
...
v0.22.0
| Author | SHA1 | Date | |
|---|---|---|---|
| a36a0fe71c | |||
| a81f6d1b24 | |||
| 8406a5ea47 | |||
| 20b6dafbd8 | |||
| 33cc9cafa9 | |||
| 6567ecf15a | |||
| 3a7322f5b2 | |||
| 829e5f287b | |||
| 1e8efa2631 | |||
| e7f7c09b0b | |||
| 8ae562504b | |||
| bacc9d3ab9 | |||
| d226764ed0 | |||
| 39120d49cf | |||
| 27211a9b34 | |||
| e9de25c973 | |||
| 09e971dcc8 | |||
| 883df22aa2 | |||
| 2bd7abadd3 | |||
| 435479adb3 | |||
| 2c727a4a9c | |||
| a15f522dc9 | |||
| de53498b39 | |||
| 72740eb5b9 | |||
| c30ffb5716 | |||
| 6dcff7db97 | |||
| 9213568692 | |||
| d81e4095de | |||
| 8ddeaca3d6 | |||
| f441f8ffc2 | |||
| 522c7b7ac6 | |||
| 377c0fb4fa | |||
| 7dd9758056 | |||
| 26cf5131c9 | |||
| 93207f83ba | |||
| f77604db26 | |||
| dd5b8e2e1a | |||
| 83ff8e8009 | |||
| 7db6cb8ca3 | |||
| ba6470a7a5 | |||
| df16a80f25 | |||
| 29ea059f90 | |||
| a191933f81 | |||
| 6e1ebb2855 | |||
| 68b952abb1 | |||
| 0879b6af2c | |||
| 2b9145948f | |||
| 726473fd39 | |||
| d207291217 | |||
| bf382e5c4d | |||
| 4338e706c6 | |||
| 86af330f06 | |||
| d016a06fd5 | |||
| 7423a5806e | |||
| b6cd282ccd | |||
| 82ca2e0378 | |||
| 1cd54832b5 | |||
| 660386d3b5 | |||
| 4cdaa77545 | |||
| 9fcc4946e2 | |||
| 98e9d68c75 | |||
| 8f34824aa4 | |||
| 9a6808230a | |||
| c7bd0a755c | |||
| dd1c8c5779 | |||
| 526ba3388f | |||
| cb95072ecf | |||
| f6aeebc608 | |||
| 307f53dae8 | |||
| fa98cc2bb9 | |||
| c58d95ed69 | |||
| edbc396bc6 | |||
| b137de1def | |||
| 2cb1046cbf | |||
| a880beb1f6 | |||
| 34283d4db4 | |||
| 5629fbd2ca | |||
| b7aa6d6c4f | |||
| 0b7b88592f | |||
| 42edecc98f | |||
| af98763e27 | |||
| 5a8fbc5a81 | |||
| 0cd8024c34 | |||
| 3bd1fefe1f | |||
| e18c408759 | |||
| 23b81eae77 | |||
| 66c01c7274 | |||
| 4b8ce08050 | |||
| ca30ef83bf | |||
| d469ae6d50 | |||
| f581a1c4e5 | |||
| 15c75bbf15 | |||
| adbb8319e0 | |||
| f98b24c9bf | |||
| 87c9a054d3 | |||
| cd6ed4b380 | |||
| f29a3dd651 | |||
| e658beee38 | |||
| 17ea5c1dee | |||
| 4e76220e25 | |||
| 24335485bf | |||
| 121c51661d | |||
| 02d10f8eda | |||
| dddf766470 | |||
| 8584d4b642 | |||
| b86e07088b | |||
| 1a9215bc6f | |||
| cf9611c96f | |||
| f126875ec6 | |||
| 89410d2381 | |||
| 96c015fb85 |
28
.github/workflows/release.yml
vendored
28
.github/workflows/release.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: [ "self-hosted", "ragflow-test" ]
|
||||
steps:
|
||||
- name: Ensure workspace ownership
|
||||
run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
||||
run: echo "chown -R ${USER} ${GITHUB_WORKSPACE}" && sudo chown -R ${USER} ${GITHUB_WORKSPACE}
|
||||
|
||||
# https://github.com/actions/checkout/blob/v3/README.md
|
||||
- name: Check out code
|
||||
@ -31,37 +31,37 @@ jobs:
|
||||
|
||||
- name: Prepare release body
|
||||
run: |
|
||||
if [[ $GITHUB_EVENT_NAME == 'create' ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} == "create" ]]; then
|
||||
RELEASE_TAG=${GITHUB_REF#refs/tags/}
|
||||
if [[ $RELEASE_TAG == 'nightly' ]]; then
|
||||
if [[ ${RELEASE_TAG} == "nightly" ]]; then
|
||||
PRERELEASE=true
|
||||
else
|
||||
PRERELEASE=false
|
||||
fi
|
||||
echo "Workflow triggered by create tag: $RELEASE_TAG"
|
||||
echo "Workflow triggered by create tag: ${RELEASE_TAG}"
|
||||
else
|
||||
RELEASE_TAG=nightly
|
||||
PRERELEASE=true
|
||||
echo "Workflow triggered by schedule"
|
||||
fi
|
||||
echo "RELEASE_TAG=$RELEASE_TAG" >> $GITHUB_ENV
|
||||
echo "PRERELEASE=$PRERELEASE" >> $GITHUB_ENV
|
||||
echo "RELEASE_TAG=${RELEASE_TAG}" >> ${GITHUB_ENV}
|
||||
echo "PRERELEASE=${PRERELEASE}" >> ${GITHUB_ENV}
|
||||
RELEASE_DATETIME=$(date --rfc-3339=seconds)
|
||||
echo Release $RELEASE_TAG created from $GITHUB_SHA at $RELEASE_DATETIME > release_body.md
|
||||
echo Release ${RELEASE_TAG} created from ${GITHUB_SHA} at ${RELEASE_DATETIME} > release_body.md
|
||||
|
||||
- name: Move the existing mutable tag
|
||||
# https://github.com/softprops/action-gh-release/issues/171
|
||||
run: |
|
||||
git fetch --tags
|
||||
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||
# Determine if a given tag exists and matches a specific Git commit.
|
||||
# actions/checkout@v4 fetch-tags doesn't work when triggered by schedule
|
||||
if [ "$(git rev-parse -q --verify "refs/tags/$RELEASE_TAG")" = "$GITHUB_SHA" ]; then
|
||||
echo "mutable tag $RELEASE_TAG exists and matches $GITHUB_SHA"
|
||||
if [ "$(git rev-parse -q --verify "refs/tags/${RELEASE_TAG}")" = "${GITHUB_SHA}" ]; then
|
||||
echo "mutable tag ${RELEASE_TAG} exists and matches ${GITHUB_SHA}"
|
||||
else
|
||||
git tag -f $RELEASE_TAG $GITHUB_SHA
|
||||
git push -f origin $RELEASE_TAG:refs/tags/$RELEASE_TAG
|
||||
echo "created/moved mutable tag $RELEASE_TAG to $GITHUB_SHA"
|
||||
git tag -f ${RELEASE_TAG} ${GITHUB_SHA}
|
||||
git push -f origin ${RELEASE_TAG}:refs/tags/${RELEASE_TAG}
|
||||
echo "created/moved mutable tag ${RELEASE_TAG} to ${GITHUB_SHA}"
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
|
||||
- name: Build and push image
|
||||
run: |
|
||||
echo ${{ secrets.DOCKERHUB_TOKEN }} | sudo docker login --username infiniflow --password-stdin
|
||||
sudo docker login --username infiniflow --password-stdin <<< ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
sudo docker build --build-arg NEED_MIRROR=1 -t infiniflow/ragflow:${RELEASE_TAG} -f Dockerfile .
|
||||
sudo docker tag infiniflow/ragflow:${RELEASE_TAG} infiniflow/ragflow:latest
|
||||
sudo docker push infiniflow/ragflow:${RELEASE_TAG}
|
||||
|
||||
43
.github/workflows/tests.yml
vendored
43
.github/workflows/tests.yml
vendored
@ -9,8 +9,11 @@ on:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
- '*.mdx'
|
||||
pull_request:
|
||||
types: [ labeled, synchronize, reopened ]
|
||||
# The only difference between pull_request and pull_request_target is the context in which the workflow runs:
|
||||
# — pull_request_target workflows use the workflow files from the default branch, and secrets are available.
|
||||
# — pull_request workflows use the workflow files from the pull request branch, and secrets are unavailable.
|
||||
pull_request_target:
|
||||
types: [ synchronize, ready_for_review ]
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
@ -28,7 +31,7 @@ jobs:
|
||||
name: ragflow_tests
|
||||
# https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
|
||||
# https://github.com/orgs/community/discussions/26261
|
||||
if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci') }}
|
||||
if: ${{ github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'ci') }}
|
||||
runs-on: [ "self-hosted", "ragflow-test" ]
|
||||
steps:
|
||||
# https://github.com/hmarr/debug-action
|
||||
@ -37,19 +40,20 @@ jobs:
|
||||
- name: Ensure workspace ownership
|
||||
run: |
|
||||
echo "Workflow triggered by ${{ github.event_name }}"
|
||||
echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
||||
echo "chown -R ${USER} ${GITHUB_WORKSPACE}" && sudo chown -R ${USER} ${GITHUB_WORKSPACE}
|
||||
|
||||
# https://github.com/actions/checkout/issues/1781
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') && format('refs/pull/{0}/merge', github.event.pull_request.number) || github.sha }}
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Check workflow duplication
|
||||
if: ${{ !cancelled() && !failure() && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci')) }}
|
||||
if: ${{ !cancelled() && !failure() }}
|
||||
run: |
|
||||
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "schedule" ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} != "pull_request_target" && ${GITHUB_EVENT_NAME} != "schedule" ]]; then
|
||||
HEAD=$(git rev-parse HEAD)
|
||||
# Find a PR that introduced a given commit
|
||||
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
|
||||
@ -67,14 +71,14 @@ jobs:
|
||||
gh run cancel ${GITHUB_RUN_ID}
|
||||
while true; do
|
||||
status=$(gh run view ${GITHUB_RUN_ID} --json status -q .status)
|
||||
[ "$status" = "completed" ] && break
|
||||
[ "${status}" = "completed" ] && break
|
||||
sleep 5
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
elif [[ ${GITHUB_EVENT_NAME} == "pull_request_target" ]]; then
|
||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
||||
# Calculate the hash of the current workspace content
|
||||
@ -93,18 +97,18 @@ jobs:
|
||||
|
||||
- name: Build ragflow:nightly
|
||||
run: |
|
||||
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
|
||||
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-${HOME}}
|
||||
RAGFLOW_IMAGE=infiniflow/ragflow:${GITHUB_RUN_ID}
|
||||
echo "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> $GITHUB_ENV
|
||||
echo "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> ${GITHUB_ENV}
|
||||
sudo docker pull ubuntu:22.04
|
||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t ${RAGFLOW_IMAGE} .
|
||||
if [[ "$GITHUB_EVENT_NAME" == "schedule" ]]; then
|
||||
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||
export HTTP_API_TEST_LEVEL=p3
|
||||
else
|
||||
export HTTP_API_TEST_LEVEL=p2
|
||||
fi
|
||||
echo "HTTP_API_TEST_LEVEL=${HTTP_API_TEST_LEVEL}" >> $GITHUB_ENV
|
||||
echo "RAGFLOW_CONTAINER=${GITHUB_RUN_ID}-ragflow-cpu-1" >> $GITHUB_ENV
|
||||
echo "HTTP_API_TEST_LEVEL=${HTTP_API_TEST_LEVEL}" >> ${GITHUB_ENV}
|
||||
echo "RAGFLOW_CONTAINER=${GITHUB_RUN_ID}-ragflow-cpu-1" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Start ragflow:nightly
|
||||
run: |
|
||||
@ -154,7 +158,7 @@ jobs:
|
||||
echo -e "COMPOSE_PROFILES=\${COMPOSE_PROFILES},tei-cpu" >> docker/.env
|
||||
echo -e "TEI_MODEL=BAAI/bge-small-en-v1.5" >> docker/.env
|
||||
echo -e "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> docker/.env
|
||||
echo "HOST_ADDRESS=http://host.docker.internal:${SVR_HTTP_PORT}" >> $GITHUB_ENV
|
||||
echo "HOST_ADDRESS=http://host.docker.internal:${SVR_HTTP_PORT}" >> ${GITHUB_ENV}
|
||||
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
|
||||
uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python
|
||||
@ -189,7 +193,8 @@ jobs:
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
run: |
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v || true
|
||||
sudo docker ps -a --filter "label=com.docker.compose.project=${GITHUB_RUN_ID}" -q | xargs -r sudo docker rm -f
|
||||
|
||||
- name: Start ragflow:nightly
|
||||
run: |
|
||||
@ -226,5 +231,9 @@ jobs:
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
run: |
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v
|
||||
sudo docker rmi -f ${RAGFLOW_IMAGE:-NO_IMAGE} || true
|
||||
# Sometimes `docker compose down` fail due to hang container, heavy load etc. Need to remove such containers to release resources(for example, listen ports).
|
||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v || true
|
||||
sudo docker ps -a --filter "label=com.docker.compose.project=${GITHUB_RUN_ID}" -q | xargs -r sudo docker rm -f
|
||||
if [[ -n ${RAGFLOW_IMAGE} ]]; then
|
||||
sudo docker rmi -f ${RAGFLOW_IMAGE}
|
||||
fi
|
||||
|
||||
31
README.md
31
README.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -61,8 +61,7 @@
|
||||
- 🔎 [System Architecture](#-system-architecture)
|
||||
- 🎬 [Get Started](#-get-started)
|
||||
- 🔧 [Configurations](#-configurations)
|
||||
- 🔧 [Build a docker image without embedding models](#-build-a-docker-image-without-embedding-models)
|
||||
- 🔧 [Build a docker image including embedding models](#-build-a-docker-image-including-embedding-models)
|
||||
- 🔧 [Build a Docker image](#-build-a-docker-image)
|
||||
- 🔨 [Launch service from source for development](#-launch-service-from-source-for-development)
|
||||
- 📚 [Documentation](#-documentation)
|
||||
- 📜 [Roadmap](#-roadmap)
|
||||
@ -86,6 +85,7 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Latest Updates
|
||||
|
||||
- 2025-11-12 Supports data synchronization from Confluence, AWS S3, Discord, Google Drive.
|
||||
- 2025-10-23 Supports MinerU & Docling as document parsing methods.
|
||||
- 2025-10-15 Supports orchestrable ingestion pipeline.
|
||||
- 2025-08-08 Supports OpenAI's latest GPT-5 series models.
|
||||
@ -93,7 +93,6 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
- 2025-05-23 Adds a Python/JavaScript code executor component to Agent.
|
||||
- 2025-05-05 Supports cross-language query.
|
||||
- 2025-03-19 Supports using a multi-modal model to make sense of images within PDF or DOCX files.
|
||||
- 2025-02-28 Combined with Internet search (Tavily), supports reasoning like Deep Research for any LLMs.
|
||||
- 2024-12-18 Upgrades Document Layout Analysis model in DeepDoc.
|
||||
- 2024-08-22 Support text to SQL statements through RAG.
|
||||
|
||||
@ -189,25 +188,29 @@ releases! 🌟
|
||||
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
||||
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
||||
|
||||
> The command below downloads the `v0.21.1-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.21.1-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
||||
> The command below downloads the `v0.22.0` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.22.0`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases), e.g.: git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
> Note: Prior to `v0.22.0`, we provided both images with embedding models and slim images without embedding models. Details as follows:
|
||||
|
||||
> Note: Starting with `v0.22.0`, we ship only the slim edition and no longer append the **-slim** suffix to the image tag.
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> Starting with `v0.22.0`, we ship only the slim edition and no longer append the **-slim** suffix to the image tag.
|
||||
|
||||
4. Check the server status after having the server up and running:
|
||||
|
||||
@ -288,7 +291,7 @@ RAGFlow uses Elasticsearch by default for storing full text and vectors. To swit
|
||||
> [!WARNING]
|
||||
> Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
|
||||
|
||||
## 🔧 Build a Docker image without embedding models
|
||||
## 🔧 Build a Docker image
|
||||
|
||||
This image is approximately 2 GB in size and relies on external LLM and embedding services.
|
||||
|
||||
|
||||
31
README_id.md
31
README_id.md
@ -22,7 +22,7 @@
|
||||
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
||||
@ -61,8 +61,7 @@
|
||||
- 🔎 [Arsitektur Sistem](#-arsitektur-sistem)
|
||||
- 🎬 [Mulai](#-mulai)
|
||||
- 🔧 [Konfigurasi](#-konfigurasi)
|
||||
- 🔧 [Membangun Image Docker tanpa Model Embedding](#-membangun-image-docker-tanpa-model-embedding)
|
||||
- 🔧 [Membangun Image Docker dengan Model Embedding](#-membangun-image-docker-dengan-model-embedding)
|
||||
- 🔧 [Membangun Image Docker](#-membangun-docker-image)
|
||||
- 🔨 [Meluncurkan aplikasi dari Sumber untuk Pengembangan](#-meluncurkan-aplikasi-dari-sumber-untuk-pengembangan)
|
||||
- 📚 [Dokumentasi](#-dokumentasi)
|
||||
- 📜 [Peta Jalan](#-peta-jalan)
|
||||
@ -86,6 +85,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Pembaruan Terbaru
|
||||
|
||||
- 2025-11-12 Mendukung sinkronisasi data dari Confluence, AWS S3, Discord, Google Drive.
|
||||
- 2025-10-23 Mendukung MinerU & Docling sebagai metode penguraian dokumen.
|
||||
- 2025-10-15 Dukungan untuk jalur data yang terorkestrasi.
|
||||
- 2025-08-08 Mendukung model seri GPT-5 terbaru dari OpenAI.
|
||||
@ -93,7 +93,6 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
- 2025-05-23 Menambahkan komponen pelaksana kode Python/JS ke Agen.
|
||||
- 2025-05-05 Mendukung kueri lintas bahasa.
|
||||
- 2025-03-19 Mendukung penggunaan model multi-modal untuk memahami gambar di dalam file PDF atau DOCX.
|
||||
- 2025-02-28 dikombinasikan dengan pencarian Internet (TAVILY), mendukung penelitian mendalam untuk LLM apa pun.
|
||||
- 2024-12-18 Meningkatkan model Analisis Tata Letak Dokumen di DeepDoc.
|
||||
- 2024-08-22 Dukungan untuk teks ke pernyataan SQL melalui RAG.
|
||||
|
||||
@ -187,25 +186,29 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
||||
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> Perintah di bawah ini mengunduh edisi v0.21.1 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.21.1, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
||||
> Perintah di bawah ini mengunduh edisi v0.22.0 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.22.0, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# Opsional: gunakan tag stabil (lihat releases: https://github.com/infiniflow/ragflow/releases), contoh: git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
> Catatan: Sebelum `v0.22.0`, kami menyediakan image dengan model embedding dan image slim tanpa model embedding. Detailnya sebagai berikut:
|
||||
|
||||
> Catatan: Mulai dari `v0.22.0`, kami hanya menyediakan edisi slim dan tidak lagi menambahkan akhiran **-slim** pada tag image.
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> Mulai dari `v0.22.0`, kami hanya menyediakan edisi slim dan tidak lagi menambahkan akhiran **-slim** pada tag image.
|
||||
|
||||
1. Periksa status server setelah server aktif dan berjalan:
|
||||
|
||||
@ -260,7 +263,7 @@ Pembaruan konfigurasi ini memerlukan reboot semua kontainer agar efektif:
|
||||
> $ docker compose -f docker-compose.yml up -d
|
||||
> ```
|
||||
|
||||
## 🔧 Membangun Docker Image tanpa Model Embedding
|
||||
## 🔧 Membangun Docker Image
|
||||
|
||||
Image ini berukuran sekitar 2 GB dan bergantung pada aplikasi LLM eksternal dan embedding.
|
||||
|
||||
|
||||
30
README_ja.md
30
README_ja.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -66,6 +66,7 @@
|
||||
|
||||
## 🔥 最新情報
|
||||
|
||||
- 2025-11-12 Confluence、AWS S3、Discord、Google Drive からのデータ同期をサポートします。
|
||||
- 2025-10-23 ドキュメント解析方法として MinerU と Docling をサポートします。
|
||||
- 2025-10-15 オーケストレーションされたデータパイプラインのサポート。
|
||||
- 2025-08-08 OpenAI の最新 GPT-5 シリーズモデルをサポートします。
|
||||
@ -73,7 +74,6 @@
|
||||
- 2025-05-23 エージェントに Python/JS コードエグゼキュータコンポーネントを追加しました。
|
||||
- 2025-05-05 言語間クエリをサポートしました。
|
||||
- 2025-03-19 PDFまたはDOCXファイル内の画像を理解するために、多モーダルモデルを使用することをサポートします。
|
||||
- 2025-02-28 インターネット検索 (TAVILY) と組み合わせて、あらゆる LLM の詳細な調査をサポートします。
|
||||
- 2024-12-18 DeepDoc のドキュメント レイアウト分析モデルをアップグレードします。
|
||||
- 2024-08-22 RAG を介して SQL ステートメントへのテキストをサポートします。
|
||||
|
||||
@ -166,28 +166,32 @@
|
||||
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
||||
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
||||
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.21.1 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.21.1 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.22.0 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.22.0 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# 任意: 安定版タグを利用 (一覧: https://github.com/infiniflow/ragflow/releases) 例: git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
> 注意:`v0.22.0` より前のバージョンでは、embedding モデルを含むイメージと、embedding モデルを含まない slim イメージの両方を提供していました。詳細は以下の通りです:
|
||||
|
||||
> 注意:`v0.22.0` 以降、当プロジェクトでは slim エディションのみを提供し、イメージタグに **-slim** サフィックスを付けなくなりました。
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
||||
> `v0.22.0` 以降、当プロジェクトでは slim エディションのみを提供し、イメージタグに **-slim** サフィックスを付けなくなりました。
|
||||
|
||||
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
||||
|
||||
```bash
|
||||
$ docker logs -f docker-ragflow-cpu-1
|
||||
```
|
||||
@ -259,7 +263,7 @@ RAGFlow はデフォルトで Elasticsearch を使用して全文とベクトル
|
||||
> Linux/arm64 マシンでの Infinity への切り替えは正式にサポートされていません。
|
||||
>
|
||||
|
||||
## 🔧 ソースコードで Docker イメージを作成(埋め込みモデルなし)
|
||||
## 🔧 ソースコードで Docker イメージを作成
|
||||
|
||||
この Docker イメージのサイズは約 1GB で、外部の大モデルと埋め込みサービスに依存しています。
|
||||
|
||||
|
||||
30
README_ko.md
30
README_ko.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -67,6 +67,7 @@
|
||||
|
||||
## 🔥 업데이트
|
||||
|
||||
- 2025-11-12 Confluence, AWS S3, Discord, Google Drive에서 데이터 동기화를 지원합니다.
|
||||
- 2025-10-23 문서 파싱 방법으로 MinerU 및 Docling을 지원합니다.
|
||||
- 2025-10-15 조정된 데이터 파이프라인 지원.
|
||||
- 2025-08-08 OpenAI의 최신 GPT-5 시리즈 모델을 지원합니다.
|
||||
@ -74,7 +75,6 @@
|
||||
- 2025-05-23 Agent에 Python/JS 코드 실행기 구성 요소를 추가합니다.
|
||||
- 2025-05-05 언어 간 쿼리를 지원합니다.
|
||||
- 2025-03-19 PDF 또는 DOCX 파일 내의 이미지를 이해하기 위해 다중 모드 모델을 사용하는 것을 지원합니다.
|
||||
- 2025-02-28 인터넷 검색(TAVILY)과 결합되어 모든 LLM에 대한 심층 연구를 지원합니다.
|
||||
- 2024-12-18 DeepDoc의 문서 레이아웃 분석 모델 업그레이드.
|
||||
- 2024-08-22 RAG를 통해 SQL 문에 텍스트를 지원합니다.
|
||||
|
||||
@ -168,25 +168,29 @@
|
||||
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
||||
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
||||
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.21.1 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.21.1과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.22.0 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.22.0과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases), e.g.: git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
> 참고: `v0.22.0` 이전 버전에서는 embedding 모델이 포함된 이미지와 embedding 모델이 포함되지 않은 slim 이미지를 모두 제공했습니다. 자세한 내용은 다음과 같습니다:
|
||||
|
||||
> 참고: `v0.22.0`부터는 slim 에디션만 배포하며 이미지 태그에 **-slim** 접미사를 더 이상 붙이지 않습니다.
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> `v0.22.0`부터는 slim 에디션만 배포하며 이미지 태그에 **-slim** 접미사를 더 이상 붙이지 않습니다.
|
||||
|
||||
1. 서버가 시작된 후 서버 상태를 확인하세요:
|
||||
|
||||
@ -253,7 +257,7 @@ RAGFlow 는 기본적으로 Elasticsearch 를 사용하여 전체 텍스트 및
|
||||
> [!WARNING]
|
||||
> Linux/arm64 시스템에서 Infinity로 전환하는 것은 공식적으로 지원되지 않습니다.
|
||||
|
||||
## 🔧 소스 코드로 Docker 이미지를 컴파일합니다(임베딩 모델 포함하지 않음)
|
||||
## 🔧 소스 코드로 Docker 이미지를 컴파일합니다
|
||||
|
||||
이 Docker 이미지의 크기는 약 1GB이며, 외부 대형 모델과 임베딩 서비스에 의존합니다.
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
||||
@ -86,6 +86,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Últimas Atualizações
|
||||
|
||||
- 12-11-2025 Suporta a sincronização de dados do Confluence, AWS S3, Discord e Google Drive.
|
||||
- 23-10-2025 Suporta MinerU e Docling como métodos de análise de documentos.
|
||||
- 15-10-2025 Suporte para pipelines de dados orquestrados.
|
||||
- 08-08-2025 Suporta a mais recente série GPT-5 da OpenAI.
|
||||
@ -93,7 +94,6 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
- 23-05-2025 Adicione o componente executor de código Python/JS ao Agente.
|
||||
- 05-05-2025 Suporte a consultas entre idiomas.
|
||||
- 19-03-2025 Suporta o uso de um modelo multi-modal para entender imagens dentro de arquivos PDF ou DOCX.
|
||||
- 28-02-2025 combinado com a pesquisa na Internet (T AVI LY), suporta pesquisas profundas para qualquer LLM.
|
||||
- 18-12-2024 Atualiza o modelo de Análise de Layout de Documentos no DeepDoc.
|
||||
- 22-08-2024 Suporta conversão de texto para comandos SQL via RAG.
|
||||
|
||||
@ -186,25 +186,29 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
||||
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
||||
|
||||
> O comando abaixo baixa a edição`v0.21.1` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.21.1`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
||||
> O comando abaixo baixa a edição`v0.22.0` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.22.0`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# Opcional: use uma tag estável (veja releases: https://github.com/infiniflow/ragflow/releases), ex.: git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
| Tag da imagem RAGFlow | Tamanho da imagem (GB) | Possui modelos de incorporação? | Estável? |
|
||||
| --------------------- | ---------------------- | --------------------------------- | ------------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Lançamento estável |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Lançamento estável |
|
||||
| nightly | ≈2 | ❌ | Construção noturna instável |
|
||||
> Nota: Antes da `v0.22.0`, fornecíamos imagens com modelos de embedding e imagens slim sem modelos de embedding. Detalhes a seguir:
|
||||
|
||||
> Observação: A partir da`v0.22.0`, distribuímos apenas a edição slim e não adicionamos mais o sufixo **-slim** às tags das imagens.
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> A partir da `v0.22.0`, distribuímos apenas a edição slim e não adicionamos mais o sufixo **-slim** às tags das imagens.
|
||||
|
||||
4. Verifique o status do servidor após tê-lo iniciado:
|
||||
|
||||
@ -274,9 +278,9 @@ O RAGFlow usa o Elasticsearch por padrão para armazenar texto completo e vetore
|
||||
```
|
||||
|
||||
> [!ATENÇÃO]
|
||||
> A mudança para o Infinity em uma máquina Linux/arm64 ainda não é oficialmente suportada.
|
||||
> A mudança para o Infinity em uma máquina Linux/arm64 ainda não é oficialmente suportada.
|
||||
|
||||
## 🔧 Criar uma imagem Docker sem modelos de incorporação
|
||||
## 🔧 Criar uma imagem Docker
|
||||
|
||||
Esta imagem tem cerca de 2 GB de tamanho e depende de serviços externos de LLM e incorporação.
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -85,6 +85,7 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-11-12 支援從 Confluence、AWS S3、Discord、Google Drive 進行資料同步。
|
||||
- 2025-10-23 支援 MinerU 和 Docling 作為文件解析方法。
|
||||
- 2025-10-15 支援可編排的資料管道。
|
||||
- 2025-08-08 支援 OpenAI 最新的 GPT-5 系列模型。
|
||||
@ -92,7 +93,6 @@
|
||||
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
||||
- 2025-05-05 支援跨語言查詢。
|
||||
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述.
|
||||
- 2025-02-28 結合網路搜尋(Tavily),對於任意大模型實現類似 Deep Research 的推理功能.
|
||||
- 2024-12-18 升級了 DeepDoc 的文檔佈局分析模型。
|
||||
- 2024-08-22 支援用 RAG 技術實現從自然語言到 SQL 語句的轉換。
|
||||
|
||||
@ -185,25 +185,29 @@
|
||||
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
||||
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
||||
|
||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.21.1`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.21.1` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
||||
> 執行以下指令會自動下載 RAGFlow Docker 映像 `v0.22.0`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.22.0` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# 可選:使用穩定版標籤(查看發佈:https://github.com/infiniflow/ragflow/releases),例:git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
> 注意:在 `v0.22.0` 之前的版本,我們會同時提供包含 embedding 模型的映像和不含 embedding 模型的 slim 映像。具體如下:
|
||||
|
||||
> 注意:自 `v0.22.0` 起,我們僅發佈 slim 版本,並且不再在映像標籤後附加 **-slim** 後綴。
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
|
||||
> 從 `v0.22.0` 開始,我們只發佈 slim 版本,並且不再在映像標籤後附加 **-slim** 後綴。
|
||||
|
||||
> [!TIP]
|
||||
> 如果你遇到 Docker 映像檔拉不下來的問題,可以在 **docker/.env** 檔案內根據變數 `RAGFLOW_IMAGE` 的註解提示選擇華為雲或阿里雲的對應映像。
|
||||
@ -285,7 +289,7 @@ RAGFlow 預設使用 Elasticsearch 儲存文字和向量資料. 如果要切換
|
||||
> [!WARNING]
|
||||
> Infinity 目前官方並未正式支援在 Linux/arm64 架構下的機器上運行.
|
||||
|
||||
## 🔧 原始碼編譯 Docker 映像(不含 embedding 模型)
|
||||
## 🔧 原始碼編譯 Docker 映像
|
||||
|
||||
本 Docker 映像大小約 2 GB 左右並且依賴外部的大模型和 embedding 服務。
|
||||
|
||||
|
||||
20
README_zh.md
20
README_zh.md
@ -22,7 +22,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||
</a>
|
||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||
@ -85,6 +85,7 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-11-12 支持从 Confluence、AWS S3、Discord、Google Drive 进行数据同步。
|
||||
- 2025-10-23 支持 MinerU 和 Docling 作为文档解析方法。
|
||||
- 2025-10-15 支持可编排的数据管道。
|
||||
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型。
|
||||
@ -92,7 +93,6 @@
|
||||
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
||||
- 2025-05-05 支持跨语言查询。
|
||||
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述.
|
||||
- 2025-02-28 结合互联网搜索(Tavily),对于任意大模型实现类似 Deep Research 的推理功能.
|
||||
- 2024-12-18 升级了 DeepDoc 的文档布局分析模型。
|
||||
- 2024-08-22 支持用 RAG 技术实现从自然语言到 SQL 语句的转换。
|
||||
|
||||
@ -186,25 +186,29 @@
|
||||
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
||||
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
||||
|
||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.21.1`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.21.1` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
||||
> 运行以下命令会自动下载 RAGFlow Docker 镜像 `v0.22.0`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.22.0` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
||||
|
||||
```bash
|
||||
$ cd ragflow/docker
|
||||
# Use CPU for embedding and DeepDoc tasks:
|
||||
|
||||
# 可选:使用稳定版本标签(查看发布:https://github.com/infiniflow/ragflow/releases),例如:git checkout v0.22.0
|
||||
|
||||
# Use CPU for DeepDoc tasks:
|
||||
$ docker compose -f docker-compose.yml up -d
|
||||
|
||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
||||
# To use GPU to accelerate DeepDoc tasks:
|
||||
# sed -i '1i DEVICE=gpu' .env
|
||||
# docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
> 注意:在 `v0.22.0` 之前的版本,我们会同时提供包含 embedding 模型的镜像和不含 embedding 模型的 slim 镜像。具体如下:
|
||||
|
||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
||||
|
||||
> 注意:从 `v0.22.0` 开始,我们只发布 slim 版本,并且不再在镜像标签后附加 **-slim** 后缀。
|
||||
> 从 `v0.22.0` 开始,我们只发布 slim 版本,并且不再在镜像标签后附加 **-slim** 后缀。
|
||||
|
||||
> [!TIP]
|
||||
> 如果你遇到 Docker 镜像拉不下来的问题,可以在 **docker/.env** 文件内根据变量 `RAGFLOW_IMAGE` 的注释提示选择华为云或者阿里云的相应镜像。
|
||||
@ -284,7 +288,7 @@ RAGFlow 默认使用 Elasticsearch 存储文本和向量数据. 如果要切换
|
||||
> [!WARNING]
|
||||
> Infinity 目前官方并未正式支持在 Linux/arm64 架构下的机器上运行.
|
||||
|
||||
## 🔧 源码编译 Docker 镜像(不含 embedding 模型)
|
||||
## 🔧 源码编译 Docker 镜像
|
||||
|
||||
本 Docker 镜像大小约 2 GB 左右并且依赖外部的大模型和 embedding 服务。
|
||||
|
||||
|
||||
@ -48,7 +48,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
||||
1. Ensure the Admin Service is running.
|
||||
2. Install ragflow-cli.
|
||||
```bash
|
||||
pip install ragflow-cli==0.21.1
|
||||
pip install ragflow-cli==0.22.0
|
||||
```
|
||||
3. Launch the CLI client:
|
||||
```bash
|
||||
|
||||
@ -23,6 +23,7 @@ from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
||||
from typing import Dict, List, Any
|
||||
from lark import Lark, Transformer, Tree
|
||||
import requests
|
||||
import getpass
|
||||
|
||||
GRAMMAR = r"""
|
||||
start: command
|
||||
@ -51,6 +52,7 @@ sql_command: list_services
|
||||
| revoke_permission
|
||||
| alter_user_role
|
||||
| show_user_permission
|
||||
| show_version
|
||||
|
||||
// meta command definition
|
||||
meta_command: "\\" meta_command_name [meta_args]
|
||||
@ -92,6 +94,7 @@ FOR: "FOR"i
|
||||
RESOURCES: "RESOURCES"i
|
||||
ON: "ON"i
|
||||
SET: "SET"i
|
||||
VERSION: "VERSION"i
|
||||
|
||||
list_services: LIST SERVICES ";"
|
||||
show_service: SHOW SERVICE NUMBER ";"
|
||||
@ -120,6 +123,8 @@ revoke_permission: REVOKE action_list ON identifier FROM ROLE identifier ";"
|
||||
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
||||
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
||||
|
||||
show_version: SHOW VERSION ";"
|
||||
|
||||
action_list: identifier ("," identifier)*
|
||||
|
||||
identifier: WORD
|
||||
@ -246,6 +251,9 @@ class AdminTransformer(Transformer):
|
||||
user_name = items[3]
|
||||
return {"type": "show_user_permission", "user_name": user_name}
|
||||
|
||||
def show_version(self, items):
|
||||
return {"type": "show_version"}
|
||||
|
||||
def action_list(self, items):
|
||||
return items
|
||||
|
||||
@ -359,7 +367,7 @@ class AdminCLI(Cmd):
|
||||
if single_command:
|
||||
admin_passwd = arguments['password']
|
||||
else:
|
||||
admin_passwd = input(f"password for {self.admin_account}: ").strip()
|
||||
admin_passwd = getpass.getpass(f"password for {self.admin_account}: ").strip()
|
||||
try:
|
||||
self.admin_password = encrypt(admin_passwd)
|
||||
response = self.session.post(url, json={'email': self.admin_account, 'password': self.admin_password})
|
||||
@ -370,7 +378,7 @@ class AdminCLI(Cmd):
|
||||
self.session.headers.update({
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': response.headers['Authorization'],
|
||||
'User-Agent': 'RAGFlow-CLI/0.21.1'
|
||||
'User-Agent': 'RAGFlow-CLI/0.22.0'
|
||||
})
|
||||
print("Authentication successful.")
|
||||
return True
|
||||
@ -384,6 +392,21 @@ class AdminCLI(Cmd):
|
||||
print(str(e))
|
||||
print(f"Can't access {self.host}, port: {self.port}")
|
||||
|
||||
def _format_service_detail_table(self, data):
|
||||
if not any([isinstance(v, list) for v in data.values()]):
|
||||
# normal table
|
||||
return data
|
||||
# handle task_executor heartbeats map, for example {'name': [{'done': 2, 'now': timestamp1}, {'done': 3, 'now': timestamp2}]
|
||||
task_executor_list = []
|
||||
for k, v in data.items():
|
||||
# display latest status
|
||||
heartbeats = sorted(v, key=lambda x: x["now"], reverse=True)
|
||||
task_executor_list.append({
|
||||
"task_executor_name": k,
|
||||
**heartbeats[0],
|
||||
})
|
||||
return task_executor_list
|
||||
|
||||
def _print_table_simple(self, data):
|
||||
if not data:
|
||||
print("No data to print")
|
||||
@ -555,6 +578,8 @@ class AdminCLI(Cmd):
|
||||
self._alter_user_role(command_dict)
|
||||
case 'show_user_permission':
|
||||
self._show_user_permission(command_dict)
|
||||
case 'show_version':
|
||||
self._show_version(command_dict)
|
||||
case 'meta':
|
||||
self._handle_meta_command(command_dict)
|
||||
case _:
|
||||
@ -585,7 +610,8 @@ class AdminCLI(Cmd):
|
||||
if isinstance(res_data['message'], str):
|
||||
print(res_data['message'])
|
||||
else:
|
||||
self._print_table_simple(res_data['message'])
|
||||
data = self._format_service_detail_table(res_data['message'])
|
||||
self._print_table_simple(data)
|
||||
else:
|
||||
print(f"Service {res_data['service_name']} is down, {res_data['message']}")
|
||||
else:
|
||||
@ -622,7 +648,9 @@ class AdminCLI(Cmd):
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
table_data = res_json['data']
|
||||
table_data.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get user {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
@ -695,7 +723,10 @@ class AdminCLI(Cmd):
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
table_data = res_json['data']
|
||||
for t in table_data:
|
||||
t.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get all datasets of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
@ -707,7 +738,10 @@ class AdminCLI(Cmd):
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
table_data = res_json['data']
|
||||
for t in table_data:
|
||||
t.pop('avatar')
|
||||
self._print_table_simple(table_data)
|
||||
else:
|
||||
print(f"Fail to get all agents of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
@ -861,6 +895,16 @@ class AdminCLI(Cmd):
|
||||
print(
|
||||
f"Fail to show user: {user_name_str} permission, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _show_version(self, command):
|
||||
print("show_version")
|
||||
url = f'http://{self.host}:{self.port}/api/v1/admin/version'
|
||||
response = self.session.get(url)
|
||||
res_json = response.json()
|
||||
if response.status_code == 200:
|
||||
self._print_table_simple(res_json['data'])
|
||||
else:
|
||||
print(f"Fail to show version, code: {res_json['code']}, message: {res_json['message']}")
|
||||
|
||||
def _handle_meta_command(self, command):
|
||||
meta_command = command['command']
|
||||
args = command.get('args', [])
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "ragflow-cli"
|
||||
version = "0.21.1"
|
||||
version = "0.22.0"
|
||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
|
||||
@ -26,11 +26,12 @@ from routes import admin_bp
|
||||
from common.log_utils import init_root_logger
|
||||
from common.constants import SERVICE_CONF
|
||||
from common.config_utils import show_configs
|
||||
from api import settings
|
||||
from common import settings
|
||||
from config import load_configurations, SERVICE_CONFIGS
|
||||
from auth import init_default_admin, setup_auth
|
||||
from flask_session import Session
|
||||
from flask_login import LoginManager
|
||||
from common.versions import get_ragflow_version
|
||||
|
||||
stop_event = threading.Event()
|
||||
|
||||
@ -52,6 +53,7 @@ if __name__ == '__main__':
|
||||
os.environ.get("MAX_CONTENT_LENGTH", 1024 * 1024 * 1024)
|
||||
)
|
||||
Session(app)
|
||||
logging.info(f'RAGFlow version: {get_ragflow_version()}')
|
||||
show_configs()
|
||||
login_manager = LoginManager()
|
||||
login_manager.init_app(app)
|
||||
@ -67,7 +69,7 @@ if __name__ == '__main__':
|
||||
port=9381,
|
||||
application=app,
|
||||
threaded=True,
|
||||
use_reloader=True,
|
||||
use_reloader=False,
|
||||
use_debugger=True,
|
||||
)
|
||||
except Exception:
|
||||
|
||||
@ -23,15 +23,15 @@ from flask import request, jsonify
|
||||
from flask_login import current_user, login_user
|
||||
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
||||
|
||||
from api import settings
|
||||
from api.common.exceptions import AdminException, UserNotFoundError
|
||||
from api.db.init_data import encode_to_base64
|
||||
from api.common.base64 import encode_to_base64
|
||||
from api.db.services import UserService
|
||||
from common.constants import ActiveEnum, StatusEnum
|
||||
from api.utils.crypt import decrypt
|
||||
from common.misc_utils import get_uuid
|
||||
from common.time_utils import current_timestamp, datetime_format, get_format_time
|
||||
from common.connection_utils import construct_response
|
||||
from common import settings
|
||||
|
||||
|
||||
def setup_auth(login_manager):
|
||||
|
||||
@ -183,11 +183,13 @@ class RAGFlowServerConfig(BaseConfig):
|
||||
|
||||
|
||||
class TaskExecutorConfig(BaseConfig):
|
||||
message_queue_type: str
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
result = super().to_dict()
|
||||
if 'extra' not in result:
|
||||
result['extra'] = dict()
|
||||
result['extra']['message_queue_type'] = self.message_queue_type
|
||||
return result
|
||||
|
||||
|
||||
@ -299,6 +301,15 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
||||
id_count += 1
|
||||
case "admin":
|
||||
pass
|
||||
case "task_executor":
|
||||
name: str = 'task_executor'
|
||||
host: str = v.get('host', '')
|
||||
port: int = v.get('port', 0)
|
||||
message_queue_type: str = v.get('message_queue_type')
|
||||
config = TaskExecutorConfig(id=id_count, name=name, host=host, port=port, message_queue_type=message_queue_type,
|
||||
service_type="task_executor", detail_func_name="check_task_executor_alive")
|
||||
configurations.append(config)
|
||||
id_count += 1
|
||||
case _:
|
||||
logging.warning(f"Unknown configuration key: {k}")
|
||||
continue
|
||||
|
||||
@ -24,6 +24,7 @@ from responses import success_response, error_response
|
||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||
from roles import RoleMgr
|
||||
from api.common.exceptions import AdminException
|
||||
from common.versions import get_ragflow_version
|
||||
|
||||
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
||||
|
||||
@ -369,3 +370,13 @@ def get_user_permission(user_name: str):
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@admin_bp.route('/version', methods=['GET'])
|
||||
@login_required
|
||||
@check_admin_auth
|
||||
def show_version():
|
||||
try:
|
||||
res = {"version": get_ragflow_version()}
|
||||
return success_response(res)
|
||||
except Exception as e:
|
||||
return error_response(str(e), 500)
|
||||
|
||||
@ -52,6 +52,7 @@ class UserMgr:
|
||||
result = []
|
||||
for user in users:
|
||||
result.append({
|
||||
'avatar': user.avatar,
|
||||
'email': user.email,
|
||||
'language': user.language,
|
||||
'last_login_time': user.last_login_time,
|
||||
@ -170,7 +171,8 @@ class UserServiceMgr:
|
||||
return [{
|
||||
'title': r['title'],
|
||||
'permission': r['permission'],
|
||||
'canvas_category': r['canvas_category'].split('_')[0]
|
||||
'canvas_category': r['canvas_category'].split('_')[0],
|
||||
'avatar': r['avatar']
|
||||
} for r in res]
|
||||
|
||||
|
||||
@ -190,6 +192,10 @@ class ServiceMgr:
|
||||
config_dict['status'] = 'timeout'
|
||||
except Exception:
|
||||
config_dict['status'] = 'timeout'
|
||||
if not config_dict['host']:
|
||||
config_dict['host'] = '-'
|
||||
if not config_dict['port']:
|
||||
config_dict['port'] = '-'
|
||||
result.append(config_dict)
|
||||
return result
|
||||
|
||||
|
||||
113
agent/canvas.py
113
agent/canvas.py
@ -26,7 +26,9 @@ from typing import Any, Union, Tuple
|
||||
from agent.component import component_class
|
||||
from agent.component.base import ComponentBase
|
||||
from api.db.services.file_service import FileService
|
||||
from api.db.services.task_service import has_canceled
|
||||
from common.misc_utils import get_uuid, hash_str2int
|
||||
from common.exceptions import TaskCanceledException
|
||||
from rag.prompts.generator import chunks_format
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
|
||||
@ -126,6 +128,7 @@ class Graph:
|
||||
self.components[k]["obj"].reset()
|
||||
try:
|
||||
REDIS_CONN.delete(f"{self.task_id}-logs")
|
||||
REDIS_CONN.delete(f"{self.task_id}-cancel")
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
@ -153,6 +156,33 @@ class Graph:
|
||||
def get_tenant_id(self):
|
||||
return self._tenant_id
|
||||
|
||||
def get_value_with_variable(self,value: str) -> Any:
|
||||
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*")
|
||||
out_parts = []
|
||||
last = 0
|
||||
|
||||
for m in pat.finditer(value):
|
||||
out_parts.append(value[last:m.start()])
|
||||
key = m.group(1)
|
||||
v = self.get_variable_value(key)
|
||||
if v is None:
|
||||
rep = ""
|
||||
elif isinstance(v, partial):
|
||||
buf = []
|
||||
for chunk in v():
|
||||
buf.append(chunk)
|
||||
rep = "".join(buf)
|
||||
elif isinstance(v, str):
|
||||
rep = v
|
||||
else:
|
||||
rep = json.dumps(v, ensure_ascii=False)
|
||||
|
||||
out_parts.append(rep)
|
||||
last = m.end()
|
||||
|
||||
out_parts.append(value[last:])
|
||||
return("".join(out_parts))
|
||||
|
||||
def get_variable_value(self, exp: str) -> Any:
|
||||
exp = exp.strip("{").strip("}").strip(" ").strip("{").strip("}")
|
||||
if exp.find("@") < 0:
|
||||
@ -169,7 +199,7 @@ class Graph:
|
||||
if not rest:
|
||||
return root_val
|
||||
return self.get_variable_param_value(root_val,rest)
|
||||
|
||||
|
||||
def get_variable_param_value(self, obj: Any, path: str) -> Any:
|
||||
cur = obj
|
||||
if not path:
|
||||
@ -188,6 +218,17 @@ class Graph:
|
||||
cur = getattr(cur, key, None)
|
||||
return cur
|
||||
|
||||
def is_canceled(self) -> bool:
|
||||
return has_canceled(self.task_id)
|
||||
|
||||
def cancel_task(self) -> bool:
|
||||
try:
|
||||
REDIS_CONN.set(f"{self.task_id}-cancel", "x")
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Canvas(Graph):
|
||||
|
||||
@ -212,7 +253,7 @@ class Canvas(Graph):
|
||||
"sys.conversation_turns": 0,
|
||||
"sys.files": []
|
||||
}
|
||||
|
||||
|
||||
self.retrieval = self.dsl["retrieval"]
|
||||
self.memory = self.dsl.get("memory", [])
|
||||
|
||||
@ -229,18 +270,19 @@ class Canvas(Graph):
|
||||
self.retrieval = []
|
||||
self.memory = []
|
||||
for k in self.globals.keys():
|
||||
if isinstance(self.globals[k], str):
|
||||
self.globals[k] = ""
|
||||
elif isinstance(self.globals[k], int):
|
||||
self.globals[k] = 0
|
||||
elif isinstance(self.globals[k], float):
|
||||
self.globals[k] = 0
|
||||
elif isinstance(self.globals[k], list):
|
||||
self.globals[k] = []
|
||||
elif isinstance(self.globals[k], dict):
|
||||
self.globals[k] = {}
|
||||
else:
|
||||
self.globals[k] = None
|
||||
if k.startswith("sys."):
|
||||
if isinstance(self.globals[k], str):
|
||||
self.globals[k] = ""
|
||||
elif isinstance(self.globals[k], int):
|
||||
self.globals[k] = 0
|
||||
elif isinstance(self.globals[k], float):
|
||||
self.globals[k] = 0
|
||||
elif isinstance(self.globals[k], list):
|
||||
self.globals[k] = []
|
||||
elif isinstance(self.globals[k], dict):
|
||||
self.globals[k] = {}
|
||||
else:
|
||||
self.globals[k] = None
|
||||
|
||||
def run(self, **kwargs):
|
||||
st = time.perf_counter()
|
||||
@ -250,6 +292,14 @@ class Canvas(Graph):
|
||||
for k, cpn in self.components.items():
|
||||
self.components[k]["obj"].reset(True)
|
||||
|
||||
if kwargs.get("webhook_payload"):
|
||||
for k, cpn in self.components.items():
|
||||
if self.components[k]["obj"].component_name.lower() == "webhook":
|
||||
for kk, vv in kwargs["webhook_payload"].items():
|
||||
self.components[k]["obj"].set_output(kk, vv)
|
||||
|
||||
self.components[k]["obj"].reset(True)
|
||||
|
||||
for k in kwargs.keys():
|
||||
if k in ["query", "user_id", "files"] and kwargs[k]:
|
||||
if k == "files":
|
||||
@ -275,18 +325,37 @@ class Canvas(Graph):
|
||||
self.path.append("begin")
|
||||
self.retrieval.append({"chunks": [], "doc_aggs": []})
|
||||
|
||||
if self.is_canceled():
|
||||
msg = f"Task {self.task_id} has been canceled before starting."
|
||||
logging.info(msg)
|
||||
raise TaskCanceledException(msg)
|
||||
|
||||
yield decorate("workflow_started", {"inputs": kwargs.get("inputs")})
|
||||
self.retrieval.append({"chunks": {}, "doc_aggs": {}})
|
||||
|
||||
def _run_batch(f, t):
|
||||
if self.is_canceled():
|
||||
msg = f"Task {self.task_id} has been canceled during batch execution."
|
||||
logging.info(msg)
|
||||
raise TaskCanceledException(msg)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||
thr = []
|
||||
for i in range(f, t):
|
||||
i = f
|
||||
while i < t:
|
||||
cpn = self.get_component_obj(self.path[i])
|
||||
if cpn.component_name.lower() in ["begin", "userfillup"]:
|
||||
thr.append(executor.submit(cpn.invoke, inputs=kwargs.get("inputs", {})))
|
||||
i += 1
|
||||
else:
|
||||
thr.append(executor.submit(cpn.invoke, **cpn.get_input()))
|
||||
for _, ele in cpn.get_input_elements().items():
|
||||
if isinstance(ele, dict) and ele.get("_cpn_id") and ele.get("_cpn_id") not in self.path[:i] and self.path[0].lower().find("userfillup") < 0:
|
||||
self.path.pop(i)
|
||||
t -= 1
|
||||
break
|
||||
else:
|
||||
thr.append(executor.submit(cpn.invoke, **cpn.get_input()))
|
||||
i += 1
|
||||
for t in thr:
|
||||
t.result()
|
||||
|
||||
@ -316,6 +385,7 @@ class Canvas(Graph):
|
||||
"thoughts": self.get_component_thoughts(self.path[i])
|
||||
})
|
||||
_run_batch(idx, to)
|
||||
to = len(self.path)
|
||||
# post processing of components invocation
|
||||
for i in range(idx, to):
|
||||
cpn = self.get_component(self.path[i])
|
||||
@ -410,9 +480,10 @@ class Canvas(Graph):
|
||||
for c in path:
|
||||
o = self.get_component_obj(c)
|
||||
if o.component_name.lower() == "userfillup":
|
||||
o.invoke()
|
||||
another_inputs.update(o.get_input_elements())
|
||||
if o.get_param("enable_tips"):
|
||||
tips = o.get_param("tips")
|
||||
tips = o.output("tips")
|
||||
self.path = path
|
||||
yield decorate("user_inputs", {"inputs": another_inputs, "tips": tips})
|
||||
return
|
||||
@ -426,6 +497,14 @@ class Canvas(Graph):
|
||||
"created_at": st,
|
||||
})
|
||||
self.history.append(("assistant", self.get_component_obj(self.path[-1]).output()))
|
||||
elif "Task has been canceled" in self.error:
|
||||
yield decorate("workflow_finished",
|
||||
{
|
||||
"inputs": kwargs.get("inputs"),
|
||||
"outputs": "Task has been canceled",
|
||||
"elapsed_time": time.perf_counter() - st,
|
||||
"created_at": st,
|
||||
})
|
||||
|
||||
def is_reff(self, exp: str) -> bool:
|
||||
exp = exp.strip("{").strip("}")
|
||||
|
||||
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
import importlib
|
||||
import inspect
|
||||
@ -50,9 +49,10 @@ del _package_path, _import_submodules, _extract_classes_from_module
|
||||
|
||||
|
||||
def component_class(class_name):
|
||||
for mdl in ["agent.component", "agent.tools", "rag.flow"]:
|
||||
for module_name in ["agent.component", "agent.tools", "rag.flow"]:
|
||||
try:
|
||||
return getattr(importlib.import_module(mdl), class_name)
|
||||
return getattr(importlib.import_module(module_name), class_name)
|
||||
except Exception:
|
||||
# logging.warning(f"Can't import module: {module_name}, error: {e}")
|
||||
pass
|
||||
assert False, f"Can't import {class_name}"
|
||||
|
||||
@ -139,6 +139,9 @@ class Agent(LLM, ToolBase):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 20*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
|
||||
if kwargs.get("user_prompt"):
|
||||
usr_pmt = ""
|
||||
if kwargs.get("reasoning"):
|
||||
@ -152,6 +155,8 @@ class Agent(LLM, ToolBase):
|
||||
self._param.prompts = [{"role": "user", "content": usr_pmt}]
|
||||
|
||||
if not self.tools:
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
return LLM._invoke(self, **kwargs)
|
||||
|
||||
prompt, msg, user_defined_prompt = self._prepare_prompt_variables()
|
||||
@ -171,6 +176,8 @@ class Agent(LLM, ToolBase):
|
||||
use_tools = []
|
||||
ans = ""
|
||||
for delta_ans, tk in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||
if self.check_if_canceled("Agent processing"):
|
||||
return
|
||||
ans += delta_ans
|
||||
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
@ -191,12 +198,16 @@ class Agent(LLM, ToolBase):
|
||||
answer_without_toolcall = ""
|
||||
use_tools = []
|
||||
for delta_ans,_ in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
|
||||
if delta_ans.find("**ERROR**") >= 0:
|
||||
if self.get_exception_default_value():
|
||||
self.set_output("content", self.get_exception_default_value())
|
||||
yield self.get_exception_default_value()
|
||||
else:
|
||||
self.set_output("_ERROR", delta_ans)
|
||||
return
|
||||
answer_without_toolcall += delta_ans
|
||||
yield delta_ans
|
||||
|
||||
@ -271,6 +282,8 @@ class Agent(LLM, ToolBase):
|
||||
st = timer()
|
||||
txt = ""
|
||||
for delta_ans in self._gen_citations(entire_txt):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
yield delta_ans, 0
|
||||
txt += delta_ans
|
||||
|
||||
@ -286,6 +299,8 @@ class Agent(LLM, ToolBase):
|
||||
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
||||
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||
for _ in range(self._param.max_rounds + 1):
|
||||
if self.check_if_canceled("Agent streaming"):
|
||||
return
|
||||
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||
# self.callback("next_step", {}, str(response)[:256]+"...")
|
||||
token_count += tk
|
||||
@ -333,6 +348,8 @@ Instructions:
|
||||
6. Focus on delivering VALUE with the information already gathered
|
||||
Respond immediately with your final comprehensive answer.
|
||||
"""
|
||||
if self.check_if_canceled("Agent final instruction"):
|
||||
return
|
||||
append_user_content(hist, final_instruction)
|
||||
|
||||
for txt, tkcnt in complete():
|
||||
|
||||
@ -393,7 +393,7 @@ class ComponentParamBase(ABC):
|
||||
class ComponentBase(ABC):
|
||||
component_name: str
|
||||
thread_limiter = trio.CapacityLimiter(int(os.environ.get('MAX_CONCURRENT_CHATS', 10)))
|
||||
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z:0-9_.-]+|sys\.[a-z_]+)\} *\}*"
|
||||
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*"
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
@ -417,6 +417,20 @@ class ComponentBase(ABC):
|
||||
self._param = param
|
||||
self._param.check()
|
||||
|
||||
def is_canceled(self) -> bool:
|
||||
return self._canvas.is_canceled()
|
||||
|
||||
def check_if_canceled(self, message: str = "") -> bool:
|
||||
if self.is_canceled():
|
||||
task_id = getattr(self._canvas, 'task_id', 'unknown')
|
||||
log_message = f"Task {task_id} has been canceled"
|
||||
if message:
|
||||
log_message += f" during {message}"
|
||||
logging.info(log_message)
|
||||
self.set_output("_ERROR", "Task has been canceled")
|
||||
return True
|
||||
return False
|
||||
|
||||
def invoke(self, **kwargs) -> dict[str, Any]:
|
||||
self.set_output("_created_time", time.perf_counter())
|
||||
try:
|
||||
@ -514,6 +528,7 @@ class ComponentBase(ABC):
|
||||
def get_param(self, name):
|
||||
if hasattr(self._param, name):
|
||||
return getattr(self._param, name)
|
||||
return None
|
||||
|
||||
def debug(self, **kwargs):
|
||||
return self._invoke(**kwargs)
|
||||
@ -521,7 +536,7 @@ class ComponentBase(ABC):
|
||||
def get_parent(self) -> Union[object, None]:
|
||||
pid = self._canvas.get_component(self._id).get("parent_id")
|
||||
if not pid:
|
||||
return
|
||||
return None
|
||||
return self._canvas.get_component(pid)["obj"]
|
||||
|
||||
def get_upstream(self) -> List[str]:
|
||||
@ -546,7 +561,7 @@ class ComponentBase(ABC):
|
||||
|
||||
def exception_handler(self):
|
||||
if not self._param.exception_method:
|
||||
return
|
||||
return None
|
||||
return {
|
||||
"goto": self._param.exception_goto,
|
||||
"default_value": self._param.exception_default_value
|
||||
|
||||
@ -37,7 +37,13 @@ class Begin(UserFillUp):
|
||||
component_name = "Begin"
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Begin processing"):
|
||||
return
|
||||
|
||||
for k, v in kwargs.get("inputs", {}).items():
|
||||
if self.check_if_canceled("Begin processing"):
|
||||
return
|
||||
|
||||
if isinstance(v, dict) and v.get("type", "").lower().find("file") >=0:
|
||||
if v.get("optional") and v.get("value", None) is None:
|
||||
v = None
|
||||
|
||||
@ -98,6 +98,9 @@ class Categorize(LLM, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Categorize processing"):
|
||||
return
|
||||
|
||||
msg = self._canvas.get_history(self._param.message_history_window_size)
|
||||
if not msg:
|
||||
msg = [{"role": "user", "content": ""}]
|
||||
@ -114,10 +117,18 @@ class Categorize(LLM, ABC):
|
||||
---- Real Data ----
|
||||
{} →
|
||||
""".format(" | ".join(["{}: \"{}\"".format(c["role"].upper(), re.sub(r"\n", "", c["content"], flags=re.DOTALL)) for c in msg]))
|
||||
|
||||
if self.check_if_canceled("Categorize processing"):
|
||||
return
|
||||
|
||||
ans = chat_mdl.chat(self._param.sys_prompt, [{"role": "user", "content": user_prompt}], self._param.gen_conf())
|
||||
logging.info(f"input: {user_prompt}, answer: {str(ans)}")
|
||||
if ERROR_PREFIX in ans:
|
||||
raise Exception(ans)
|
||||
|
||||
if self.check_if_canceled("Categorize processing"):
|
||||
return
|
||||
|
||||
# Count the number of times each category appears in the answer.
|
||||
category_counts = {}
|
||||
for c in self._param.category_description.keys():
|
||||
|
||||
@ -10,7 +10,7 @@ class DataOperationsParam(ComponentParamBase):
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.inputs = []
|
||||
self.query = []
|
||||
self.operations = "literal_eval"
|
||||
self.select_keys = []
|
||||
self.filter_values=[]
|
||||
@ -35,18 +35,19 @@ class DataOperations(ComponentBase,ABC):
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return {
|
||||
k: {"name": o.get("name", ""), "type": "line"}
|
||||
for input_item in (self._param.inputs or [])
|
||||
for input_item in (self._param.query or [])
|
||||
for k, o in self.get_input_elements_from_text(input_item).items()
|
||||
}
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
self.input_objects=[]
|
||||
inputs = getattr(self._param, "inputs", None)
|
||||
inputs = getattr(self._param, "query", None)
|
||||
if not isinstance(inputs, (list, tuple)):
|
||||
inputs = [inputs]
|
||||
for input_ref in self._param.inputs:
|
||||
for input_ref in inputs:
|
||||
input_object=self._canvas.get_variable_value(input_ref)
|
||||
self.set_input_value(input_ref, input_object)
|
||||
if input_object is None:
|
||||
continue
|
||||
if isinstance(input_object,dict):
|
||||
@ -57,7 +58,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
continue
|
||||
if self._param.operations == "select_keys":
|
||||
self._select_keys()
|
||||
elif self._param.operations == "literal_eval":
|
||||
elif self._param.operations == "recursive_eval":
|
||||
self._literal_eval()
|
||||
elif self._param.operations == "combine":
|
||||
self._combine()
|
||||
@ -100,7 +101,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
|
||||
def _combine(self):
|
||||
result={}
|
||||
for obj in self.input_objects():
|
||||
for obj in self.input_objects:
|
||||
for key, value in obj.items():
|
||||
if key not in result:
|
||||
result[key] = value
|
||||
@ -123,6 +124,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
key = rule.get("key")
|
||||
op = (rule.get("operator") or "equals").lower()
|
||||
target = self.norm(rule.get("value"))
|
||||
target = self._canvas.get_value_with_variable(target) or target
|
||||
if key not in obj:
|
||||
return False
|
||||
val = obj.get(key, None)
|
||||
@ -142,7 +144,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
def _filter_values(self):
|
||||
results=[]
|
||||
rules = (getattr(self._param, "filter_values", None) or [])
|
||||
for obj in self.input_objects():
|
||||
for obj in self.input_objects:
|
||||
if not rules:
|
||||
results.append(obj)
|
||||
continue
|
||||
@ -154,7 +156,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
def _append_or_update(self):
|
||||
results=[]
|
||||
updates = getattr(self._param, "updates", []) or []
|
||||
for obj in self.input_objects():
|
||||
for obj in self.input_objects:
|
||||
new_obj = dict(obj)
|
||||
for item in updates:
|
||||
if not isinstance(item, dict):
|
||||
@ -162,7 +164,7 @@ class DataOperations(ComponentBase,ABC):
|
||||
k = (item.get("key") or "").strip()
|
||||
if not k:
|
||||
continue
|
||||
new_obj[k] = item.get("value")
|
||||
new_obj[k] = self._canvas.get_value_with_variable(item.get("value")) or item.get("value")
|
||||
results.append(new_obj)
|
||||
self.set_output("result", results)
|
||||
|
||||
|
||||
@ -13,7 +13,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
import json
|
||||
import re
|
||||
from functools import partial
|
||||
|
||||
from agent.component.base import ComponentParamBase, ComponentBase
|
||||
|
||||
|
||||
class UserFillUpParam(ComponentParamBase):
|
||||
@ -31,10 +35,35 @@ class UserFillUp(ComponentBase):
|
||||
component_name = "UserFillUp"
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("UserFillUp processing"):
|
||||
return
|
||||
|
||||
if self._param.enable_tips:
|
||||
content = self._param.tips
|
||||
for k, v in self.get_input_elements_from_text(self._param.tips).items():
|
||||
v = v["value"]
|
||||
ans = ""
|
||||
if isinstance(v, partial):
|
||||
for t in v():
|
||||
ans += t
|
||||
elif isinstance(v, list):
|
||||
ans = ",".join([str(vv) for vv in v])
|
||||
elif not isinstance(v, str):
|
||||
try:
|
||||
ans = json.dumps(v, ensure_ascii=False)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
ans = v
|
||||
if not ans:
|
||||
ans = ""
|
||||
content = re.sub(r"\{%s\}"%k, ans, content)
|
||||
|
||||
self.set_output("tips", content)
|
||||
for k, v in kwargs.get("inputs", {}).items():
|
||||
if self.check_if_canceled("UserFillUp processing"):
|
||||
return
|
||||
self.set_output(k, v)
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Waiting for your input..."
|
||||
|
||||
|
||||
|
||||
@ -56,6 +56,9 @@ class Invoke(ComponentBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Invoke processing"):
|
||||
return
|
||||
|
||||
args = {}
|
||||
for para in self._param.variables:
|
||||
if para.get("value"):
|
||||
@ -89,6 +92,9 @@ class Invoke(ComponentBase, ABC):
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries + 1):
|
||||
if self.check_if_canceled("Invoke processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
if method == "get":
|
||||
response = requests.get(url=url, params=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||
@ -121,6 +127,9 @@ class Invoke(ComponentBase, ABC):
|
||||
|
||||
return self.output("result")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("Invoke processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"Http request error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -16,6 +16,13 @@
|
||||
from abc import ABC
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
"""
|
||||
class VariableModel(BaseModel):
|
||||
data_type: Annotated[Literal["string", "number", "Object", "Boolean", "Array<string>", "Array<number>", "Array<object>", "Array<boolean>"], Field(default="Array<string>")]
|
||||
input_mode: Annotated[Literal["constant", "variable"], Field(default="constant")]
|
||||
value: Annotated[Any, Field(default=None)]
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
"""
|
||||
|
||||
class IterationParam(ComponentParamBase):
|
||||
"""
|
||||
@ -49,6 +56,9 @@ class Iteration(ComponentBase, ABC):
|
||||
return cid
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Iteration processing"):
|
||||
return
|
||||
|
||||
arr = self._canvas.get_variable_value(self._param.items_ref)
|
||||
if not isinstance(arr, list):
|
||||
self.set_output("_ERROR", self._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
||||
|
||||
@ -33,6 +33,9 @@ class IterationItem(ComponentBase, ABC):
|
||||
self._idx = 0
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("IterationItem processing"):
|
||||
return
|
||||
|
||||
parent = self.get_parent()
|
||||
arr = self._canvas.get_variable_value(parent._param.items_ref)
|
||||
if not isinstance(arr, list):
|
||||
@ -40,12 +43,17 @@ class IterationItem(ComponentBase, ABC):
|
||||
raise Exception(parent._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
||||
|
||||
if self._idx > 0:
|
||||
if self.check_if_canceled("IterationItem processing"):
|
||||
return
|
||||
self.output_collation()
|
||||
|
||||
if self._idx >= len(arr):
|
||||
self._idx = -1
|
||||
return
|
||||
|
||||
if self.check_if_canceled("IterationItem processing"):
|
||||
return
|
||||
|
||||
self.set_output("item", arr[self._idx])
|
||||
self.set_output("index", self._idx)
|
||||
|
||||
@ -80,4 +88,4 @@ class IterationItem(ComponentBase, ABC):
|
||||
return self._idx == -1
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Next turn..."
|
||||
return "Next turn..."
|
||||
|
||||
@ -207,6 +207,9 @@ class LLM(ComponentBase):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
def clean_formated_answer(ans: str) -> str:
|
||||
ans = re.sub(r"^.*</think>", "", ans, flags=re.DOTALL)
|
||||
ans = re.sub(r"^.*```json", "", ans, flags=re.DOTALL)
|
||||
@ -216,13 +219,16 @@ class LLM(ComponentBase):
|
||||
error: str = ""
|
||||
output_structure=None
|
||||
try:
|
||||
output_structure=self._param.outputs['structured']
|
||||
output_structure = self._param.outputs['structured']
|
||||
except Exception:
|
||||
pass
|
||||
if output_structure:
|
||||
schema=json.dumps(output_structure, ensure_ascii=False, indent=2)
|
||||
prompt += structured_output_prompt(schema)
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
error = ""
|
||||
ans = self._generate(msg)
|
||||
@ -248,6 +254,9 @@ class LLM(ComponentBase):
|
||||
return
|
||||
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("LLM processing"):
|
||||
return
|
||||
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
error = ""
|
||||
ans = self._generate(msg)
|
||||
@ -269,6 +278,9 @@ class LLM(ComponentBase):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
answer = ""
|
||||
for ans in self._generate_streamly(msg):
|
||||
if self.check_if_canceled("LLM streaming"):
|
||||
return
|
||||
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
if self.get_exception_default_value():
|
||||
self.set_output("content", self.get_exception_default_value())
|
||||
@ -287,4 +299,4 @@ class LLM(ComponentBase):
|
||||
|
||||
def thoughts(self) -> str:
|
||||
_, msg,_ = self._prepare_prompt_variables()
|
||||
return "⌛Give me a moment—starting from: \n\n" + re.sub(r"(User's query:|[\\]+)", '', msg[-1]['content'], flags=re.DOTALL) + "\n\nI’ll figure out our best next move."
|
||||
return "⌛Give me a moment—starting from: \n\n" + re.sub(r"(User's query:|[\\]+)", '', msg[-1]['content'], flags=re.DOTALL) + "\n\nI’ll figure out our best next move."
|
||||
|
||||
@ -49,6 +49,9 @@ class MessageParam(ComponentParamBase):
|
||||
class Message(ComponentBase):
|
||||
component_name = "Message"
|
||||
|
||||
def get_input_elements(self) -> dict[str, Any]:
|
||||
return self.get_input_elements_from_text("".join(self._param.content))
|
||||
|
||||
def get_kwargs(self, script:str, kwargs:dict = {}, delimiter:str=None) -> tuple[str, dict[str, str | list | Any]]:
|
||||
for k,v in self.get_input_elements_from_text(script).items():
|
||||
if k in kwargs:
|
||||
@ -86,6 +89,9 @@ class Message(ComponentBase):
|
||||
all_content = ""
|
||||
cache = {}
|
||||
for r in re.finditer(self.variable_ref_patt, rand_cnt, flags=re.DOTALL):
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
|
||||
all_content += rand_cnt[s: r.start()]
|
||||
yield rand_cnt[s: r.start()]
|
||||
s = r.end()
|
||||
@ -96,26 +102,33 @@ class Message(ComponentBase):
|
||||
continue
|
||||
|
||||
v = self._canvas.get_variable_value(exp)
|
||||
if not v:
|
||||
if v is None:
|
||||
v = ""
|
||||
if isinstance(v, partial):
|
||||
cnt = ""
|
||||
for t in v():
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
|
||||
all_content += t
|
||||
cnt += t
|
||||
yield t
|
||||
|
||||
self.set_input_value(exp, cnt)
|
||||
continue
|
||||
elif not isinstance(v, str):
|
||||
try:
|
||||
v = json.dumps(v, ensure_ascii=False, indent=2)
|
||||
v = json.dumps(v, ensure_ascii=False)
|
||||
except Exception:
|
||||
v = str(v)
|
||||
yield v
|
||||
self.set_input_value(exp, v)
|
||||
all_content += v
|
||||
cache[exp] = v
|
||||
|
||||
if s < len(rand_cnt):
|
||||
if self.check_if_canceled("Message streaming"):
|
||||
return
|
||||
|
||||
all_content += rand_cnt[s: ]
|
||||
yield rand_cnt[s: ]
|
||||
|
||||
@ -129,6 +142,9 @@ class Message(ComponentBase):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Message processing"):
|
||||
return
|
||||
|
||||
rand_cnt = random.choice(self._param.content)
|
||||
if self._param.stream and not self._is_jinjia2(rand_cnt):
|
||||
self.set_output("content", partial(self._stream, rand_cnt))
|
||||
@ -141,6 +157,9 @@ class Message(ComponentBase):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if self.check_if_canceled("Message processing"):
|
||||
return
|
||||
|
||||
for n, v in kwargs.items():
|
||||
content = re.sub(n, v, content)
|
||||
|
||||
|
||||
@ -16,6 +16,8 @@
|
||||
import os
|
||||
import re
|
||||
from abc import ABC
|
||||
from typing import Any
|
||||
|
||||
from jinja2 import Template as Jinja2Template
|
||||
from agent.component.base import ComponentParamBase
|
||||
from common.connection_utils import timeout
|
||||
@ -43,6 +45,9 @@ class StringTransformParam(ComponentParamBase):
|
||||
class StringTransform(Message, ABC):
|
||||
component_name = "StringTransform"
|
||||
|
||||
def get_input_elements(self) -> dict[str, Any]:
|
||||
return self.get_input_elements_from_text(self._param.script)
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
if self._param.method == "split":
|
||||
return {
|
||||
@ -58,17 +63,24 @@ class StringTransform(Message, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("StringTransform processing"):
|
||||
return
|
||||
|
||||
if self._param.method == "split":
|
||||
self._split(kwargs.get("line"))
|
||||
else:
|
||||
self._merge(kwargs)
|
||||
|
||||
def _split(self, line:str|None = None):
|
||||
if self.check_if_canceled("StringTransform split processing"):
|
||||
return
|
||||
|
||||
var = self._canvas.get_variable_value(self._param.split_ref) if not line else line
|
||||
if not var:
|
||||
var = ""
|
||||
assert isinstance(var, str), "The input variable is not a string: {}".format(type(var))
|
||||
self.set_input_value(self._param.split_ref, var)
|
||||
|
||||
res = []
|
||||
for i,s in enumerate(re.split(r"(%s)"%("|".join([re.escape(d) for d in self._param.delimiters])), var, flags=re.DOTALL)):
|
||||
if i % 2 == 1:
|
||||
@ -77,6 +89,9 @@ class StringTransform(Message, ABC):
|
||||
self.set_output("result", res)
|
||||
|
||||
def _merge(self, kwargs:dict[str, str] = {}):
|
||||
if self.check_if_canceled("StringTransform merge processing"):
|
||||
return
|
||||
|
||||
script = self._param.script
|
||||
script, kwargs = self.get_kwargs(script, kwargs, self._param.delimiters[0])
|
||||
|
||||
|
||||
@ -63,9 +63,18 @@ class Switch(ComponentBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Switch processing"):
|
||||
return
|
||||
|
||||
for cond in self._param.conditions:
|
||||
if self.check_if_canceled("Switch processing"):
|
||||
return
|
||||
|
||||
res = []
|
||||
for item in cond["items"]:
|
||||
if self.check_if_canceled("Switch processing"):
|
||||
return
|
||||
|
||||
if not item["cpn_id"]:
|
||||
continue
|
||||
cpn_v = self._canvas.get_variable_value(item["cpn_id"])
|
||||
@ -128,4 +137,4 @@ class Switch(ComponentBase, ABC):
|
||||
raise ValueError('Not supported operator' + operator)
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "I’m weighing a few options and will pick the next step shortly."
|
||||
return "I’m weighing a few options and will pick the next step shortly."
|
||||
|
||||
84
agent/component/varaiable_aggregator.py
Normal file
84
agent/component/varaiable_aggregator.py
Normal file
@ -0,0 +1,84 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Any
|
||||
import os
|
||||
|
||||
from common.connection_utils import timeout
|
||||
from agent.component.base import ComponentBase, ComponentParamBase
|
||||
|
||||
|
||||
class VariableAggregatorParam(ComponentParamBase):
|
||||
"""
|
||||
Parameters for VariableAggregator
|
||||
|
||||
- groups: list of dicts {"group_name": str, "variables": [variable selectors]}
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
# each group expects: {"group_name": str, "variables": List[str]}
|
||||
self.groups = []
|
||||
|
||||
def check(self):
|
||||
self.check_empty(self.groups, "[VariableAggregator] groups")
|
||||
for g in self.groups:
|
||||
if not g.get("group_name"):
|
||||
raise ValueError("[VariableAggregator] group_name can not be empty!")
|
||||
if not g.get("variables"):
|
||||
raise ValueError(
|
||||
f"[VariableAggregator] variables of group `{g.get('group_name')}` can not be empty"
|
||||
)
|
||||
if not isinstance(g.get("variables"), list):
|
||||
raise ValueError(
|
||||
f"[VariableAggregator] variables of group `{g.get('group_name')}` should be a list of strings"
|
||||
)
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return {
|
||||
"variables": {
|
||||
"name": "Variables",
|
||||
"type": "list",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class VariableAggregator(ComponentBase):
|
||||
component_name = "VariableAggregator"
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||
def _invoke(self, **kwargs):
|
||||
# Group mode: for each group, pick the first available variable
|
||||
for group in self._param.groups:
|
||||
gname = group.get("group_name")
|
||||
|
||||
# record candidate selectors within this group
|
||||
self.set_input_value(f"{gname}.variables", list(group.get("variables", [])))
|
||||
for selector in group.get("variables", []):
|
||||
val = self._canvas.get_variable_value(selector['value'])
|
||||
if val:
|
||||
self.set_output(gname, val)
|
||||
break
|
||||
|
||||
@staticmethod
|
||||
def _to_object(value: Any) -> Any:
|
||||
# Try to convert value to serializable object if it has to_object()
|
||||
try:
|
||||
return value.to_object() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Aggregating variables from canvas and grouping as configured."
|
||||
38
agent/component/webhook.py
Normal file
38
agent/component/webhook.py
Normal file
@ -0,0 +1,38 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from agent.component.base import ComponentParamBase, ComponentBase
|
||||
|
||||
|
||||
class WebhookParam(ComponentParamBase):
|
||||
|
||||
"""
|
||||
Define the Begin component parameters.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def get_input_form(self) -> dict[str, dict]:
|
||||
return getattr(self, "inputs")
|
||||
|
||||
|
||||
class Webhook(ComponentBase):
|
||||
component_name = "Webhook"
|
||||
|
||||
def _invoke(self, **kwargs):
|
||||
pass
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return ""
|
||||
519
agent/templates/user_interaction.json
Normal file
519
agent/templates/user_interaction.json
Normal file
@ -0,0 +1,519 @@
|
||||
{
|
||||
"id": 27,
|
||||
"title": {
|
||||
"en": "Interactive Agent",
|
||||
"zh": "可交互的 Agent"
|
||||
},
|
||||
"description": {
|
||||
"en": "During the Agent’s execution, users can actively intervene and interact with the Agent to adjust or guide its output, ensuring the final result aligns with their intentions.",
|
||||
"zh": "在 Agent 的运行过程中,用户可以随时介入,与 Agent 进行交互,以调整或引导生成结果,使最终输出更符合预期。"
|
||||
},
|
||||
"canvas_type": "Agent",
|
||||
"dsl": {
|
||||
"components": {
|
||||
"Agent:LargeFliesMelt": {
|
||||
"downstream": [
|
||||
"UserFillUp:GoldBroomsRelate"
|
||||
],
|
||||
"obj": {
|
||||
"component_name": "Agent",
|
||||
"params": {
|
||||
"cite": true,
|
||||
"delay_after_error": 1,
|
||||
"description": "",
|
||||
"exception_default_value": "",
|
||||
"exception_goto": [],
|
||||
"exception_method": "",
|
||||
"frequencyPenaltyEnabled": false,
|
||||
"frequency_penalty": 0.7,
|
||||
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||
"maxTokensEnabled": false,
|
||||
"max_retries": 3,
|
||||
"max_rounds": 1,
|
||||
"max_tokens": 256,
|
||||
"mcp": [],
|
||||
"message_history_window_size": 12,
|
||||
"outputs": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"structured": {}
|
||||
},
|
||||
"presencePenaltyEnabled": false,
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "User query:{sys.query}",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "<role>\nYou are the Planning Agent in a multi-agent RAG workflow.\nYour sole job is to design a crisp, executable Search Plan for the next agent. Do not search or answer the user’s question.\n</role>\n<objectives>\nUnderstand the user’s task and decompose it into evidence-seeking steps.\nProduce high-quality queries and retrieval settings tailored to the task type (fact lookup, multi-hop reasoning, comparison, statistics, how-to, etc.).\nIdentify missing information that would materially change the plan (≤3 concise questions).\nOptimize for source trustworthiness, diversity, and recency; define stopping criteria to avoid over-searching.\nAnswer in 150 words.\n<objectives>",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": false,
|
||||
"tools": [],
|
||||
"topPEnabled": false,
|
||||
"top_p": 0.3,
|
||||
"user_prompt": "",
|
||||
"visual_files_var": ""
|
||||
}
|
||||
},
|
||||
"upstream": [
|
||||
"begin"
|
||||
]
|
||||
},
|
||||
"Agent:TangyWordsType": {
|
||||
"downstream": [
|
||||
"Message:FreshWallsStudy"
|
||||
],
|
||||
"obj": {
|
||||
"component_name": "Agent",
|
||||
"params": {
|
||||
"cite": true,
|
||||
"delay_after_error": 1,
|
||||
"description": "",
|
||||
"exception_default_value": "",
|
||||
"exception_goto": [],
|
||||
"exception_method": "",
|
||||
"frequencyPenaltyEnabled": false,
|
||||
"frequency_penalty": 0.7,
|
||||
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||
"maxTokensEnabled": false,
|
||||
"max_retries": 3,
|
||||
"max_rounds": 1,
|
||||
"max_tokens": 256,
|
||||
"mcp": [],
|
||||
"message_history_window_size": 12,
|
||||
"outputs": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"structured": {}
|
||||
},
|
||||
"presencePenaltyEnabled": false,
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "Search Plan: {Agent:LargeFliesMelt@content}\n\n\n\nAwait Response feedback:{UserFillUp:GoldBroomsRelate@instructions}\n",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "<role>\nYou are the Search Agent.\nYour job is to execute the approved Search Plan, integrate the Await Response feedback, retrieve evidence, and produce a well-grounded answer.\n</role>\n<objectives>\nTranslate the plan + feedback into concrete searches.\nCollect diverse, trustworthy, and recent evidence meeting the plan’s evidence bar.\nSynthesize a concise answer; include citations next to claims they support.\nIf evidence is insufficient or conflicting, clearly state limitations and propose next steps.\n</objectives>\n <tools>\nRetrieval: You must use Retrieval to do the search.\n </tools>\n",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": false,
|
||||
"tools": [
|
||||
{
|
||||
"component_name": "Retrieval",
|
||||
"name": "Retrieval",
|
||||
"params": {
|
||||
"cross_languages": [],
|
||||
"description": "",
|
||||
"empty_response": "",
|
||||
"kb_ids": [],
|
||||
"keywords_similarity_weight": 0.7,
|
||||
"outputs": {
|
||||
"formalized_content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"json": {
|
||||
"type": "Array<Object>",
|
||||
"value": []
|
||||
}
|
||||
},
|
||||
"rerank_id": "",
|
||||
"similarity_threshold": 0.2,
|
||||
"toc_enhance": false,
|
||||
"top_k": 1024,
|
||||
"top_n": 8,
|
||||
"use_kg": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"topPEnabled": false,
|
||||
"top_p": 0.3,
|
||||
"user_prompt": "",
|
||||
"visual_files_var": ""
|
||||
}
|
||||
},
|
||||
"upstream": [
|
||||
"UserFillUp:GoldBroomsRelate"
|
||||
]
|
||||
},
|
||||
"Message:FreshWallsStudy": {
|
||||
"downstream": [],
|
||||
"obj": {
|
||||
"component_name": "Message",
|
||||
"params": {
|
||||
"content": [
|
||||
"{Agent:TangyWordsType@content}"
|
||||
]
|
||||
}
|
||||
},
|
||||
"upstream": [
|
||||
"Agent:TangyWordsType"
|
||||
]
|
||||
},
|
||||
"UserFillUp:GoldBroomsRelate": {
|
||||
"downstream": [
|
||||
"Agent:TangyWordsType"
|
||||
],
|
||||
"obj": {
|
||||
"component_name": "UserFillUp",
|
||||
"params": {
|
||||
"enable_tips": true,
|
||||
"inputs": {
|
||||
"instructions": {
|
||||
"name": "instructions",
|
||||
"optional": false,
|
||||
"options": [],
|
||||
"type": "paragraph"
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"instructions": {
|
||||
"name": "instructions",
|
||||
"optional": false,
|
||||
"options": [],
|
||||
"type": "paragraph"
|
||||
}
|
||||
},
|
||||
"tips": "Here is my search plan:\n{Agent:LargeFliesMelt@content}\nAre you okay with it?"
|
||||
}
|
||||
},
|
||||
"upstream": [
|
||||
"Agent:LargeFliesMelt"
|
||||
]
|
||||
},
|
||||
"begin": {
|
||||
"downstream": [
|
||||
"Agent:LargeFliesMelt"
|
||||
],
|
||||
"obj": {
|
||||
"component_name": "Begin",
|
||||
"params": {}
|
||||
},
|
||||
"upstream": []
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"sys.conversation_turns": 0,
|
||||
"sys.files": [],
|
||||
"sys.query": "",
|
||||
"sys.user_id": ""
|
||||
},
|
||||
"graph": {
|
||||
"edges": [
|
||||
{
|
||||
"data": {
|
||||
"isHovered": false
|
||||
},
|
||||
"id": "xy-edge__beginstart-Agent:LargeFliesMeltend",
|
||||
"source": "begin",
|
||||
"sourceHandle": "start",
|
||||
"target": "Agent:LargeFliesMelt",
|
||||
"targetHandle": "end"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"isHovered": false
|
||||
},
|
||||
"id": "xy-edge__Agent:LargeFliesMeltstart-UserFillUp:GoldBroomsRelateend",
|
||||
"source": "Agent:LargeFliesMelt",
|
||||
"sourceHandle": "start",
|
||||
"target": "UserFillUp:GoldBroomsRelate",
|
||||
"targetHandle": "end"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"isHovered": false
|
||||
},
|
||||
"id": "xy-edge__UserFillUp:GoldBroomsRelatestart-Agent:TangyWordsTypeend",
|
||||
"source": "UserFillUp:GoldBroomsRelate",
|
||||
"sourceHandle": "start",
|
||||
"target": "Agent:TangyWordsType",
|
||||
"targetHandle": "end"
|
||||
},
|
||||
{
|
||||
"id": "xy-edge__Agent:TangyWordsTypetool-Tool:NastyBatsGoend",
|
||||
"source": "Agent:TangyWordsType",
|
||||
"sourceHandle": "tool",
|
||||
"target": "Tool:NastyBatsGo",
|
||||
"targetHandle": "end"
|
||||
},
|
||||
{
|
||||
"id": "xy-edge__Agent:TangyWordsTypestart-Message:FreshWallsStudyend",
|
||||
"source": "Agent:TangyWordsType",
|
||||
"sourceHandle": "start",
|
||||
"target": "Message:FreshWallsStudy",
|
||||
"targetHandle": "end"
|
||||
}
|
||||
],
|
||||
"nodes": [
|
||||
{
|
||||
"data": {
|
||||
"label": "Begin",
|
||||
"name": "begin"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "begin",
|
||||
"measured": {
|
||||
"height": 50,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 154.9008789064451,
|
||||
"y": 119.51001744285344
|
||||
},
|
||||
"selected": false,
|
||||
"sourcePosition": "left",
|
||||
"targetPosition": "right",
|
||||
"type": "beginNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"form": {
|
||||
"cite": true,
|
||||
"delay_after_error": 1,
|
||||
"description": "",
|
||||
"exception_default_value": "",
|
||||
"exception_goto": [],
|
||||
"exception_method": "",
|
||||
"frequencyPenaltyEnabled": false,
|
||||
"frequency_penalty": 0.7,
|
||||
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||
"maxTokensEnabled": false,
|
||||
"max_retries": 3,
|
||||
"max_rounds": 1,
|
||||
"max_tokens": 256,
|
||||
"mcp": [],
|
||||
"message_history_window_size": 12,
|
||||
"outputs": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"structured": {}
|
||||
},
|
||||
"presencePenaltyEnabled": false,
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "User query:{sys.query}",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "<role>\nYou are the Planning Agent in a multi-agent RAG workflow.\nYour sole job is to design a crisp, executable Search Plan for the next agent. Do not search or answer the user’s question.\n</role>\n<objectives>\nUnderstand the user’s task and decompose it into evidence-seeking steps.\nProduce high-quality queries and retrieval settings tailored to the task type (fact lookup, multi-hop reasoning, comparison, statistics, how-to, etc.).\nIdentify missing information that would materially change the plan (≤3 concise questions).\nOptimize for source trustworthiness, diversity, and recency; define stopping criteria to avoid over-searching.\nAnswer in 150 words.\n<objectives>",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": false,
|
||||
"tools": [],
|
||||
"topPEnabled": false,
|
||||
"top_p": 0.3,
|
||||
"user_prompt": "",
|
||||
"visual_files_var": ""
|
||||
},
|
||||
"label": "Agent",
|
||||
"name": "Planning Agent"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "Agent:LargeFliesMelt",
|
||||
"measured": {
|
||||
"height": 90,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 443.96309330796714,
|
||||
"y": 104.61370811205677
|
||||
},
|
||||
"selected": false,
|
||||
"sourcePosition": "right",
|
||||
"targetPosition": "left",
|
||||
"type": "agentNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"form": {
|
||||
"enable_tips": true,
|
||||
"inputs": {
|
||||
"instructions": {
|
||||
"name": "instructions",
|
||||
"optional": false,
|
||||
"options": [],
|
||||
"type": "paragraph"
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"instructions": {
|
||||
"name": "instructions",
|
||||
"optional": false,
|
||||
"options": [],
|
||||
"type": "paragraph"
|
||||
}
|
||||
},
|
||||
"tips": "Here is my search plan:\n{Agent:LargeFliesMelt@content}\nAre you okay with it?"
|
||||
},
|
||||
"label": "UserFillUp",
|
||||
"name": "Await Response"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "UserFillUp:GoldBroomsRelate",
|
||||
"measured": {
|
||||
"height": 50,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 683.3409492927474,
|
||||
"y": 116.76274137645598
|
||||
},
|
||||
"selected": false,
|
||||
"sourcePosition": "right",
|
||||
"targetPosition": "left",
|
||||
"type": "ragNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"form": {
|
||||
"cite": true,
|
||||
"delay_after_error": 1,
|
||||
"description": "",
|
||||
"exception_default_value": "",
|
||||
"exception_goto": [],
|
||||
"exception_method": "",
|
||||
"frequencyPenaltyEnabled": false,
|
||||
"frequency_penalty": 0.7,
|
||||
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||
"maxTokensEnabled": false,
|
||||
"max_retries": 3,
|
||||
"max_rounds": 1,
|
||||
"max_tokens": 256,
|
||||
"mcp": [],
|
||||
"message_history_window_size": 12,
|
||||
"outputs": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"structured": {}
|
||||
},
|
||||
"presencePenaltyEnabled": false,
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "Search Plan: {Agent:LargeFliesMelt@content}\n\n\n\nAwait Response feedback:{UserFillUp:GoldBroomsRelate@instructions}\n",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "<role>\nYou are the Search Agent.\nYour job is to execute the approved Search Plan, integrate the Await Response feedback, retrieve evidence, and produce a well-grounded answer.\n</role>\n<objectives>\nTranslate the plan + feedback into concrete searches.\nCollect diverse, trustworthy, and recent evidence meeting the plan’s evidence bar.\nSynthesize a concise answer; include citations next to claims they support.\nIf evidence is insufficient or conflicting, clearly state limitations and propose next steps.\n</objectives>\n <tools>\nRetrieval: You must use Retrieval to do the search.\n </tools>\n",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": false,
|
||||
"tools": [
|
||||
{
|
||||
"component_name": "Retrieval",
|
||||
"name": "Retrieval",
|
||||
"params": {
|
||||
"cross_languages": [],
|
||||
"description": "",
|
||||
"empty_response": "",
|
||||
"kb_ids": [],
|
||||
"keywords_similarity_weight": 0.7,
|
||||
"outputs": {
|
||||
"formalized_content": {
|
||||
"type": "string",
|
||||
"value": ""
|
||||
},
|
||||
"json": {
|
||||
"type": "Array<Object>",
|
||||
"value": []
|
||||
}
|
||||
},
|
||||
"rerank_id": "",
|
||||
"similarity_threshold": 0.2,
|
||||
"toc_enhance": false,
|
||||
"top_k": 1024,
|
||||
"top_n": 8,
|
||||
"use_kg": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"topPEnabled": false,
|
||||
"top_p": 0.3,
|
||||
"user_prompt": "",
|
||||
"visual_files_var": ""
|
||||
},
|
||||
"label": "Agent",
|
||||
"name": "Search Agent"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "Agent:TangyWordsType",
|
||||
"measured": {
|
||||
"height": 90,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 944.6411255659472,
|
||||
"y": 99.84499066368488
|
||||
},
|
||||
"selected": true,
|
||||
"sourcePosition": "right",
|
||||
"targetPosition": "left",
|
||||
"type": "agentNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"form": {
|
||||
"description": "This is an agent for a specific task.",
|
||||
"user_prompt": "This is the order you need to send to the agent."
|
||||
},
|
||||
"label": "Tool",
|
||||
"name": "flow.tool_0"
|
||||
},
|
||||
"id": "Tool:NastyBatsGo",
|
||||
"measured": {
|
||||
"height": 50,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 862.6411255659472,
|
||||
"y": 239.84499066368488
|
||||
},
|
||||
"sourcePosition": "right",
|
||||
"targetPosition": "left",
|
||||
"type": "toolNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"form": {
|
||||
"content": [
|
||||
"{Agent:TangyWordsType@content}"
|
||||
]
|
||||
},
|
||||
"label": "Message",
|
||||
"name": "Message"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "Message:FreshWallsStudy",
|
||||
"measured": {
|
||||
"height": 50,
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 1216.7057997987163,
|
||||
"y": 120.48541298149814
|
||||
},
|
||||
"selected": false,
|
||||
"sourcePosition": "right",
|
||||
"targetPosition": "left",
|
||||
"type": "messageNode"
|
||||
}
|
||||
]
|
||||
},
|
||||
"history": [],
|
||||
"messages": [],
|
||||
"path": [],
|
||||
"retrieval": [],
|
||||
"variables": {}
|
||||
},
|
||||
"avatar":
|
||||
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAACXBIWXMAABYlAAAWJQFJUiTwAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA1FSURBVHgBzVppcFRVFv7e672TTjrprGTrkI1tgFjiAI4TmgEGcEFgVFAHxr2mLFe0cKxyBOeHljVjwVDqzDhVTqksOlOiuCBrCwk7hCAkLAGy70kn3eklvb03571e8rrTCQGi5em6ecu9fe85557znXNuh0EUHTGb5/Ast4TnmXsB3oifBTFVxEsVy7PrZ5lM9RE9oZvTZrPeAe51Gvh85ABGMuonJh6Ra9Mzw2KDm2PXm0ymPoS6ReZ5n5n6p4f45QOsj0ihEXxwdGg9Pta3Ax2R98ErH+wIzMFjFFTlhcwkCMEKT4qE+Nc5np8e5Aa8MAf94UNtmEl5DPYFrpHPER8+xn3U3KNkXqDpKtFaSGi1Wm188MH7655+6inoExLR0toW0N211R+8jJ198YHtiFyDH5EHE8Oy7Iccx/1BeJ4yZRI2bXgHSfokyGQsLH19YDgax4RXGJz450A8v1HGMMw62soMEgRxcVqcrDyG9X95E339fSidPh05Odnot9vB0Qe84CB8wMQglSe8HWPI3HAmJVmLYTOYJ55YxVedOYcTxyux59svsfqpJ5GWkorGhhZYLL0oKSnC2pfX4DemOaIg9n473B63ZCFGOu2YU4QIgikIJiF5yaxcuZTPzzfC42WQnZWJLdu2IiXFgG+/3oOs7Ey0t3fC7/cjMyMdU6dOwpyyMswtmwO9PhmdnYE+5hrcS9Eq8MzSHRfuvT7Dj5qbiKeGY4fKsWL1ShQWFKG1uQVz55Thh5pqVJ87D6fLDY/HA7/PD87PiYxMnToRLzz7LI0zoamphQTxYewpKIzUB6OUJcIoOTEOVpjhcroRH69FQ2MLHANOZKZn4PcPP4DikgKoVWrI5CzkcjmUSiUuXriKRx77I2bMuh0tba1IStKL84TXDTU26hp9z0QzJYVVbhBuJdDL8Vy4yeRK+TrBNHp7e9HR0QWXawDNTa3o7ulB2R2zER+nQXZ2FhRKBVwOl7BloO+IVwGpBtxebPv0v0hNN+CO2beHHT4cD6I9Pvo+iobElmtYk6xkQuE6nvOjpaWNmHeDJxPp7bVCp4vHvv0HsXfvQVSeOgOtRoOFi+aJu+C0O6FVa2AwJEOhkMPhcODEiVNwuJ0w5ubRLsbB7/UPMsvH5HTYxvNBwSUAIT5yQcG4wTlk3+/5bt2cX5chIzMdbW3tOH/+EtRaNex2B1RkKkuXLkJCog41NZdw8MBhFBUVICU1WTQDwZwMKcloJJN74snHMeByoq7uCr1XECBkEzB4I3ge6p4xkp3hiOFj+rdcYFpg5vaZs3H3nXehh0znyPFjWLv2NcyeNQPbtn0hDlRr1GKc2L1rP5QqJeQyMiMZRH8QqKujDa+9+gruWf4A4siPlLQzeXlG+Dl/hAjR2B7KnURQEnyDC4kyXPoS5dN7dn7FR0umJ4d8aNUjBKEdePSxh8iMDuDixcsiCglbK5PJ4PMFUEe4F6B01zfbMeD1iTx8vPUTMi0lXn7uJXT1dIr+Eph/EFViAMqoKGBag89sZFIVuMaTplsJWZYtvwv7yQdqai5CQWYhoMyECUWYv8BE2s0RJ8gk03v/3Q24UHsJ27/YjnaKDa4BD1INBpQfrgDlWoFZRRPgcdNBW4hlLBNGM3mEdMGrmzDfSNt/4OBhdLR1Yv26tXj77U0iEqUYUvDvf34AjUpOaUclssaNQ2cXoZdzAC++/Cds3vwZLl+oxtZPt2INPVefOYH6hsbALtwkDabtg8jG8pK0OURtbR34dMtHaKhrImlZvPXW35FsSMKKFfeivOIwsnKMWLDwbrjdbtEHOB8HFfnF8YoD+Md7m7Bv3y7s2rtfXOjkqUqMFYnMR5tQsCdCQwK+t5MQ+3d/g4cfXiHCZHeXBZ98/D/R5gX0sfRZsGTZAzDNW4SG5kbk5uSSL3DIzc1GaekMkNyQ0Z+PN29FYmICxooEPkNN5Hv3zh0jhgq9PhFXrtSh+nw1Kg4dIah04Oix02JfQUEepk2bgs8//0bcocWLfotXXlqD7u5uzF1wp7hIcnIyKr7fjSZKTyDV5HXkO9EkrfjksQZI62Brnw2pqQbMz5yH5fcug4/zUtCqxIcffSRmsI2NrSKjlp5ebNnyGWG/B2++8QYW3TlfTAhZGQOej8EAw0SYbcTaGLk6kxZR8miGY4wWGRAY6+7pFu1PSLH/88G/yAcG8NcNG3H85CkxSgrBT4gPDqcDE4sn4GjKCbBCsGAki7KDO8AybIBRugqLMFFMhuPDCALJI9CBGfxidKkYfi+Mp0mFGCEkUy888wy0Wq3YJyf/sFOa0W/rR093DzLS0tDXZw3WHkPnE9li5IhzdMCu0EGupnn8/sgxLEISBJTMISKSsVKnCDEtvYY+ovLIKQVGQk1waCF56yDs7+zsotjRDlu/DR6fF4sXL0RN9QWwFD/ik1JJwdwQ7XEspSKudqSqFUhsqcWAtUccH4tC/Ilrh/hlmbB84RcjtZgTSwQMPQtayhqXiTPvvIDHJyXjtWVlUCUkQ6ZQBHZDXIvMR6akOmMALtoxTUom9K2XaPesYl9wsmsSG2aQYTDih4ndQvl89LOb0o72wlkonTYZ9y25BwffWYOexstQJ+jhJ8377b1Ic9ngam4A53GC9zqgpCrPd7YcnFwZqB4l64aVHNXYmBoMDkbMgmMUJNqqH6rEFChvW0wMWjFv0RKom37AuX3bkcnbUKCOQ3LqOFgb69Gu1GGgs5HiiwYTbp0JS8UOMBpdxLrDRXI2WruICmrRGh5VEz9kIj4PtIZMqGdQ1HZZEZ+ZhxmFuVD73WT/bhx5/8+wZk1AvHEqrAYjCdoHH9UUxRMnoefIDggFHitjR9QVK9VyQIBYzNxAHhPEeI6EkJM2vfm3QSnzwpA/FZaOeuzdvAn9GUYUlC2Gj2zfkJoPRfpkNFdW0DgWeVlGKJqr4SKQkFNmOxwHbDSTTFjxTFAuaQ9z7Y8EIUK+RdgIXq1DUsFM2Pva0dfWDGu/C8ZxBhiaTyGXbF/ltsBSfRI+dQKhmB8KbTwUcYlgG2vQ095CZaxy0KwxaOLscEyEtyWYx4ey4ZhloFQ/XLD5o5pgDl46V+q4igOU6PVyGtxy10ooEtPQfKYc+z/cBBfrRM7EKZR3dYs7x/IK6JJT4K2vRq8QTxgpfwgaaizn4MN/pFsyfAsWKRFNFlCPCNRC3k7PjVZKCpvrULZ8BV7927vobmxG7dG9OHK4HDYqW6/W1cI3wCAtdyql8c0U/Z3wUuGUWzwFF8w7oIzTiZYptQ2WCUdoVnKVoFL4lgn3D+5UYKx45YVdCupEsmNhUyRUUhrywGWUoGhCKdpbWmGpO42LZ8/BR/EgnmqNxb97FCqFH9UHd8JG6YjT1U9w7IOP6pNJhUYc3f0VtCSElBt5KDeJFEKKnbwElQLhX0oMI70O52rBBM3nRELRL9HQeh5x8KGy3IzjVbWwWfvxwc7dsFssuHz2NBo7W1Ciywdnt4KOQ+ChkxC1SoWmc+XwmBZRYaUWq0Oe80H26KpV60TNhTQW0fiAVoNFBBPSMs9E+ED08/CNxgm2HZcCy5XjkGt16Dp/Fms3vUfiyGFtv4KK/ftQWFxEhwEMvFQwyTihWFKIJ4Ie+u6V0+XkUjzcDhvcNJ9ckkxgSN6LoNfzsYA08t3ooTYA1UrjrXD+cAC/mj+X0CYJ/cT8oe++ppQiBf2OfjE5tFD2qzekIk9GdXpGDtRUSMnpN4wByptSUhOhdLVK6gFecu4SfY3Nxo0TbX18qhHujHqkjy9G84WT6GptwZUOC2qqziM7Lx16jRI54/MJwTzgKYdiBSgWYopSQ0edJXB6/NhyslYCo+F8Q5KDYGiUHVTiTYkAn9MGfcks9NFRpnAUqc/MIgY5LF1xH9Ioe1247H5MnjQNxZNvAUMptttuQyYdlrXRaYmccqnaLhvSEjWRFdlozEDK+I0KIVZiAsRSkaSdOBfeumPobWkUS9RcOqZJjtchJbsEjt42eEhQpS5ZFBiETHVX68jnPHR040YGHW1GFjQxXED67mbq2IhpJTvJu21QjZ8BFaXRPnq2kP3rE3RouUBVHsGzlQ4PEhLi4CB/aCLo9dLhmRDFs+k3jEt0GC3sQD01Y2jCoatJb6MGXEOe0QnMUPCyI+OWBdDk/AK1x3ejraFeLJYG6DTE7XIhjiC0z9IFJ9UNOq2AOz4UZ2bD6hyokj22enU+zTETN0LMyO16MliOzEmpVsH4i1lIzy2kn0+14CjAUSYHi80uFj8cOfL4wiJkpKfT4QKHLB37HVNuNs9hWc6MkVU5IiL9GCSccMsVKjrOl5HZuKnWtqKj4TI6as+AcffTkaUKfl6WL9rEoQP7NpAenhvNxEOO92LGjx+BGCGfUkKh0ZIv21Bz9vTGJ59+8XlRALPZrFexvLAL0zEGNNLJ80h910FVmoQkU2lpaeBfDYT/OXBzjIm0uhFjQMwN9o2KOH6jhngVmI853xGz2QgZ1lH2MI3yIXFHeP4nNP7YVE9cfMlw7BezTKbvpR3/Bx465XnKBextAAAAAElFTkSuQmCC"
|
||||
}
|
||||
@ -16,7 +16,7 @@
|
||||
import argparse
|
||||
import os
|
||||
from agent.canvas import Canvas
|
||||
from api import settings
|
||||
from common import settings
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
@ -63,12 +63,18 @@ class ArXiv(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("ArXiv processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("ArXiv processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
sort_choices = {"relevance": arxiv.SortCriterion.Relevance,
|
||||
"lastUpdatedDate": arxiv.SortCriterion.LastUpdatedDate,
|
||||
@ -79,12 +85,20 @@ class ArXiv(ToolBase, ABC):
|
||||
max_results=self._param.top_n,
|
||||
sort_by=sort_choices[self._param.sort_by]
|
||||
)
|
||||
self._retrieve_chunks(list(arxiv_client.results(search)),
|
||||
results = list(arxiv_client.results(search))
|
||||
|
||||
if self.check_if_canceled("ArXiv processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(results,
|
||||
get_title=lambda r: r.title,
|
||||
get_url=lambda r: r.pdf_url,
|
||||
get_content=lambda r: r.summary)
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("ArXiv processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"ArXiv error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -125,6 +125,9 @@ class ToolBase(ComponentBase):
|
||||
return self._param.get_meta()
|
||||
|
||||
def invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Tool processing"):
|
||||
return
|
||||
|
||||
self.set_output("_created_time", time.perf_counter())
|
||||
try:
|
||||
res = self._invoke(**kwargs)
|
||||
@ -170,4 +173,4 @@ class ToolBase(ComponentBase):
|
||||
self.set_output("formalized_content", "\n".join(kb_prompt({"chunks": chunks, "doc_aggs": aggs}, 200000, True)))
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return self._canvas.get_component_name(self._id) + " is running..."
|
||||
return self._canvas.get_component_name(self._id) + " is running..."
|
||||
|
||||
@ -21,8 +21,8 @@ from strenum import StrEnum
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from agent.tools.base import ToolParamBase, ToolBase, ToolMeta
|
||||
from api import settings
|
||||
from common.connection_utils import timeout
|
||||
from common import settings
|
||||
|
||||
|
||||
class Language(StrEnum):
|
||||
@ -131,10 +131,14 @@ class CodeExec(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("CodeExec processing"):
|
||||
return
|
||||
|
||||
lang = kwargs.get("lang", self._param.lang)
|
||||
script = kwargs.get("script", self._param.script)
|
||||
arguments = {}
|
||||
for k, v in self._param.arguments.items():
|
||||
|
||||
if kwargs.get(k):
|
||||
arguments[k] = kwargs[k]
|
||||
continue
|
||||
@ -149,15 +153,28 @@ class CodeExec(ToolBase, ABC):
|
||||
def _execute_code(self, language: str, code: str, arguments: dict):
|
||||
import requests
|
||||
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
try:
|
||||
code_b64 = self._encode_code(code)
|
||||
code_req = CodeExecutionRequest(code_b64=code_b64, language=language, arguments=arguments).model_dump()
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
self.set_output("_ERROR", "construct code request error: " + str(e))
|
||||
|
||||
try:
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return "Task has been canceled"
|
||||
|
||||
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run, code_req: {code_req}, resp.status_code {resp.status_code}:")
|
||||
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return "Task has been canceled"
|
||||
|
||||
if resp.status_code != 200:
|
||||
resp.raise_for_status()
|
||||
body = resp.json()
|
||||
@ -173,16 +190,25 @@ class CodeExec(ToolBase, ABC):
|
||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run -> {rt}")
|
||||
if isinstance(rt, tuple):
|
||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
if k.find("_") == 0:
|
||||
continue
|
||||
o["value"] = rt[i]
|
||||
elif isinstance(rt, dict):
|
||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
if k not in rt or k.find("_") == 0:
|
||||
continue
|
||||
o["value"] = rt[k]
|
||||
else:
|
||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
if k.find("_") == 0:
|
||||
continue
|
||||
o["value"] = rt
|
||||
@ -190,6 +216,9 @@ class CodeExec(ToolBase, ABC):
|
||||
self.set_output("_ERROR", "There is no response from sandbox")
|
||||
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("CodeExec execution"):
|
||||
return
|
||||
|
||||
self.set_output("_ERROR", "Exception executing code: " + str(e))
|
||||
|
||||
return self.output()
|
||||
|
||||
@ -29,7 +29,7 @@ class CrawlerParam(ToolParamBase):
|
||||
super().__init__()
|
||||
self.proxy = None
|
||||
self.extract_type = "markdown"
|
||||
|
||||
|
||||
def check(self):
|
||||
self.check_valid_value(self.extract_type, "Type of content from the crawler", ['html', 'markdown', 'content'])
|
||||
|
||||
@ -47,18 +47,24 @@ class Crawler(ToolBase, ABC):
|
||||
result = asyncio.run(self.get_web(ans))
|
||||
|
||||
return Crawler.be_output(result)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
return Crawler.be_output(f"An unexpected error occurred: {str(e)}")
|
||||
|
||||
async def get_web(self, url):
|
||||
if self.check_if_canceled("Crawler async operation"):
|
||||
return
|
||||
|
||||
proxy = self._param.proxy if self._param.proxy else None
|
||||
async with AsyncWebCrawler(verbose=True, proxy=proxy) as crawler:
|
||||
result = await crawler.arun(
|
||||
url=url,
|
||||
bypass_cache=True
|
||||
)
|
||||
|
||||
|
||||
if self.check_if_canceled("Crawler async operation"):
|
||||
return
|
||||
|
||||
if self._param.extract_type == 'html':
|
||||
return result.cleaned_html
|
||||
elif self._param.extract_type == 'markdown':
|
||||
|
||||
@ -46,11 +46,16 @@ class DeepL(ComponentBase, ABC):
|
||||
component_name = "DeepL"
|
||||
|
||||
def _run(self, history, **kwargs):
|
||||
if self.check_if_canceled("DeepL processing"):
|
||||
return
|
||||
ans = self.get_input()
|
||||
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
||||
if not ans:
|
||||
return DeepL.be_output("")
|
||||
|
||||
if self.check_if_canceled("DeepL processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
translator = deepl.Translator(self._param.auth_key)
|
||||
result = translator.translate_text(ans, source_lang=self._param.source_lang,
|
||||
@ -58,4 +63,6 @@ class DeepL(ComponentBase, ABC):
|
||||
|
||||
return DeepL.be_output(result.text)
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("DeepL processing"):
|
||||
return
|
||||
DeepL.be_output("**Error**:" + str(e))
|
||||
|
||||
@ -75,17 +75,30 @@ class DuckDuckGo(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
if kwargs.get("topic", "general") == "general":
|
||||
with DDGS() as ddgs:
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
# {'title': '', 'href': '', 'body': ''}
|
||||
duck_res = ddgs.text(kwargs["query"], max_results=self._param.top_n)
|
||||
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(duck_res,
|
||||
get_title=lambda r: r["title"],
|
||||
get_url=lambda r: r.get("href", r.get("url")),
|
||||
@ -94,8 +107,15 @@ class DuckDuckGo(ToolBase, ABC):
|
||||
return self.output("formalized_content")
|
||||
else:
|
||||
with DDGS() as ddgs:
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
# {'date': '', 'title': '', 'body': '', 'url': '', 'image': '', 'source': ''}
|
||||
duck_res = ddgs.news(kwargs["query"], max_results=self._param.top_n)
|
||||
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(duck_res,
|
||||
get_title=lambda r: r["title"],
|
||||
get_url=lambda r: r.get("href", r.get("url")),
|
||||
@ -103,6 +123,9 @@ class DuckDuckGo(ToolBase, ABC):
|
||||
self.set_output("json", duck_res)
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("DuckDuckGo processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"DuckDuckGo error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -101,19 +101,27 @@ class Email(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Email processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("to_email"):
|
||||
self.set_output("success", False)
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("Email processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
# Parse JSON string passed from upstream
|
||||
email_data = kwargs
|
||||
|
||||
# Validate required fields
|
||||
if "to_email" not in email_data:
|
||||
return Email.be_output("Missing required field: to_email")
|
||||
self.set_output("_ERROR", "Missing required field: to_email")
|
||||
self.set_output("success", False)
|
||||
return False
|
||||
|
||||
# Create email object
|
||||
msg = MIMEMultipart('alternative')
|
||||
@ -133,6 +141,9 @@ class Email(ToolBase, ABC):
|
||||
# Connect to SMTP server and send
|
||||
logging.info(f"Connecting to SMTP server {self._param.smtp_server}:{self._param.smtp_port}")
|
||||
|
||||
if self.check_if_canceled("Email processing"):
|
||||
return
|
||||
|
||||
context = smtplib.ssl.create_default_context()
|
||||
with smtplib.SMTP(self._param.smtp_server, self._param.smtp_port) as server:
|
||||
server.ehlo()
|
||||
@ -149,6 +160,10 @@ class Email(ToolBase, ABC):
|
||||
|
||||
# Send email
|
||||
logging.info(f"Sending email to recipients: {recipients}")
|
||||
|
||||
if self.check_if_canceled("Email processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
server.send_message(msg, self._param.email, recipients)
|
||||
success = True
|
||||
|
||||
@ -81,6 +81,8 @@ class ExeSQL(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
return
|
||||
|
||||
def convert_decimals(obj):
|
||||
from decimal import Decimal
|
||||
@ -96,6 +98,9 @@ class ExeSQL(ToolBase, ABC):
|
||||
if not sql:
|
||||
raise Exception("SQL for `ExeSQL` MUST not be empty.")
|
||||
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
return
|
||||
|
||||
vars = self.get_input_elements_from_text(sql)
|
||||
args = {}
|
||||
for k, o in vars.items():
|
||||
@ -108,6 +113,9 @@ class ExeSQL(ToolBase, ABC):
|
||||
self.set_input_value(k, args[k])
|
||||
sql = self.string_format(sql, args)
|
||||
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
return
|
||||
|
||||
sqls = sql.split(";")
|
||||
if self._param.db_type in ["mysql", "mariadb"]:
|
||||
db = pymysql.connect(db=self._param.database, user=self._param.username, host=self._param.host,
|
||||
@ -181,6 +189,10 @@ class ExeSQL(ToolBase, ABC):
|
||||
sql_res = []
|
||||
formalized_content = []
|
||||
for single_sql in sqls:
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
ibm_db.close(conn)
|
||||
return
|
||||
|
||||
single_sql = single_sql.replace("```", "").strip()
|
||||
if not single_sql:
|
||||
continue
|
||||
@ -190,6 +202,9 @@ class ExeSQL(ToolBase, ABC):
|
||||
rows = []
|
||||
row = ibm_db.fetch_assoc(stmt)
|
||||
while row and len(rows) < self._param.max_records:
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
ibm_db.close(conn)
|
||||
return
|
||||
rows.append(row)
|
||||
row = ibm_db.fetch_assoc(stmt)
|
||||
|
||||
@ -220,6 +235,11 @@ class ExeSQL(ToolBase, ABC):
|
||||
sql_res = []
|
||||
formalized_content = []
|
||||
for single_sql in sqls:
|
||||
if self.check_if_canceled("ExeSQL processing"):
|
||||
cursor.close()
|
||||
db.close()
|
||||
return
|
||||
|
||||
single_sql = single_sql.replace('```','')
|
||||
if not single_sql:
|
||||
continue
|
||||
@ -244,6 +264,9 @@ class ExeSQL(ToolBase, ABC):
|
||||
sql_res.append(convert_decimals(single_res.to_dict(orient='records')))
|
||||
formalized_content.append(single_res.to_markdown(index=False, floatfmt=".6f"))
|
||||
|
||||
cursor.close()
|
||||
db.close()
|
||||
|
||||
self.set_output("json", sql_res)
|
||||
self.set_output("formalized_content", "\n\n".join(formalized_content))
|
||||
return self.output("formalized_content")
|
||||
|
||||
@ -59,17 +59,27 @@ class GitHub(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("GitHub processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("GitHub processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
url = 'https://api.github.com/search/repositories?q=' + kwargs["query"] + '&sort=stars&order=desc&per_page=' + str(
|
||||
self._param.top_n)
|
||||
headers = {"Content-Type": "application/vnd.github+json", "X-GitHub-Api-Version": '2022-11-28'}
|
||||
response = requests.get(url=url, headers=headers).json()
|
||||
|
||||
if self.check_if_canceled("GitHub processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(response['items'],
|
||||
get_title=lambda r: r["name"],
|
||||
get_url=lambda r: r["html_url"],
|
||||
@ -77,6 +87,9 @@ class GitHub(ToolBase, ABC):
|
||||
self.set_output("json", response['items'])
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("GitHub processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"GitHub error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -118,6 +118,9 @@ class Google(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Google processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("q"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
@ -132,8 +135,15 @@ class Google(ToolBase, ABC):
|
||||
}
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("Google processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
search = GoogleSearch(params).get_dict()
|
||||
|
||||
if self.check_if_canceled("Google processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(search["organic_results"],
|
||||
get_title=lambda r: r["title"],
|
||||
get_url=lambda r: r["link"],
|
||||
@ -142,6 +152,9 @@ class Google(ToolBase, ABC):
|
||||
self.set_output("json", search["organic_results"])
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("Google processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"Google error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -65,15 +65,25 @@ class GoogleScholar(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("GoogleScholar processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("GoogleScholar processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
scholar_client = scholarly.search_pubs(kwargs["query"], patents=self._param.patents, year_low=self._param.year_low,
|
||||
year_high=self._param.year_high, sort_by=self._param.sort_by)
|
||||
|
||||
if self.check_if_canceled("GoogleScholar processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(scholar_client,
|
||||
get_title=lambda r: r['bib']['title'],
|
||||
get_url=lambda r: r["pub_url"],
|
||||
@ -82,6 +92,9 @@ class GoogleScholar(ToolBase, ABC):
|
||||
self.set_output("json", list(scholar_client))
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("GoogleScholar processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"GoogleScholar error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -50,6 +50,9 @@ class Jin10(ComponentBase, ABC):
|
||||
component_name = "Jin10"
|
||||
|
||||
def _run(self, history, **kwargs):
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
|
||||
ans = self.get_input()
|
||||
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
||||
if not ans:
|
||||
@ -58,6 +61,9 @@ class Jin10(ComponentBase, ABC):
|
||||
jin10_res = []
|
||||
headers = {'secret-key': self._param.secret_key}
|
||||
try:
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
|
||||
if self._param.type == "flash":
|
||||
params = {
|
||||
'category': self._param.flash_type,
|
||||
@ -69,6 +75,8 @@ class Jin10(ComponentBase, ABC):
|
||||
headers=headers, data=json.dumps(params))
|
||||
response = response.json()
|
||||
for i in response['data']:
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
jin10_res.append({"content": i['data']['content']})
|
||||
if self._param.type == "calendar":
|
||||
params = {
|
||||
@ -79,6 +87,8 @@ class Jin10(ComponentBase, ABC):
|
||||
headers=headers, data=json.dumps(params))
|
||||
|
||||
response = response.json()
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
||||
if self._param.type == "symbols":
|
||||
params = {
|
||||
@ -90,8 +100,12 @@ class Jin10(ComponentBase, ABC):
|
||||
url='https://open-data-api.jin10.com/data-api/' + self._param.symbols_datatype + '?type=' + self._param.symbols_type,
|
||||
headers=headers, data=json.dumps(params))
|
||||
response = response.json()
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
if self._param.symbols_datatype == "symbols":
|
||||
for i in response['data']:
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
i['Commodity Code'] = i['c']
|
||||
i['Stock Exchange'] = i['e']
|
||||
i['Commodity Name'] = i['n']
|
||||
@ -99,6 +113,8 @@ class Jin10(ComponentBase, ABC):
|
||||
del i['c'], i['e'], i['n'], i['t']
|
||||
if self._param.symbols_datatype == "quotes":
|
||||
for i in response['data']:
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
i['Selling Price'] = i['a']
|
||||
i['Buying Price'] = i['b']
|
||||
i['Commodity Code'] = i['c']
|
||||
@ -120,8 +136,12 @@ class Jin10(ComponentBase, ABC):
|
||||
url='https://open-data-api.jin10.com/data-api/news',
|
||||
headers=headers, data=json.dumps(params))
|
||||
response = response.json()
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("Jin10 processing"):
|
||||
return
|
||||
return Jin10.be_output("**ERROR**: " + str(e))
|
||||
|
||||
if not jin10_res:
|
||||
|
||||
@ -71,23 +71,40 @@ class PubMed(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("PubMed processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("PubMed processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
Entrez.email = self._param.email
|
||||
pubmedids = Entrez.read(Entrez.esearch(db='pubmed', retmax=self._param.top_n, term=kwargs["query"]))['IdList']
|
||||
|
||||
if self.check_if_canceled("PubMed processing"):
|
||||
return
|
||||
|
||||
pubmedcnt = ET.fromstring(re.sub(r'<(/?)b>|<(/?)i>', '', Entrez.efetch(db='pubmed', id=",".join(pubmedids),
|
||||
retmode="xml").read().decode("utf-8")))
|
||||
|
||||
if self.check_if_canceled("PubMed processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(pubmedcnt.findall("PubmedArticle"),
|
||||
get_title=lambda child: child.find("MedlineCitation").find("Article").find("ArticleTitle").text,
|
||||
get_url=lambda child: "https://pubmed.ncbi.nlm.nih.gov/" + child.find("MedlineCitation").find("PMID").text,
|
||||
get_content=lambda child: self._format_pubmed_content(child),)
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("PubMed processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"PubMed error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -58,12 +58,18 @@ class QWeather(ComponentBase, ABC):
|
||||
component_name = "QWeather"
|
||||
|
||||
def _run(self, history, **kwargs):
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
|
||||
ans = self.get_input()
|
||||
ans = "".join(ans["content"]) if "content" in ans else ""
|
||||
if not ans:
|
||||
return QWeather.be_output("")
|
||||
|
||||
try:
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
|
||||
response = requests.get(
|
||||
url="https://geoapi.qweather.com/v2/city/lookup?location=" + ans + "&key=" + self._param.web_apikey).json()
|
||||
if response["code"] == "200":
|
||||
@ -71,16 +77,23 @@ class QWeather(ComponentBase, ABC):
|
||||
else:
|
||||
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
||||
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
|
||||
base_url = "https://api.qweather.com/v7/" if self._param.user_type == 'paid' else "https://devapi.qweather.com/v7/"
|
||||
|
||||
if self._param.type == "weather":
|
||||
url = base_url + "weather/" + self._param.time_period + "?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||
response = requests.get(url=url).json()
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
if response["code"] == "200":
|
||||
if self._param.time_period == "now":
|
||||
return QWeather.be_output(str(response["now"]))
|
||||
else:
|
||||
qweather_res = [{"content": str(i) + "\n"} for i in response["daily"]]
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
if not qweather_res:
|
||||
return QWeather.be_output("")
|
||||
|
||||
@ -92,6 +105,8 @@ class QWeather(ComponentBase, ABC):
|
||||
elif self._param.type == "indices":
|
||||
url = base_url + "indices/1d?type=0&location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||
response = requests.get(url=url).json()
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
if response["code"] == "200":
|
||||
indices_res = response["daily"][0]["date"] + "\n" + "\n".join(
|
||||
[i["name"] + ": " + i["category"] + ", " + i["text"] for i in response["daily"]])
|
||||
@ -103,9 +118,13 @@ class QWeather(ComponentBase, ABC):
|
||||
elif self._param.type == "airquality":
|
||||
url = base_url + "air/now?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||
response = requests.get(url=url).json()
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
if response["code"] == "200":
|
||||
return QWeather.be_output(str(response["now"]))
|
||||
else:
|
||||
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("Qweather processing"):
|
||||
return
|
||||
return QWeather.be_output("**Error**" + str(e))
|
||||
|
||||
@ -24,7 +24,7 @@ from api.db.services.document_service import DocumentService
|
||||
from api.db.services.dialog_service import meta_filter
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from api import settings
|
||||
from common import settings
|
||||
from common.connection_utils import timeout
|
||||
from rag.app.tag import label_question
|
||||
from rag.prompts.generator import cross_languages, kb_prompt, gen_meta_filter
|
||||
@ -82,8 +82,12 @@ class Retrieval(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Retrieval processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", self._param.empty_response)
|
||||
return
|
||||
|
||||
kb_ids: list[str] = []
|
||||
for id in self._param.kb_ids:
|
||||
@ -122,7 +126,7 @@ class Retrieval(ToolBase, ABC):
|
||||
vars = self.get_input_elements_from_text(kwargs["query"])
|
||||
vars = {k:o["value"] for k,o in vars.items()}
|
||||
query = self.string_format(kwargs["query"], vars)
|
||||
|
||||
|
||||
doc_ids=[]
|
||||
if self._param.meta_data_filter!={}:
|
||||
metas = DocumentService.get_meta_by_kbs(kb_ids)
|
||||
@ -135,7 +139,7 @@ class Retrieval(ToolBase, ABC):
|
||||
elif self._param.meta_data_filter.get("method") == "manual":
|
||||
filters=self._param.meta_data_filter["manual"]
|
||||
for flt in filters:
|
||||
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z:0-9_.-]+|sys\.[a-z_]+)\} *\}*")
|
||||
pat = re.compile(self.variable_ref_patt)
|
||||
s = flt["value"]
|
||||
out_parts = []
|
||||
last = 0
|
||||
@ -184,9 +188,14 @@ class Retrieval(ToolBase, ABC):
|
||||
rerank_mdl=rerank_mdl,
|
||||
rank_feature=label_question(query, kbs),
|
||||
)
|
||||
if self.check_if_canceled("Retrieval processing"):
|
||||
return
|
||||
|
||||
if self._param.toc_enhance:
|
||||
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT)
|
||||
cks = settings.retriever.retrieval_by_toc(query, kbinfos["chunks"], [kb.tenant_id for kb in kbs], chat_mdl, self._param.top_n)
|
||||
if self.check_if_canceled("Retrieval processing"):
|
||||
return
|
||||
if cks:
|
||||
kbinfos["chunks"] = cks
|
||||
if self._param.use_kg:
|
||||
@ -195,6 +204,8 @@ class Retrieval(ToolBase, ABC):
|
||||
kb_ids,
|
||||
embd_mdl,
|
||||
LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT))
|
||||
if self.check_if_canceled("Retrieval processing"):
|
||||
return
|
||||
if ck["content_with_weight"]:
|
||||
kbinfos["chunks"].insert(0, ck)
|
||||
else:
|
||||
@ -202,6 +213,8 @@ class Retrieval(ToolBase, ABC):
|
||||
|
||||
if self._param.use_kg and kbs:
|
||||
ck = settings.kg_retriever.retrieval(query, [kb.tenant_id for kb in kbs], filtered_kb_ids, embd_mdl, LLMBundle(kbs[0].tenant_id, LLMType.CHAT))
|
||||
if self.check_if_canceled("Retrieval processing"):
|
||||
return
|
||||
if ck["content_with_weight"]:
|
||||
ck["content"] = ck["content_with_weight"]
|
||||
del ck["content_with_weight"]
|
||||
|
||||
@ -79,6 +79,9 @@ class SearXNG(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
# Gracefully handle try-run without inputs
|
||||
query = kwargs.get("query")
|
||||
if not query or not isinstance(query, str) or not query.strip():
|
||||
@ -93,6 +96,9 @@ class SearXNG(ToolBase, ABC):
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
search_params = {
|
||||
'q': query,
|
||||
@ -110,6 +116,9 @@ class SearXNG(ToolBase, ABC):
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
data = response.json()
|
||||
|
||||
if not data or not isinstance(data, dict):
|
||||
@ -121,6 +130,9 @@ class SearXNG(ToolBase, ABC):
|
||||
|
||||
results = results[:self._param.top_n]
|
||||
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(results,
|
||||
get_title=lambda r: r.get("title", ""),
|
||||
get_url=lambda r: r.get("url", ""),
|
||||
@ -130,10 +142,16 @@ class SearXNG(ToolBase, ABC):
|
||||
return self.output("formalized_content")
|
||||
|
||||
except requests.RequestException as e:
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
last_e = f"Network error: {e}"
|
||||
logging.exception(f"SearXNG network error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("SearXNG processing"):
|
||||
return
|
||||
|
||||
last_e = str(e)
|
||||
logging.exception(f"SearXNG error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -103,6 +103,9 @@ class TavilySearch(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("TavilySearch processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
@ -113,10 +116,16 @@ class TavilySearch(ToolBase, ABC):
|
||||
if fld not in kwargs:
|
||||
kwargs[fld] = getattr(self._param, fld)
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("TavilySearch processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
kwargs["include_images"] = False
|
||||
kwargs["include_raw_content"] = False
|
||||
res = self.tavily_client.search(**kwargs)
|
||||
if self.check_if_canceled("TavilySearch processing"):
|
||||
return
|
||||
|
||||
self._retrieve_chunks(res["results"],
|
||||
get_title=lambda r: r["title"],
|
||||
get_url=lambda r: r["url"],
|
||||
@ -125,6 +134,9 @@ class TavilySearch(ToolBase, ABC):
|
||||
self.set_output("json", res["results"])
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("TavilySearch processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"Tavily error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
@ -201,6 +213,9 @@ class TavilyExtract(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("TavilyExtract processing"):
|
||||
return
|
||||
|
||||
self.tavily_client = TavilyClient(api_key=self._param.api_key)
|
||||
last_e = None
|
||||
for fld in ["urls", "extract_depth", "format"]:
|
||||
@ -209,12 +224,21 @@ class TavilyExtract(ToolBase, ABC):
|
||||
if kwargs.get("urls") and isinstance(kwargs["urls"], str):
|
||||
kwargs["urls"] = kwargs["urls"].split(",")
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("TavilyExtract processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
kwargs["include_images"] = False
|
||||
res = self.tavily_client.extract(**kwargs)
|
||||
if self.check_if_canceled("TavilyExtract processing"):
|
||||
return
|
||||
|
||||
self.set_output("json", res["results"])
|
||||
return self.output("json")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("TavilyExtract processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"Tavily error: {e}")
|
||||
if last_e:
|
||||
|
||||
@ -43,12 +43,18 @@ class TuShare(ComponentBase, ABC):
|
||||
component_name = "TuShare"
|
||||
|
||||
def _run(self, history, **kwargs):
|
||||
if self.check_if_canceled("TuShare processing"):
|
||||
return
|
||||
|
||||
ans = self.get_input()
|
||||
ans = ",".join(ans["content"]) if "content" in ans else ""
|
||||
if not ans:
|
||||
return TuShare.be_output("")
|
||||
|
||||
try:
|
||||
if self.check_if_canceled("TuShare processing"):
|
||||
return
|
||||
|
||||
tus_res = []
|
||||
params = {
|
||||
"api_name": "news",
|
||||
@ -58,12 +64,18 @@ class TuShare(ComponentBase, ABC):
|
||||
}
|
||||
response = requests.post(url="http://api.tushare.pro", data=json.dumps(params).encode('utf-8'))
|
||||
response = response.json()
|
||||
if self.check_if_canceled("TuShare processing"):
|
||||
return
|
||||
if response['code'] != 0:
|
||||
return TuShare.be_output(response['msg'])
|
||||
df = pd.DataFrame(response['data']['items'])
|
||||
df.columns = response['data']['fields']
|
||||
if self.check_if_canceled("TuShare processing"):
|
||||
return
|
||||
tus_res.append({"content": (df[df['content'].str.contains(self._param.keyword, case=False)]).to_markdown()})
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("TuShare processing"):
|
||||
return
|
||||
return TuShare.be_output("**ERROR**: " + str(e))
|
||||
|
||||
if not tus_res:
|
||||
|
||||
@ -70,19 +70,31 @@ class WenCai(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("WenCai processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("report", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("WenCai processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
wencai_res = []
|
||||
res = pywencai.get(query=kwargs["query"], query_type=self._param.query_type, perpage=self._param.top_n)
|
||||
if self.check_if_canceled("WenCai processing"):
|
||||
return
|
||||
|
||||
if isinstance(res, pd.DataFrame):
|
||||
wencai_res.append(res.to_markdown())
|
||||
elif isinstance(res, dict):
|
||||
for item in res.items():
|
||||
if self.check_if_canceled("WenCai processing"):
|
||||
return
|
||||
|
||||
if isinstance(item[1], list):
|
||||
wencai_res.append(item[0] + "\n" + pd.DataFrame(item[1]).to_markdown())
|
||||
elif isinstance(item[1], str):
|
||||
@ -100,6 +112,9 @@ class WenCai(ToolBase, ABC):
|
||||
self.set_output("report", "\n\n".join(wencai_res))
|
||||
return self.output("report")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("WenCai processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"WenCai error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -66,17 +66,26 @@ class Wikipedia(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("Wikipedia processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("query"):
|
||||
self.set_output("formalized_content", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("Wikipedia processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
wikipedia.set_lang(self._param.language)
|
||||
wiki_engine = wikipedia
|
||||
pages = []
|
||||
for p in wiki_engine.search(kwargs["query"], results=self._param.top_n):
|
||||
if self.check_if_canceled("Wikipedia processing"):
|
||||
return
|
||||
|
||||
try:
|
||||
pages.append(wikipedia.page(p))
|
||||
except Exception:
|
||||
@ -87,6 +96,9 @@ class Wikipedia(ToolBase, ABC):
|
||||
get_content=lambda r: r.summary)
|
||||
return self.output("formalized_content")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("Wikipedia processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"Wikipedia error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -74,15 +74,24 @@ class YahooFinance(ToolBase, ABC):
|
||||
|
||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||
def _invoke(self, **kwargs):
|
||||
if self.check_if_canceled("YahooFinance processing"):
|
||||
return
|
||||
|
||||
if not kwargs.get("stock_code"):
|
||||
self.set_output("report", "")
|
||||
return ""
|
||||
|
||||
last_e = ""
|
||||
for _ in range(self._param.max_retries+1):
|
||||
if self.check_if_canceled("YahooFinance processing"):
|
||||
return
|
||||
|
||||
yohoo_res = []
|
||||
try:
|
||||
msft = yf.Ticker(kwargs["stock_code"])
|
||||
if self.check_if_canceled("YahooFinance processing"):
|
||||
return
|
||||
|
||||
if self._param.info:
|
||||
yohoo_res.append("# Information:\n" + pd.Series(msft.info).to_markdown() + "\n")
|
||||
if self._param.history:
|
||||
@ -100,6 +109,9 @@ class YahooFinance(ToolBase, ABC):
|
||||
self.set_output("report", "\n\n".join(yohoo_res))
|
||||
return self.output("report")
|
||||
except Exception as e:
|
||||
if self.check_if_canceled("YahooFinance processing"):
|
||||
return
|
||||
|
||||
last_e = e
|
||||
logging.exception(f"YahooFinance error: {e}")
|
||||
time.sleep(self._param.delay_after_error)
|
||||
|
||||
@ -33,7 +33,7 @@ from api.utils import commands
|
||||
from flask_mail import Mail
|
||||
from flask_session import Session
|
||||
from flask_login import LoginManager
|
||||
from api import settings
|
||||
from common import settings
|
||||
from api.utils.api_utils import server_error_response
|
||||
from api.constants import API_VERSION
|
||||
|
||||
|
||||
@ -32,7 +32,6 @@ from api.db.services.file_service import FileService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.task_service import queue_tasks, TaskService
|
||||
from api.db.services.user_service import UserTenantService
|
||||
from api import settings
|
||||
from common.misc_utils import get_uuid
|
||||
from common.constants import RetCode, VALID_TASK_STATUS, LLMType, ParserType, FileSource
|
||||
from api.utils.api_utils import server_error_response, get_data_error_result, get_json_result, validate_request, \
|
||||
@ -41,13 +40,13 @@ from api.utils.api_utils import server_error_response, get_data_error_result, ge
|
||||
from api.utils.file_utils import filename_type, thumbnail
|
||||
from rag.app.tag import label_question
|
||||
from rag.prompts.generator import keyword_extraction
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common.time_utils import current_timestamp, datetime_format
|
||||
|
||||
from api.db.services.canvas_service import UserCanvasService
|
||||
from agent.canvas import Canvas
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route('/new_token', methods=['POST']) # noqa: F821
|
||||
@ -428,10 +427,10 @@ def upload():
|
||||
message="This type of file has not been supported yet!")
|
||||
|
||||
location = filename
|
||||
while STORAGE_IMPL.obj_exist(kb_id, location):
|
||||
while settings.STORAGE_IMPL.obj_exist(kb_id, location):
|
||||
location += "_"
|
||||
blob = request.files['file'].read()
|
||||
STORAGE_IMPL.put(kb_id, location, blob)
|
||||
settings.STORAGE_IMPL.put(kb_id, location, blob)
|
||||
doc = {
|
||||
"id": get_uuid(),
|
||||
"kb_id": kb.id,
|
||||
@ -467,10 +466,7 @@ def upload():
|
||||
if "run" in form_data.keys():
|
||||
if request.form.get("run").strip() == "1":
|
||||
try:
|
||||
info = {"run": 1, "progress": 0}
|
||||
info["progress_msg"] = ""
|
||||
info["chunk_num"] = 0
|
||||
info["token_num"] = 0
|
||||
info = {"run": 1, "progress": 0, "progress_msg": "", "chunk_num": 0, "token_num": 0}
|
||||
DocumentService.update_by_id(doc["id"], info)
|
||||
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
||||
tenant_id = DocumentService.get_tenant_id(doc["id"])
|
||||
@ -699,7 +695,7 @@ def document_rm():
|
||||
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
||||
File2DocumentService.delete_by_document_id(doc_id)
|
||||
|
||||
STORAGE_IMPL.rm(b, n)
|
||||
settings.STORAGE_IMPL.rm(b, n)
|
||||
except Exception as e:
|
||||
errors += str(e)
|
||||
|
||||
@ -727,8 +723,7 @@ def completion_faq():
|
||||
if "quote" not in req:
|
||||
req["quote"] = True
|
||||
|
||||
msg = []
|
||||
msg.append({"role": "user", "content": req["word"]})
|
||||
msg = [{"role": "user", "content": req["word"]}]
|
||||
if not msg[-1].get("id"):
|
||||
msg[-1]["id"] = get_uuid()
|
||||
message_id = msg[-1]["id"]
|
||||
@ -792,7 +787,7 @@ def completion_faq():
|
||||
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
||||
try:
|
||||
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
||||
response = STORAGE_IMPL.get(bkt, nm)
|
||||
response = settings.STORAGE_IMPL.get(bkt, nm)
|
||||
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
||||
data.append(data_type_picture)
|
||||
break
|
||||
@ -837,7 +832,7 @@ def completion_faq():
|
||||
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
||||
try:
|
||||
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
||||
response = STORAGE_IMPL.get(bkt, nm)
|
||||
response = settings.STORAGE_IMPL.get(bkt, nm)
|
||||
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
||||
data.append(data_type_picture)
|
||||
break
|
||||
|
||||
@ -34,7 +34,7 @@ class GithubOAuthClient(OAuthClient):
|
||||
|
||||
def fetch_user_info(self, access_token, **kwargs):
|
||||
"""
|
||||
Fetch github user info.
|
||||
Fetch GitHub user info.
|
||||
"""
|
||||
user_info = {}
|
||||
try:
|
||||
|
||||
@ -43,7 +43,8 @@ class OIDCClient(OAuthClient):
|
||||
self.jwks_uri = config['jwks_uri']
|
||||
|
||||
|
||||
def _load_oidc_metadata(self, issuer):
|
||||
@staticmethod
|
||||
def _load_oidc_metadata(issuer):
|
||||
"""
|
||||
Load OIDC metadata from `/.well-known/openid-configuration`.
|
||||
"""
|
||||
|
||||
@ -25,7 +25,6 @@ from flask import request, Response
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from agent.component import LLM
|
||||
from api import settings
|
||||
from api.db import CanvasCategory, FileType
|
||||
from api.db.services.canvas_service import CanvasTemplateService, UserCanvasService, API4ConversationService
|
||||
from api.db.services.document_service import DocumentService
|
||||
@ -46,6 +45,7 @@ from api.utils.file_utils import filename_type, read_potential_broken_pdf
|
||||
from rag.flow.pipeline import Pipeline
|
||||
from rag.nlp import search
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route('/templates', methods=['GET']) # noqa: F821
|
||||
@ -156,7 +156,7 @@ def run():
|
||||
return get_json_result(data={"message_id": task_id})
|
||||
|
||||
try:
|
||||
canvas = Canvas(cvs.dsl, current_user.id, req["id"])
|
||||
canvas = Canvas(cvs.dsl, current_user.id)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -168,8 +168,10 @@ def run():
|
||||
|
||||
cvs.dsl = json.loads(str(canvas))
|
||||
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
canvas.cancel_task()
|
||||
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": False}, ensure_ascii=False) + "\n\n"
|
||||
|
||||
resp = Response(sse(), mimetype="text/event-stream")
|
||||
@ -177,6 +179,7 @@ def run():
|
||||
resp.headers.add_header("Connection", "keep-alive")
|
||||
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||
resp.call_on_close(lambda: canvas.cancel_task())
|
||||
return resp
|
||||
|
||||
|
||||
@ -410,27 +413,27 @@ def test_db_connect():
|
||||
ibm_db.close(conn)
|
||||
return get_json_result(data="Database Connection Successful!")
|
||||
elif req["db_type"] == 'trino':
|
||||
def _parse_catalog_schema(db: str):
|
||||
if not db:
|
||||
def _parse_catalog_schema(db_name: str):
|
||||
if not db_name:
|
||||
return None, None
|
||||
if "." in db:
|
||||
c, s = db.split(".", 1)
|
||||
elif "/" in db:
|
||||
c, s = db.split("/", 1)
|
||||
if "." in db_name:
|
||||
catalog_name, schema_name = db_name.split(".", 1)
|
||||
elif "/" in db_name:
|
||||
catalog_name, schema_name = db_name.split("/", 1)
|
||||
else:
|
||||
c, s = db, "default"
|
||||
return c, s
|
||||
catalog_name, schema_name = db_name, "default"
|
||||
return catalog_name, schema_name
|
||||
try:
|
||||
import trino
|
||||
import os
|
||||
from trino.auth import BasicAuthentication
|
||||
except Exception:
|
||||
return server_error_response("Missing dependency 'trino'. Please install: pip install trino")
|
||||
except Exception as e:
|
||||
return server_error_response(f"Missing dependency 'trino'. Please install: pip install trino, detail: {e}")
|
||||
|
||||
catalog, schema = _parse_catalog_schema(req["database"])
|
||||
if not catalog:
|
||||
return server_error_response("For Trino, 'database' must be 'catalog.schema' or at least 'catalog'.")
|
||||
|
||||
|
||||
http_scheme = "https" if os.environ.get("TRINO_USE_TLS", "0") == "1" else "http"
|
||||
|
||||
auth = None
|
||||
@ -479,7 +482,6 @@ def getlistversion(canvas_id):
|
||||
@login_required
|
||||
def getversion( version_id):
|
||||
try:
|
||||
|
||||
e, version = UserCanvasVersionService.get_by_id(version_id)
|
||||
if version:
|
||||
return get_json_result(data=version.to_dict())
|
||||
@ -546,11 +548,11 @@ def trace():
|
||||
cvs_id = request.args.get("canvas_id")
|
||||
msg_id = request.args.get("message_id")
|
||||
try:
|
||||
bin = REDIS_CONN.get(f"{cvs_id}-{msg_id}-logs")
|
||||
if not bin:
|
||||
binary = REDIS_CONN.get(f"{cvs_id}-{msg_id}-logs")
|
||||
if not binary:
|
||||
return get_json_result(data={})
|
||||
|
||||
return get_json_result(data=json.loads(bin.encode("utf-8")))
|
||||
return get_json_result(data=json.loads(binary.encode("utf-8")))
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
@ -604,4 +606,4 @@ def download():
|
||||
id = request.args.get("id")
|
||||
created_by = request.args.get("created_by")
|
||||
blob = FileService.get_blob(created_by, id)
|
||||
return flask.make_response(blob)
|
||||
return flask.make_response(blob)
|
||||
|
||||
@ -21,7 +21,6 @@ import xxhash
|
||||
from flask import request
|
||||
from flask_login import current_user, login_required
|
||||
|
||||
from api import settings
|
||||
from api.db.services.dialog_service import meta_filter
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
@ -33,9 +32,9 @@ from rag.app.qa import beAdoc, rmPrefix
|
||||
from rag.app.tag import label_question
|
||||
from rag.nlp import rag_tokenizer, search
|
||||
from rag.prompts.generator import gen_meta_filter, cross_languages, keyword_extraction
|
||||
from rag.settings import PAGERANK_FLD
|
||||
from common.string_utils import remove_redundant_spaces
|
||||
from common.constants import RetCode, LLMType, ParserType
|
||||
from common.constants import RetCode, LLMType, ParserType, PAGERANK_FLD
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route('/list', methods=['POST']) # noqa: F821
|
||||
@ -200,7 +199,6 @@ def switch():
|
||||
@login_required
|
||||
@validate_request("chunk_ids", "doc_id")
|
||||
def rm():
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
req = request.json
|
||||
try:
|
||||
e, doc = DocumentService.get_by_id(req["doc_id"])
|
||||
@ -214,8 +212,8 @@ def rm():
|
||||
chunk_number = len(deleted_chunk_ids)
|
||||
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
||||
for cid in deleted_chunk_ids:
|
||||
if STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||
STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||
settings.STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -13,16 +13,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from html import escape
|
||||
from typing import Any
|
||||
|
||||
from flask import request
|
||||
from flask_login import login_required, current_user
|
||||
from flask import make_response, request
|
||||
from flask_login import current_user, login_required
|
||||
from google_auth_oauthlib.flow import Flow
|
||||
|
||||
from api.db import InputType
|
||||
from api.db.services.connector_service import ConnectorService, Connector2KbService, SyncLogsService
|
||||
from api.utils.api_utils import get_json_result, validate_request, get_data_error_result
|
||||
from common.misc_utils import get_uuid
|
||||
from api.db.services.connector_service import ConnectorService, SyncLogsService
|
||||
from api.utils.api_utils import get_data_error_result, get_json_result, validate_request
|
||||
from common.constants import RetCode, TaskStatus
|
||||
from common.data_source.config import GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI, DocumentSource
|
||||
from common.data_source.google_util.constant import GOOGLE_DRIVE_WEB_OAUTH_POPUP_TEMPLATE, GOOGLE_SCOPES
|
||||
from common.misc_utils import get_uuid
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
|
||||
|
||||
@manager.route("/set", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
@ -42,12 +52,12 @@ def set_connector():
|
||||
"config": req["config"],
|
||||
"refresh_freq": int(req.get("refresh_freq", 30)),
|
||||
"prune_freq": int(req.get("prune_freq", 720)),
|
||||
"timeout_secs": int(req.get("timeout_secs", 60*29)),
|
||||
"status": TaskStatus.SCHEDULE
|
||||
"timeout_secs": int(req.get("timeout_secs", 60 * 29)),
|
||||
"status": TaskStatus.SCHEDULE,
|
||||
}
|
||||
conn["status"] = TaskStatus.SCHEDULE
|
||||
ConnectorService.save(**conn)
|
||||
|
||||
ConnectorService.save(**conn)
|
||||
time.sleep(1)
|
||||
e, conn = ConnectorService.get_by_id(req["id"])
|
||||
|
||||
@ -73,7 +83,8 @@ def get_connector(connector_id):
|
||||
@login_required
|
||||
def list_logs(connector_id):
|
||||
req = request.args.to_dict(flat=True)
|
||||
return get_json_result(data=SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15))))
|
||||
arr, total = SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15)))
|
||||
return get_json_result(data={"total": total, "logs": arr})
|
||||
|
||||
|
||||
@manager.route("/<connector_id>/resume", methods=["PUT"]) # noqa: F821
|
||||
@ -87,14 +98,14 @@ def resume(connector_id):
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
@manager.route("/<connector_id>/link", methods=["POST"]) # noqa: F821
|
||||
@validate_request("kb_ids")
|
||||
@manager.route("/<connector_id>/rebuild", methods=["PUT"]) # noqa: F821
|
||||
@login_required
|
||||
def link_kb(connector_id):
|
||||
@validate_request("kb_id")
|
||||
def rebuild(connector_id):
|
||||
req = request.json
|
||||
errors = Connector2KbService.link_kb(connector_id, req["kb_ids"], current_user.id)
|
||||
if errors:
|
||||
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
||||
err = ConnectorService.rebuild(req["kb_id"], connector_id, current_user.id)
|
||||
if err:
|
||||
return get_json_result(data=False, message=err, code=RetCode.SERVER_ERROR)
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
@ -103,4 +114,182 @@ def link_kb(connector_id):
|
||||
def rm_connector(connector_id):
|
||||
ConnectorService.resume(connector_id, TaskStatus.CANCEL)
|
||||
ConnectorService.delete_by_id(connector_id)
|
||||
return get_json_result(data=True)
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
GOOGLE_WEB_FLOW_STATE_PREFIX = "google_drive_web_flow_state"
|
||||
GOOGLE_WEB_FLOW_RESULT_PREFIX = "google_drive_web_flow_result"
|
||||
WEB_FLOW_TTL_SECS = 15 * 60
|
||||
|
||||
|
||||
def _web_state_cache_key(flow_id: str) -> str:
|
||||
return f"{GOOGLE_WEB_FLOW_STATE_PREFIX}:{flow_id}"
|
||||
|
||||
|
||||
def _web_result_cache_key(flow_id: str) -> str:
|
||||
return f"{GOOGLE_WEB_FLOW_RESULT_PREFIX}:{flow_id}"
|
||||
|
||||
|
||||
def _load_credentials(payload: str | dict[str, Any]) -> dict[str, Any]:
|
||||
if isinstance(payload, dict):
|
||||
return payload
|
||||
try:
|
||||
return json.loads(payload)
|
||||
except json.JSONDecodeError as exc: # pragma: no cover - defensive
|
||||
raise ValueError("Invalid Google credentials JSON.") from exc
|
||||
|
||||
|
||||
def _get_web_client_config(credentials: dict[str, Any]) -> dict[str, Any]:
|
||||
web_section = credentials.get("web")
|
||||
if not isinstance(web_section, dict):
|
||||
raise ValueError("Google OAuth JSON must include a 'web' client configuration to use browser-based authorization.")
|
||||
return {"web": web_section}
|
||||
|
||||
|
||||
def _render_web_oauth_popup(flow_id: str, success: bool, message: str):
|
||||
status = "success" if success else "error"
|
||||
auto_close = "window.close();" if success else ""
|
||||
escaped_message = escape(message)
|
||||
payload_json = json.dumps(
|
||||
{
|
||||
"type": "ragflow-google-drive-oauth",
|
||||
"status": status,
|
||||
"flowId": flow_id or "",
|
||||
"message": message,
|
||||
}
|
||||
)
|
||||
html = GOOGLE_DRIVE_WEB_OAUTH_POPUP_TEMPLATE.format(
|
||||
heading="Authorization complete" if success else "Authorization failed",
|
||||
message=escaped_message,
|
||||
payload_json=payload_json,
|
||||
auto_close=auto_close,
|
||||
)
|
||||
response = make_response(html, 200)
|
||||
response.headers["Content-Type"] = "text/html; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
@manager.route("/google-drive/oauth/web/start", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
@validate_request("credentials")
|
||||
def start_google_drive_web_oauth():
|
||||
if not GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI:
|
||||
return get_json_result(
|
||||
code=RetCode.SERVER_ERROR,
|
||||
message="Google Drive OAuth redirect URI is not configured on the server.",
|
||||
)
|
||||
|
||||
req = request.json or {}
|
||||
raw_credentials = req.get("credentials", "")
|
||||
try:
|
||||
credentials = _load_credentials(raw_credentials)
|
||||
except ValueError as exc:
|
||||
return get_json_result(code=RetCode.ARGUMENT_ERROR, message=str(exc))
|
||||
|
||||
if credentials.get("refresh_token"):
|
||||
return get_json_result(
|
||||
code=RetCode.ARGUMENT_ERROR,
|
||||
message="Uploaded credentials already include a refresh token.",
|
||||
)
|
||||
|
||||
try:
|
||||
client_config = _get_web_client_config(credentials)
|
||||
except ValueError as exc:
|
||||
return get_json_result(code=RetCode.ARGUMENT_ERROR, message=str(exc))
|
||||
|
||||
flow_id = str(uuid.uuid4())
|
||||
try:
|
||||
flow = Flow.from_client_config(client_config, scopes=GOOGLE_SCOPES[DocumentSource.GOOGLE_DRIVE])
|
||||
flow.redirect_uri = GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI
|
||||
authorization_url, _ = flow.authorization_url(
|
||||
access_type="offline",
|
||||
include_granted_scopes="true",
|
||||
prompt="consent",
|
||||
state=flow_id,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
logging.exception("Failed to create Google OAuth flow: %s", exc)
|
||||
return get_json_result(
|
||||
code=RetCode.SERVER_ERROR,
|
||||
message="Failed to initialize Google OAuth flow. Please verify the uploaded client configuration.",
|
||||
)
|
||||
|
||||
cache_payload = {
|
||||
"user_id": current_user.id,
|
||||
"client_config": client_config,
|
||||
"created_at": int(time.time()),
|
||||
}
|
||||
REDIS_CONN.set_obj(_web_state_cache_key(flow_id), cache_payload, WEB_FLOW_TTL_SECS)
|
||||
|
||||
return get_json_result(
|
||||
data={
|
||||
"flow_id": flow_id,
|
||||
"authorization_url": authorization_url,
|
||||
"expires_in": WEB_FLOW_TTL_SECS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@manager.route("/google-drive/oauth/web/callback", methods=["GET"]) # noqa: F821
|
||||
def google_drive_web_oauth_callback():
|
||||
state_id = request.args.get("state")
|
||||
error = request.args.get("error")
|
||||
error_description = request.args.get("error_description") or error
|
||||
|
||||
if not state_id:
|
||||
return _render_web_oauth_popup("", False, "Missing OAuth state parameter.")
|
||||
|
||||
state_cache = REDIS_CONN.get(_web_state_cache_key(state_id))
|
||||
if not state_cache:
|
||||
return _render_web_oauth_popup(state_id, False, "Authorization session expired. Please restart from the main window.")
|
||||
|
||||
state_obj = json.loads(state_cache)
|
||||
client_config = state_obj.get("client_config")
|
||||
if not client_config:
|
||||
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||
return _render_web_oauth_popup(state_id, False, "Authorization session was invalid. Please retry.")
|
||||
|
||||
if error:
|
||||
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||
return _render_web_oauth_popup(state_id, False, error_description or "Authorization was cancelled.")
|
||||
|
||||
code = request.args.get("code")
|
||||
if not code:
|
||||
return _render_web_oauth_popup(state_id, False, "Missing authorization code from Google.")
|
||||
|
||||
try:
|
||||
flow = Flow.from_client_config(client_config, scopes=GOOGLE_SCOPES[DocumentSource.GOOGLE_DRIVE])
|
||||
flow.redirect_uri = GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI
|
||||
flow.fetch_token(code=code)
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
logging.exception("Failed to exchange Google OAuth code: %s", exc)
|
||||
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||
return _render_web_oauth_popup(state_id, False, "Failed to exchange tokens with Google. Please retry.")
|
||||
|
||||
creds_json = flow.credentials.to_json()
|
||||
result_payload = {
|
||||
"user_id": state_obj.get("user_id"),
|
||||
"credentials": creds_json,
|
||||
}
|
||||
REDIS_CONN.set_obj(_web_result_cache_key(state_id), result_payload, WEB_FLOW_TTL_SECS)
|
||||
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||
|
||||
return _render_web_oauth_popup(state_id, True, "Authorization completed successfully.")
|
||||
|
||||
|
||||
@manager.route("/google-drive/oauth/web/result", methods=["POST"]) # noqa: F821
|
||||
@login_required
|
||||
@validate_request("flow_id")
|
||||
def poll_google_drive_web_result():
|
||||
req = request.json or {}
|
||||
flow_id = req.get("flow_id")
|
||||
cache_raw = REDIS_CONN.get(_web_result_cache_key(flow_id))
|
||||
if not cache_raw:
|
||||
return get_json_result(code=RetCode.RUNNING, message="Authorization is still pending.")
|
||||
|
||||
result = json.loads(cache_raw)
|
||||
if result.get("user_id") != current_user.id:
|
||||
return get_json_result(code=RetCode.PERMISSION_ERROR, message="You are not allowed to access this authorization result.")
|
||||
|
||||
REDIS_CONN.delete(_web_result_cache_key(flow_id))
|
||||
return get_json_result(data={"credentials": result.get("credentials")})
|
||||
|
||||
@ -23,7 +23,6 @@ import flask
|
||||
from flask import request
|
||||
from flask_login import current_user, login_required
|
||||
|
||||
from api import settings
|
||||
from api.common.check_team_permission import check_kb_team_permission
|
||||
from api.constants import FILE_NAME_LEN_LIMIT, IMG_BASE64_PREFIX
|
||||
from api.db import VALID_FILE_TYPES, FileType
|
||||
@ -48,7 +47,7 @@ from common.constants import RetCode, VALID_TASK_STATUS, ParserType, TaskStatus
|
||||
from api.utils.web_utils import CONTENT_TYPE_MAP, html2pdf, is_valid_url
|
||||
from deepdoc.parser.html_parser import RAGFlowHtmlParser
|
||||
from rag.nlp import search, rag_tokenizer
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route("/upload", methods=["POST"]) # noqa: F821
|
||||
@ -119,9 +118,9 @@ def web_crawl():
|
||||
raise RuntimeError("This type of file has not been supported yet!")
|
||||
|
||||
location = filename
|
||||
while STORAGE_IMPL.obj_exist(kb_id, location):
|
||||
while settings.STORAGE_IMPL.obj_exist(kb_id, location):
|
||||
location += "_"
|
||||
STORAGE_IMPL.put(kb_id, location, blob)
|
||||
settings.STORAGE_IMPL.put(kb_id, location, blob)
|
||||
doc = {
|
||||
"id": get_uuid(),
|
||||
"kb_id": kb.id,
|
||||
@ -261,6 +260,8 @@ def list_docs():
|
||||
for doc_item in docs:
|
||||
if doc_item["thumbnail"] and not doc_item["thumbnail"].startswith(IMG_BASE64_PREFIX):
|
||||
doc_item["thumbnail"] = f"/v1/document/image/{kb_id}-{doc_item['thumbnail']}"
|
||||
if doc_item.get("source_type"):
|
||||
doc_item["source_type"] = doc_item["source_type"].split("/")[0]
|
||||
|
||||
return get_json_result(data={"total": tol, "docs": docs})
|
||||
except Exception as e:
|
||||
@ -501,7 +502,7 @@ def get(doc_id):
|
||||
return get_data_error_result(message="Document not found!")
|
||||
|
||||
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
||||
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
||||
response = flask.make_response(settings.STORAGE_IMPL.get(b, n))
|
||||
|
||||
ext = re.search(r"\.([^.]+)$", doc.name.lower())
|
||||
ext = ext.group(1) if ext else None
|
||||
@ -577,7 +578,7 @@ def get_image(image_id):
|
||||
if len(arr) != 2:
|
||||
return get_data_error_result(message="Image not found.")
|
||||
bkt, nm = image_id.split("-")
|
||||
response = flask.make_response(STORAGE_IMPL.get(bkt, nm))
|
||||
response = flask.make_response(settings.STORAGE_IMPL.get(bkt, nm))
|
||||
response.headers.set("Content-Type", "image/JPEG")
|
||||
return response
|
||||
except Exception as e:
|
||||
|
||||
@ -34,7 +34,7 @@ from api.db.services.file_service import FileService
|
||||
from api.utils.api_utils import get_json_result
|
||||
from api.utils.file_utils import filename_type
|
||||
from api.utils.web_utils import CONTENT_TYPE_MAP
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route('/upload', methods=['POST']) # noqa: F821
|
||||
@ -95,14 +95,14 @@ def upload():
|
||||
# file type
|
||||
filetype = filename_type(file_obj_names[file_len - 1])
|
||||
location = file_obj_names[file_len - 1]
|
||||
while STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||
while settings.STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||
location += "_"
|
||||
blob = file_obj.read()
|
||||
filename = duplicate_name(
|
||||
FileService.query,
|
||||
name=file_obj_names[file_len - 1],
|
||||
parent_id=last_folder.id)
|
||||
STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||
settings.STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||
file = {
|
||||
"id": get_uuid(),
|
||||
"parent_id": last_folder.id,
|
||||
@ -245,7 +245,7 @@ def rm():
|
||||
def _delete_single_file(file):
|
||||
try:
|
||||
if file.location:
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
except Exception:
|
||||
logging.exception(f"Fail to remove object: {file.parent_id}/{file.location}")
|
||||
|
||||
@ -346,10 +346,10 @@ def get(file_id):
|
||||
if not check_file_team_permission(file, current_user.id):
|
||||
return get_json_result(data=False, message='No authorization.', code=RetCode.AUTHENTICATION_ERROR)
|
||||
|
||||
blob = STORAGE_IMPL.get(file.parent_id, file.location)
|
||||
blob = settings.STORAGE_IMPL.get(file.parent_id, file.location)
|
||||
if not blob:
|
||||
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
||||
blob = STORAGE_IMPL.get(b, n)
|
||||
blob = settings.STORAGE_IMPL.get(b, n)
|
||||
|
||||
response = flask.make_response(blob)
|
||||
ext = re.search(r"\.([^.]+)$", file.name.lower())
|
||||
@ -428,11 +428,11 @@ def move():
|
||||
filename = source_file_entry.name
|
||||
|
||||
new_location = filename
|
||||
while STORAGE_IMPL.obj_exist(dest_folder.id, new_location):
|
||||
while settings.STORAGE_IMPL.obj_exist(dest_folder.id, new_location):
|
||||
new_location += "_"
|
||||
|
||||
try:
|
||||
STORAGE_IMPL.move(old_parent_id, old_location, dest_folder.id, new_location)
|
||||
settings.STORAGE_IMPL.move(old_parent_id, old_location, dest_folder.id, new_location)
|
||||
except Exception as storage_err:
|
||||
raise RuntimeError(f"Move file failed at storage layer: {str(storage_err)}")
|
||||
|
||||
|
||||
@ -35,15 +35,12 @@ from api.db import VALID_FILE_TYPES
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.db_models import File
|
||||
from api.utils.api_utils import get_json_result
|
||||
from api import settings
|
||||
from rag.nlp import search
|
||||
from api.constants import DATASET_NAME_LIMIT
|
||||
from rag.settings import PAGERANK_FLD
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.utils.doc_store_conn import OrderByExpr
|
||||
from common.constants import RetCode, PipelineTaskType, StatusEnum, VALID_TASK_STATUS, FileSource, LLMType
|
||||
|
||||
from rag.utils.doc_store_conn import OrderByExpr
|
||||
from common.constants import RetCode, PipelineTaskType, StatusEnum, VALID_TASK_STATUS, FileSource, LLMType, PAGERANK_FLD
|
||||
from common import settings
|
||||
|
||||
@manager.route('/create', methods=['post']) # noqa: F821
|
||||
@login_required
|
||||
@ -55,7 +52,7 @@ def create():
|
||||
tenant_id = current_user.id,
|
||||
parser_id = req.pop("parser_id", None),
|
||||
**req
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
if not KnowledgebaseService.save(**req):
|
||||
@ -105,6 +102,10 @@ def update():
|
||||
message="Duplicated knowledgebase name.")
|
||||
|
||||
del req["kb_id"]
|
||||
connectors = []
|
||||
if "connectors" in req:
|
||||
connectors = req["connectors"]
|
||||
del req["connectors"]
|
||||
if not KnowledgebaseService.update_by_id(kb.id, req):
|
||||
return get_data_error_result()
|
||||
|
||||
@ -121,8 +122,12 @@ def update():
|
||||
if not e:
|
||||
return get_data_error_result(
|
||||
message="Database error (Knowledgebase rename)!")
|
||||
errors = Connector2KbService.link_connectors(kb.id, [conn for conn in connectors], current_user.id)
|
||||
if errors:
|
||||
logging.error("Link KB errors: ", errors)
|
||||
kb = kb.to_dict()
|
||||
kb.update(req)
|
||||
kb["connectors"] = connectors
|
||||
|
||||
return get_json_result(data=kb)
|
||||
except Exception as e:
|
||||
@ -228,8 +233,8 @@ def rm():
|
||||
for kb in kbs:
|
||||
settings.docStoreConn.delete({"kb_id": kb.id}, search.index_name(kb.tenant_id), kb.id)
|
||||
settings.docStoreConn.deleteIdx(search.index_name(kb.tenant_id), kb.id)
|
||||
if hasattr(STORAGE_IMPL, 'remove_bucket'):
|
||||
STORAGE_IMPL.remove_bucket(kb.id)
|
||||
if hasattr(settings.STORAGE_IMPL, 'remove_bucket'):
|
||||
settings.STORAGE_IMPL.remove_bucket(kb.id)
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
@ -566,7 +571,7 @@ def trace_graphrag():
|
||||
|
||||
ok, task = TaskService.get_by_id(task_id)
|
||||
if not ok:
|
||||
return get_error_data_result(message="GraphRAG Task Not Found or Error Occurred")
|
||||
return get_json_result(data={})
|
||||
|
||||
return get_json_result(data=task.to_dict())
|
||||
|
||||
@ -775,14 +780,14 @@ def check_embedding():
|
||||
|
||||
def _to_1d(x):
|
||||
a = np.asarray(x, dtype=np.float32)
|
||||
return a.reshape(-1)
|
||||
return a.reshape(-1)
|
||||
|
||||
def _cos_sim(a, b, eps=1e-12):
|
||||
a = _to_1d(a)
|
||||
b = _to_1d(b)
|
||||
na = np.linalg.norm(a)
|
||||
nb = np.linalg.norm(b)
|
||||
if na < eps or nb < eps:
|
||||
if na < eps or nb < eps:
|
||||
return 0.0
|
||||
return float(np.dot(a, b) / (na * nb))
|
||||
|
||||
@ -820,7 +825,7 @@ def check_embedding():
|
||||
indexNames=index_nm, knowledgebaseIds=[kb_id]
|
||||
)
|
||||
ids = docStoreConn.getChunkIds(res1)
|
||||
if not ids:
|
||||
if not ids:
|
||||
continue
|
||||
|
||||
cid = ids[0]
|
||||
@ -864,7 +869,7 @@ def check_embedding():
|
||||
continue
|
||||
|
||||
try:
|
||||
qv, _ = emb_mdl.encode_queries(txt)
|
||||
qv, _ = emb_mdl.encode_queries(txt)
|
||||
sim = _cos_sim(qv, ck["vector"])
|
||||
except Exception:
|
||||
return get_error_data_result(message="embedding failure")
|
||||
@ -893,13 +898,3 @@ def check_embedding():
|
||||
return get_json_result(code=RetCode.NOT_EFFECTIVE, message="failed", data={"summary": summary, "results": results})
|
||||
|
||||
|
||||
@manager.route("/<kb_id>/link", methods=["POST"]) # noqa: F821
|
||||
@validate_request("connector_ids")
|
||||
@login_required
|
||||
def link_connector(kb_id):
|
||||
req = request.json
|
||||
errors = Connector2KbService.link_connectors(kb_id, req["connector_ids"], current_user.id)
|
||||
if errors:
|
||||
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ from api.utils.api_utils import server_error_response, get_data_error_result, va
|
||||
from common.constants import StatusEnum, LLMType
|
||||
from api.db.db_models import TenantLLM
|
||||
from api.utils.api_utils import get_json_result, get_allowed_llm_factories
|
||||
from common.base64_image import test_image
|
||||
from rag.utils.base64_image import test_image
|
||||
from rag.llm import EmbeddingModel, ChatModel, RerankModel, CvModel, TTSModel
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ from rag.llm import EmbeddingModel, ChatModel, RerankModel, CvModel, TTSModel
|
||||
def factories():
|
||||
try:
|
||||
fac = get_allowed_llm_factories()
|
||||
fac = [f.to_dict() for f in fac if f.name not in ["Youdao", "FastEmbed", "BAAI"]]
|
||||
fac = [f.to_dict() for f in fac if f.name not in ["Youdao", "FastEmbed", "BAAI", "Builtin"]]
|
||||
llms = LLMService.get_all()
|
||||
mdl_types = {}
|
||||
for m in llms:
|
||||
@ -128,7 +128,7 @@ def add_llm():
|
||||
api_key = req.get("api_key", "x")
|
||||
llm_name = req.get("llm_name")
|
||||
|
||||
if factory not in get_allowed_llm_factories():
|
||||
if factory not in [f.name for f in get_allowed_llm_factories()]:
|
||||
return get_data_error_result(message=f"LLM factory {factory} is not allowed")
|
||||
|
||||
def apikey_json(keys):
|
||||
@ -348,7 +348,7 @@ def list_app():
|
||||
facts = set([o.to_dict()["llm_factory"] for o in objs if o.api_key and o.status == StatusEnum.VALID.value])
|
||||
status = {(o.llm_name + "@" + o.llm_factory) for o in objs if o.status == StatusEnum.VALID.value}
|
||||
llms = LLMService.get_all()
|
||||
llms = [m.to_dict() for m in llms if m.status == StatusEnum.VALID.value and m.fid not in weighted and (m.llm_name + "@" + m.fid) in status]
|
||||
llms = [m.to_dict() for m in llms if m.status == StatusEnum.VALID.value and m.fid not in weighted and (m.fid == 'Builtin' or (m.llm_name + "@" + m.fid) in status)]
|
||||
for m in llms:
|
||||
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in self_deployed
|
||||
if "tei-" in os.getenv("COMPOSE_PROFILES", "") and m["model_type"] == LLMType.EMBEDDING and m["fid"] == "Builtin" and m["llm_name"] == os.getenv("TEI_MODEL", ""):
|
||||
@ -358,7 +358,7 @@ def list_app():
|
||||
for o in objs:
|
||||
if o.llm_name + "@" + o.llm_factory in llm_set:
|
||||
continue
|
||||
llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True})
|
||||
llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True, "status": StatusEnum.VALID.value})
|
||||
|
||||
res = {}
|
||||
for m in llms:
|
||||
|
||||
@ -15,15 +15,19 @@
|
||||
#
|
||||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, cast
|
||||
|
||||
from agent.canvas import Canvas
|
||||
from api.db import CanvasCategory
|
||||
from api.db.services.canvas_service import UserCanvasService
|
||||
from api.db.services.user_canvas_version import UserCanvasVersionService
|
||||
from common.constants import RetCode
|
||||
from common.misc_utils import get_uuid
|
||||
from api.utils.api_utils import get_data_error_result, get_error_data_result, get_json_result, token_required
|
||||
from api.utils.api_utils import get_result
|
||||
from flask import request
|
||||
from flask import request, Response
|
||||
|
||||
|
||||
@manager.route('/agents', methods=['GET']) # noqa: F821
|
||||
@ -127,3 +131,49 @@ def delete_agent(tenant_id: str, agent_id: str):
|
||||
|
||||
UserCanvasService.delete_by_id(agent_id)
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
@manager.route('/webhook/<agent_id>', methods=['POST']) # noqa: F821
|
||||
@token_required
|
||||
def webhook(tenant_id: str, agent_id: str):
|
||||
req = request.json
|
||||
if not UserCanvasService.accessible(req["id"], tenant_id):
|
||||
return get_json_result(
|
||||
data=False, message='Only owner of canvas authorized for this operation.',
|
||||
code=RetCode.OPERATING_ERROR)
|
||||
|
||||
e, cvs = UserCanvasService.get_by_id(req["id"])
|
||||
if not e:
|
||||
return get_data_error_result(message="canvas not found.")
|
||||
|
||||
if not isinstance(cvs.dsl, str):
|
||||
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
||||
|
||||
if cvs.canvas_category == CanvasCategory.DataFlow:
|
||||
return get_data_error_result(message="Dataflow can not be triggered by webhook.")
|
||||
|
||||
try:
|
||||
canvas = Canvas(cvs.dsl, tenant_id, agent_id)
|
||||
except Exception as e:
|
||||
return get_json_result(
|
||||
data=False, message=str(e),
|
||||
code=RetCode.EXCEPTION_ERROR)
|
||||
|
||||
def sse():
|
||||
nonlocal canvas
|
||||
try:
|
||||
for ans in canvas.run(query=req.get("query", ""), files=req.get("files", []), user_id=req.get("user_id", tenant_id), webhook_payload=req):
|
||||
yield "data:" + json.dumps(ans, ensure_ascii=False) + "\n\n"
|
||||
|
||||
cvs.dsl = json.loads(str(canvas))
|
||||
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": False}, ensure_ascii=False) + "\n\n"
|
||||
|
||||
resp = Response(sse(), mimetype="text/event-stream")
|
||||
resp.headers.add_header("Cache-control", "no-cache")
|
||||
resp.headers.add_header("Connection", "keep-alive")
|
||||
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||
return resp
|
||||
@ -20,7 +20,6 @@ import os
|
||||
import json
|
||||
from flask import request
|
||||
from peewee import OperationalError
|
||||
from api import settings
|
||||
from api.db.db_models import File
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.file2document_service import File2DocumentService
|
||||
@ -48,7 +47,8 @@ from api.utils.validation_utils import (
|
||||
validate_and_parse_request_args,
|
||||
)
|
||||
from rag.nlp import search
|
||||
from rag.settings import PAGERANK_FLD
|
||||
from common.constants import PAGERANK_FLD
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route("/datasets", methods=["POST"]) # noqa: F821
|
||||
|
||||
@ -20,11 +20,11 @@ from flask import request, jsonify
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.llm_service import LLMBundle
|
||||
from api import settings
|
||||
from api.utils.api_utils import validate_request, build_error_result, apikey_required
|
||||
from rag.app.tag import label_question
|
||||
from api.db.services.dialog_service import meta_filter, convert_conditions
|
||||
from common.constants import RetCode, LLMType
|
||||
from common import settings
|
||||
|
||||
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
|
||||
@apikey_required
|
||||
|
||||
@ -24,7 +24,6 @@ from flask import request, send_file
|
||||
from peewee import OperationalError
|
||||
from pydantic import BaseModel, Field, validator
|
||||
|
||||
from api import settings
|
||||
from api.constants import FILE_NAME_LEN_LIMIT
|
||||
from api.db import FileType
|
||||
from api.db.db_models import File, Task
|
||||
@ -41,9 +40,9 @@ from rag.app.qa import beAdoc, rmPrefix
|
||||
from rag.app.tag import label_question
|
||||
from rag.nlp import rag_tokenizer, search
|
||||
from rag.prompts.generator import cross_languages, keyword_extraction
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common.string_utils import remove_redundant_spaces
|
||||
from common.constants import RetCode, LLMType, ParserType, TaskStatus, FileSource
|
||||
from common import settings
|
||||
|
||||
MAXIMUM_OF_UPLOADING_FILES = 256
|
||||
|
||||
@ -401,7 +400,7 @@ def download(tenant_id, dataset_id, document_id):
|
||||
return get_error_data_result(message=f"The dataset not own the document {document_id}.")
|
||||
# The process of downloading
|
||||
doc_id, doc_location = File2DocumentService.get_storage_address(doc_id=document_id) # minio address
|
||||
file_stream = STORAGE_IMPL.get(doc_id, doc_location)
|
||||
file_stream = settings.STORAGE_IMPL.get(doc_id, doc_location)
|
||||
if not file_stream:
|
||||
return construct_json_result(message="This file is empty.", code=RetCode.DATA_ERROR)
|
||||
file = BytesIO(file_stream)
|
||||
@ -671,7 +670,7 @@ def delete(tenant_id, dataset_id):
|
||||
)
|
||||
File2DocumentService.delete_by_document_id(doc_id)
|
||||
|
||||
STORAGE_IMPL.rm(b, n)
|
||||
settings.STORAGE_IMPL.rm(b, n)
|
||||
success_count += 1
|
||||
except Exception as e:
|
||||
errors += str(e)
|
||||
|
||||
@ -32,7 +32,7 @@ from api.db.services import duplicate_name
|
||||
from api.db.services.file_service import FileService
|
||||
from api.utils.api_utils import get_json_result
|
||||
from api.utils.file_utils import filename_type
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route('/file/upload', methods=['POST']) # noqa: F821
|
||||
@ -126,7 +126,7 @@ def upload(tenant_id):
|
||||
|
||||
filetype = filename_type(file_obj_names[file_len - 1])
|
||||
location = file_obj_names[file_len - 1]
|
||||
while STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||
while settings.STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||
location += "_"
|
||||
blob = file_obj.read()
|
||||
filename = duplicate_name(FileService.query, name=file_obj_names[file_len - 1], parent_id=last_folder.id)
|
||||
@ -142,7 +142,7 @@ def upload(tenant_id):
|
||||
"size": len(blob),
|
||||
}
|
||||
file = FileService.insert(file)
|
||||
STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||
settings.STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||
file_res.append(file.to_json())
|
||||
return get_json_result(data=file_res)
|
||||
except Exception as e:
|
||||
@ -497,10 +497,10 @@ def rm(tenant_id):
|
||||
e, file = FileService.get_by_id(inner_file_id)
|
||||
if not e:
|
||||
return get_json_result(message="File not found!", code=404)
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
FileService.delete_folder_by_pf_id(tenant_id, file_id)
|
||||
else:
|
||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||
if not FileService.delete(file):
|
||||
return get_json_result(message="Database error (File removal)!", code=500)
|
||||
|
||||
@ -614,10 +614,10 @@ def get(tenant_id, file_id):
|
||||
if not e:
|
||||
return get_json_result(message="Document not found!", code=404)
|
||||
|
||||
blob = STORAGE_IMPL.get(file.parent_id, file.location)
|
||||
blob = settings.STORAGE_IMPL.get(file.parent_id, file.location)
|
||||
if not blob:
|
||||
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
||||
blob = STORAGE_IMPL.get(b, n)
|
||||
blob = settings.STORAGE_IMPL.get(b, n)
|
||||
|
||||
response = flask.make_response(blob)
|
||||
ext = re.search(r"\.([^.]+)$", file.name)
|
||||
|
||||
@ -21,7 +21,6 @@ import tiktoken
|
||||
from flask import Response, jsonify, request
|
||||
|
||||
from agent.canvas import Canvas
|
||||
from api import settings
|
||||
from api.db.db_models import APIToken
|
||||
from api.db.services.api_service import API4ConversationService
|
||||
from api.db.services.canvas_service import UserCanvasService, completion_openai
|
||||
@ -41,6 +40,7 @@ from rag.app.tag import label_question
|
||||
from rag.prompts.template import load_prompt
|
||||
from rag.prompts.generator import cross_languages, gen_meta_filter, keyword_extraction, chunks_format
|
||||
from common.constants import RetCode, LLMType, StatusEnum
|
||||
from common import settings
|
||||
|
||||
@manager.route("/chats/<chat_id>/sessions", methods=["POST"]) # noqa: F821
|
||||
@token_required
|
||||
|
||||
@ -23,21 +23,20 @@ from api.db.db_models import APIToken
|
||||
from api.db.services.api_service import APITokenService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.user_service import UserTenantService
|
||||
from api import settings
|
||||
from api.utils.api_utils import (
|
||||
get_json_result,
|
||||
get_data_error_result,
|
||||
server_error_response,
|
||||
generate_confirmation_token,
|
||||
)
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from common.time_utils import current_timestamp, datetime_format
|
||||
from rag.utils.storage_factory import STORAGE_IMPL, STORAGE_IMPL_TYPE
|
||||
from timeit import default_timer as timer
|
||||
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from flask import jsonify
|
||||
from api.utils.health_utils import run_health_checks
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route("/version", methods=["GET"]) # noqa: F821
|
||||
@ -112,15 +111,15 @@ def status():
|
||||
|
||||
st = timer()
|
||||
try:
|
||||
STORAGE_IMPL.health()
|
||||
settings.STORAGE_IMPL.health()
|
||||
res["storage"] = {
|
||||
"storage": STORAGE_IMPL_TYPE.lower(),
|
||||
"storage": settings.STORAGE_IMPL_TYPE.lower(),
|
||||
"status": "green",
|
||||
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||
}
|
||||
except Exception as e:
|
||||
res["storage"] = {
|
||||
"storage": STORAGE_IMPL_TYPE.lower(),
|
||||
"storage": settings.STORAGE_IMPL_TYPE.lower(),
|
||||
"status": "red",
|
||||
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||
"error": str(e),
|
||||
|
||||
@ -17,7 +17,6 @@
|
||||
from flask import request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from api import settings
|
||||
from api.apps import smtp_mail_server
|
||||
from api.db import UserTenantRole
|
||||
from api.db.db_models import UserTenant
|
||||
@ -28,6 +27,7 @@ from common.misc_utils import get_uuid
|
||||
from common.time_utils import delta_seconds
|
||||
from api.utils.api_utils import get_json_result, validate_request, server_error_response, get_data_error_result
|
||||
from api.utils.web_utils import send_invite_email
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route("/<tenant_id>/user/list", methods=["GET"]) # noqa: F821
|
||||
|
||||
@ -26,7 +26,6 @@ from flask import redirect, request, session, make_response
|
||||
from flask_login import current_user, login_required, login_user, logout_user
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
|
||||
from api import settings
|
||||
from api.apps.auth import get_auth_client
|
||||
from api.db import FileType, UserTenantRole
|
||||
from api.db.db_models import TenantLLM
|
||||
@ -58,6 +57,7 @@ from api.utils.web_utils import (
|
||||
hash_code,
|
||||
captcha_key,
|
||||
)
|
||||
from common import settings
|
||||
|
||||
|
||||
@manager.route("/login", methods=["POST", "GET"]) # noqa: F821
|
||||
|
||||
@ -70,4 +70,7 @@ class PipelineTaskType(StrEnum):
|
||||
VALID_PIPELINE_TASK_TYPES = {PipelineTaskType.PARSE, PipelineTaskType.DOWNLOAD, PipelineTaskType.RAPTOR, PipelineTaskType.GRAPH_RAG, PipelineTaskType.MINDMAP}
|
||||
|
||||
|
||||
PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES = {PipelineTaskType.RAPTOR.lower(), PipelineTaskType.GRAPH_RAG.lower(), PipelineTaskType.MINDMAP.lower()}
|
||||
|
||||
|
||||
KNOWLEDGEBASE_FOLDER_NAME=".knowledgebase"
|
||||
|
||||
@ -31,7 +31,7 @@ from peewee import InterfaceError, OperationalError, BigIntegerField, BooleanFie
|
||||
from playhouse.migrate import MySQLMigrator, PostgresqlMigrator, migrate
|
||||
from playhouse.pool import PooledMySQLDatabase, PooledPostgresqlDatabase
|
||||
|
||||
from api import settings, utils
|
||||
from api import utils
|
||||
from api.db import SerializedType
|
||||
from api.utils.json_encode import json_dumps, json_loads
|
||||
from api.utils.configs import deserialize_b64, serialize_b64
|
||||
@ -39,6 +39,7 @@ from api.utils.configs import deserialize_b64, serialize_b64
|
||||
from common.time_utils import current_timestamp, timestamp_to_date, date_string_to_timestamp
|
||||
from common.decorator import singleton
|
||||
from common.constants import ParserType
|
||||
from common import settings
|
||||
|
||||
|
||||
CONTINUOUS_FIELD_TYPE = {IntegerField, FloatField, DateTimeField}
|
||||
@ -668,6 +669,7 @@ class LLMFactories(DataBaseModel):
|
||||
name = CharField(max_length=128, null=False, help_text="LLM factory name", primary_key=True)
|
||||
logo = TextField(null=True, help_text="llm logo base64")
|
||||
tags = CharField(max_length=255, null=False, help_text="LLM, Text Embedding, Image2Text, ASR", index=True)
|
||||
rank = IntegerField(default=0, index=False)
|
||||
status = CharField(max_length=1, null=True, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)
|
||||
|
||||
def __str__(self):
|
||||
@ -1063,6 +1065,7 @@ class Connector2Kb(DataBaseModel):
|
||||
id = CharField(max_length=32, primary_key=True)
|
||||
connector_id = CharField(max_length=32, null=False, index=True)
|
||||
kb_id = CharField(max_length=32, null=False, index=True)
|
||||
auto_parse = CharField(max_length=1, null=False, default="1", index=False)
|
||||
|
||||
class Meta:
|
||||
db_table = "connector2kb"
|
||||
@ -1281,4 +1284,12 @@ def migrate_db():
|
||||
migrate(migrator.add_column("tenant_llm", "status", CharField(max_length=1, null=False, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
migrate(migrator.add_column("connector2kb", "auto_parse", CharField(max_length=1, null=False, default="1", index=False)))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
migrate(migrator.add_column("llm_factories", "rank", IntegerField(default=0, index=False)))
|
||||
except Exception:
|
||||
pass
|
||||
logging.disable(logging.NOTSET)
|
||||
|
||||
@ -29,9 +29,9 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.tenant_llm_service import LLMFactoriesService, TenantLLMService
|
||||
from api.db.services.llm_service import LLMService, LLMBundle, get_init_tenant_llm
|
||||
from api.db.services.user_service import TenantService, UserTenantService
|
||||
from api import settings
|
||||
from common.constants import LLMType
|
||||
from common.file_utils import get_project_base_directory
|
||||
from common import settings
|
||||
from api.common.base64 import encode_to_base64
|
||||
|
||||
|
||||
@ -89,13 +89,7 @@ def init_superuser():
|
||||
|
||||
|
||||
def init_llm_factory():
|
||||
try:
|
||||
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
|
||||
LLMService.filter_delete([(LLM.fid == "cohere")])
|
||||
LLMFactoriesService.filter_delete([LLMFactories.name == "cohere"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
LLMFactoriesService.filter_delete([1 == 1])
|
||||
factory_llm_infos = settings.FACTORY_LLM_INFOS
|
||||
for factory_llm_info in factory_llm_infos:
|
||||
info = deepcopy(factory_llm_info)
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from api import settings
|
||||
from api.utils.api_utils import group_by
|
||||
from api.db import FileType, UserTenantRole
|
||||
from api.db.services.api_service import APITokenService, API4ConversationService
|
||||
@ -35,9 +34,9 @@ from api.db.services.task_service import TaskService
|
||||
from api.db.services.tenant_llm_service import TenantLLMService
|
||||
from api.db.services.user_canvas_version import UserCanvasVersionService
|
||||
from api.db.services.user_service import TenantService, UserService, UserTenantService
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.nlp import search
|
||||
from common.constants import ActiveEnum
|
||||
from common import settings
|
||||
|
||||
def create_new_user(user_info: dict) -> dict:
|
||||
"""
|
||||
@ -158,8 +157,8 @@ def delete_user_data(user_id: str) -> dict:
|
||||
if kb_ids:
|
||||
# step1.1.1 delete files in storage, remove bucket
|
||||
for kb_id in kb_ids:
|
||||
if STORAGE_IMPL.bucket_exists(kb_id):
|
||||
STORAGE_IMPL.remove_bucket(kb_id)
|
||||
if settings.STORAGE_IMPL.bucket_exists(kb_id):
|
||||
settings.STORAGE_IMPL.remove_bucket(kb_id)
|
||||
done_msg += f"- Removed {len(kb_ids)} dataset's buckets.\n"
|
||||
# step1.1.2 delete file and document info in db
|
||||
doc_ids = DocumentService.get_all_doc_ids_by_kb_ids(kb_ids)
|
||||
@ -218,7 +217,7 @@ def delete_user_data(user_id: str) -> dict:
|
||||
if created_files:
|
||||
# step2.1.1.1 delete file in storage
|
||||
for f in created_files:
|
||||
STORAGE_IMPL.rm(f.parent_id, f.location)
|
||||
settings.STORAGE_IMPL.rm(f.parent_id, f.location)
|
||||
done_msg += f"- Deleted {len(created_files)} uploaded file.\n"
|
||||
# step2.1.1.2 delete file record
|
||||
file_delete_res = FileService.delete_by_ids([f.id for f in created_files])
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from .reload_config_base import ReloadConfigBase
|
||||
|
||||
|
||||
|
||||
@ -67,6 +67,7 @@ class UserCanvasService(CommonService):
|
||||
# will get all permitted agents, be cautious
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.avatar,
|
||||
cls.model.title,
|
||||
cls.model.permission,
|
||||
cls.model.canvas_type,
|
||||
|
||||
@ -90,7 +90,7 @@ class CommonService:
|
||||
else:
|
||||
query_records = cls.model.select()
|
||||
if reverse is not None:
|
||||
if not order_by or not hasattr(cls, order_by):
|
||||
if not order_by or not hasattr(cls.model, order_by):
|
||||
order_by = "create_time"
|
||||
if reverse is True:
|
||||
query_records = query_records.order_by(cls.model.getter_by(order_by).desc())
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
#
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Tuple, List
|
||||
|
||||
from anthropic import BaseModel
|
||||
from peewee import SQL, fn
|
||||
@ -39,17 +40,20 @@ class ConnectorService(CommonService):
|
||||
if not task:
|
||||
if status == TaskStatus.SCHEDULE:
|
||||
SyncLogsService.schedule(connector_id, c2k.kb_id)
|
||||
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||
return
|
||||
|
||||
if task.status == TaskStatus.DONE:
|
||||
if status == TaskStatus.SCHEDULE:
|
||||
SyncLogsService.schedule(connector_id, c2k.kb_id, task.poll_range_end, total_docs_indexed=task.total_docs_indexed)
|
||||
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||
return
|
||||
|
||||
task = task.to_dict()
|
||||
task["status"] = status
|
||||
SyncLogsService.update_by_id(task["id"], task)
|
||||
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||
|
||||
|
||||
@classmethod
|
||||
def list(cls, tenant_id):
|
||||
fields = [
|
||||
@ -62,26 +66,42 @@ class ConnectorService(CommonService):
|
||||
cls.model.tenant_id == tenant_id
|
||||
).dicts())
|
||||
|
||||
@classmethod
|
||||
def rebuild(cls, kb_id:str, connector_id: str, tenant_id:str):
|
||||
e, conn = cls.get_by_id(connector_id)
|
||||
if not e:
|
||||
return
|
||||
SyncLogsService.filter_delete([SyncLogs.connector_id==connector_id, SyncLogs.kb_id==kb_id])
|
||||
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}", kb_id=kb_id)
|
||||
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||
SyncLogsService.schedule(connector_id, kb_id, reindex=True)
|
||||
return err
|
||||
|
||||
|
||||
class SyncLogsService(CommonService):
|
||||
model = SyncLogs
|
||||
|
||||
@classmethod
|
||||
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15):
|
||||
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15) -> Tuple[List[dict], int]:
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.connector_id,
|
||||
cls.model.kb_id,
|
||||
cls.model.update_date,
|
||||
cls.model.poll_range_start,
|
||||
cls.model.poll_range_end,
|
||||
cls.model.new_docs_indexed,
|
||||
cls.model.total_docs_indexed,
|
||||
cls.model.error_msg,
|
||||
cls.model.full_exception_trace,
|
||||
cls.model.error_count,
|
||||
Connector.name,
|
||||
Connector.source,
|
||||
Connector.tenant_id,
|
||||
Connector.timeout_secs,
|
||||
Knowledgebase.name.alias("kb_name"),
|
||||
Knowledgebase.avatar.alias("kb_avatar"),
|
||||
Connector2Kb.auto_parse,
|
||||
cls.model.from_beginning.alias("reindex"),
|
||||
cls.model.status
|
||||
]
|
||||
@ -105,27 +125,39 @@ class SyncLogsService(CommonService):
|
||||
)
|
||||
|
||||
query = query.distinct().order_by(cls.model.update_time.desc())
|
||||
totbal = query.count()
|
||||
if page_number:
|
||||
query = query.paginate(page_number, items_per_page)
|
||||
|
||||
return list(query.dicts())
|
||||
return list(query.dicts()), totbal
|
||||
|
||||
@classmethod
|
||||
def start(cls, id):
|
||||
def start(cls, id, connector_id):
|
||||
cls.update_by_id(id, {"status": TaskStatus.RUNNING, "time_started": datetime.now().strftime('%Y-%m-%d %H:%M:%S') })
|
||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.RUNNING})
|
||||
|
||||
@classmethod
|
||||
def done(cls, id):
|
||||
def done(cls, id, connector_id):
|
||||
cls.update_by_id(id, {"status": TaskStatus.DONE})
|
||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.DONE})
|
||||
|
||||
@classmethod
|
||||
def schedule(cls, connector_id, kb_id, poll_range_start=None, reindex=False, total_docs_indexed=0):
|
||||
try:
|
||||
if cls.model.select().where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).count() > 100:
|
||||
rm_ids = [m.id for m in cls.model.select(cls.model.id).where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).order_by(cls.model.update_time.asc()).limit(70)]
|
||||
deleted = cls.model.delete().where(cls.model.id.in_(rm_ids)).execute()
|
||||
logging.info(f"[SyncLogService] Cleaned {deleted} old logs.")
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
try:
|
||||
e = cls.query(kb_id=kb_id, connector_id=connector_id, status=TaskStatus.SCHEDULE)
|
||||
if e:
|
||||
logging.warning(f"{kb_id}--{connector_id} has already had a scheduling sync task which is abnormal.")
|
||||
return None
|
||||
reindex = "1" if reindex else "0"
|
||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDULE})
|
||||
return cls.save(**{
|
||||
"id": get_uuid(),
|
||||
"kb_id": kb_id, "status": TaskStatus.SCHEDULE, "connector_id": connector_id,
|
||||
@ -142,6 +174,7 @@ class SyncLogsService(CommonService):
|
||||
full_exception_trace=cls.model.full_exception_trace + str(e)
|
||||
) \
|
||||
.where(cls.model.id == task.id).execute()
|
||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDULE})
|
||||
|
||||
@classmethod
|
||||
def increase_docs(cls, id, min_update, max_update, doc_num, err_msg="", error_count=0):
|
||||
@ -157,7 +190,7 @@ class SyncLogsService(CommonService):
|
||||
.where(cls.model.id == id).execute()
|
||||
|
||||
@classmethod
|
||||
def duplicate_and_parse(cls, kb, docs, tenant_id, src):
|
||||
def duplicate_and_parse(cls, kb, docs, tenant_id, src, auto_parse=True):
|
||||
if not docs:
|
||||
return None
|
||||
|
||||
@ -169,15 +202,17 @@ class SyncLogsService(CommonService):
|
||||
return self.blob
|
||||
|
||||
errs = []
|
||||
files = [FileObj(filename=d["semantic_identifier"]+f".{d['extension']}", blob=d["blob"]) for d in docs]
|
||||
files = [FileObj(filename=d["semantic_identifier"]+(f"{d['extension']}" if d["semantic_identifier"][::-1].find(d['extension'][::-1])<0 else ""), blob=d["blob"]) for d in docs]
|
||||
doc_ids = []
|
||||
err, doc_blob_pairs = FileService.upload_document(kb, files, tenant_id, src)
|
||||
errs.extend(err)
|
||||
if not err:
|
||||
kb_table_num_map = {}
|
||||
for doc, _ in doc_blob_pairs:
|
||||
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
||||
doc_ids.append(doc["id"])
|
||||
|
||||
kb_table_num_map = {}
|
||||
for doc, _ in doc_blob_pairs:
|
||||
doc_ids.append(doc["id"])
|
||||
if not auto_parse or auto_parse == "0":
|
||||
continue
|
||||
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
||||
|
||||
return errs, doc_ids
|
||||
|
||||
@ -193,43 +228,21 @@ class Connector2KbService(CommonService):
|
||||
model = Connector2Kb
|
||||
|
||||
@classmethod
|
||||
def link_kb(cls, conn_id:str, kb_ids: list[str], tenant_id:str):
|
||||
arr = cls.query(connector_id=conn_id)
|
||||
old_kb_ids = [a.kb_id for a in arr]
|
||||
for kb_id in kb_ids:
|
||||
if kb_id in old_kb_ids:
|
||||
continue
|
||||
cls.save(**{
|
||||
"id": get_uuid(),
|
||||
"connector_id": conn_id,
|
||||
"kb_id": kb_id
|
||||
})
|
||||
SyncLogsService.schedule(conn_id, kb_id, reindex=True)
|
||||
|
||||
errs = []
|
||||
e, conn = ConnectorService.get_by_id(conn_id)
|
||||
for kb_id in old_kb_ids:
|
||||
if kb_id in kb_ids:
|
||||
continue
|
||||
cls.filter_delete([cls.model.kb_id==kb_id, cls.model.connector_id==conn_id])
|
||||
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status==TaskStatus.SCHEDULE], {"status": TaskStatus.CANCEL})
|
||||
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
||||
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||
if err:
|
||||
errs.append(err)
|
||||
return "\n".join(errs)
|
||||
|
||||
@classmethod
|
||||
def link_connectors(cls, kb_id:str, connector_ids: list[str], tenant_id:str):
|
||||
def link_connectors(cls, kb_id:str, connectors: list[dict], tenant_id:str):
|
||||
arr = cls.query(kb_id=kb_id)
|
||||
old_conn_ids = [a.connector_id for a in arr]
|
||||
for conn_id in connector_ids:
|
||||
connector_ids = []
|
||||
for conn in connectors:
|
||||
conn_id = conn["id"]
|
||||
connector_ids.append(conn_id)
|
||||
if conn_id in old_conn_ids:
|
||||
cls.filter_update([cls.model.connector_id==conn_id, cls.model.kb_id==kb_id], {"auto_parse": conn.get("auto_parse", "1")})
|
||||
continue
|
||||
cls.save(**{
|
||||
"id": get_uuid(),
|
||||
"connector_id": conn_id,
|
||||
"kb_id": kb_id
|
||||
"kb_id": kb_id,
|
||||
"auto_parse": conn.get("auto_parse", "1")
|
||||
})
|
||||
SyncLogsService.schedule(conn_id, kb_id, reindex=True)
|
||||
|
||||
@ -239,11 +252,15 @@ class Connector2KbService(CommonService):
|
||||
continue
|
||||
cls.filter_delete([cls.model.kb_id==kb_id, cls.model.connector_id==conn_id])
|
||||
e, conn = ConnectorService.get_by_id(conn_id)
|
||||
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status==TaskStatus.SCHEDULE], {"status": TaskStatus.CANCEL})
|
||||
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
||||
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||
if err:
|
||||
errs.append(err)
|
||||
if not e:
|
||||
continue
|
||||
#SyncLogsService.filter_delete([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id])
|
||||
# Do not delete docs while unlinking.
|
||||
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status.in_([TaskStatus.SCHEDULE, TaskStatus.RUNNING])], {"status": TaskStatus.CANCEL})
|
||||
#docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
||||
#err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||
#if err:
|
||||
# errs.append(err)
|
||||
return "\n".join(errs)
|
||||
|
||||
@classmethod
|
||||
@ -252,6 +269,7 @@ class Connector2KbService(CommonService):
|
||||
Connector.id,
|
||||
Connector.source,
|
||||
Connector.name,
|
||||
cls.model.auto_parse,
|
||||
Connector.status
|
||||
]
|
||||
return list(cls.model.select(*fields)\
|
||||
@ -261,3 +279,5 @@ class Connector2KbService(CommonService):
|
||||
).dicts()
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
@ -25,7 +25,6 @@ import trio
|
||||
from langfuse import Langfuse
|
||||
from peewee import fn
|
||||
from agentic_reasoning import DeepResearcher
|
||||
from api import settings
|
||||
from common.constants import LLMType, ParserType, StatusEnum
|
||||
from api.db.db_models import DB, Dialog
|
||||
from api.db.services.common_service import CommonService
|
||||
@ -44,6 +43,7 @@ from rag.prompts.generator import chunks_format, citation_prompt, cross_language
|
||||
from common.token_utils import num_tokens_from_string
|
||||
from rag.utils.tavily_conn import Tavily
|
||||
from common.string_utils import remove_redundant_spaces
|
||||
from common import settings
|
||||
|
||||
|
||||
class DialogService(CommonService):
|
||||
@ -293,12 +293,13 @@ def meta_filter(metas: dict, filters: list[dict]):
|
||||
def filter_out(v2docs, operator, value):
|
||||
ids = []
|
||||
for input, docids in v2docs.items():
|
||||
try:
|
||||
input = float(input)
|
||||
value = float(value)
|
||||
except Exception:
|
||||
input = str(input)
|
||||
value = str(value)
|
||||
if operator in ["=", "≠", ">", "<", "≥", "≤"]:
|
||||
try:
|
||||
input = float(input)
|
||||
value = float(value)
|
||||
except Exception:
|
||||
input = str(input)
|
||||
value = str(value)
|
||||
|
||||
for conds in [
|
||||
(operator == "contains", str(value).lower() in str(input).lower()),
|
||||
@ -618,7 +619,12 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
|
||||
|
||||
def use_sql(question, field_map, tenant_id, chat_mdl, quota=True, kb_ids=None):
|
||||
sys_prompt = "You are a Database Administrator. You need to check the fields of the following tables based on the user's list of questions and write the SQL corresponding to the last question."
|
||||
sys_prompt = """
|
||||
You are a Database Administrator. You need to check the fields of the following tables based on the user's list of questions and write the SQL corresponding to the last question.
|
||||
Ensure that:
|
||||
1. Field names should not start with a digit. If any field name starts with a digit, use double quotes around it.
|
||||
2. Write only the SQL, no explanations or additional text.
|
||||
"""
|
||||
user_prompt = """
|
||||
Table name: {};
|
||||
Table of database fields are as follows:
|
||||
@ -639,6 +645,7 @@ Please write the SQL, only SQL, without any other explanations or text.
|
||||
sql = re.sub(r".*select ", "select ", sql.lower())
|
||||
sql = re.sub(r" +", " ", sql)
|
||||
sql = re.sub(r"([;;]|```).*", "", sql)
|
||||
sql = re.sub(r"&", "and", sql)
|
||||
if sql[: len("select ")] != "select ":
|
||||
return None, None
|
||||
if not re.search(r"((sum|avg|max|min)\(|group by )", sql.lower()):
|
||||
|
||||
@ -26,9 +26,8 @@ import trio
|
||||
import xxhash
|
||||
from peewee import fn, Case, JOIN
|
||||
|
||||
from api import settings
|
||||
from api.constants import IMG_BASE64_PREFIX, FILE_NAME_LEN_LIMIT
|
||||
from api.db import FileType, UserTenantRole, CanvasCategory
|
||||
from api.db import PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES, FileType, UserTenantRole, CanvasCategory
|
||||
from api.db.db_models import DB, Document, Knowledgebase, Task, Tenant, UserTenant, File2Document, File, UserCanvas, \
|
||||
User
|
||||
from api.db.db_utils import bulk_insert_into_db
|
||||
@ -36,13 +35,11 @@ from api.db.services.common_service import CommonService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from common.misc_utils import get_uuid
|
||||
from common.time_utils import current_timestamp, get_format_time
|
||||
from common.constants import LLMType, ParserType, StatusEnum, TaskStatus
|
||||
from common.constants import LLMType, ParserType, StatusEnum, TaskStatus, SVR_CONSUMER_GROUP_NAME
|
||||
from rag.nlp import rag_tokenizer, search
|
||||
from rag.settings import get_svr_queue_name, SVR_CONSUMER_GROUP_NAME
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.utils.doc_store_conn import OrderByExpr
|
||||
|
||||
from common import settings
|
||||
|
||||
class DocumentService(CommonService):
|
||||
model = Document
|
||||
@ -318,11 +315,11 @@ class DocumentService(CommonService):
|
||||
all_chunk_ids.extend(chunk_ids)
|
||||
page += 1
|
||||
for cid in all_chunk_ids:
|
||||
if STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||
STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||
settings.STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||
if doc.thumbnail and not doc.thumbnail.startswith(IMG_BASE64_PREFIX):
|
||||
if STORAGE_IMPL.obj_exist(doc.kb_id, doc.thumbnail):
|
||||
STORAGE_IMPL.rm(doc.kb_id, doc.thumbnail)
|
||||
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, doc.thumbnail):
|
||||
settings.STORAGE_IMPL.rm(doc.kb_id, doc.thumbnail)
|
||||
settings.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
||||
|
||||
graph_source = settings.docStoreConn.getFields(
|
||||
@ -375,12 +372,16 @@ class DocumentService(CommonService):
|
||||
def get_unfinished_docs(cls):
|
||||
fields = [cls.model.id, cls.model.process_begin_at, cls.model.parser_config, cls.model.progress_msg,
|
||||
cls.model.run, cls.model.parser_id]
|
||||
unfinished_task_query = Task.select(Task.doc_id).where(
|
||||
(Task.progress >= 0) & (Task.progress < 1)
|
||||
)
|
||||
|
||||
docs = cls.model.select(*fields) \
|
||||
.where(
|
||||
cls.model.status == StatusEnum.VALID.value,
|
||||
~(cls.model.type == FileType.VIRTUAL.value),
|
||||
cls.model.progress < 1,
|
||||
cls.model.progress > 0)
|
||||
(((cls.model.progress < 1) & (cls.model.progress > 0)) |
|
||||
(cls.model.id.in_(unfinished_task_query)))) # including unfinished tasks like GraphRAG, RAPTOR and Mindmap
|
||||
return list(docs.dicts())
|
||||
|
||||
@classmethod
|
||||
@ -622,12 +623,17 @@ class DocumentService(CommonService):
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def begin2parse(cls, docid):
|
||||
cls.update_by_id(
|
||||
docid, {"progress": random.random() * 1 / 100.,
|
||||
"progress_msg": "Task is queued...",
|
||||
"process_begin_at": get_format_time()
|
||||
})
|
||||
def begin2parse(cls, doc_id, keep_progress=False):
|
||||
info = {
|
||||
"progress_msg": "Task is queued...",
|
||||
"process_begin_at": get_format_time(),
|
||||
}
|
||||
if not keep_progress:
|
||||
info["progress"] = random.random() * 1 / 100.
|
||||
info["run"] = TaskStatus.RUNNING.value
|
||||
# keep the doc in DONE state when keep_progress=True for GraphRAG, RAPTOR and Mindmap tasks
|
||||
|
||||
cls.update_by_id(doc_id, info)
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
@ -686,8 +692,13 @@ class DocumentService(CommonService):
|
||||
bad = 0
|
||||
e, doc = DocumentService.get_by_id(d["id"])
|
||||
status = doc.run # TaskStatus.RUNNING.value
|
||||
doc_progress = doc.progress if doc and doc.progress else 0.0
|
||||
special_task_running = False
|
||||
priority = 0
|
||||
for t in tsks:
|
||||
task_type = (t.task_type or "").lower()
|
||||
if task_type in PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES:
|
||||
special_task_running = True
|
||||
if 0 <= t.progress < 1:
|
||||
finished = False
|
||||
if t.progress == -1:
|
||||
@ -704,13 +715,15 @@ class DocumentService(CommonService):
|
||||
prg = 1
|
||||
status = TaskStatus.DONE.value
|
||||
|
||||
# only for special task and parsed docs and unfinised
|
||||
freeze_progress = special_task_running and doc_progress >= 1 and not finished
|
||||
msg = "\n".join(sorted(msg))
|
||||
info = {
|
||||
"process_duration": datetime.timestamp(
|
||||
datetime.now()) -
|
||||
d["process_begin_at"].timestamp(),
|
||||
"run": status}
|
||||
if prg != 0:
|
||||
if prg != 0 and not freeze_progress:
|
||||
info["progress"] = prg
|
||||
if msg:
|
||||
info["progress_msg"] = msg
|
||||
@ -757,6 +770,14 @@ class DocumentService(CommonService):
|
||||
.where((cls.model.kb_id == kb_id) & (cls.model.run == TaskStatus.CANCEL))
|
||||
.scalar()
|
||||
)
|
||||
downloaded = (
|
||||
cls.model.select(fn.COUNT(1))
|
||||
.where(
|
||||
cls.model.kb_id == kb_id,
|
||||
cls.model.source_type != "local"
|
||||
)
|
||||
.scalar()
|
||||
)
|
||||
|
||||
row = (
|
||||
cls.model.select(
|
||||
@ -793,6 +814,7 @@ class DocumentService(CommonService):
|
||||
"finished": int(row["finished"]),
|
||||
"failed": int(row["failed"]),
|
||||
"cancelled": int(cancelled),
|
||||
"downloaded": int(downloaded)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@ -839,7 +861,7 @@ def queue_raptor_o_graphrag_tasks(sample_doc_id, ty, priority, fake_doc_id="", d
|
||||
"to_page": 100000000,
|
||||
"task_type": ty,
|
||||
"progress_msg": datetime.now().strftime("%H:%M:%S") + " created task " + ty,
|
||||
"begin_at": datetime.now(),
|
||||
"begin_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
}
|
||||
|
||||
task = new_task()
|
||||
@ -851,13 +873,13 @@ def queue_raptor_o_graphrag_tasks(sample_doc_id, ty, priority, fake_doc_id="", d
|
||||
|
||||
task["doc_id"] = fake_doc_id
|
||||
task["doc_ids"] = doc_ids
|
||||
DocumentService.begin2parse(sample_doc_id["id"])
|
||||
assert REDIS_CONN.queue_product(get_svr_queue_name(priority), message=task), "Can't access Redis. Please check the Redis' status."
|
||||
DocumentService.begin2parse(sample_doc_id["id"], keep_progress=True)
|
||||
assert REDIS_CONN.queue_product(settings.get_svr_queue_name(priority), message=task), "Can't access Redis. Please check the Redis' status."
|
||||
return task["id"]
|
||||
|
||||
|
||||
def get_queue_length(priority):
|
||||
group_info = REDIS_CONN.queue_info(get_svr_queue_name(priority), SVR_CONSUMER_GROUP_NAME)
|
||||
group_info = REDIS_CONN.queue_info(settings.get_svr_queue_name(priority), SVR_CONSUMER_GROUP_NAME)
|
||||
if not group_info:
|
||||
return 0
|
||||
return int(group_info.get("lag", 0) or 0)
|
||||
@ -939,7 +961,7 @@ def doc_upload_and_parse(conversation_id, file_objs, user_id):
|
||||
else:
|
||||
d["image"].save(output_buffer, format='JPEG')
|
||||
|
||||
STORAGE_IMPL.put(kb.id, d["id"], output_buffer.getvalue())
|
||||
settings.STORAGE_IMPL.put(kb.id, d["id"], output_buffer.getvalue())
|
||||
d["img_id"] = "{}-{}".format(kb.id, d["id"])
|
||||
d.pop("image", None)
|
||||
docs.append(d)
|
||||
@ -1005,4 +1027,3 @@ def doc_upload_and_parse(conversation_id, file_objs, user_id):
|
||||
doc_id, kb.id, token_counts[doc_id], chunk_counts[doc_id], 0)
|
||||
|
||||
return [d["id"] for d, _ in files]
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.task_service import TaskService
|
||||
from api.utils.file_utils import filename_type, read_potential_broken_pdf, thumbnail_img
|
||||
from rag.llm.cv_model import GptV4
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from common import settings
|
||||
|
||||
|
||||
class FileService(CommonService):
|
||||
@ -440,13 +440,13 @@ class FileService(CommonService):
|
||||
raise RuntimeError("This type of file has not been supported yet!")
|
||||
|
||||
location = filename
|
||||
while STORAGE_IMPL.obj_exist(kb.id, location):
|
||||
while settings.STORAGE_IMPL.obj_exist(kb.id, location):
|
||||
location += "_"
|
||||
|
||||
blob = file.read()
|
||||
if filetype == FileType.PDF.value:
|
||||
blob = read_potential_broken_pdf(blob)
|
||||
STORAGE_IMPL.put(kb.id, location, blob)
|
||||
settings.STORAGE_IMPL.put(kb.id, location, blob)
|
||||
|
||||
doc_id = get_uuid()
|
||||
|
||||
@ -454,7 +454,7 @@ class FileService(CommonService):
|
||||
thumbnail_location = ""
|
||||
if img is not None:
|
||||
thumbnail_location = f"thumbnail_{doc_id}.png"
|
||||
STORAGE_IMPL.put(kb.id, thumbnail_location, img)
|
||||
settings.STORAGE_IMPL.put(kb.id, thumbnail_location, img)
|
||||
|
||||
doc = {
|
||||
"id": doc_id,
|
||||
@ -534,12 +534,12 @@ class FileService(CommonService):
|
||||
@staticmethod
|
||||
def get_blob(user_id, location):
|
||||
bname = f"{user_id}-downloads"
|
||||
return STORAGE_IMPL.get(bname, location)
|
||||
return settings.STORAGE_IMPL.get(bname, location)
|
||||
|
||||
@staticmethod
|
||||
def put_blob(user_id, location, blob):
|
||||
bname = f"{user_id}-downloads"
|
||||
return STORAGE_IMPL.put(bname, location, blob)
|
||||
return settings.STORAGE_IMPL.put(bname, location, blob)
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
@ -570,7 +570,7 @@ class FileService(CommonService):
|
||||
deleted_file_count = FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
||||
File2DocumentService.delete_by_document_id(doc_id)
|
||||
if deleted_file_count > 0:
|
||||
STORAGE_IMPL.rm(b, n)
|
||||
settings.STORAGE_IMPL.rm(b, n)
|
||||
|
||||
doc_parser = doc.parser_id
|
||||
if doc_parser == ParserType.TABLE:
|
||||
|
||||
@ -201,6 +201,7 @@ class KnowledgebaseService(CommonService):
|
||||
# will get all permitted kb, be cautious.
|
||||
fields = [
|
||||
cls.model.name,
|
||||
cls.model.avatar,
|
||||
cls.model.language,
|
||||
cls.model.permission,
|
||||
cls.model.doc_num,
|
||||
|
||||
@ -29,7 +29,7 @@ class LLMService(CommonService):
|
||||
|
||||
|
||||
def get_init_tenant_llm(user_id):
|
||||
from api import settings
|
||||
from common import settings
|
||||
tenant_llm = []
|
||||
|
||||
seen = set()
|
||||
|
||||
@ -159,7 +159,7 @@ class PipelineOperationLogService(CommonService):
|
||||
document_name=document.name,
|
||||
document_suffix=document.suffix,
|
||||
document_type=document.type,
|
||||
source_from="", # TODO: add in the future
|
||||
source_from=document.source_type.split("/")[0],
|
||||
progress=document.progress,
|
||||
progress_msg=document.progress_msg,
|
||||
process_begin_at=document.process_begin_at,
|
||||
|
||||
@ -31,10 +31,8 @@ from common.misc_utils import get_uuid
|
||||
from common.time_utils import current_timestamp
|
||||
from common.constants import StatusEnum, TaskStatus
|
||||
from deepdoc.parser.excel_parser import RAGFlowExcelParser
|
||||
from rag.settings import get_svr_queue_name
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from api import settings
|
||||
from common import settings
|
||||
from rag.nlp import search
|
||||
|
||||
CANVAS_DEBUG_DOC_ID = "dataflow_x"
|
||||
@ -359,7 +357,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
||||
parse_task_array = []
|
||||
|
||||
if doc["type"] == FileType.PDF.value:
|
||||
file_bin = STORAGE_IMPL.get(bucket, name)
|
||||
file_bin = settings.STORAGE_IMPL.get(bucket, name)
|
||||
do_layout = doc["parser_config"].get("layout_recognize", "DeepDOC")
|
||||
pages = PdfParser.total_page_number(doc["name"], file_bin)
|
||||
if pages is None:
|
||||
@ -381,7 +379,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
||||
parse_task_array.append(task)
|
||||
|
||||
elif doc["parser_id"] == "table":
|
||||
file_bin = STORAGE_IMPL.get(bucket, name)
|
||||
file_bin = settings.STORAGE_IMPL.get(bucket, name)
|
||||
rn = RAGFlowExcelParser.row_number(doc["name"], file_bin)
|
||||
for i in range(0, rn, 3000):
|
||||
task = new_task()
|
||||
@ -428,7 +426,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
||||
unfinished_task_array = [task for task in parse_task_array if task["progress"] < 1.0]
|
||||
for unfinished_task in unfinished_task_array:
|
||||
assert REDIS_CONN.queue_product(
|
||||
get_svr_queue_name(priority), message=unfinished_task
|
||||
settings.get_svr_queue_name(priority), message=unfinished_task
|
||||
), "Can't access Redis. Please check the Redis' status."
|
||||
|
||||
|
||||
@ -518,7 +516,7 @@ def queue_dataflow(tenant_id:str, flow_id:str, task_id:str, doc_id:str=CANVAS_DE
|
||||
task["file"] = file
|
||||
|
||||
if not REDIS_CONN.queue_product(
|
||||
get_svr_queue_name(priority), message=task
|
||||
settings.get_svr_queue_name(priority), message=task
|
||||
):
|
||||
return False, "Can't access Redis. Please check the Redis' status."
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@
|
||||
import os
|
||||
import logging
|
||||
from langfuse import Langfuse
|
||||
from api import settings
|
||||
from common import settings
|
||||
from common.constants import LLMType
|
||||
from api.db.db_models import DB, LLMFactories, TenantLLM
|
||||
from api.db.services.common_service import CommonService
|
||||
|
||||
@ -27,7 +27,7 @@ from api.db.services.common_service import CommonService
|
||||
from common.misc_utils import get_uuid
|
||||
from common.time_utils import current_timestamp, datetime_format
|
||||
from common.constants import StatusEnum
|
||||
from rag.settings import MINIO
|
||||
from common import settings
|
||||
|
||||
|
||||
class UserService(CommonService):
|
||||
@ -221,7 +221,7 @@ class TenantService(CommonService):
|
||||
@DB.connection_context()
|
||||
def user_gateway(cls, tenant_id):
|
||||
hash_obj = hashlib.sha256(tenant_id.encode("utf-8"))
|
||||
return int(hash_obj.hexdigest(), 16)%len(MINIO)
|
||||
return int(hash_obj.hexdigest(), 16)%len(settings.MINIO)
|
||||
|
||||
|
||||
class UserTenantService(CommonService):
|
||||
|
||||
@ -32,17 +32,15 @@ import threading
|
||||
import uuid
|
||||
|
||||
from werkzeug.serving import run_simple
|
||||
from api import settings
|
||||
from api.apps import app, smtp_mail_server
|
||||
from api.db.runtime_config import RuntimeConfig
|
||||
from api.db.services.document_service import DocumentService
|
||||
from common.file_utils import get_project_base_directory
|
||||
|
||||
from common import settings
|
||||
from api.db.db_models import init_database_tables as init_web_db
|
||||
from api.db.init_data import init_web_data
|
||||
from api.versions import get_ragflow_version
|
||||
from common.versions import get_ragflow_version
|
||||
from common.config_utils import show_configs
|
||||
from rag.settings import print_rag_settings
|
||||
from rag.utils.mcp_tool_call_conn import shutdown_all_mcp_sessions
|
||||
from rag.utils.redis_conn import RedisDistributedLock
|
||||
|
||||
@ -92,7 +90,7 @@ if __name__ == '__main__':
|
||||
)
|
||||
show_configs()
|
||||
settings.init_settings()
|
||||
print_rag_settings()
|
||||
settings.print_rag_settings()
|
||||
|
||||
if RAGFLOW_DEBUGPY_LISTEN > 0:
|
||||
logging.info(f"debugpy listen on {RAGFLOW_DEBUGPY_LISTEN}")
|
||||
|
||||
222
api/settings.py
222
api/settings.py
@ -13,225 +13,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
from datetime import date
|
||||
|
||||
import rag.utils
|
||||
import rag.utils.es_conn
|
||||
import rag.utils.infinity_conn
|
||||
import rag.utils.opensearch_conn
|
||||
from api.constants import RAG_FLOW_SERVICE_NAME
|
||||
from common.config_utils import decrypt_database_config, get_base_config
|
||||
from common.file_utils import get_project_base_directory
|
||||
from rag.nlp import search
|
||||
|
||||
LLM = None
|
||||
LLM_FACTORY = None
|
||||
LLM_BASE_URL = None
|
||||
CHAT_MDL = ""
|
||||
EMBEDDING_MDL = ""
|
||||
RERANK_MDL = ""
|
||||
ASR_MDL = ""
|
||||
IMAGE2TEXT_MDL = ""
|
||||
CHAT_CFG = ""
|
||||
EMBEDDING_CFG = ""
|
||||
RERANK_CFG = ""
|
||||
ASR_CFG = ""
|
||||
IMAGE2TEXT_CFG = ""
|
||||
API_KEY = None
|
||||
PARSERS = None
|
||||
HOST_IP = None
|
||||
HOST_PORT = None
|
||||
SECRET_KEY = None
|
||||
FACTORY_LLM_INFOS = None
|
||||
ALLOWED_LLM_FACTORIES = None
|
||||
|
||||
DATABASE_TYPE = os.getenv("DB_TYPE", "mysql")
|
||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
||||
|
||||
# authentication
|
||||
AUTHENTICATION_CONF = None
|
||||
|
||||
# client
|
||||
CLIENT_AUTHENTICATION = None
|
||||
HTTP_APP_KEY = None
|
||||
GITHUB_OAUTH = None
|
||||
FEISHU_OAUTH = None
|
||||
OAUTH_CONFIG = None
|
||||
DOC_ENGINE = None
|
||||
docStoreConn = None
|
||||
|
||||
retriever = None
|
||||
kg_retriever = None
|
||||
|
||||
# user registration switch
|
||||
REGISTER_ENABLED = 1
|
||||
|
||||
|
||||
# sandbox-executor-manager
|
||||
SANDBOX_ENABLED = 0
|
||||
SANDBOX_HOST = None
|
||||
STRONG_TEST_COUNT = int(os.environ.get("STRONG_TEST_COUNT", "8"))
|
||||
|
||||
SMTP_CONF = None
|
||||
MAIL_SERVER = ""
|
||||
MAIL_PORT = 000
|
||||
MAIL_USE_SSL = True
|
||||
MAIL_USE_TLS = False
|
||||
MAIL_USERNAME = ""
|
||||
MAIL_PASSWORD = ""
|
||||
MAIL_DEFAULT_SENDER = ()
|
||||
MAIL_FRONTEND_URL = ""
|
||||
|
||||
|
||||
def get_or_create_secret_key():
|
||||
secret_key = os.environ.get("RAGFLOW_SECRET_KEY")
|
||||
if secret_key and len(secret_key) >= 32:
|
||||
return secret_key
|
||||
|
||||
# Check if there's a configured secret key
|
||||
configured_key = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("secret_key")
|
||||
if configured_key and configured_key != str(date.today()) and len(configured_key) >= 32:
|
||||
return configured_key
|
||||
|
||||
# Generate a new secure key and warn about it
|
||||
import logging
|
||||
|
||||
new_key = secrets.token_hex(32)
|
||||
logging.warning(f"SECURITY WARNING: Using auto-generated SECRET_KEY. Generated key: {new_key}")
|
||||
return new_key
|
||||
|
||||
|
||||
def init_settings():
|
||||
global LLM, LLM_FACTORY, LLM_BASE_URL, DATABASE_TYPE, DATABASE, FACTORY_LLM_INFOS, REGISTER_ENABLED, ALLOWED_LLM_FACTORIES
|
||||
DATABASE_TYPE = os.getenv("DB_TYPE", "mysql")
|
||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
||||
LLM = get_base_config("user_default_llm", {}) or {}
|
||||
LLM_DEFAULT_MODELS = LLM.get("default_models", {}) or {}
|
||||
LLM_FACTORY = LLM.get("factory", "") or ""
|
||||
LLM_BASE_URL = LLM.get("base_url", "") or ""
|
||||
ALLOWED_LLM_FACTORIES = LLM.get("allowed_factories", None)
|
||||
try:
|
||||
REGISTER_ENABLED = int(os.environ.get("REGISTER_ENABLED", "1"))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
with open(os.path.join(get_project_base_directory(), "conf", "llm_factories.json"), "r") as f:
|
||||
FACTORY_LLM_INFOS = json.load(f)["factory_llm_infos"]
|
||||
except Exception:
|
||||
FACTORY_LLM_INFOS = []
|
||||
|
||||
global CHAT_MDL, EMBEDDING_MDL, RERANK_MDL, ASR_MDL, IMAGE2TEXT_MDL
|
||||
global CHAT_CFG, EMBEDDING_CFG, RERANK_CFG, ASR_CFG, IMAGE2TEXT_CFG
|
||||
|
||||
global API_KEY, PARSERS, HOST_IP, HOST_PORT, SECRET_KEY
|
||||
API_KEY = LLM.get("api_key")
|
||||
PARSERS = LLM.get(
|
||||
"parsers", "naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One,audio:Audio,email:Email,tag:Tag"
|
||||
)
|
||||
|
||||
chat_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("chat_model", CHAT_MDL))
|
||||
embedding_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("embedding_model", EMBEDDING_MDL))
|
||||
rerank_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("rerank_model", RERANK_MDL))
|
||||
asr_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("asr_model", ASR_MDL))
|
||||
image2text_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("image2text_model", IMAGE2TEXT_MDL))
|
||||
|
||||
CHAT_CFG = _resolve_per_model_config(chat_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
EMBEDDING_CFG = _resolve_per_model_config(embedding_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
RERANK_CFG = _resolve_per_model_config(rerank_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
ASR_CFG = _resolve_per_model_config(asr_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
IMAGE2TEXT_CFG = _resolve_per_model_config(image2text_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
|
||||
CHAT_MDL = CHAT_CFG.get("model", "") or ""
|
||||
EMBEDDING_MDL = os.getenv("TEI_MODEL", "BAAI/bge-small-en-v1.5") if "tei-" in os.getenv("COMPOSE_PROFILES", "") else ""
|
||||
RERANK_MDL = RERANK_CFG.get("model", "") or ""
|
||||
ASR_MDL = ASR_CFG.get("model", "") or ""
|
||||
IMAGE2TEXT_MDL = IMAGE2TEXT_CFG.get("model", "") or ""
|
||||
|
||||
HOST_IP = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("host", "127.0.0.1")
|
||||
HOST_PORT = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("http_port")
|
||||
|
||||
SECRET_KEY = get_or_create_secret_key()
|
||||
|
||||
global AUTHENTICATION_CONF, CLIENT_AUTHENTICATION, HTTP_APP_KEY, GITHUB_OAUTH, FEISHU_OAUTH, OAUTH_CONFIG
|
||||
# authentication
|
||||
AUTHENTICATION_CONF = get_base_config("authentication", {})
|
||||
|
||||
# client
|
||||
CLIENT_AUTHENTICATION = AUTHENTICATION_CONF.get("client", {}).get("switch", False)
|
||||
HTTP_APP_KEY = AUTHENTICATION_CONF.get("client", {}).get("http_app_key")
|
||||
GITHUB_OAUTH = get_base_config("oauth", {}).get("github")
|
||||
FEISHU_OAUTH = get_base_config("oauth", {}).get("feishu")
|
||||
|
||||
OAUTH_CONFIG = get_base_config("oauth", {})
|
||||
|
||||
global DOC_ENGINE, docStoreConn, retriever, kg_retriever
|
||||
DOC_ENGINE = os.environ.get("DOC_ENGINE", "elasticsearch")
|
||||
# DOC_ENGINE = os.environ.get('DOC_ENGINE', "opensearch")
|
||||
lower_case_doc_engine = DOC_ENGINE.lower()
|
||||
if lower_case_doc_engine == "elasticsearch":
|
||||
docStoreConn = rag.utils.es_conn.ESConnection()
|
||||
elif lower_case_doc_engine == "infinity":
|
||||
docStoreConn = rag.utils.infinity_conn.InfinityConnection()
|
||||
elif lower_case_doc_engine == "opensearch":
|
||||
docStoreConn = rag.utils.opensearch_conn.OSConnection()
|
||||
else:
|
||||
raise Exception(f"Not supported doc engine: {DOC_ENGINE}")
|
||||
|
||||
retriever = search.Dealer(docStoreConn)
|
||||
from graphrag import search as kg_search
|
||||
|
||||
kg_retriever = kg_search.KGSearch(docStoreConn)
|
||||
|
||||
if int(os.environ.get("SANDBOX_ENABLED", "0")):
|
||||
global SANDBOX_HOST
|
||||
SANDBOX_HOST = os.environ.get("SANDBOX_HOST", "sandbox-executor-manager")
|
||||
|
||||
global SMTP_CONF, MAIL_SERVER, MAIL_PORT, MAIL_USE_SSL, MAIL_USE_TLS
|
||||
global MAIL_USERNAME, MAIL_PASSWORD, MAIL_DEFAULT_SENDER, MAIL_FRONTEND_URL
|
||||
SMTP_CONF = get_base_config("smtp", {})
|
||||
|
||||
MAIL_SERVER = SMTP_CONF.get("mail_server", "")
|
||||
MAIL_PORT = SMTP_CONF.get("mail_port", 000)
|
||||
MAIL_USE_SSL = SMTP_CONF.get("mail_use_ssl", True)
|
||||
MAIL_USE_TLS = SMTP_CONF.get("mail_use_tls", False)
|
||||
MAIL_USERNAME = SMTP_CONF.get("mail_username", "")
|
||||
MAIL_PASSWORD = SMTP_CONF.get("mail_password", "")
|
||||
mail_default_sender = SMTP_CONF.get("mail_default_sender", [])
|
||||
if mail_default_sender and len(mail_default_sender) >= 2:
|
||||
MAIL_DEFAULT_SENDER = (mail_default_sender[0], mail_default_sender[1])
|
||||
MAIL_FRONTEND_URL = SMTP_CONF.get("mail_frontend_url", "")
|
||||
|
||||
|
||||
def _parse_model_entry(entry):
|
||||
if isinstance(entry, str):
|
||||
return {"name": entry, "factory": None, "api_key": None, "base_url": None}
|
||||
if isinstance(entry, dict):
|
||||
name = entry.get("name") or entry.get("model") or ""
|
||||
return {
|
||||
"name": name,
|
||||
"factory": entry.get("factory"),
|
||||
"api_key": entry.get("api_key"),
|
||||
"base_url": entry.get("base_url"),
|
||||
}
|
||||
return {"name": "", "factory": None, "api_key": None, "base_url": None}
|
||||
|
||||
|
||||
def _resolve_per_model_config(entry_dict, backup_factory, backup_api_key, backup_base_url):
|
||||
name = (entry_dict.get("name") or "").strip()
|
||||
m_factory = entry_dict.get("factory") or backup_factory or ""
|
||||
m_api_key = entry_dict.get("api_key") or backup_api_key or ""
|
||||
m_base_url = entry_dict.get("base_url") or backup_base_url or ""
|
||||
|
||||
if name and "@" not in name and m_factory:
|
||||
name = f"{name}@{m_factory}"
|
||||
|
||||
return {
|
||||
"model": name,
|
||||
"factory": m_factory,
|
||||
"api_key": m_api_key,
|
||||
"base_url": m_base_url,
|
||||
}
|
||||
|
||||
@ -34,7 +34,6 @@ from flask import (
|
||||
)
|
||||
from peewee import OperationalError
|
||||
|
||||
from api import settings
|
||||
from common.constants import ActiveEnum
|
||||
from api.db.db_models import APIToken
|
||||
from api.utils.json_encode import CustomJSONEncoder
|
||||
@ -42,7 +41,7 @@ from rag.utils.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_
|
||||
from api.db.services.tenant_llm_service import LLMFactoriesService
|
||||
from common.connection_utils import timeout
|
||||
from common.constants import RetCode
|
||||
|
||||
from common import settings
|
||||
|
||||
requests.models.complexjson.dumps = functools.partial(json.dumps, cls=CustomJSONEncoder)
|
||||
|
||||
@ -626,7 +625,7 @@ async def is_strong_enough(chat_model, embedding_model):
|
||||
|
||||
|
||||
def get_allowed_llm_factories() -> list:
|
||||
factories = LLMFactoriesService.get_all()
|
||||
factories = list(LLMFactoriesService.get_all(reverse=True, order_by="rank"))
|
||||
if settings.ALLOWED_LLM_FACTORIES is None:
|
||||
return factories
|
||||
|
||||
|
||||
@ -13,17 +13,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
from timeit import default_timer as timer
|
||||
|
||||
from api import settings
|
||||
from api.db.db_models import DB
|
||||
from rag import settings as rag_settings
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from rag.utils.es_conn import ESConnection
|
||||
from rag.utils.infinity_conn import InfinityConnection
|
||||
from common import settings
|
||||
|
||||
|
||||
def _ok_nok(ok: bool) -> str:
|
||||
@ -62,7 +62,7 @@ def check_doc_engine() -> tuple[bool, dict]:
|
||||
def check_storage() -> tuple[bool, dict]:
|
||||
st = timer()
|
||||
try:
|
||||
STORAGE_IMPL.health()
|
||||
settings.STORAGE_IMPL.health()
|
||||
return True, {"elapsed": f"{(timer() - st) * 1000.0:.1f}"}
|
||||
except Exception as e:
|
||||
return False, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", "error": str(e)}
|
||||
@ -120,7 +120,7 @@ def get_mysql_status():
|
||||
def check_minio_alive():
|
||||
start_time = timer()
|
||||
try:
|
||||
response = requests.get(f'http://{rag_settings.MINIO["host"]}/minio/health/live')
|
||||
response = requests.get(f'http://{settings.MINIO["host"]}/minio/health/live')
|
||||
if response.status_code == 200:
|
||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
@ -148,7 +148,10 @@ def get_redis_info():
|
||||
def check_ragflow_server_alive():
|
||||
start_time = timer()
|
||||
try:
|
||||
response = requests.get(f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping')
|
||||
url = f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping'
|
||||
if '0.0.0.0' in url:
|
||||
url = url.replace('0.0.0.0', '127.0.0.1')
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||
else:
|
||||
@ -160,6 +163,26 @@ def check_ragflow_server_alive():
|
||||
}
|
||||
|
||||
|
||||
def check_task_executor_alive():
|
||||
task_executor_heartbeats = {}
|
||||
try:
|
||||
task_executors = REDIS_CONN.smembers("TASKEXE")
|
||||
now = datetime.now().timestamp()
|
||||
for task_executor_id in task_executors:
|
||||
heartbeats = REDIS_CONN.zrangebyscore(task_executor_id, now - 60 * 30, now)
|
||||
heartbeats = [json.loads(heartbeat) for heartbeat in heartbeats]
|
||||
task_executor_heartbeats[task_executor_id] = heartbeats
|
||||
if task_executor_heartbeats:
|
||||
return {"status": "alive", "message": task_executor_heartbeats}
|
||||
else:
|
||||
return {"status": "timeout", "message": "Not found any task executor."}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "timeout",
|
||||
"message": f"error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def run_health_checks() -> tuple[dict, bool]:
|
||||
result: dict[str, str | dict] = {}
|
||||
|
||||
|
||||
@ -18,7 +18,7 @@ from enum import Enum, IntEnum
|
||||
from strenum import StrEnum
|
||||
|
||||
SERVICE_CONF = "service_conf.yaml"
|
||||
|
||||
RAG_FLOW_SERVICE_NAME = "ragflow"
|
||||
|
||||
class CustomEnum(Enum):
|
||||
@classmethod
|
||||
@ -113,7 +113,7 @@ class FileSource(StrEnum):
|
||||
DISCORD = "discord"
|
||||
CONFLUENCE = "confluence"
|
||||
GMAIL = "gmail"
|
||||
GOOGLE_DRIVER = "google_driver"
|
||||
GOOGLE_DRIVE = "google_drive"
|
||||
JIRA = "jira"
|
||||
SHAREPOINT = "sharepoint"
|
||||
SLACK = "slack"
|
||||
@ -137,6 +137,14 @@ class MCPServerType(StrEnum):
|
||||
|
||||
VALID_MCP_SERVER_TYPES = {MCPServerType.SSE, MCPServerType.STREAMABLE_HTTP}
|
||||
|
||||
class Storage(Enum):
|
||||
MINIO = 1
|
||||
AZURE_SPN = 2
|
||||
AZURE_SAS = 3
|
||||
AWS_S3 = 4
|
||||
OSS = 5
|
||||
OPENDAL = 6
|
||||
|
||||
# environment
|
||||
# ENV_STRONG_TEST_COUNT = "STRONG_TEST_COUNT"
|
||||
# ENV_RAGFLOW_SECRET_KEY = "RAGFLOW_SECRET_KEY"
|
||||
@ -181,3 +189,8 @@ VALID_MCP_SERVER_TYPES = {MCPServerType.SSE, MCPServerType.STREAMABLE_HTTP}
|
||||
# ENV_MAX_CONCURRENT_MINIO = "MAX_CONCURRENT_MINIO"
|
||||
# ENV_WORKER_HEARTBEAT_TIMEOUT = "WORKER_HEARTBEAT_TIMEOUT"
|
||||
# ENV_TRACE_MALLOC_ENABLED = "TRACE_MALLOC_ENABLED"
|
||||
|
||||
PAGERANK_FLD = "pagerank_fea"
|
||||
SVR_QUEUE_NAME = "rag_flow_svr_queue"
|
||||
SVR_CONSUMER_GROUP_NAME = "rag_flow_svr_task_broker"
|
||||
TAG_FLD = "tag_feas"
|
||||
|
||||
@ -10,7 +10,7 @@ from .notion_connector import NotionConnector
|
||||
from .confluence_connector import ConfluenceConnector
|
||||
from .discord_connector import DiscordConnector
|
||||
from .dropbox_connector import DropboxConnector
|
||||
from .google_drive_connector import GoogleDriveConnector
|
||||
from .google_drive.connector import GoogleDriveConnector
|
||||
from .jira_connector import JiraConnector
|
||||
from .sharepoint_connector import SharePointConnector
|
||||
from .teams_connector import TeamsConnector
|
||||
@ -47,4 +47,4 @@ __all__ = [
|
||||
"CredentialExpiredError",
|
||||
"InsufficientPermissionsError",
|
||||
"UnexpectedValidationError"
|
||||
]
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user