mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
99 Commits
4e76220e25
...
v0.22.0
| Author | SHA1 | Date | |
|---|---|---|---|
| a36a0fe71c | |||
| a81f6d1b24 | |||
| 8406a5ea47 | |||
| 20b6dafbd8 | |||
| 33cc9cafa9 | |||
| 6567ecf15a | |||
| 3a7322f5b2 | |||
| 829e5f287b | |||
| 1e8efa2631 | |||
| e7f7c09b0b | |||
| 8ae562504b | |||
| bacc9d3ab9 | |||
| d226764ed0 | |||
| 39120d49cf | |||
| 27211a9b34 | |||
| e9de25c973 | |||
| 09e971dcc8 | |||
| 883df22aa2 | |||
| 2bd7abadd3 | |||
| 435479adb3 | |||
| 2c727a4a9c | |||
| a15f522dc9 | |||
| de53498b39 | |||
| 72740eb5b9 | |||
| c30ffb5716 | |||
| 6dcff7db97 | |||
| 9213568692 | |||
| d81e4095de | |||
| 8ddeaca3d6 | |||
| f441f8ffc2 | |||
| 522c7b7ac6 | |||
| 377c0fb4fa | |||
| 7dd9758056 | |||
| 26cf5131c9 | |||
| 93207f83ba | |||
| f77604db26 | |||
| dd5b8e2e1a | |||
| 83ff8e8009 | |||
| 7db6cb8ca3 | |||
| ba6470a7a5 | |||
| df16a80f25 | |||
| 29ea059f90 | |||
| a191933f81 | |||
| 6e1ebb2855 | |||
| 68b952abb1 | |||
| 0879b6af2c | |||
| 2b9145948f | |||
| 726473fd39 | |||
| d207291217 | |||
| bf382e5c4d | |||
| 4338e706c6 | |||
| 86af330f06 | |||
| d016a06fd5 | |||
| 7423a5806e | |||
| b6cd282ccd | |||
| 82ca2e0378 | |||
| 1cd54832b5 | |||
| 660386d3b5 | |||
| 4cdaa77545 | |||
| 9fcc4946e2 | |||
| 98e9d68c75 | |||
| 8f34824aa4 | |||
| 9a6808230a | |||
| c7bd0a755c | |||
| dd1c8c5779 | |||
| 526ba3388f | |||
| cb95072ecf | |||
| f6aeebc608 | |||
| 307f53dae8 | |||
| fa98cc2bb9 | |||
| c58d95ed69 | |||
| edbc396bc6 | |||
| b137de1def | |||
| 2cb1046cbf | |||
| a880beb1f6 | |||
| 34283d4db4 | |||
| 5629fbd2ca | |||
| b7aa6d6c4f | |||
| 0b7b88592f | |||
| 42edecc98f | |||
| af98763e27 | |||
| 5a8fbc5a81 | |||
| 0cd8024c34 | |||
| 3bd1fefe1f | |||
| e18c408759 | |||
| 23b81eae77 | |||
| 66c01c7274 | |||
| 4b8ce08050 | |||
| ca30ef83bf | |||
| d469ae6d50 | |||
| f581a1c4e5 | |||
| 15c75bbf15 | |||
| adbb8319e0 | |||
| f98b24c9bf | |||
| 87c9a054d3 | |||
| cd6ed4b380 | |||
| f29a3dd651 | |||
| e658beee38 | |||
| 17ea5c1dee |
28
.github/workflows/release.yml
vendored
28
.github/workflows/release.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: [ "self-hosted", "ragflow-test" ]
|
runs-on: [ "self-hosted", "ragflow-test" ]
|
||||||
steps:
|
steps:
|
||||||
- name: Ensure workspace ownership
|
- name: Ensure workspace ownership
|
||||||
run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
run: echo "chown -R ${USER} ${GITHUB_WORKSPACE}" && sudo chown -R ${USER} ${GITHUB_WORKSPACE}
|
||||||
|
|
||||||
# https://github.com/actions/checkout/blob/v3/README.md
|
# https://github.com/actions/checkout/blob/v3/README.md
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
@ -31,37 +31,37 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare release body
|
- name: Prepare release body
|
||||||
run: |
|
run: |
|
||||||
if [[ $GITHUB_EVENT_NAME == 'create' ]]; then
|
if [[ ${GITHUB_EVENT_NAME} == "create" ]]; then
|
||||||
RELEASE_TAG=${GITHUB_REF#refs/tags/}
|
RELEASE_TAG=${GITHUB_REF#refs/tags/}
|
||||||
if [[ $RELEASE_TAG == 'nightly' ]]; then
|
if [[ ${RELEASE_TAG} == "nightly" ]]; then
|
||||||
PRERELEASE=true
|
PRERELEASE=true
|
||||||
else
|
else
|
||||||
PRERELEASE=false
|
PRERELEASE=false
|
||||||
fi
|
fi
|
||||||
echo "Workflow triggered by create tag: $RELEASE_TAG"
|
echo "Workflow triggered by create tag: ${RELEASE_TAG}"
|
||||||
else
|
else
|
||||||
RELEASE_TAG=nightly
|
RELEASE_TAG=nightly
|
||||||
PRERELEASE=true
|
PRERELEASE=true
|
||||||
echo "Workflow triggered by schedule"
|
echo "Workflow triggered by schedule"
|
||||||
fi
|
fi
|
||||||
echo "RELEASE_TAG=$RELEASE_TAG" >> $GITHUB_ENV
|
echo "RELEASE_TAG=${RELEASE_TAG}" >> ${GITHUB_ENV}
|
||||||
echo "PRERELEASE=$PRERELEASE" >> $GITHUB_ENV
|
echo "PRERELEASE=${PRERELEASE}" >> ${GITHUB_ENV}
|
||||||
RELEASE_DATETIME=$(date --rfc-3339=seconds)
|
RELEASE_DATETIME=$(date --rfc-3339=seconds)
|
||||||
echo Release $RELEASE_TAG created from $GITHUB_SHA at $RELEASE_DATETIME > release_body.md
|
echo Release ${RELEASE_TAG} created from ${GITHUB_SHA} at ${RELEASE_DATETIME} > release_body.md
|
||||||
|
|
||||||
- name: Move the existing mutable tag
|
- name: Move the existing mutable tag
|
||||||
# https://github.com/softprops/action-gh-release/issues/171
|
# https://github.com/softprops/action-gh-release/issues/171
|
||||||
run: |
|
run: |
|
||||||
git fetch --tags
|
git fetch --tags
|
||||||
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
|
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||||
# Determine if a given tag exists and matches a specific Git commit.
|
# Determine if a given tag exists and matches a specific Git commit.
|
||||||
# actions/checkout@v4 fetch-tags doesn't work when triggered by schedule
|
# actions/checkout@v4 fetch-tags doesn't work when triggered by schedule
|
||||||
if [ "$(git rev-parse -q --verify "refs/tags/$RELEASE_TAG")" = "$GITHUB_SHA" ]; then
|
if [ "$(git rev-parse -q --verify "refs/tags/${RELEASE_TAG}")" = "${GITHUB_SHA}" ]; then
|
||||||
echo "mutable tag $RELEASE_TAG exists and matches $GITHUB_SHA"
|
echo "mutable tag ${RELEASE_TAG} exists and matches ${GITHUB_SHA}"
|
||||||
else
|
else
|
||||||
git tag -f $RELEASE_TAG $GITHUB_SHA
|
git tag -f ${RELEASE_TAG} ${GITHUB_SHA}
|
||||||
git push -f origin $RELEASE_TAG:refs/tags/$RELEASE_TAG
|
git push -f origin ${RELEASE_TAG}:refs/tags/${RELEASE_TAG}
|
||||||
echo "created/moved mutable tag $RELEASE_TAG to $GITHUB_SHA"
|
echo "created/moved mutable tag ${RELEASE_TAG} to ${GITHUB_SHA}"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
run: |
|
run: |
|
||||||
echo ${{ secrets.DOCKERHUB_TOKEN }} | sudo docker login --username infiniflow --password-stdin
|
sudo docker login --username infiniflow --password-stdin <<< ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
sudo docker build --build-arg NEED_MIRROR=1 -t infiniflow/ragflow:${RELEASE_TAG} -f Dockerfile .
|
sudo docker build --build-arg NEED_MIRROR=1 -t infiniflow/ragflow:${RELEASE_TAG} -f Dockerfile .
|
||||||
sudo docker tag infiniflow/ragflow:${RELEASE_TAG} infiniflow/ragflow:latest
|
sudo docker tag infiniflow/ragflow:${RELEASE_TAG} infiniflow/ragflow:latest
|
||||||
sudo docker push infiniflow/ragflow:${RELEASE_TAG}
|
sudo docker push infiniflow/ragflow:${RELEASE_TAG}
|
||||||
|
|||||||
43
.github/workflows/tests.yml
vendored
43
.github/workflows/tests.yml
vendored
@ -9,8 +9,11 @@ on:
|
|||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- '*.md'
|
- '*.md'
|
||||||
- '*.mdx'
|
- '*.mdx'
|
||||||
pull_request:
|
# The only difference between pull_request and pull_request_target is the context in which the workflow runs:
|
||||||
types: [ labeled, synchronize, reopened ]
|
# — pull_request_target workflows use the workflow files from the default branch, and secrets are available.
|
||||||
|
# — pull_request workflows use the workflow files from the pull request branch, and secrets are unavailable.
|
||||||
|
pull_request_target:
|
||||||
|
types: [ synchronize, ready_for_review ]
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- '*.md'
|
- '*.md'
|
||||||
@ -28,7 +31,7 @@ jobs:
|
|||||||
name: ragflow_tests
|
name: ragflow_tests
|
||||||
# https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
|
# https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
|
||||||
# https://github.com/orgs/community/discussions/26261
|
# https://github.com/orgs/community/discussions/26261
|
||||||
if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci') }}
|
if: ${{ github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'ci') }}
|
||||||
runs-on: [ "self-hosted", "ragflow-test" ]
|
runs-on: [ "self-hosted", "ragflow-test" ]
|
||||||
steps:
|
steps:
|
||||||
# https://github.com/hmarr/debug-action
|
# https://github.com/hmarr/debug-action
|
||||||
@ -37,19 +40,20 @@ jobs:
|
|||||||
- name: Ensure workspace ownership
|
- name: Ensure workspace ownership
|
||||||
run: |
|
run: |
|
||||||
echo "Workflow triggered by ${{ github.event_name }}"
|
echo "Workflow triggered by ${{ github.event_name }}"
|
||||||
echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
|
echo "chown -R ${USER} ${GITHUB_WORKSPACE}" && sudo chown -R ${USER} ${GITHUB_WORKSPACE}
|
||||||
|
|
||||||
# https://github.com/actions/checkout/issues/1781
|
# https://github.com/actions/checkout/issues/1781
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
ref: ${{ (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') && format('refs/pull/{0}/merge', github.event.pull_request.number) || github.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Check workflow duplication
|
- name: Check workflow duplication
|
||||||
if: ${{ !cancelled() && !failure() && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci')) }}
|
if: ${{ !cancelled() && !failure() }}
|
||||||
run: |
|
run: |
|
||||||
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "schedule" ]]; then
|
if [[ ${GITHUB_EVENT_NAME} != "pull_request_target" && ${GITHUB_EVENT_NAME} != "schedule" ]]; then
|
||||||
HEAD=$(git rev-parse HEAD)
|
HEAD=$(git rev-parse HEAD)
|
||||||
# Find a PR that introduced a given commit
|
# Find a PR that introduced a given commit
|
||||||
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
|
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
|
||||||
@ -67,14 +71,14 @@ jobs:
|
|||||||
gh run cancel ${GITHUB_RUN_ID}
|
gh run cancel ${GITHUB_RUN_ID}
|
||||||
while true; do
|
while true; do
|
||||||
status=$(gh run view ${GITHUB_RUN_ID} --json status -q .status)
|
status=$(gh run view ${GITHUB_RUN_ID} --json status -q .status)
|
||||||
[ "$status" = "completed" ] && break
|
[ "${status}" = "completed" ] && break
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
else
|
elif [[ ${GITHUB_EVENT_NAME} == "pull_request_target" ]]; then
|
||||||
PR_NUMBER=${{ github.event.pull_request.number }}
|
PR_NUMBER=${{ github.event.pull_request.number }}
|
||||||
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
|
||||||
# Calculate the hash of the current workspace content
|
# Calculate the hash of the current workspace content
|
||||||
@ -93,18 +97,18 @@ jobs:
|
|||||||
|
|
||||||
- name: Build ragflow:nightly
|
- name: Build ragflow:nightly
|
||||||
run: |
|
run: |
|
||||||
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
|
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-${HOME}}
|
||||||
RAGFLOW_IMAGE=infiniflow/ragflow:${GITHUB_RUN_ID}
|
RAGFLOW_IMAGE=infiniflow/ragflow:${GITHUB_RUN_ID}
|
||||||
echo "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> $GITHUB_ENV
|
echo "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> ${GITHUB_ENV}
|
||||||
sudo docker pull ubuntu:22.04
|
sudo docker pull ubuntu:22.04
|
||||||
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t ${RAGFLOW_IMAGE} .
|
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t ${RAGFLOW_IMAGE} .
|
||||||
if [[ "$GITHUB_EVENT_NAME" == "schedule" ]]; then
|
if [[ ${GITHUB_EVENT_NAME} == "schedule" ]]; then
|
||||||
export HTTP_API_TEST_LEVEL=p3
|
export HTTP_API_TEST_LEVEL=p3
|
||||||
else
|
else
|
||||||
export HTTP_API_TEST_LEVEL=p2
|
export HTTP_API_TEST_LEVEL=p2
|
||||||
fi
|
fi
|
||||||
echo "HTTP_API_TEST_LEVEL=${HTTP_API_TEST_LEVEL}" >> $GITHUB_ENV
|
echo "HTTP_API_TEST_LEVEL=${HTTP_API_TEST_LEVEL}" >> ${GITHUB_ENV}
|
||||||
echo "RAGFLOW_CONTAINER=${GITHUB_RUN_ID}-ragflow-cpu-1" >> $GITHUB_ENV
|
echo "RAGFLOW_CONTAINER=${GITHUB_RUN_ID}-ragflow-cpu-1" >> ${GITHUB_ENV}
|
||||||
|
|
||||||
- name: Start ragflow:nightly
|
- name: Start ragflow:nightly
|
||||||
run: |
|
run: |
|
||||||
@ -154,7 +158,7 @@ jobs:
|
|||||||
echo -e "COMPOSE_PROFILES=\${COMPOSE_PROFILES},tei-cpu" >> docker/.env
|
echo -e "COMPOSE_PROFILES=\${COMPOSE_PROFILES},tei-cpu" >> docker/.env
|
||||||
echo -e "TEI_MODEL=BAAI/bge-small-en-v1.5" >> docker/.env
|
echo -e "TEI_MODEL=BAAI/bge-small-en-v1.5" >> docker/.env
|
||||||
echo -e "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> docker/.env
|
echo -e "RAGFLOW_IMAGE=${RAGFLOW_IMAGE}" >> docker/.env
|
||||||
echo "HOST_ADDRESS=http://host.docker.internal:${SVR_HTTP_PORT}" >> $GITHUB_ENV
|
echo "HOST_ADDRESS=http://host.docker.internal:${SVR_HTTP_PORT}" >> ${GITHUB_ENV}
|
||||||
|
|
||||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
|
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
|
||||||
uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python
|
uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python
|
||||||
@ -189,7 +193,8 @@ jobs:
|
|||||||
- name: Stop ragflow:nightly
|
- name: Stop ragflow:nightly
|
||||||
if: always() # always run this step even if previous steps failed
|
if: always() # always run this step even if previous steps failed
|
||||||
run: |
|
run: |
|
||||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v
|
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v || true
|
||||||
|
sudo docker ps -a --filter "label=com.docker.compose.project=${GITHUB_RUN_ID}" -q | xargs -r sudo docker rm -f
|
||||||
|
|
||||||
- name: Start ragflow:nightly
|
- name: Start ragflow:nightly
|
||||||
run: |
|
run: |
|
||||||
@ -226,5 +231,9 @@ jobs:
|
|||||||
- name: Stop ragflow:nightly
|
- name: Stop ragflow:nightly
|
||||||
if: always() # always run this step even if previous steps failed
|
if: always() # always run this step even if previous steps failed
|
||||||
run: |
|
run: |
|
||||||
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v
|
# Sometimes `docker compose down` fail due to hang container, heavy load etc. Need to remove such containers to release resources(for example, listen ports).
|
||||||
sudo docker rmi -f ${RAGFLOW_IMAGE:-NO_IMAGE} || true
|
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v || true
|
||||||
|
sudo docker ps -a --filter "label=com.docker.compose.project=${GITHUB_RUN_ID}" -q | xargs -r sudo docker rm -f
|
||||||
|
if [[ -n ${RAGFLOW_IMAGE} ]]; then
|
||||||
|
sudo docker rmi -f ${RAGFLOW_IMAGE}
|
||||||
|
fi
|
||||||
|
|||||||
31
README.md
31
README.md
@ -22,7 +22,7 @@
|
|||||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||||
@ -61,8 +61,7 @@
|
|||||||
- 🔎 [System Architecture](#-system-architecture)
|
- 🔎 [System Architecture](#-system-architecture)
|
||||||
- 🎬 [Get Started](#-get-started)
|
- 🎬 [Get Started](#-get-started)
|
||||||
- 🔧 [Configurations](#-configurations)
|
- 🔧 [Configurations](#-configurations)
|
||||||
- 🔧 [Build a docker image without embedding models](#-build-a-docker-image-without-embedding-models)
|
- 🔧 [Build a Docker image](#-build-a-docker-image)
|
||||||
- 🔧 [Build a docker image including embedding models](#-build-a-docker-image-including-embedding-models)
|
|
||||||
- 🔨 [Launch service from source for development](#-launch-service-from-source-for-development)
|
- 🔨 [Launch service from source for development](#-launch-service-from-source-for-development)
|
||||||
- 📚 [Documentation](#-documentation)
|
- 📚 [Documentation](#-documentation)
|
||||||
- 📜 [Roadmap](#-roadmap)
|
- 📜 [Roadmap](#-roadmap)
|
||||||
@ -86,6 +85,7 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
|
|
||||||
## 🔥 Latest Updates
|
## 🔥 Latest Updates
|
||||||
|
|
||||||
|
- 2025-11-12 Supports data synchronization from Confluence, AWS S3, Discord, Google Drive.
|
||||||
- 2025-10-23 Supports MinerU & Docling as document parsing methods.
|
- 2025-10-23 Supports MinerU & Docling as document parsing methods.
|
||||||
- 2025-10-15 Supports orchestrable ingestion pipeline.
|
- 2025-10-15 Supports orchestrable ingestion pipeline.
|
||||||
- 2025-08-08 Supports OpenAI's latest GPT-5 series models.
|
- 2025-08-08 Supports OpenAI's latest GPT-5 series models.
|
||||||
@ -93,7 +93,6 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
- 2025-05-23 Adds a Python/JavaScript code executor component to Agent.
|
- 2025-05-23 Adds a Python/JavaScript code executor component to Agent.
|
||||||
- 2025-05-05 Supports cross-language query.
|
- 2025-05-05 Supports cross-language query.
|
||||||
- 2025-03-19 Supports using a multi-modal model to make sense of images within PDF or DOCX files.
|
- 2025-03-19 Supports using a multi-modal model to make sense of images within PDF or DOCX files.
|
||||||
- 2025-02-28 Combined with Internet search (Tavily), supports reasoning like Deep Research for any LLMs.
|
|
||||||
- 2024-12-18 Upgrades Document Layout Analysis model in DeepDoc.
|
- 2024-12-18 Upgrades Document Layout Analysis model in DeepDoc.
|
||||||
- 2024-08-22 Support text to SQL statements through RAG.
|
- 2024-08-22 Support text to SQL statements through RAG.
|
||||||
|
|
||||||
@ -189,25 +188,29 @@ releases! 🌟
|
|||||||
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
> All Docker images are built for x86 platforms. We don't currently offer Docker images for ARM64.
|
||||||
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
> If you are on an ARM64 platform, follow [this guide](https://ragflow.io/docs/dev/build_docker_image) to build a Docker image compatible with your system.
|
||||||
|
|
||||||
> The command below downloads the `v0.21.1-slim` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.21.1-slim`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
> The command below downloads the `v0.22.0` edition of the RAGFlow Docker image. See the following table for descriptions of different RAGFlow editions. To download a RAGFlow edition different from `v0.22.0`, update the `RAGFLOW_IMAGE` variable accordingly in **docker/.env** before using `docker compose` to start the server.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases), e.g.: git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
> Note: Prior to `v0.22.0`, we provided both images with embedding models and slim images without embedding models. Details as follows:
|
||||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> Note: Starting with `v0.22.0`, we ship only the slim edition and no longer append the **-slim** suffix to the image tag.
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
|
> Starting with `v0.22.0`, we ship only the slim edition and no longer append the **-slim** suffix to the image tag.
|
||||||
|
|
||||||
4. Check the server status after having the server up and running:
|
4. Check the server status after having the server up and running:
|
||||||
|
|
||||||
@ -288,7 +291,7 @@ RAGFlow uses Elasticsearch by default for storing full text and vectors. To swit
|
|||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
|
> Switching to Infinity on a Linux/arm64 machine is not yet officially supported.
|
||||||
|
|
||||||
## 🔧 Build a Docker image without embedding models
|
## 🔧 Build a Docker image
|
||||||
|
|
||||||
This image is approximately 2 GB in size and relies on external LLM and embedding services.
|
This image is approximately 2 GB in size and relies on external LLM and embedding services.
|
||||||
|
|
||||||
|
|||||||
31
README_id.md
31
README_id.md
@ -22,7 +22,7 @@
|
|||||||
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Lencana Daring" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Rilis%20Terbaru" alt="Rilis Terbaru">
|
||||||
@ -61,8 +61,7 @@
|
|||||||
- 🔎 [Arsitektur Sistem](#-arsitektur-sistem)
|
- 🔎 [Arsitektur Sistem](#-arsitektur-sistem)
|
||||||
- 🎬 [Mulai](#-mulai)
|
- 🎬 [Mulai](#-mulai)
|
||||||
- 🔧 [Konfigurasi](#-konfigurasi)
|
- 🔧 [Konfigurasi](#-konfigurasi)
|
||||||
- 🔧 [Membangun Image Docker tanpa Model Embedding](#-membangun-image-docker-tanpa-model-embedding)
|
- 🔧 [Membangun Image Docker](#-membangun-docker-image)
|
||||||
- 🔧 [Membangun Image Docker dengan Model Embedding](#-membangun-image-docker-dengan-model-embedding)
|
|
||||||
- 🔨 [Meluncurkan aplikasi dari Sumber untuk Pengembangan](#-meluncurkan-aplikasi-dari-sumber-untuk-pengembangan)
|
- 🔨 [Meluncurkan aplikasi dari Sumber untuk Pengembangan](#-meluncurkan-aplikasi-dari-sumber-untuk-pengembangan)
|
||||||
- 📚 [Dokumentasi](#-dokumentasi)
|
- 📚 [Dokumentasi](#-dokumentasi)
|
||||||
- 📜 [Peta Jalan](#-peta-jalan)
|
- 📜 [Peta Jalan](#-peta-jalan)
|
||||||
@ -86,6 +85,7 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
|
|
||||||
## 🔥 Pembaruan Terbaru
|
## 🔥 Pembaruan Terbaru
|
||||||
|
|
||||||
|
- 2025-11-12 Mendukung sinkronisasi data dari Confluence, AWS S3, Discord, Google Drive.
|
||||||
- 2025-10-23 Mendukung MinerU & Docling sebagai metode penguraian dokumen.
|
- 2025-10-23 Mendukung MinerU & Docling sebagai metode penguraian dokumen.
|
||||||
- 2025-10-15 Dukungan untuk jalur data yang terorkestrasi.
|
- 2025-10-15 Dukungan untuk jalur data yang terorkestrasi.
|
||||||
- 2025-08-08 Mendukung model seri GPT-5 terbaru dari OpenAI.
|
- 2025-08-08 Mendukung model seri GPT-5 terbaru dari OpenAI.
|
||||||
@ -93,7 +93,6 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
- 2025-05-23 Menambahkan komponen pelaksana kode Python/JS ke Agen.
|
- 2025-05-23 Menambahkan komponen pelaksana kode Python/JS ke Agen.
|
||||||
- 2025-05-05 Mendukung kueri lintas bahasa.
|
- 2025-05-05 Mendukung kueri lintas bahasa.
|
||||||
- 2025-03-19 Mendukung penggunaan model multi-modal untuk memahami gambar di dalam file PDF atau DOCX.
|
- 2025-03-19 Mendukung penggunaan model multi-modal untuk memahami gambar di dalam file PDF atau DOCX.
|
||||||
- 2025-02-28 dikombinasikan dengan pencarian Internet (TAVILY), mendukung penelitian mendalam untuk LLM apa pun.
|
|
||||||
- 2024-12-18 Meningkatkan model Analisis Tata Letak Dokumen di DeepDoc.
|
- 2024-12-18 Meningkatkan model Analisis Tata Letak Dokumen di DeepDoc.
|
||||||
- 2024-08-22 Dukungan untuk teks ke pernyataan SQL melalui RAG.
|
- 2024-08-22 Dukungan untuk teks ke pernyataan SQL melalui RAG.
|
||||||
|
|
||||||
@ -187,25 +186,29 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
> Semua gambar Docker dibangun untuk platform x86. Saat ini, kami tidak menawarkan gambar Docker untuk ARM64.
|
||||||
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
> Jika Anda menggunakan platform ARM64, [silakan gunakan panduan ini untuk membangun gambar Docker yang kompatibel dengan sistem Anda](https://ragflow.io/docs/dev/build_docker_image).
|
||||||
|
|
||||||
> Perintah di bawah ini mengunduh edisi v0.21.1 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.21.1, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
> Perintah di bawah ini mengunduh edisi v0.22.0 dari gambar Docker RAGFlow. Silakan merujuk ke tabel berikut untuk deskripsi berbagai edisi RAGFlow. Untuk mengunduh edisi RAGFlow yang berbeda dari v0.22.0, perbarui variabel RAGFLOW_IMAGE di docker/.env sebelum menggunakan docker compose untuk memulai server.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# Opsional: gunakan tag stabil (lihat releases: https://github.com/infiniflow/ragflow/releases), contoh: git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
> Catatan: Sebelum `v0.22.0`, kami menyediakan image dengan model embedding dan image slim tanpa model embedding. Detailnya sebagai berikut:
|
||||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> Catatan: Mulai dari `v0.22.0`, kami hanya menyediakan edisi slim dan tidak lagi menambahkan akhiran **-slim** pada tag image.
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
|
> Mulai dari `v0.22.0`, kami hanya menyediakan edisi slim dan tidak lagi menambahkan akhiran **-slim** pada tag image.
|
||||||
|
|
||||||
1. Periksa status server setelah server aktif dan berjalan:
|
1. Periksa status server setelah server aktif dan berjalan:
|
||||||
|
|
||||||
@ -260,7 +263,7 @@ Pembaruan konfigurasi ini memerlukan reboot semua kontainer agar efektif:
|
|||||||
> $ docker compose -f docker-compose.yml up -d
|
> $ docker compose -f docker-compose.yml up -d
|
||||||
> ```
|
> ```
|
||||||
|
|
||||||
## 🔧 Membangun Docker Image tanpa Model Embedding
|
## 🔧 Membangun Docker Image
|
||||||
|
|
||||||
Image ini berukuran sekitar 2 GB dan bergantung pada aplikasi LLM eksternal dan embedding.
|
Image ini berukuran sekitar 2 GB dan bergantung pada aplikasi LLM eksternal dan embedding.
|
||||||
|
|
||||||
|
|||||||
30
README_ja.md
30
README_ja.md
@ -22,7 +22,7 @@
|
|||||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||||
@ -66,6 +66,7 @@
|
|||||||
|
|
||||||
## 🔥 最新情報
|
## 🔥 最新情報
|
||||||
|
|
||||||
|
- 2025-11-12 Confluence、AWS S3、Discord、Google Drive からのデータ同期をサポートします。
|
||||||
- 2025-10-23 ドキュメント解析方法として MinerU と Docling をサポートします。
|
- 2025-10-23 ドキュメント解析方法として MinerU と Docling をサポートします。
|
||||||
- 2025-10-15 オーケストレーションされたデータパイプラインのサポート。
|
- 2025-10-15 オーケストレーションされたデータパイプラインのサポート。
|
||||||
- 2025-08-08 OpenAI の最新 GPT-5 シリーズモデルをサポートします。
|
- 2025-08-08 OpenAI の最新 GPT-5 シリーズモデルをサポートします。
|
||||||
@ -73,7 +74,6 @@
|
|||||||
- 2025-05-23 エージェントに Python/JS コードエグゼキュータコンポーネントを追加しました。
|
- 2025-05-23 エージェントに Python/JS コードエグゼキュータコンポーネントを追加しました。
|
||||||
- 2025-05-05 言語間クエリをサポートしました。
|
- 2025-05-05 言語間クエリをサポートしました。
|
||||||
- 2025-03-19 PDFまたはDOCXファイル内の画像を理解するために、多モーダルモデルを使用することをサポートします。
|
- 2025-03-19 PDFまたはDOCXファイル内の画像を理解するために、多モーダルモデルを使用することをサポートします。
|
||||||
- 2025-02-28 インターネット検索 (TAVILY) と組み合わせて、あらゆる LLM の詳細な調査をサポートします。
|
|
||||||
- 2024-12-18 DeepDoc のドキュメント レイアウト分析モデルをアップグレードします。
|
- 2024-12-18 DeepDoc のドキュメント レイアウト分析モデルをアップグレードします。
|
||||||
- 2024-08-22 RAG を介して SQL ステートメントへのテキストをサポートします。
|
- 2024-08-22 RAG を介して SQL ステートメントへのテキストをサポートします。
|
||||||
|
|
||||||
@ -166,28 +166,32 @@
|
|||||||
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
> 現在、公式に提供されているすべての Docker イメージは x86 アーキテクチャ向けにビルドされており、ARM64 用の Docker イメージは提供されていません。
|
||||||
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
> ARM64 アーキテクチャのオペレーティングシステムを使用している場合は、[このドキュメント](https://ragflow.io/docs/dev/build_docker_image)を参照して Docker イメージを自分でビルドしてください。
|
||||||
|
|
||||||
> 以下のコマンドは、RAGFlow Docker イメージの v0.21.1 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.21.1 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
> 以下のコマンドは、RAGFlow Docker イメージの v0.22.0 エディションをダウンロードします。異なる RAGFlow エディションの説明については、以下の表を参照してください。v0.22.0 とは異なるエディションをダウンロードするには、docker/.env ファイルの RAGFLOW_IMAGE 変数を適宜更新し、docker compose を使用してサーバーを起動してください。
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# 任意: 安定版タグを利用 (一覧: https://github.com/infiniflow/ragflow/releases) 例: git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
> 注意:`v0.22.0` より前のバージョンでは、embedding モデルを含むイメージと、embedding モデルを含まない slim イメージの両方を提供していました。詳細は以下の通りです:
|
||||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> 注意:`v0.22.0` 以降、当プロジェクトでは slim エディションのみを提供し、イメージタグに **-slim** サフィックスを付けなくなりました。
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
> `v0.22.0` 以降、当プロジェクトでは slim エディションのみを提供し、イメージタグに **-slim** サフィックスを付けなくなりました。
|
||||||
|
|
||||||
|
1. サーバーを立ち上げた後、サーバーの状態を確認する:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker logs -f docker-ragflow-cpu-1
|
$ docker logs -f docker-ragflow-cpu-1
|
||||||
```
|
```
|
||||||
@ -259,7 +263,7 @@ RAGFlow はデフォルトで Elasticsearch を使用して全文とベクトル
|
|||||||
> Linux/arm64 マシンでの Infinity への切り替えは正式にサポートされていません。
|
> Linux/arm64 マシンでの Infinity への切り替えは正式にサポートされていません。
|
||||||
>
|
>
|
||||||
|
|
||||||
## 🔧 ソースコードで Docker イメージを作成(埋め込みモデルなし)
|
## 🔧 ソースコードで Docker イメージを作成
|
||||||
|
|
||||||
この Docker イメージのサイズは約 1GB で、外部の大モデルと埋め込みサービスに依存しています。
|
この Docker イメージのサイズは約 1GB で、外部の大モデルと埋め込みサービスに依存しています。
|
||||||
|
|
||||||
|
|||||||
30
README_ko.md
30
README_ko.md
@ -22,7 +22,7 @@
|
|||||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||||
@ -67,6 +67,7 @@
|
|||||||
|
|
||||||
## 🔥 업데이트
|
## 🔥 업데이트
|
||||||
|
|
||||||
|
- 2025-11-12 Confluence, AWS S3, Discord, Google Drive에서 데이터 동기화를 지원합니다.
|
||||||
- 2025-10-23 문서 파싱 방법으로 MinerU 및 Docling을 지원합니다.
|
- 2025-10-23 문서 파싱 방법으로 MinerU 및 Docling을 지원합니다.
|
||||||
- 2025-10-15 조정된 데이터 파이프라인 지원.
|
- 2025-10-15 조정된 데이터 파이프라인 지원.
|
||||||
- 2025-08-08 OpenAI의 최신 GPT-5 시리즈 모델을 지원합니다.
|
- 2025-08-08 OpenAI의 최신 GPT-5 시리즈 모델을 지원합니다.
|
||||||
@ -74,7 +75,6 @@
|
|||||||
- 2025-05-23 Agent에 Python/JS 코드 실행기 구성 요소를 추가합니다.
|
- 2025-05-23 Agent에 Python/JS 코드 실행기 구성 요소를 추가합니다.
|
||||||
- 2025-05-05 언어 간 쿼리를 지원합니다.
|
- 2025-05-05 언어 간 쿼리를 지원합니다.
|
||||||
- 2025-03-19 PDF 또는 DOCX 파일 내의 이미지를 이해하기 위해 다중 모드 모델을 사용하는 것을 지원합니다.
|
- 2025-03-19 PDF 또는 DOCX 파일 내의 이미지를 이해하기 위해 다중 모드 모델을 사용하는 것을 지원합니다.
|
||||||
- 2025-02-28 인터넷 검색(TAVILY)과 결합되어 모든 LLM에 대한 심층 연구를 지원합니다.
|
|
||||||
- 2024-12-18 DeepDoc의 문서 레이아웃 분석 모델 업그레이드.
|
- 2024-12-18 DeepDoc의 문서 레이아웃 분석 모델 업그레이드.
|
||||||
- 2024-08-22 RAG를 통해 SQL 문에 텍스트를 지원합니다.
|
- 2024-08-22 RAG를 통해 SQL 문에 텍스트를 지원합니다.
|
||||||
|
|
||||||
@ -168,25 +168,29 @@
|
|||||||
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
> 모든 Docker 이미지는 x86 플랫폼을 위해 빌드되었습니다. 우리는 현재 ARM64 플랫폼을 위한 Docker 이미지를 제공하지 않습니다.
|
||||||
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
> ARM64 플랫폼을 사용 중이라면, [시스템과 호환되는 Docker 이미지를 빌드하려면 이 가이드를 사용해 주세요](https://ragflow.io/docs/dev/build_docker_image).
|
||||||
|
|
||||||
> 아래 명령어는 RAGFlow Docker 이미지의 v0.21.1 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.21.1과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
> 아래 명령어는 RAGFlow Docker 이미지의 v0.22.0 버전을 다운로드합니다. 다양한 RAGFlow 버전에 대한 설명은 다음 표를 참조하십시오. v0.22.0과 다른 RAGFlow 버전을 다운로드하려면, docker/.env 파일에서 RAGFLOW_IMAGE 변수를 적절히 업데이트한 후 docker compose를 사용하여 서버를 시작하십시오.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# Optional: use a stable tag (see releases: https://github.com/infiniflow/ragflow/releases), e.g.: git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
> 참고: `v0.22.0` 이전 버전에서는 embedding 모델이 포함된 이미지와 embedding 모델이 포함되지 않은 slim 이미지를 모두 제공했습니다. 자세한 내용은 다음과 같습니다:
|
||||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> 참고: `v0.22.0`부터는 slim 에디션만 배포하며 이미지 태그에 **-slim** 접미사를 더 이상 붙이지 않습니다.
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
|
> `v0.22.0`부터는 slim 에디션만 배포하며 이미지 태그에 **-slim** 접미사를 더 이상 붙이지 않습니다.
|
||||||
|
|
||||||
1. 서버가 시작된 후 서버 상태를 확인하세요:
|
1. 서버가 시작된 후 서버 상태를 확인하세요:
|
||||||
|
|
||||||
@ -253,7 +257,7 @@ RAGFlow 는 기본적으로 Elasticsearch 를 사용하여 전체 텍스트 및
|
|||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> Linux/arm64 시스템에서 Infinity로 전환하는 것은 공식적으로 지원되지 않습니다.
|
> Linux/arm64 시스템에서 Infinity로 전환하는 것은 공식적으로 지원되지 않습니다.
|
||||||
|
|
||||||
## 🔧 소스 코드로 Docker 이미지를 컴파일합니다(임베딩 모델 포함하지 않음)
|
## 🔧 소스 코드로 Docker 이미지를 컴파일합니다
|
||||||
|
|
||||||
이 Docker 이미지의 크기는 약 1GB이며, 외부 대형 모델과 임베딩 서비스에 의존합니다.
|
이 Docker 이미지의 크기는 약 1GB이며, 외부 대형 모델과 임베딩 서비스에 의존합니다.
|
||||||
|
|
||||||
|
|||||||
@ -22,7 +22,7 @@
|
|||||||
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Badge Estático" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Última%20Relese" alt="Última Versão">
|
||||||
@ -86,6 +86,7 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
|
|
||||||
## 🔥 Últimas Atualizações
|
## 🔥 Últimas Atualizações
|
||||||
|
|
||||||
|
- 12-11-2025 Suporta a sincronização de dados do Confluence, AWS S3, Discord e Google Drive.
|
||||||
- 23-10-2025 Suporta MinerU e Docling como métodos de análise de documentos.
|
- 23-10-2025 Suporta MinerU e Docling como métodos de análise de documentos.
|
||||||
- 15-10-2025 Suporte para pipelines de dados orquestrados.
|
- 15-10-2025 Suporte para pipelines de dados orquestrados.
|
||||||
- 08-08-2025 Suporta a mais recente série GPT-5 da OpenAI.
|
- 08-08-2025 Suporta a mais recente série GPT-5 da OpenAI.
|
||||||
@ -93,7 +94,6 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
- 23-05-2025 Adicione o componente executor de código Python/JS ao Agente.
|
- 23-05-2025 Adicione o componente executor de código Python/JS ao Agente.
|
||||||
- 05-05-2025 Suporte a consultas entre idiomas.
|
- 05-05-2025 Suporte a consultas entre idiomas.
|
||||||
- 19-03-2025 Suporta o uso de um modelo multi-modal para entender imagens dentro de arquivos PDF ou DOCX.
|
- 19-03-2025 Suporta o uso de um modelo multi-modal para entender imagens dentro de arquivos PDF ou DOCX.
|
||||||
- 28-02-2025 combinado com a pesquisa na Internet (T AVI LY), suporta pesquisas profundas para qualquer LLM.
|
|
||||||
- 18-12-2024 Atualiza o modelo de Análise de Layout de Documentos no DeepDoc.
|
- 18-12-2024 Atualiza o modelo de Análise de Layout de Documentos no DeepDoc.
|
||||||
- 22-08-2024 Suporta conversão de texto para comandos SQL via RAG.
|
- 22-08-2024 Suporta conversão de texto para comandos SQL via RAG.
|
||||||
|
|
||||||
@ -186,25 +186,29 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
|||||||
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
> Todas as imagens Docker são construídas para plataformas x86. Atualmente, não oferecemos imagens Docker para ARM64.
|
||||||
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
> Se você estiver usando uma plataforma ARM64, por favor, utilize [este guia](https://ragflow.io/docs/dev/build_docker_image) para construir uma imagem Docker compatível com o seu sistema.
|
||||||
|
|
||||||
> O comando abaixo baixa a edição`v0.21.1` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.21.1`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
> O comando abaixo baixa a edição`v0.22.0` da imagem Docker do RAGFlow. Consulte a tabela a seguir para descrições de diferentes edições do RAGFlow. Para baixar uma edição do RAGFlow diferente da `v0.22.0`, atualize a variável `RAGFLOW_IMAGE` conforme necessário no **docker/.env** antes de usar `docker compose` para iniciar o servidor.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# Opcional: use uma tag estável (veja releases: https://github.com/infiniflow/ragflow/releases), ex.: git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| Tag da imagem RAGFlow | Tamanho da imagem (GB) | Possui modelos de incorporação? | Estável? |
|
> Nota: Antes da `v0.22.0`, fornecíamos imagens com modelos de embedding e imagens slim sem modelos de embedding. Detalhes a seguir:
|
||||||
| --------------------- | ---------------------- | --------------------------------- | ------------------------------ |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Lançamento estável |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Lançamento estável |
|
|
||||||
| nightly | ≈2 | ❌ | Construção noturna instável |
|
|
||||||
|
|
||||||
> Observação: A partir da`v0.22.0`, distribuímos apenas a edição slim e não adicionamos mais o sufixo **-slim** às tags das imagens.
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
|
> A partir da `v0.22.0`, distribuímos apenas a edição slim e não adicionamos mais o sufixo **-slim** às tags das imagens.
|
||||||
|
|
||||||
4. Verifique o status do servidor após tê-lo iniciado:
|
4. Verifique o status do servidor após tê-lo iniciado:
|
||||||
|
|
||||||
@ -274,9 +278,9 @@ O RAGFlow usa o Elasticsearch por padrão para armazenar texto completo e vetore
|
|||||||
```
|
```
|
||||||
|
|
||||||
> [!ATENÇÃO]
|
> [!ATENÇÃO]
|
||||||
> A mudança para o Infinity em uma máquina Linux/arm64 ainda não é oficialmente suportada.
|
> A mudança para o Infinity em uma máquina Linux/arm64 ainda não é oficialmente suportada.
|
||||||
|
|
||||||
## 🔧 Criar uma imagem Docker sem modelos de incorporação
|
## 🔧 Criar uma imagem Docker
|
||||||
|
|
||||||
Esta imagem tem cerca de 2 GB de tamanho e depende de serviços externos de LLM e incorporação.
|
Esta imagem tem cerca de 2 GB de tamanho e depende de serviços externos de LLM e incorporação.
|
||||||
|
|
||||||
|
|||||||
@ -22,7 +22,7 @@
|
|||||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||||
@ -85,6 +85,7 @@
|
|||||||
|
|
||||||
## 🔥 近期更新
|
## 🔥 近期更新
|
||||||
|
|
||||||
|
- 2025-11-12 支援從 Confluence、AWS S3、Discord、Google Drive 進行資料同步。
|
||||||
- 2025-10-23 支援 MinerU 和 Docling 作為文件解析方法。
|
- 2025-10-23 支援 MinerU 和 Docling 作為文件解析方法。
|
||||||
- 2025-10-15 支援可編排的資料管道。
|
- 2025-10-15 支援可編排的資料管道。
|
||||||
- 2025-08-08 支援 OpenAI 最新的 GPT-5 系列模型。
|
- 2025-08-08 支援 OpenAI 最新的 GPT-5 系列模型。
|
||||||
@ -92,7 +93,6 @@
|
|||||||
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
||||||
- 2025-05-05 支援跨語言查詢。
|
- 2025-05-05 支援跨語言查詢。
|
||||||
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述.
|
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述.
|
||||||
- 2025-02-28 結合網路搜尋(Tavily),對於任意大模型實現類似 Deep Research 的推理功能.
|
|
||||||
- 2024-12-18 升級了 DeepDoc 的文檔佈局分析模型。
|
- 2024-12-18 升級了 DeepDoc 的文檔佈局分析模型。
|
||||||
- 2024-08-22 支援用 RAG 技術實現從自然語言到 SQL 語句的轉換。
|
- 2024-08-22 支援用 RAG 技術實現從自然語言到 SQL 語句的轉換。
|
||||||
|
|
||||||
@ -185,25 +185,29 @@
|
|||||||
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
> 所有 Docker 映像檔都是為 x86 平台建置的。目前,我們不提供 ARM64 平台的 Docker 映像檔。
|
||||||
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
> 如果您使用的是 ARM64 平台,請使用 [這份指南](https://ragflow.io/docs/dev/build_docker_image) 來建置適合您系統的 Docker 映像檔。
|
||||||
|
|
||||||
> 執行以下指令會自動下載 RAGFlow slim Docker 映像 `v0.21.1`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.21.1` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
> 執行以下指令會自動下載 RAGFlow Docker 映像 `v0.22.0`。請參考下表查看不同 Docker 發行版的說明。如需下載不同於 `v0.22.0` 的 Docker 映像,請在執行 `docker compose` 啟動服務之前先更新 **docker/.env** 檔案內的 `RAGFLOW_IMAGE` 變數。
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# 可選:使用穩定版標籤(查看發佈:https://github.com/infiniflow/ragflow/releases),例:git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
> 注意:在 `v0.22.0` 之前的版本,我們會同時提供包含 embedding 模型的映像和不含 embedding 模型的 slim 映像。具體如下:
|
||||||
| ----------------- | --------------- | --------------------- | -------------------------- |
|
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> 注意:自 `v0.22.0` 起,我們僅發佈 slim 版本,並且不再在映像標籤後附加 **-slim** 後綴。
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
|
|
||||||
|
> 從 `v0.22.0` 開始,我們只發佈 slim 版本,並且不再在映像標籤後附加 **-slim** 後綴。
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> 如果你遇到 Docker 映像檔拉不下來的問題,可以在 **docker/.env** 檔案內根據變數 `RAGFLOW_IMAGE` 的註解提示選擇華為雲或阿里雲的對應映像。
|
> 如果你遇到 Docker 映像檔拉不下來的問題,可以在 **docker/.env** 檔案內根據變數 `RAGFLOW_IMAGE` 的註解提示選擇華為雲或阿里雲的對應映像。
|
||||||
@ -285,7 +289,7 @@ RAGFlow 預設使用 Elasticsearch 儲存文字和向量資料. 如果要切換
|
|||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> Infinity 目前官方並未正式支援在 Linux/arm64 架構下的機器上運行.
|
> Infinity 目前官方並未正式支援在 Linux/arm64 架構下的機器上運行.
|
||||||
|
|
||||||
## 🔧 原始碼編譯 Docker 映像(不含 embedding 模型)
|
## 🔧 原始碼編譯 Docker 映像
|
||||||
|
|
||||||
本 Docker 映像大小約 2 GB 左右並且依賴外部的大模型和 embedding 服務。
|
本 Docker 映像大小約 2 GB 左右並且依賴外部的大模型和 embedding 服務。
|
||||||
|
|
||||||
|
|||||||
20
README_zh.md
20
README_zh.md
@ -22,7 +22,7 @@
|
|||||||
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
<img alt="Static Badge" src="https://img.shields.io/badge/Online-Demo-4e6b99">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||||
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.21.1">
|
<img src="https://img.shields.io/docker/pulls/infiniflow/ragflow?label=Docker%20Pulls&color=0db7ed&logo=docker&logoColor=white&style=flat-square" alt="docker pull infiniflow/ragflow:v0.22.0">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
<a href="https://github.com/infiniflow/ragflow/releases/latest">
|
||||||
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
<img src="https://img.shields.io/github/v/release/infiniflow/ragflow?color=blue&label=Latest%20Release" alt="Latest Release">
|
||||||
@ -85,6 +85,7 @@
|
|||||||
|
|
||||||
## 🔥 近期更新
|
## 🔥 近期更新
|
||||||
|
|
||||||
|
- 2025-11-12 支持从 Confluence、AWS S3、Discord、Google Drive 进行数据同步。
|
||||||
- 2025-10-23 支持 MinerU 和 Docling 作为文档解析方法。
|
- 2025-10-23 支持 MinerU 和 Docling 作为文档解析方法。
|
||||||
- 2025-10-15 支持可编排的数据管道。
|
- 2025-10-15 支持可编排的数据管道。
|
||||||
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型。
|
- 2025-08-08 支持 OpenAI 最新的 GPT-5 系列模型。
|
||||||
@ -92,7 +93,6 @@
|
|||||||
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
||||||
- 2025-05-05 支持跨语言查询。
|
- 2025-05-05 支持跨语言查询。
|
||||||
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述.
|
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述.
|
||||||
- 2025-02-28 结合互联网搜索(Tavily),对于任意大模型实现类似 Deep Research 的推理功能.
|
|
||||||
- 2024-12-18 升级了 DeepDoc 的文档布局分析模型。
|
- 2024-12-18 升级了 DeepDoc 的文档布局分析模型。
|
||||||
- 2024-08-22 支持用 RAG 技术实现从自然语言到 SQL 语句的转换。
|
- 2024-08-22 支持用 RAG 技术实现从自然语言到 SQL 语句的转换。
|
||||||
|
|
||||||
@ -186,25 +186,29 @@
|
|||||||
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
> 请注意,目前官方提供的所有 Docker 镜像均基于 x86 架构构建,并不提供基于 ARM64 的 Docker 镜像。
|
||||||
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
> 如果你的操作系统是 ARM64 架构,请参考[这篇文档](https://ragflow.io/docs/dev/build_docker_image)自行构建 Docker 镜像。
|
||||||
|
|
||||||
> 运行以下命令会自动下载 RAGFlow slim Docker 镜像 `v0.21.1`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.21.1` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
> 运行以下命令会自动下载 RAGFlow Docker 镜像 `v0.22.0`。请参考下表查看不同 Docker 发行版的描述。如需下载不同于 `v0.22.0` 的 Docker 镜像,请在运行 `docker compose` 启动服务之前先更新 **docker/.env** 文件内的 `RAGFLOW_IMAGE` 变量。
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd ragflow/docker
|
$ cd ragflow/docker
|
||||||
# Use CPU for embedding and DeepDoc tasks:
|
|
||||||
|
# 可选:使用稳定版本标签(查看发布:https://github.com/infiniflow/ragflow/releases),例如:git checkout v0.22.0
|
||||||
|
|
||||||
|
# Use CPU for DeepDoc tasks:
|
||||||
$ docker compose -f docker-compose.yml up -d
|
$ docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
# To use GPU to accelerate embedding and DeepDoc tasks:
|
# To use GPU to accelerate DeepDoc tasks:
|
||||||
# sed -i '1i DEVICE=gpu' .env
|
# sed -i '1i DEVICE=gpu' .env
|
||||||
# docker compose -f docker-compose.yml up -d
|
# docker compose -f docker-compose.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> 注意:在 `v0.22.0` 之前的版本,我们会同时提供包含 embedding 模型的镜像和不含 embedding 模型的 slim 镜像。具体如下:
|
||||||
|
|
||||||
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
| RAGFlow image tag | Image size (GB) | Has embedding models? | Stable? |
|
||||||
| ----------------- | --------------- | --------------------- | ------------------------ |
|
| ----------------- | --------------- | --------------------- | ------------------------ |
|
||||||
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
| v0.21.1 | ≈9 | ✔️ | Stable release |
|
||||||
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
| v0.21.1-slim | ≈2 | ❌ | Stable release |
|
||||||
| nightly | ≈2 | ❌ | _Unstable_ nightly build |
|
|
||||||
|
|
||||||
> 注意:从 `v0.22.0` 开始,我们只发布 slim 版本,并且不再在镜像标签后附加 **-slim** 后缀。
|
> 从 `v0.22.0` 开始,我们只发布 slim 版本,并且不再在镜像标签后附加 **-slim** 后缀。
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> 如果你遇到 Docker 镜像拉不下来的问题,可以在 **docker/.env** 文件内根据变量 `RAGFLOW_IMAGE` 的注释提示选择华为云或者阿里云的相应镜像。
|
> 如果你遇到 Docker 镜像拉不下来的问题,可以在 **docker/.env** 文件内根据变量 `RAGFLOW_IMAGE` 的注释提示选择华为云或者阿里云的相应镜像。
|
||||||
@ -284,7 +288,7 @@ RAGFlow 默认使用 Elasticsearch 存储文本和向量数据. 如果要切换
|
|||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> Infinity 目前官方并未正式支持在 Linux/arm64 架构下的机器上运行.
|
> Infinity 目前官方并未正式支持在 Linux/arm64 架构下的机器上运行.
|
||||||
|
|
||||||
## 🔧 源码编译 Docker 镜像(不含 embedding 模型)
|
## 🔧 源码编译 Docker 镜像
|
||||||
|
|
||||||
本 Docker 镜像大小约 2 GB 左右并且依赖外部的大模型和 embedding 服务。
|
本 Docker 镜像大小约 2 GB 左右并且依赖外部的大模型和 embedding 服务。
|
||||||
|
|
||||||
|
|||||||
@ -48,7 +48,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
|||||||
1. Ensure the Admin Service is running.
|
1. Ensure the Admin Service is running.
|
||||||
2. Install ragflow-cli.
|
2. Install ragflow-cli.
|
||||||
```bash
|
```bash
|
||||||
pip install ragflow-cli==0.21.1
|
pip install ragflow-cli==0.22.0
|
||||||
```
|
```
|
||||||
3. Launch the CLI client:
|
3. Launch the CLI client:
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@ -23,6 +23,7 @@ from Cryptodome.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
|
|||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
from lark import Lark, Transformer, Tree
|
from lark import Lark, Transformer, Tree
|
||||||
import requests
|
import requests
|
||||||
|
import getpass
|
||||||
|
|
||||||
GRAMMAR = r"""
|
GRAMMAR = r"""
|
||||||
start: command
|
start: command
|
||||||
@ -51,6 +52,7 @@ sql_command: list_services
|
|||||||
| revoke_permission
|
| revoke_permission
|
||||||
| alter_user_role
|
| alter_user_role
|
||||||
| show_user_permission
|
| show_user_permission
|
||||||
|
| show_version
|
||||||
|
|
||||||
// meta command definition
|
// meta command definition
|
||||||
meta_command: "\\" meta_command_name [meta_args]
|
meta_command: "\\" meta_command_name [meta_args]
|
||||||
@ -92,6 +94,7 @@ FOR: "FOR"i
|
|||||||
RESOURCES: "RESOURCES"i
|
RESOURCES: "RESOURCES"i
|
||||||
ON: "ON"i
|
ON: "ON"i
|
||||||
SET: "SET"i
|
SET: "SET"i
|
||||||
|
VERSION: "VERSION"i
|
||||||
|
|
||||||
list_services: LIST SERVICES ";"
|
list_services: LIST SERVICES ";"
|
||||||
show_service: SHOW SERVICE NUMBER ";"
|
show_service: SHOW SERVICE NUMBER ";"
|
||||||
@ -120,6 +123,8 @@ revoke_permission: REVOKE action_list ON identifier FROM ROLE identifier ";"
|
|||||||
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
alter_user_role: ALTER USER quoted_string SET ROLE identifier ";"
|
||||||
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
show_user_permission: SHOW USER PERMISSION quoted_string ";"
|
||||||
|
|
||||||
|
show_version: SHOW VERSION ";"
|
||||||
|
|
||||||
action_list: identifier ("," identifier)*
|
action_list: identifier ("," identifier)*
|
||||||
|
|
||||||
identifier: WORD
|
identifier: WORD
|
||||||
@ -246,6 +251,9 @@ class AdminTransformer(Transformer):
|
|||||||
user_name = items[3]
|
user_name = items[3]
|
||||||
return {"type": "show_user_permission", "user_name": user_name}
|
return {"type": "show_user_permission", "user_name": user_name}
|
||||||
|
|
||||||
|
def show_version(self, items):
|
||||||
|
return {"type": "show_version"}
|
||||||
|
|
||||||
def action_list(self, items):
|
def action_list(self, items):
|
||||||
return items
|
return items
|
||||||
|
|
||||||
@ -359,7 +367,7 @@ class AdminCLI(Cmd):
|
|||||||
if single_command:
|
if single_command:
|
||||||
admin_passwd = arguments['password']
|
admin_passwd = arguments['password']
|
||||||
else:
|
else:
|
||||||
admin_passwd = input(f"password for {self.admin_account}: ").strip()
|
admin_passwd = getpass.getpass(f"password for {self.admin_account}: ").strip()
|
||||||
try:
|
try:
|
||||||
self.admin_password = encrypt(admin_passwd)
|
self.admin_password = encrypt(admin_passwd)
|
||||||
response = self.session.post(url, json={'email': self.admin_account, 'password': self.admin_password})
|
response = self.session.post(url, json={'email': self.admin_account, 'password': self.admin_password})
|
||||||
@ -370,7 +378,7 @@ class AdminCLI(Cmd):
|
|||||||
self.session.headers.update({
|
self.session.headers.update({
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Authorization': response.headers['Authorization'],
|
'Authorization': response.headers['Authorization'],
|
||||||
'User-Agent': 'RAGFlow-CLI/0.21.1'
|
'User-Agent': 'RAGFlow-CLI/0.22.0'
|
||||||
})
|
})
|
||||||
print("Authentication successful.")
|
print("Authentication successful.")
|
||||||
return True
|
return True
|
||||||
@ -384,6 +392,21 @@ class AdminCLI(Cmd):
|
|||||||
print(str(e))
|
print(str(e))
|
||||||
print(f"Can't access {self.host}, port: {self.port}")
|
print(f"Can't access {self.host}, port: {self.port}")
|
||||||
|
|
||||||
|
def _format_service_detail_table(self, data):
|
||||||
|
if not any([isinstance(v, list) for v in data.values()]):
|
||||||
|
# normal table
|
||||||
|
return data
|
||||||
|
# handle task_executor heartbeats map, for example {'name': [{'done': 2, 'now': timestamp1}, {'done': 3, 'now': timestamp2}]
|
||||||
|
task_executor_list = []
|
||||||
|
for k, v in data.items():
|
||||||
|
# display latest status
|
||||||
|
heartbeats = sorted(v, key=lambda x: x["now"], reverse=True)
|
||||||
|
task_executor_list.append({
|
||||||
|
"task_executor_name": k,
|
||||||
|
**heartbeats[0],
|
||||||
|
})
|
||||||
|
return task_executor_list
|
||||||
|
|
||||||
def _print_table_simple(self, data):
|
def _print_table_simple(self, data):
|
||||||
if not data:
|
if not data:
|
||||||
print("No data to print")
|
print("No data to print")
|
||||||
@ -555,6 +578,8 @@ class AdminCLI(Cmd):
|
|||||||
self._alter_user_role(command_dict)
|
self._alter_user_role(command_dict)
|
||||||
case 'show_user_permission':
|
case 'show_user_permission':
|
||||||
self._show_user_permission(command_dict)
|
self._show_user_permission(command_dict)
|
||||||
|
case 'show_version':
|
||||||
|
self._show_version(command_dict)
|
||||||
case 'meta':
|
case 'meta':
|
||||||
self._handle_meta_command(command_dict)
|
self._handle_meta_command(command_dict)
|
||||||
case _:
|
case _:
|
||||||
@ -585,7 +610,8 @@ class AdminCLI(Cmd):
|
|||||||
if isinstance(res_data['message'], str):
|
if isinstance(res_data['message'], str):
|
||||||
print(res_data['message'])
|
print(res_data['message'])
|
||||||
else:
|
else:
|
||||||
self._print_table_simple(res_data['message'])
|
data = self._format_service_detail_table(res_data['message'])
|
||||||
|
self._print_table_simple(data)
|
||||||
else:
|
else:
|
||||||
print(f"Service {res_data['service_name']} is down, {res_data['message']}")
|
print(f"Service {res_data['service_name']} is down, {res_data['message']}")
|
||||||
else:
|
else:
|
||||||
@ -622,7 +648,9 @@ class AdminCLI(Cmd):
|
|||||||
response = self.session.get(url)
|
response = self.session.get(url)
|
||||||
res_json = response.json()
|
res_json = response.json()
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
self._print_table_simple(res_json['data'])
|
table_data = res_json['data']
|
||||||
|
table_data.pop('avatar')
|
||||||
|
self._print_table_simple(table_data)
|
||||||
else:
|
else:
|
||||||
print(f"Fail to get user {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
print(f"Fail to get user {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||||
|
|
||||||
@ -695,7 +723,10 @@ class AdminCLI(Cmd):
|
|||||||
response = self.session.get(url)
|
response = self.session.get(url)
|
||||||
res_json = response.json()
|
res_json = response.json()
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
self._print_table_simple(res_json['data'])
|
table_data = res_json['data']
|
||||||
|
for t in table_data:
|
||||||
|
t.pop('avatar')
|
||||||
|
self._print_table_simple(table_data)
|
||||||
else:
|
else:
|
||||||
print(f"Fail to get all datasets of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
print(f"Fail to get all datasets of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||||
|
|
||||||
@ -707,7 +738,10 @@ class AdminCLI(Cmd):
|
|||||||
response = self.session.get(url)
|
response = self.session.get(url)
|
||||||
res_json = response.json()
|
res_json = response.json()
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
self._print_table_simple(res_json['data'])
|
table_data = res_json['data']
|
||||||
|
for t in table_data:
|
||||||
|
t.pop('avatar')
|
||||||
|
self._print_table_simple(table_data)
|
||||||
else:
|
else:
|
||||||
print(f"Fail to get all agents of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
print(f"Fail to get all agents of {user_name}, code: {res_json['code']}, message: {res_json['message']}")
|
||||||
|
|
||||||
@ -861,6 +895,16 @@ class AdminCLI(Cmd):
|
|||||||
print(
|
print(
|
||||||
f"Fail to show user: {user_name_str} permission, code: {res_json['code']}, message: {res_json['message']}")
|
f"Fail to show user: {user_name_str} permission, code: {res_json['code']}, message: {res_json['message']}")
|
||||||
|
|
||||||
|
def _show_version(self, command):
|
||||||
|
print("show_version")
|
||||||
|
url = f'http://{self.host}:{self.port}/api/v1/admin/version'
|
||||||
|
response = self.session.get(url)
|
||||||
|
res_json = response.json()
|
||||||
|
if response.status_code == 200:
|
||||||
|
self._print_table_simple(res_json['data'])
|
||||||
|
else:
|
||||||
|
print(f"Fail to show version, code: {res_json['code']}, message: {res_json['message']}")
|
||||||
|
|
||||||
def _handle_meta_command(self, command):
|
def _handle_meta_command(self, command):
|
||||||
meta_command = command['command']
|
meta_command = command['command']
|
||||||
args = command.get('args', [])
|
args = command.get('args', [])
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "ragflow-cli"
|
name = "ragflow-cli"
|
||||||
version = "0.21.1"
|
version = "0.22.0"
|
||||||
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
||||||
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
||||||
license = { text = "Apache License, Version 2.0" }
|
license = { text = "Apache License, Version 2.0" }
|
||||||
|
|||||||
@ -26,11 +26,12 @@ from routes import admin_bp
|
|||||||
from common.log_utils import init_root_logger
|
from common.log_utils import init_root_logger
|
||||||
from common.constants import SERVICE_CONF
|
from common.constants import SERVICE_CONF
|
||||||
from common.config_utils import show_configs
|
from common.config_utils import show_configs
|
||||||
from api import settings
|
from common import settings
|
||||||
from config import load_configurations, SERVICE_CONFIGS
|
from config import load_configurations, SERVICE_CONFIGS
|
||||||
from auth import init_default_admin, setup_auth
|
from auth import init_default_admin, setup_auth
|
||||||
from flask_session import Session
|
from flask_session import Session
|
||||||
from flask_login import LoginManager
|
from flask_login import LoginManager
|
||||||
|
from common.versions import get_ragflow_version
|
||||||
|
|
||||||
stop_event = threading.Event()
|
stop_event = threading.Event()
|
||||||
|
|
||||||
@ -52,6 +53,7 @@ if __name__ == '__main__':
|
|||||||
os.environ.get("MAX_CONTENT_LENGTH", 1024 * 1024 * 1024)
|
os.environ.get("MAX_CONTENT_LENGTH", 1024 * 1024 * 1024)
|
||||||
)
|
)
|
||||||
Session(app)
|
Session(app)
|
||||||
|
logging.info(f'RAGFlow version: {get_ragflow_version()}')
|
||||||
show_configs()
|
show_configs()
|
||||||
login_manager = LoginManager()
|
login_manager = LoginManager()
|
||||||
login_manager.init_app(app)
|
login_manager.init_app(app)
|
||||||
@ -67,7 +69,7 @@ if __name__ == '__main__':
|
|||||||
port=9381,
|
port=9381,
|
||||||
application=app,
|
application=app,
|
||||||
threaded=True,
|
threaded=True,
|
||||||
use_reloader=True,
|
use_reloader=False,
|
||||||
use_debugger=True,
|
use_debugger=True,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@ -23,15 +23,15 @@ from flask import request, jsonify
|
|||||||
from flask_login import current_user, login_user
|
from flask_login import current_user, login_user
|
||||||
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.common.exceptions import AdminException, UserNotFoundError
|
from api.common.exceptions import AdminException, UserNotFoundError
|
||||||
from api.db.init_data import encode_to_base64
|
from api.common.base64 import encode_to_base64
|
||||||
from api.db.services import UserService
|
from api.db.services import UserService
|
||||||
from common.constants import ActiveEnum, StatusEnum
|
from common.constants import ActiveEnum, StatusEnum
|
||||||
from api.utils.crypt import decrypt
|
from api.utils.crypt import decrypt
|
||||||
from common.misc_utils import get_uuid
|
from common.misc_utils import get_uuid
|
||||||
from common.time_utils import current_timestamp, datetime_format, get_format_time
|
from common.time_utils import current_timestamp, datetime_format, get_format_time
|
||||||
from common.connection_utils import construct_response
|
from common.connection_utils import construct_response
|
||||||
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
def setup_auth(login_manager):
|
def setup_auth(login_manager):
|
||||||
|
|||||||
@ -183,11 +183,13 @@ class RAGFlowServerConfig(BaseConfig):
|
|||||||
|
|
||||||
|
|
||||||
class TaskExecutorConfig(BaseConfig):
|
class TaskExecutorConfig(BaseConfig):
|
||||||
|
message_queue_type: str
|
||||||
|
|
||||||
def to_dict(self) -> dict[str, Any]:
|
def to_dict(self) -> dict[str, Any]:
|
||||||
result = super().to_dict()
|
result = super().to_dict()
|
||||||
if 'extra' not in result:
|
if 'extra' not in result:
|
||||||
result['extra'] = dict()
|
result['extra'] = dict()
|
||||||
|
result['extra']['message_queue_type'] = self.message_queue_type
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -299,6 +301,15 @@ def load_configurations(config_path: str) -> list[BaseConfig]:
|
|||||||
id_count += 1
|
id_count += 1
|
||||||
case "admin":
|
case "admin":
|
||||||
pass
|
pass
|
||||||
|
case "task_executor":
|
||||||
|
name: str = 'task_executor'
|
||||||
|
host: str = v.get('host', '')
|
||||||
|
port: int = v.get('port', 0)
|
||||||
|
message_queue_type: str = v.get('message_queue_type')
|
||||||
|
config = TaskExecutorConfig(id=id_count, name=name, host=host, port=port, message_queue_type=message_queue_type,
|
||||||
|
service_type="task_executor", detail_func_name="check_task_executor_alive")
|
||||||
|
configurations.append(config)
|
||||||
|
id_count += 1
|
||||||
case _:
|
case _:
|
||||||
logging.warning(f"Unknown configuration key: {k}")
|
logging.warning(f"Unknown configuration key: {k}")
|
||||||
continue
|
continue
|
||||||
|
|||||||
@ -24,6 +24,7 @@ from responses import success_response, error_response
|
|||||||
from services import UserMgr, ServiceMgr, UserServiceMgr
|
from services import UserMgr, ServiceMgr, UserServiceMgr
|
||||||
from roles import RoleMgr
|
from roles import RoleMgr
|
||||||
from api.common.exceptions import AdminException
|
from api.common.exceptions import AdminException
|
||||||
|
from common.versions import get_ragflow_version
|
||||||
|
|
||||||
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
admin_bp = Blueprint('admin', __name__, url_prefix='/api/v1/admin')
|
||||||
|
|
||||||
@ -369,3 +370,13 @@ def get_user_permission(user_name: str):
|
|||||||
return success_response(res)
|
return success_response(res)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return error_response(str(e), 500)
|
return error_response(str(e), 500)
|
||||||
|
|
||||||
|
@admin_bp.route('/version', methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
@check_admin_auth
|
||||||
|
def show_version():
|
||||||
|
try:
|
||||||
|
res = {"version": get_ragflow_version()}
|
||||||
|
return success_response(res)
|
||||||
|
except Exception as e:
|
||||||
|
return error_response(str(e), 500)
|
||||||
|
|||||||
@ -52,6 +52,7 @@ class UserMgr:
|
|||||||
result = []
|
result = []
|
||||||
for user in users:
|
for user in users:
|
||||||
result.append({
|
result.append({
|
||||||
|
'avatar': user.avatar,
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'language': user.language,
|
'language': user.language,
|
||||||
'last_login_time': user.last_login_time,
|
'last_login_time': user.last_login_time,
|
||||||
@ -170,7 +171,8 @@ class UserServiceMgr:
|
|||||||
return [{
|
return [{
|
||||||
'title': r['title'],
|
'title': r['title'],
|
||||||
'permission': r['permission'],
|
'permission': r['permission'],
|
||||||
'canvas_category': r['canvas_category'].split('_')[0]
|
'canvas_category': r['canvas_category'].split('_')[0],
|
||||||
|
'avatar': r['avatar']
|
||||||
} for r in res]
|
} for r in res]
|
||||||
|
|
||||||
|
|
||||||
@ -190,6 +192,10 @@ class ServiceMgr:
|
|||||||
config_dict['status'] = 'timeout'
|
config_dict['status'] = 'timeout'
|
||||||
except Exception:
|
except Exception:
|
||||||
config_dict['status'] = 'timeout'
|
config_dict['status'] = 'timeout'
|
||||||
|
if not config_dict['host']:
|
||||||
|
config_dict['host'] = '-'
|
||||||
|
if not config_dict['port']:
|
||||||
|
config_dict['port'] = '-'
|
||||||
result.append(config_dict)
|
result.append(config_dict)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
101
agent/canvas.py
101
agent/canvas.py
@ -26,7 +26,9 @@ from typing import Any, Union, Tuple
|
|||||||
from agent.component import component_class
|
from agent.component import component_class
|
||||||
from agent.component.base import ComponentBase
|
from agent.component.base import ComponentBase
|
||||||
from api.db.services.file_service import FileService
|
from api.db.services.file_service import FileService
|
||||||
|
from api.db.services.task_service import has_canceled
|
||||||
from common.misc_utils import get_uuid, hash_str2int
|
from common.misc_utils import get_uuid, hash_str2int
|
||||||
|
from common.exceptions import TaskCanceledException
|
||||||
from rag.prompts.generator import chunks_format
|
from rag.prompts.generator import chunks_format
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
|
|
||||||
@ -126,6 +128,7 @@ class Graph:
|
|||||||
self.components[k]["obj"].reset()
|
self.components[k]["obj"].reset()
|
||||||
try:
|
try:
|
||||||
REDIS_CONN.delete(f"{self.task_id}-logs")
|
REDIS_CONN.delete(f"{self.task_id}-logs")
|
||||||
|
REDIS_CONN.delete(f"{self.task_id}-cancel")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
|
||||||
@ -153,6 +156,33 @@ class Graph:
|
|||||||
def get_tenant_id(self):
|
def get_tenant_id(self):
|
||||||
return self._tenant_id
|
return self._tenant_id
|
||||||
|
|
||||||
|
def get_value_with_variable(self,value: str) -> Any:
|
||||||
|
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*")
|
||||||
|
out_parts = []
|
||||||
|
last = 0
|
||||||
|
|
||||||
|
for m in pat.finditer(value):
|
||||||
|
out_parts.append(value[last:m.start()])
|
||||||
|
key = m.group(1)
|
||||||
|
v = self.get_variable_value(key)
|
||||||
|
if v is None:
|
||||||
|
rep = ""
|
||||||
|
elif isinstance(v, partial):
|
||||||
|
buf = []
|
||||||
|
for chunk in v():
|
||||||
|
buf.append(chunk)
|
||||||
|
rep = "".join(buf)
|
||||||
|
elif isinstance(v, str):
|
||||||
|
rep = v
|
||||||
|
else:
|
||||||
|
rep = json.dumps(v, ensure_ascii=False)
|
||||||
|
|
||||||
|
out_parts.append(rep)
|
||||||
|
last = m.end()
|
||||||
|
|
||||||
|
out_parts.append(value[last:])
|
||||||
|
return("".join(out_parts))
|
||||||
|
|
||||||
def get_variable_value(self, exp: str) -> Any:
|
def get_variable_value(self, exp: str) -> Any:
|
||||||
exp = exp.strip("{").strip("}").strip(" ").strip("{").strip("}")
|
exp = exp.strip("{").strip("}").strip(" ").strip("{").strip("}")
|
||||||
if exp.find("@") < 0:
|
if exp.find("@") < 0:
|
||||||
@ -169,7 +199,7 @@ class Graph:
|
|||||||
if not rest:
|
if not rest:
|
||||||
return root_val
|
return root_val
|
||||||
return self.get_variable_param_value(root_val,rest)
|
return self.get_variable_param_value(root_val,rest)
|
||||||
|
|
||||||
def get_variable_param_value(self, obj: Any, path: str) -> Any:
|
def get_variable_param_value(self, obj: Any, path: str) -> Any:
|
||||||
cur = obj
|
cur = obj
|
||||||
if not path:
|
if not path:
|
||||||
@ -188,6 +218,17 @@ class Graph:
|
|||||||
cur = getattr(cur, key, None)
|
cur = getattr(cur, key, None)
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
|
def is_canceled(self) -> bool:
|
||||||
|
return has_canceled(self.task_id)
|
||||||
|
|
||||||
|
def cancel_task(self) -> bool:
|
||||||
|
try:
|
||||||
|
REDIS_CONN.set(f"{self.task_id}-cancel", "x")
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(e)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class Canvas(Graph):
|
class Canvas(Graph):
|
||||||
|
|
||||||
@ -212,7 +253,7 @@ class Canvas(Graph):
|
|||||||
"sys.conversation_turns": 0,
|
"sys.conversation_turns": 0,
|
||||||
"sys.files": []
|
"sys.files": []
|
||||||
}
|
}
|
||||||
|
|
||||||
self.retrieval = self.dsl["retrieval"]
|
self.retrieval = self.dsl["retrieval"]
|
||||||
self.memory = self.dsl.get("memory", [])
|
self.memory = self.dsl.get("memory", [])
|
||||||
|
|
||||||
@ -229,18 +270,19 @@ class Canvas(Graph):
|
|||||||
self.retrieval = []
|
self.retrieval = []
|
||||||
self.memory = []
|
self.memory = []
|
||||||
for k in self.globals.keys():
|
for k in self.globals.keys():
|
||||||
if isinstance(self.globals[k], str):
|
if k.startswith("sys."):
|
||||||
self.globals[k] = ""
|
if isinstance(self.globals[k], str):
|
||||||
elif isinstance(self.globals[k], int):
|
self.globals[k] = ""
|
||||||
self.globals[k] = 0
|
elif isinstance(self.globals[k], int):
|
||||||
elif isinstance(self.globals[k], float):
|
self.globals[k] = 0
|
||||||
self.globals[k] = 0
|
elif isinstance(self.globals[k], float):
|
||||||
elif isinstance(self.globals[k], list):
|
self.globals[k] = 0
|
||||||
self.globals[k] = []
|
elif isinstance(self.globals[k], list):
|
||||||
elif isinstance(self.globals[k], dict):
|
self.globals[k] = []
|
||||||
self.globals[k] = {}
|
elif isinstance(self.globals[k], dict):
|
||||||
else:
|
self.globals[k] = {}
|
||||||
self.globals[k] = None
|
else:
|
||||||
|
self.globals[k] = None
|
||||||
|
|
||||||
def run(self, **kwargs):
|
def run(self, **kwargs):
|
||||||
st = time.perf_counter()
|
st = time.perf_counter()
|
||||||
@ -250,6 +292,14 @@ class Canvas(Graph):
|
|||||||
for k, cpn in self.components.items():
|
for k, cpn in self.components.items():
|
||||||
self.components[k]["obj"].reset(True)
|
self.components[k]["obj"].reset(True)
|
||||||
|
|
||||||
|
if kwargs.get("webhook_payload"):
|
||||||
|
for k, cpn in self.components.items():
|
||||||
|
if self.components[k]["obj"].component_name.lower() == "webhook":
|
||||||
|
for kk, vv in kwargs["webhook_payload"].items():
|
||||||
|
self.components[k]["obj"].set_output(kk, vv)
|
||||||
|
|
||||||
|
self.components[k]["obj"].reset(True)
|
||||||
|
|
||||||
for k in kwargs.keys():
|
for k in kwargs.keys():
|
||||||
if k in ["query", "user_id", "files"] and kwargs[k]:
|
if k in ["query", "user_id", "files"] and kwargs[k]:
|
||||||
if k == "files":
|
if k == "files":
|
||||||
@ -275,10 +325,20 @@ class Canvas(Graph):
|
|||||||
self.path.append("begin")
|
self.path.append("begin")
|
||||||
self.retrieval.append({"chunks": [], "doc_aggs": []})
|
self.retrieval.append({"chunks": [], "doc_aggs": []})
|
||||||
|
|
||||||
|
if self.is_canceled():
|
||||||
|
msg = f"Task {self.task_id} has been canceled before starting."
|
||||||
|
logging.info(msg)
|
||||||
|
raise TaskCanceledException(msg)
|
||||||
|
|
||||||
yield decorate("workflow_started", {"inputs": kwargs.get("inputs")})
|
yield decorate("workflow_started", {"inputs": kwargs.get("inputs")})
|
||||||
self.retrieval.append({"chunks": {}, "doc_aggs": {}})
|
self.retrieval.append({"chunks": {}, "doc_aggs": {}})
|
||||||
|
|
||||||
def _run_batch(f, t):
|
def _run_batch(f, t):
|
||||||
|
if self.is_canceled():
|
||||||
|
msg = f"Task {self.task_id} has been canceled during batch execution."
|
||||||
|
logging.info(msg)
|
||||||
|
raise TaskCanceledException(msg)
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||||
thr = []
|
thr = []
|
||||||
i = f
|
i = f
|
||||||
@ -289,7 +349,7 @@ class Canvas(Graph):
|
|||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
for _, ele in cpn.get_input_elements().items():
|
for _, ele in cpn.get_input_elements().items():
|
||||||
if isinstance(ele, dict) and ele.get("_cpn_id") and ele.get("_cpn_id") not in self.path[:i]:
|
if isinstance(ele, dict) and ele.get("_cpn_id") and ele.get("_cpn_id") not in self.path[:i] and self.path[0].lower().find("userfillup") < 0:
|
||||||
self.path.pop(i)
|
self.path.pop(i)
|
||||||
t -= 1
|
t -= 1
|
||||||
break
|
break
|
||||||
@ -420,9 +480,10 @@ class Canvas(Graph):
|
|||||||
for c in path:
|
for c in path:
|
||||||
o = self.get_component_obj(c)
|
o = self.get_component_obj(c)
|
||||||
if o.component_name.lower() == "userfillup":
|
if o.component_name.lower() == "userfillup":
|
||||||
|
o.invoke()
|
||||||
another_inputs.update(o.get_input_elements())
|
another_inputs.update(o.get_input_elements())
|
||||||
if o.get_param("enable_tips"):
|
if o.get_param("enable_tips"):
|
||||||
tips = o.get_param("tips")
|
tips = o.output("tips")
|
||||||
self.path = path
|
self.path = path
|
||||||
yield decorate("user_inputs", {"inputs": another_inputs, "tips": tips})
|
yield decorate("user_inputs", {"inputs": another_inputs, "tips": tips})
|
||||||
return
|
return
|
||||||
@ -436,6 +497,14 @@ class Canvas(Graph):
|
|||||||
"created_at": st,
|
"created_at": st,
|
||||||
})
|
})
|
||||||
self.history.append(("assistant", self.get_component_obj(self.path[-1]).output()))
|
self.history.append(("assistant", self.get_component_obj(self.path[-1]).output()))
|
||||||
|
elif "Task has been canceled" in self.error:
|
||||||
|
yield decorate("workflow_finished",
|
||||||
|
{
|
||||||
|
"inputs": kwargs.get("inputs"),
|
||||||
|
"outputs": "Task has been canceled",
|
||||||
|
"elapsed_time": time.perf_counter() - st,
|
||||||
|
"created_at": st,
|
||||||
|
})
|
||||||
|
|
||||||
def is_reff(self, exp: str) -> bool:
|
def is_reff(self, exp: str) -> bool:
|
||||||
exp = exp.strip("{").strip("}")
|
exp = exp.strip("{").strip("}")
|
||||||
|
|||||||
@ -13,7 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import importlib
|
import importlib
|
||||||
import inspect
|
import inspect
|
||||||
@ -50,9 +49,10 @@ del _package_path, _import_submodules, _extract_classes_from_module
|
|||||||
|
|
||||||
|
|
||||||
def component_class(class_name):
|
def component_class(class_name):
|
||||||
for mdl in ["agent.component", "agent.tools", "rag.flow"]:
|
for module_name in ["agent.component", "agent.tools", "rag.flow"]:
|
||||||
try:
|
try:
|
||||||
return getattr(importlib.import_module(mdl), class_name)
|
return getattr(importlib.import_module(module_name), class_name)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
# logging.warning(f"Can't import module: {module_name}, error: {e}")
|
||||||
pass
|
pass
|
||||||
assert False, f"Can't import {class_name}"
|
assert False, f"Can't import {class_name}"
|
||||||
|
|||||||
@ -139,6 +139,9 @@ class Agent(LLM, ToolBase):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 20*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 20*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Agent processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if kwargs.get("user_prompt"):
|
if kwargs.get("user_prompt"):
|
||||||
usr_pmt = ""
|
usr_pmt = ""
|
||||||
if kwargs.get("reasoning"):
|
if kwargs.get("reasoning"):
|
||||||
@ -152,6 +155,8 @@ class Agent(LLM, ToolBase):
|
|||||||
self._param.prompts = [{"role": "user", "content": usr_pmt}]
|
self._param.prompts = [{"role": "user", "content": usr_pmt}]
|
||||||
|
|
||||||
if not self.tools:
|
if not self.tools:
|
||||||
|
if self.check_if_canceled("Agent processing"):
|
||||||
|
return
|
||||||
return LLM._invoke(self, **kwargs)
|
return LLM._invoke(self, **kwargs)
|
||||||
|
|
||||||
prompt, msg, user_defined_prompt = self._prepare_prompt_variables()
|
prompt, msg, user_defined_prompt = self._prepare_prompt_variables()
|
||||||
@ -171,6 +176,8 @@ class Agent(LLM, ToolBase):
|
|||||||
use_tools = []
|
use_tools = []
|
||||||
ans = ""
|
ans = ""
|
||||||
for delta_ans, tk in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
for delta_ans, tk in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||||
|
if self.check_if_canceled("Agent processing"):
|
||||||
|
return
|
||||||
ans += delta_ans
|
ans += delta_ans
|
||||||
|
|
||||||
if ans.find("**ERROR**") >= 0:
|
if ans.find("**ERROR**") >= 0:
|
||||||
@ -191,12 +198,16 @@ class Agent(LLM, ToolBase):
|
|||||||
answer_without_toolcall = ""
|
answer_without_toolcall = ""
|
||||||
use_tools = []
|
use_tools = []
|
||||||
for delta_ans,_ in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
for delta_ans,_ in self._react_with_tools_streamly(prompt, msg, use_tools, user_defined_prompt):
|
||||||
|
if self.check_if_canceled("Agent streaming"):
|
||||||
|
return
|
||||||
|
|
||||||
if delta_ans.find("**ERROR**") >= 0:
|
if delta_ans.find("**ERROR**") >= 0:
|
||||||
if self.get_exception_default_value():
|
if self.get_exception_default_value():
|
||||||
self.set_output("content", self.get_exception_default_value())
|
self.set_output("content", self.get_exception_default_value())
|
||||||
yield self.get_exception_default_value()
|
yield self.get_exception_default_value()
|
||||||
else:
|
else:
|
||||||
self.set_output("_ERROR", delta_ans)
|
self.set_output("_ERROR", delta_ans)
|
||||||
|
return
|
||||||
answer_without_toolcall += delta_ans
|
answer_without_toolcall += delta_ans
|
||||||
yield delta_ans
|
yield delta_ans
|
||||||
|
|
||||||
@ -271,6 +282,8 @@ class Agent(LLM, ToolBase):
|
|||||||
st = timer()
|
st = timer()
|
||||||
txt = ""
|
txt = ""
|
||||||
for delta_ans in self._gen_citations(entire_txt):
|
for delta_ans in self._gen_citations(entire_txt):
|
||||||
|
if self.check_if_canceled("Agent streaming"):
|
||||||
|
return
|
||||||
yield delta_ans, 0
|
yield delta_ans, 0
|
||||||
txt += delta_ans
|
txt += delta_ans
|
||||||
|
|
||||||
@ -286,6 +299,8 @@ class Agent(LLM, ToolBase):
|
|||||||
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas, user_defined_prompt)
|
||||||
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
self.callback("analyze_task", {}, task_desc, elapsed_time=timer()-st)
|
||||||
for _ in range(self._param.max_rounds + 1):
|
for _ in range(self._param.max_rounds + 1):
|
||||||
|
if self.check_if_canceled("Agent streaming"):
|
||||||
|
return
|
||||||
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc, user_defined_prompt)
|
||||||
# self.callback("next_step", {}, str(response)[:256]+"...")
|
# self.callback("next_step", {}, str(response)[:256]+"...")
|
||||||
token_count += tk
|
token_count += tk
|
||||||
@ -333,6 +348,8 @@ Instructions:
|
|||||||
6. Focus on delivering VALUE with the information already gathered
|
6. Focus on delivering VALUE with the information already gathered
|
||||||
Respond immediately with your final comprehensive answer.
|
Respond immediately with your final comprehensive answer.
|
||||||
"""
|
"""
|
||||||
|
if self.check_if_canceled("Agent final instruction"):
|
||||||
|
return
|
||||||
append_user_content(hist, final_instruction)
|
append_user_content(hist, final_instruction)
|
||||||
|
|
||||||
for txt, tkcnt in complete():
|
for txt, tkcnt in complete():
|
||||||
|
|||||||
@ -393,7 +393,7 @@ class ComponentParamBase(ABC):
|
|||||||
class ComponentBase(ABC):
|
class ComponentBase(ABC):
|
||||||
component_name: str
|
component_name: str
|
||||||
thread_limiter = trio.CapacityLimiter(int(os.environ.get('MAX_CONCURRENT_CHATS', 10)))
|
thread_limiter = trio.CapacityLimiter(int(os.environ.get('MAX_CONCURRENT_CHATS', 10)))
|
||||||
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z:0-9_.-]+|sys\.[a-z_]+)\} *\}*"
|
variable_ref_patt = r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z0-9_.]+|sys\.[A-Za-z0-9_.]+|env\.[A-Za-z0-9_.]+)\} *\}*"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""
|
"""
|
||||||
@ -417,6 +417,20 @@ class ComponentBase(ABC):
|
|||||||
self._param = param
|
self._param = param
|
||||||
self._param.check()
|
self._param.check()
|
||||||
|
|
||||||
|
def is_canceled(self) -> bool:
|
||||||
|
return self._canvas.is_canceled()
|
||||||
|
|
||||||
|
def check_if_canceled(self, message: str = "") -> bool:
|
||||||
|
if self.is_canceled():
|
||||||
|
task_id = getattr(self._canvas, 'task_id', 'unknown')
|
||||||
|
log_message = f"Task {task_id} has been canceled"
|
||||||
|
if message:
|
||||||
|
log_message += f" during {message}"
|
||||||
|
logging.info(log_message)
|
||||||
|
self.set_output("_ERROR", "Task has been canceled")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def invoke(self, **kwargs) -> dict[str, Any]:
|
def invoke(self, **kwargs) -> dict[str, Any]:
|
||||||
self.set_output("_created_time", time.perf_counter())
|
self.set_output("_created_time", time.perf_counter())
|
||||||
try:
|
try:
|
||||||
@ -514,6 +528,7 @@ class ComponentBase(ABC):
|
|||||||
def get_param(self, name):
|
def get_param(self, name):
|
||||||
if hasattr(self._param, name):
|
if hasattr(self._param, name):
|
||||||
return getattr(self._param, name)
|
return getattr(self._param, name)
|
||||||
|
return None
|
||||||
|
|
||||||
def debug(self, **kwargs):
|
def debug(self, **kwargs):
|
||||||
return self._invoke(**kwargs)
|
return self._invoke(**kwargs)
|
||||||
@ -521,7 +536,7 @@ class ComponentBase(ABC):
|
|||||||
def get_parent(self) -> Union[object, None]:
|
def get_parent(self) -> Union[object, None]:
|
||||||
pid = self._canvas.get_component(self._id).get("parent_id")
|
pid = self._canvas.get_component(self._id).get("parent_id")
|
||||||
if not pid:
|
if not pid:
|
||||||
return
|
return None
|
||||||
return self._canvas.get_component(pid)["obj"]
|
return self._canvas.get_component(pid)["obj"]
|
||||||
|
|
||||||
def get_upstream(self) -> List[str]:
|
def get_upstream(self) -> List[str]:
|
||||||
@ -546,7 +561,7 @@ class ComponentBase(ABC):
|
|||||||
|
|
||||||
def exception_handler(self):
|
def exception_handler(self):
|
||||||
if not self._param.exception_method:
|
if not self._param.exception_method:
|
||||||
return
|
return None
|
||||||
return {
|
return {
|
||||||
"goto": self._param.exception_goto,
|
"goto": self._param.exception_goto,
|
||||||
"default_value": self._param.exception_default_value
|
"default_value": self._param.exception_default_value
|
||||||
|
|||||||
@ -37,7 +37,13 @@ class Begin(UserFillUp):
|
|||||||
component_name = "Begin"
|
component_name = "Begin"
|
||||||
|
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Begin processing"):
|
||||||
|
return
|
||||||
|
|
||||||
for k, v in kwargs.get("inputs", {}).items():
|
for k, v in kwargs.get("inputs", {}).items():
|
||||||
|
if self.check_if_canceled("Begin processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if isinstance(v, dict) and v.get("type", "").lower().find("file") >=0:
|
if isinstance(v, dict) and v.get("type", "").lower().find("file") >=0:
|
||||||
if v.get("optional") and v.get("value", None) is None:
|
if v.get("optional") and v.get("value", None) is None:
|
||||||
v = None
|
v = None
|
||||||
|
|||||||
@ -98,6 +98,9 @@ class Categorize(LLM, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Categorize processing"):
|
||||||
|
return
|
||||||
|
|
||||||
msg = self._canvas.get_history(self._param.message_history_window_size)
|
msg = self._canvas.get_history(self._param.message_history_window_size)
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = [{"role": "user", "content": ""}]
|
msg = [{"role": "user", "content": ""}]
|
||||||
@ -114,10 +117,18 @@ class Categorize(LLM, ABC):
|
|||||||
---- Real Data ----
|
---- Real Data ----
|
||||||
{} →
|
{} →
|
||||||
""".format(" | ".join(["{}: \"{}\"".format(c["role"].upper(), re.sub(r"\n", "", c["content"], flags=re.DOTALL)) for c in msg]))
|
""".format(" | ".join(["{}: \"{}\"".format(c["role"].upper(), re.sub(r"\n", "", c["content"], flags=re.DOTALL)) for c in msg]))
|
||||||
|
|
||||||
|
if self.check_if_canceled("Categorize processing"):
|
||||||
|
return
|
||||||
|
|
||||||
ans = chat_mdl.chat(self._param.sys_prompt, [{"role": "user", "content": user_prompt}], self._param.gen_conf())
|
ans = chat_mdl.chat(self._param.sys_prompt, [{"role": "user", "content": user_prompt}], self._param.gen_conf())
|
||||||
logging.info(f"input: {user_prompt}, answer: {str(ans)}")
|
logging.info(f"input: {user_prompt}, answer: {str(ans)}")
|
||||||
if ERROR_PREFIX in ans:
|
if ERROR_PREFIX in ans:
|
||||||
raise Exception(ans)
|
raise Exception(ans)
|
||||||
|
|
||||||
|
if self.check_if_canceled("Categorize processing"):
|
||||||
|
return
|
||||||
|
|
||||||
# Count the number of times each category appears in the answer.
|
# Count the number of times each category appears in the answer.
|
||||||
category_counts = {}
|
category_counts = {}
|
||||||
for c in self._param.category_description.keys():
|
for c in self._param.category_description.keys():
|
||||||
|
|||||||
@ -10,7 +10,7 @@ class DataOperationsParam(ComponentParamBase):
|
|||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.inputs = []
|
self.query = []
|
||||||
self.operations = "literal_eval"
|
self.operations = "literal_eval"
|
||||||
self.select_keys = []
|
self.select_keys = []
|
||||||
self.filter_values=[]
|
self.filter_values=[]
|
||||||
@ -35,18 +35,19 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
def get_input_form(self) -> dict[str, dict]:
|
def get_input_form(self) -> dict[str, dict]:
|
||||||
return {
|
return {
|
||||||
k: {"name": o.get("name", ""), "type": "line"}
|
k: {"name": o.get("name", ""), "type": "line"}
|
||||||
for input_item in (self._param.inputs or [])
|
for input_item in (self._param.query or [])
|
||||||
for k, o in self.get_input_elements_from_text(input_item).items()
|
for k, o in self.get_input_elements_from_text(input_item).items()
|
||||||
}
|
}
|
||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
self.input_objects=[]
|
self.input_objects=[]
|
||||||
inputs = getattr(self._param, "inputs", None)
|
inputs = getattr(self._param, "query", None)
|
||||||
if not isinstance(inputs, (list, tuple)):
|
if not isinstance(inputs, (list, tuple)):
|
||||||
inputs = [inputs]
|
inputs = [inputs]
|
||||||
for input_ref in self._param.inputs:
|
for input_ref in inputs:
|
||||||
input_object=self._canvas.get_variable_value(input_ref)
|
input_object=self._canvas.get_variable_value(input_ref)
|
||||||
|
self.set_input_value(input_ref, input_object)
|
||||||
if input_object is None:
|
if input_object is None:
|
||||||
continue
|
continue
|
||||||
if isinstance(input_object,dict):
|
if isinstance(input_object,dict):
|
||||||
@ -57,7 +58,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
continue
|
continue
|
||||||
if self._param.operations == "select_keys":
|
if self._param.operations == "select_keys":
|
||||||
self._select_keys()
|
self._select_keys()
|
||||||
elif self._param.operations == "literal_eval":
|
elif self._param.operations == "recursive_eval":
|
||||||
self._literal_eval()
|
self._literal_eval()
|
||||||
elif self._param.operations == "combine":
|
elif self._param.operations == "combine":
|
||||||
self._combine()
|
self._combine()
|
||||||
@ -100,7 +101,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
|
|
||||||
def _combine(self):
|
def _combine(self):
|
||||||
result={}
|
result={}
|
||||||
for obj in self.input_objects():
|
for obj in self.input_objects:
|
||||||
for key, value in obj.items():
|
for key, value in obj.items():
|
||||||
if key not in result:
|
if key not in result:
|
||||||
result[key] = value
|
result[key] = value
|
||||||
@ -123,6 +124,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
key = rule.get("key")
|
key = rule.get("key")
|
||||||
op = (rule.get("operator") or "equals").lower()
|
op = (rule.get("operator") or "equals").lower()
|
||||||
target = self.norm(rule.get("value"))
|
target = self.norm(rule.get("value"))
|
||||||
|
target = self._canvas.get_value_with_variable(target) or target
|
||||||
if key not in obj:
|
if key not in obj:
|
||||||
return False
|
return False
|
||||||
val = obj.get(key, None)
|
val = obj.get(key, None)
|
||||||
@ -142,7 +144,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
def _filter_values(self):
|
def _filter_values(self):
|
||||||
results=[]
|
results=[]
|
||||||
rules = (getattr(self._param, "filter_values", None) or [])
|
rules = (getattr(self._param, "filter_values", None) or [])
|
||||||
for obj in self.input_objects():
|
for obj in self.input_objects:
|
||||||
if not rules:
|
if not rules:
|
||||||
results.append(obj)
|
results.append(obj)
|
||||||
continue
|
continue
|
||||||
@ -154,7 +156,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
def _append_or_update(self):
|
def _append_or_update(self):
|
||||||
results=[]
|
results=[]
|
||||||
updates = getattr(self._param, "updates", []) or []
|
updates = getattr(self._param, "updates", []) or []
|
||||||
for obj in self.input_objects():
|
for obj in self.input_objects:
|
||||||
new_obj = dict(obj)
|
new_obj = dict(obj)
|
||||||
for item in updates:
|
for item in updates:
|
||||||
if not isinstance(item, dict):
|
if not isinstance(item, dict):
|
||||||
@ -162,7 +164,7 @@ class DataOperations(ComponentBase,ABC):
|
|||||||
k = (item.get("key") or "").strip()
|
k = (item.get("key") or "").strip()
|
||||||
if not k:
|
if not k:
|
||||||
continue
|
continue
|
||||||
new_obj[k] = item.get("value")
|
new_obj[k] = self._canvas.get_value_with_variable(item.get("value")) or item.get("value")
|
||||||
results.append(new_obj)
|
results.append(new_obj)
|
||||||
self.set_output("result", results)
|
self.set_output("result", results)
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,11 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
from agent.component.base import ComponentBase, ComponentParamBase
|
import json
|
||||||
|
import re
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from agent.component.base import ComponentParamBase, ComponentBase
|
||||||
|
|
||||||
|
|
||||||
class UserFillUpParam(ComponentParamBase):
|
class UserFillUpParam(ComponentParamBase):
|
||||||
@ -31,10 +35,35 @@ class UserFillUp(ComponentBase):
|
|||||||
component_name = "UserFillUp"
|
component_name = "UserFillUp"
|
||||||
|
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("UserFillUp processing"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._param.enable_tips:
|
||||||
|
content = self._param.tips
|
||||||
|
for k, v in self.get_input_elements_from_text(self._param.tips).items():
|
||||||
|
v = v["value"]
|
||||||
|
ans = ""
|
||||||
|
if isinstance(v, partial):
|
||||||
|
for t in v():
|
||||||
|
ans += t
|
||||||
|
elif isinstance(v, list):
|
||||||
|
ans = ",".join([str(vv) for vv in v])
|
||||||
|
elif not isinstance(v, str):
|
||||||
|
try:
|
||||||
|
ans = json.dumps(v, ensure_ascii=False)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
ans = v
|
||||||
|
if not ans:
|
||||||
|
ans = ""
|
||||||
|
content = re.sub(r"\{%s\}"%k, ans, content)
|
||||||
|
|
||||||
|
self.set_output("tips", content)
|
||||||
for k, v in kwargs.get("inputs", {}).items():
|
for k, v in kwargs.get("inputs", {}).items():
|
||||||
|
if self.check_if_canceled("UserFillUp processing"):
|
||||||
|
return
|
||||||
self.set_output(k, v)
|
self.set_output(k, v)
|
||||||
|
|
||||||
def thoughts(self) -> str:
|
def thoughts(self) -> str:
|
||||||
return "Waiting for your input..."
|
return "Waiting for your input..."
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -56,6 +56,9 @@ class Invoke(ComponentBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Invoke processing"):
|
||||||
|
return
|
||||||
|
|
||||||
args = {}
|
args = {}
|
||||||
for para in self._param.variables:
|
for para in self._param.variables:
|
||||||
if para.get("value"):
|
if para.get("value"):
|
||||||
@ -89,6 +92,9 @@ class Invoke(ComponentBase, ABC):
|
|||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries + 1):
|
for _ in range(self._param.max_retries + 1):
|
||||||
|
if self.check_if_canceled("Invoke processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if method == "get":
|
if method == "get":
|
||||||
response = requests.get(url=url, params=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
response = requests.get(url=url, params=args, headers=headers, proxies=proxies, timeout=self._param.timeout)
|
||||||
@ -121,6 +127,9 @@ class Invoke(ComponentBase, ABC):
|
|||||||
|
|
||||||
return self.output("result")
|
return self.output("result")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("Invoke processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"Http request error: {e}")
|
logging.exception(f"Http request error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -56,6 +56,9 @@ class Iteration(ComponentBase, ABC):
|
|||||||
return cid
|
return cid
|
||||||
|
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Iteration processing"):
|
||||||
|
return
|
||||||
|
|
||||||
arr = self._canvas.get_variable_value(self._param.items_ref)
|
arr = self._canvas.get_variable_value(self._param.items_ref)
|
||||||
if not isinstance(arr, list):
|
if not isinstance(arr, list):
|
||||||
self.set_output("_ERROR", self._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
self.set_output("_ERROR", self._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
||||||
|
|||||||
@ -33,6 +33,9 @@ class IterationItem(ComponentBase, ABC):
|
|||||||
self._idx = 0
|
self._idx = 0
|
||||||
|
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("IterationItem processing"):
|
||||||
|
return
|
||||||
|
|
||||||
parent = self.get_parent()
|
parent = self.get_parent()
|
||||||
arr = self._canvas.get_variable_value(parent._param.items_ref)
|
arr = self._canvas.get_variable_value(parent._param.items_ref)
|
||||||
if not isinstance(arr, list):
|
if not isinstance(arr, list):
|
||||||
@ -40,12 +43,17 @@ class IterationItem(ComponentBase, ABC):
|
|||||||
raise Exception(parent._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
raise Exception(parent._param.items_ref + " must be an array, but its type is "+str(type(arr)))
|
||||||
|
|
||||||
if self._idx > 0:
|
if self._idx > 0:
|
||||||
|
if self.check_if_canceled("IterationItem processing"):
|
||||||
|
return
|
||||||
self.output_collation()
|
self.output_collation()
|
||||||
|
|
||||||
if self._idx >= len(arr):
|
if self._idx >= len(arr):
|
||||||
self._idx = -1
|
self._idx = -1
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.check_if_canceled("IterationItem processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self.set_output("item", arr[self._idx])
|
self.set_output("item", arr[self._idx])
|
||||||
self.set_output("index", self._idx)
|
self.set_output("index", self._idx)
|
||||||
|
|
||||||
@ -80,4 +88,4 @@ class IterationItem(ComponentBase, ABC):
|
|||||||
return self._idx == -1
|
return self._idx == -1
|
||||||
|
|
||||||
def thoughts(self) -> str:
|
def thoughts(self) -> str:
|
||||||
return "Next turn..."
|
return "Next turn..."
|
||||||
|
|||||||
@ -207,6 +207,9 @@ class LLM(ComponentBase):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("LLM processing"):
|
||||||
|
return
|
||||||
|
|
||||||
def clean_formated_answer(ans: str) -> str:
|
def clean_formated_answer(ans: str) -> str:
|
||||||
ans = re.sub(r"^.*</think>", "", ans, flags=re.DOTALL)
|
ans = re.sub(r"^.*</think>", "", ans, flags=re.DOTALL)
|
||||||
ans = re.sub(r"^.*```json", "", ans, flags=re.DOTALL)
|
ans = re.sub(r"^.*```json", "", ans, flags=re.DOTALL)
|
||||||
@ -216,13 +219,16 @@ class LLM(ComponentBase):
|
|||||||
error: str = ""
|
error: str = ""
|
||||||
output_structure=None
|
output_structure=None
|
||||||
try:
|
try:
|
||||||
output_structure = None#self._param.outputs['structured']
|
output_structure = self._param.outputs['structured']
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if output_structure:
|
if output_structure:
|
||||||
schema=json.dumps(output_structure, ensure_ascii=False, indent=2)
|
schema=json.dumps(output_structure, ensure_ascii=False, indent=2)
|
||||||
prompt += structured_output_prompt(schema)
|
prompt += structured_output_prompt(schema)
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("LLM processing"):
|
||||||
|
return
|
||||||
|
|
||||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||||
error = ""
|
error = ""
|
||||||
ans = self._generate(msg)
|
ans = self._generate(msg)
|
||||||
@ -248,6 +254,9 @@ class LLM(ComponentBase):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("LLM processing"):
|
||||||
|
return
|
||||||
|
|
||||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||||
error = ""
|
error = ""
|
||||||
ans = self._generate(msg)
|
ans = self._generate(msg)
|
||||||
@ -269,6 +278,9 @@ class LLM(ComponentBase):
|
|||||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||||
answer = ""
|
answer = ""
|
||||||
for ans in self._generate_streamly(msg):
|
for ans in self._generate_streamly(msg):
|
||||||
|
if self.check_if_canceled("LLM streaming"):
|
||||||
|
return
|
||||||
|
|
||||||
if ans.find("**ERROR**") >= 0:
|
if ans.find("**ERROR**") >= 0:
|
||||||
if self.get_exception_default_value():
|
if self.get_exception_default_value():
|
||||||
self.set_output("content", self.get_exception_default_value())
|
self.set_output("content", self.get_exception_default_value())
|
||||||
@ -287,4 +299,4 @@ class LLM(ComponentBase):
|
|||||||
|
|
||||||
def thoughts(self) -> str:
|
def thoughts(self) -> str:
|
||||||
_, msg,_ = self._prepare_prompt_variables()
|
_, msg,_ = self._prepare_prompt_variables()
|
||||||
return "⌛Give me a moment—starting from: \n\n" + re.sub(r"(User's query:|[\\]+)", '', msg[-1]['content'], flags=re.DOTALL) + "\n\nI’ll figure out our best next move."
|
return "⌛Give me a moment—starting from: \n\n" + re.sub(r"(User's query:|[\\]+)", '', msg[-1]['content'], flags=re.DOTALL) + "\n\nI’ll figure out our best next move."
|
||||||
|
|||||||
@ -89,6 +89,9 @@ class Message(ComponentBase):
|
|||||||
all_content = ""
|
all_content = ""
|
||||||
cache = {}
|
cache = {}
|
||||||
for r in re.finditer(self.variable_ref_patt, rand_cnt, flags=re.DOTALL):
|
for r in re.finditer(self.variable_ref_patt, rand_cnt, flags=re.DOTALL):
|
||||||
|
if self.check_if_canceled("Message streaming"):
|
||||||
|
return
|
||||||
|
|
||||||
all_content += rand_cnt[s: r.start()]
|
all_content += rand_cnt[s: r.start()]
|
||||||
yield rand_cnt[s: r.start()]
|
yield rand_cnt[s: r.start()]
|
||||||
s = r.end()
|
s = r.end()
|
||||||
@ -99,26 +102,33 @@ class Message(ComponentBase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
v = self._canvas.get_variable_value(exp)
|
v = self._canvas.get_variable_value(exp)
|
||||||
if not v:
|
if v is None:
|
||||||
v = ""
|
v = ""
|
||||||
if isinstance(v, partial):
|
if isinstance(v, partial):
|
||||||
cnt = ""
|
cnt = ""
|
||||||
for t in v():
|
for t in v():
|
||||||
|
if self.check_if_canceled("Message streaming"):
|
||||||
|
return
|
||||||
|
|
||||||
all_content += t
|
all_content += t
|
||||||
cnt += t
|
cnt += t
|
||||||
yield t
|
yield t
|
||||||
|
self.set_input_value(exp, cnt)
|
||||||
continue
|
continue
|
||||||
elif not isinstance(v, str):
|
elif not isinstance(v, str):
|
||||||
try:
|
try:
|
||||||
v = json.dumps(v, ensure_ascii=False, indent=2)
|
v = json.dumps(v, ensure_ascii=False)
|
||||||
except Exception:
|
except Exception:
|
||||||
v = str(v)
|
v = str(v)
|
||||||
yield v
|
yield v
|
||||||
|
self.set_input_value(exp, v)
|
||||||
all_content += v
|
all_content += v
|
||||||
cache[exp] = v
|
cache[exp] = v
|
||||||
|
|
||||||
if s < len(rand_cnt):
|
if s < len(rand_cnt):
|
||||||
|
if self.check_if_canceled("Message streaming"):
|
||||||
|
return
|
||||||
|
|
||||||
all_content += rand_cnt[s: ]
|
all_content += rand_cnt[s: ]
|
||||||
yield rand_cnt[s: ]
|
yield rand_cnt[s: ]
|
||||||
|
|
||||||
@ -132,6 +142,9 @@ class Message(ComponentBase):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Message processing"):
|
||||||
|
return
|
||||||
|
|
||||||
rand_cnt = random.choice(self._param.content)
|
rand_cnt = random.choice(self._param.content)
|
||||||
if self._param.stream and not self._is_jinjia2(rand_cnt):
|
if self._param.stream and not self._is_jinjia2(rand_cnt):
|
||||||
self.set_output("content", partial(self._stream, rand_cnt))
|
self.set_output("content", partial(self._stream, rand_cnt))
|
||||||
@ -144,6 +157,9 @@ class Message(ComponentBase):
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if self.check_if_canceled("Message processing"):
|
||||||
|
return
|
||||||
|
|
||||||
for n, v in kwargs.items():
|
for n, v in kwargs.items():
|
||||||
content = re.sub(n, v, content)
|
content = re.sub(n, v, content)
|
||||||
|
|
||||||
|
|||||||
@ -63,17 +63,24 @@ class StringTransform(Message, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("StringTransform processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if self._param.method == "split":
|
if self._param.method == "split":
|
||||||
self._split(kwargs.get("line"))
|
self._split(kwargs.get("line"))
|
||||||
else:
|
else:
|
||||||
self._merge(kwargs)
|
self._merge(kwargs)
|
||||||
|
|
||||||
def _split(self, line:str|None = None):
|
def _split(self, line:str|None = None):
|
||||||
|
if self.check_if_canceled("StringTransform split processing"):
|
||||||
|
return
|
||||||
|
|
||||||
var = self._canvas.get_variable_value(self._param.split_ref) if not line else line
|
var = self._canvas.get_variable_value(self._param.split_ref) if not line else line
|
||||||
if not var:
|
if not var:
|
||||||
var = ""
|
var = ""
|
||||||
assert isinstance(var, str), "The input variable is not a string: {}".format(type(var))
|
assert isinstance(var, str), "The input variable is not a string: {}".format(type(var))
|
||||||
self.set_input_value(self._param.split_ref, var)
|
self.set_input_value(self._param.split_ref, var)
|
||||||
|
|
||||||
res = []
|
res = []
|
||||||
for i,s in enumerate(re.split(r"(%s)"%("|".join([re.escape(d) for d in self._param.delimiters])), var, flags=re.DOTALL)):
|
for i,s in enumerate(re.split(r"(%s)"%("|".join([re.escape(d) for d in self._param.delimiters])), var, flags=re.DOTALL)):
|
||||||
if i % 2 == 1:
|
if i % 2 == 1:
|
||||||
@ -82,6 +89,9 @@ class StringTransform(Message, ABC):
|
|||||||
self.set_output("result", res)
|
self.set_output("result", res)
|
||||||
|
|
||||||
def _merge(self, kwargs:dict[str, str] = {}):
|
def _merge(self, kwargs:dict[str, str] = {}):
|
||||||
|
if self.check_if_canceled("StringTransform merge processing"):
|
||||||
|
return
|
||||||
|
|
||||||
script = self._param.script
|
script = self._param.script
|
||||||
script, kwargs = self.get_kwargs(script, kwargs, self._param.delimiters[0])
|
script, kwargs = self.get_kwargs(script, kwargs, self._param.delimiters[0])
|
||||||
|
|
||||||
|
|||||||
@ -63,9 +63,18 @@ class Switch(ComponentBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Switch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
for cond in self._param.conditions:
|
for cond in self._param.conditions:
|
||||||
|
if self.check_if_canceled("Switch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
res = []
|
res = []
|
||||||
for item in cond["items"]:
|
for item in cond["items"]:
|
||||||
|
if self.check_if_canceled("Switch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not item["cpn_id"]:
|
if not item["cpn_id"]:
|
||||||
continue
|
continue
|
||||||
cpn_v = self._canvas.get_variable_value(item["cpn_id"])
|
cpn_v = self._canvas.get_variable_value(item["cpn_id"])
|
||||||
@ -128,4 +137,4 @@ class Switch(ComponentBase, ABC):
|
|||||||
raise ValueError('Not supported operator' + operator)
|
raise ValueError('Not supported operator' + operator)
|
||||||
|
|
||||||
def thoughts(self) -> str:
|
def thoughts(self) -> str:
|
||||||
return "I’m weighing a few options and will pick the next step shortly."
|
return "I’m weighing a few options and will pick the next step shortly."
|
||||||
|
|||||||
84
agent/component/varaiable_aggregator.py
Normal file
84
agent/component/varaiable_aggregator.py
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
import os
|
||||||
|
|
||||||
|
from common.connection_utils import timeout
|
||||||
|
from agent.component.base import ComponentBase, ComponentParamBase
|
||||||
|
|
||||||
|
|
||||||
|
class VariableAggregatorParam(ComponentParamBase):
|
||||||
|
"""
|
||||||
|
Parameters for VariableAggregator
|
||||||
|
|
||||||
|
- groups: list of dicts {"group_name": str, "variables": [variable selectors]}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
# each group expects: {"group_name": str, "variables": List[str]}
|
||||||
|
self.groups = []
|
||||||
|
|
||||||
|
def check(self):
|
||||||
|
self.check_empty(self.groups, "[VariableAggregator] groups")
|
||||||
|
for g in self.groups:
|
||||||
|
if not g.get("group_name"):
|
||||||
|
raise ValueError("[VariableAggregator] group_name can not be empty!")
|
||||||
|
if not g.get("variables"):
|
||||||
|
raise ValueError(
|
||||||
|
f"[VariableAggregator] variables of group `{g.get('group_name')}` can not be empty"
|
||||||
|
)
|
||||||
|
if not isinstance(g.get("variables"), list):
|
||||||
|
raise ValueError(
|
||||||
|
f"[VariableAggregator] variables of group `{g.get('group_name')}` should be a list of strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_input_form(self) -> dict[str, dict]:
|
||||||
|
return {
|
||||||
|
"variables": {
|
||||||
|
"name": "Variables",
|
||||||
|
"type": "list",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class VariableAggregator(ComponentBase):
|
||||||
|
component_name = "VariableAggregator"
|
||||||
|
|
||||||
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 3)))
|
||||||
|
def _invoke(self, **kwargs):
|
||||||
|
# Group mode: for each group, pick the first available variable
|
||||||
|
for group in self._param.groups:
|
||||||
|
gname = group.get("group_name")
|
||||||
|
|
||||||
|
# record candidate selectors within this group
|
||||||
|
self.set_input_value(f"{gname}.variables", list(group.get("variables", [])))
|
||||||
|
for selector in group.get("variables", []):
|
||||||
|
val = self._canvas.get_variable_value(selector['value'])
|
||||||
|
if val:
|
||||||
|
self.set_output(gname, val)
|
||||||
|
break
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _to_object(value: Any) -> Any:
|
||||||
|
# Try to convert value to serializable object if it has to_object()
|
||||||
|
try:
|
||||||
|
return value.to_object() # type: ignore[attr-defined]
|
||||||
|
except Exception:
|
||||||
|
return value
|
||||||
|
|
||||||
|
def thoughts(self) -> str:
|
||||||
|
return "Aggregating variables from canvas and grouping as configured."
|
||||||
38
agent/component/webhook.py
Normal file
38
agent/component/webhook.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
from agent.component.base import ComponentParamBase, ComponentBase
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookParam(ComponentParamBase):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Define the Begin component parameters.
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def get_input_form(self) -> dict[str, dict]:
|
||||||
|
return getattr(self, "inputs")
|
||||||
|
|
||||||
|
|
||||||
|
class Webhook(ComponentBase):
|
||||||
|
component_name = "Webhook"
|
||||||
|
|
||||||
|
def _invoke(self, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def thoughts(self) -> str:
|
||||||
|
return ""
|
||||||
519
agent/templates/user_interaction.json
Normal file
519
agent/templates/user_interaction.json
Normal file
@ -0,0 +1,519 @@
|
|||||||
|
{
|
||||||
|
"id": 27,
|
||||||
|
"title": {
|
||||||
|
"en": "Interactive Agent",
|
||||||
|
"zh": "可交互的 Agent"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"en": "During the Agent’s execution, users can actively intervene and interact with the Agent to adjust or guide its output, ensuring the final result aligns with their intentions.",
|
||||||
|
"zh": "在 Agent 的运行过程中,用户可以随时介入,与 Agent 进行交互,以调整或引导生成结果,使最终输出更符合预期。"
|
||||||
|
},
|
||||||
|
"canvas_type": "Agent",
|
||||||
|
"dsl": {
|
||||||
|
"components": {
|
||||||
|
"Agent:LargeFliesMelt": {
|
||||||
|
"downstream": [
|
||||||
|
"UserFillUp:GoldBroomsRelate"
|
||||||
|
],
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Agent",
|
||||||
|
"params": {
|
||||||
|
"cite": true,
|
||||||
|
"delay_after_error": 1,
|
||||||
|
"description": "",
|
||||||
|
"exception_default_value": "",
|
||||||
|
"exception_goto": [],
|
||||||
|
"exception_method": "",
|
||||||
|
"frequencyPenaltyEnabled": false,
|
||||||
|
"frequency_penalty": 0.7,
|
||||||
|
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||||
|
"maxTokensEnabled": false,
|
||||||
|
"max_retries": 3,
|
||||||
|
"max_rounds": 1,
|
||||||
|
"max_tokens": 256,
|
||||||
|
"mcp": [],
|
||||||
|
"message_history_window_size": 12,
|
||||||
|
"outputs": {
|
||||||
|
"content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"structured": {}
|
||||||
|
},
|
||||||
|
"presencePenaltyEnabled": false,
|
||||||
|
"presence_penalty": 0.4,
|
||||||
|
"prompts": [
|
||||||
|
{
|
||||||
|
"content": "User query:{sys.query}",
|
||||||
|
"role": "user"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sys_prompt": "<role>\nYou are the Planning Agent in a multi-agent RAG workflow.\nYour sole job is to design a crisp, executable Search Plan for the next agent. Do not search or answer the user’s question.\n</role>\n<objectives>\nUnderstand the user’s task and decompose it into evidence-seeking steps.\nProduce high-quality queries and retrieval settings tailored to the task type (fact lookup, multi-hop reasoning, comparison, statistics, how-to, etc.).\nIdentify missing information that would materially change the plan (≤3 concise questions).\nOptimize for source trustworthiness, diversity, and recency; define stopping criteria to avoid over-searching.\nAnswer in 150 words.\n<objectives>",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"temperatureEnabled": false,
|
||||||
|
"tools": [],
|
||||||
|
"topPEnabled": false,
|
||||||
|
"top_p": 0.3,
|
||||||
|
"user_prompt": "",
|
||||||
|
"visual_files_var": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"upstream": [
|
||||||
|
"begin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"Agent:TangyWordsType": {
|
||||||
|
"downstream": [
|
||||||
|
"Message:FreshWallsStudy"
|
||||||
|
],
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Agent",
|
||||||
|
"params": {
|
||||||
|
"cite": true,
|
||||||
|
"delay_after_error": 1,
|
||||||
|
"description": "",
|
||||||
|
"exception_default_value": "",
|
||||||
|
"exception_goto": [],
|
||||||
|
"exception_method": "",
|
||||||
|
"frequencyPenaltyEnabled": false,
|
||||||
|
"frequency_penalty": 0.7,
|
||||||
|
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||||
|
"maxTokensEnabled": false,
|
||||||
|
"max_retries": 3,
|
||||||
|
"max_rounds": 1,
|
||||||
|
"max_tokens": 256,
|
||||||
|
"mcp": [],
|
||||||
|
"message_history_window_size": 12,
|
||||||
|
"outputs": {
|
||||||
|
"content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"structured": {}
|
||||||
|
},
|
||||||
|
"presencePenaltyEnabled": false,
|
||||||
|
"presence_penalty": 0.4,
|
||||||
|
"prompts": [
|
||||||
|
{
|
||||||
|
"content": "Search Plan: {Agent:LargeFliesMelt@content}\n\n\n\nAwait Response feedback:{UserFillUp:GoldBroomsRelate@instructions}\n",
|
||||||
|
"role": "user"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sys_prompt": "<role>\nYou are the Search Agent.\nYour job is to execute the approved Search Plan, integrate the Await Response feedback, retrieve evidence, and produce a well-grounded answer.\n</role>\n<objectives>\nTranslate the plan + feedback into concrete searches.\nCollect diverse, trustworthy, and recent evidence meeting the plan’s evidence bar.\nSynthesize a concise answer; include citations next to claims they support.\nIf evidence is insufficient or conflicting, clearly state limitations and propose next steps.\n</objectives>\n <tools>\nRetrieval: You must use Retrieval to do the search.\n </tools>\n",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"temperatureEnabled": false,
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"component_name": "Retrieval",
|
||||||
|
"name": "Retrieval",
|
||||||
|
"params": {
|
||||||
|
"cross_languages": [],
|
||||||
|
"description": "",
|
||||||
|
"empty_response": "",
|
||||||
|
"kb_ids": [],
|
||||||
|
"keywords_similarity_weight": 0.7,
|
||||||
|
"outputs": {
|
||||||
|
"formalized_content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"type": "Array<Object>",
|
||||||
|
"value": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rerank_id": "",
|
||||||
|
"similarity_threshold": 0.2,
|
||||||
|
"toc_enhance": false,
|
||||||
|
"top_k": 1024,
|
||||||
|
"top_n": 8,
|
||||||
|
"use_kg": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"topPEnabled": false,
|
||||||
|
"top_p": 0.3,
|
||||||
|
"user_prompt": "",
|
||||||
|
"visual_files_var": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"upstream": [
|
||||||
|
"UserFillUp:GoldBroomsRelate"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"Message:FreshWallsStudy": {
|
||||||
|
"downstream": [],
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Message",
|
||||||
|
"params": {
|
||||||
|
"content": [
|
||||||
|
"{Agent:TangyWordsType@content}"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"upstream": [
|
||||||
|
"Agent:TangyWordsType"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"UserFillUp:GoldBroomsRelate": {
|
||||||
|
"downstream": [
|
||||||
|
"Agent:TangyWordsType"
|
||||||
|
],
|
||||||
|
"obj": {
|
||||||
|
"component_name": "UserFillUp",
|
||||||
|
"params": {
|
||||||
|
"enable_tips": true,
|
||||||
|
"inputs": {
|
||||||
|
"instructions": {
|
||||||
|
"name": "instructions",
|
||||||
|
"optional": false,
|
||||||
|
"options": [],
|
||||||
|
"type": "paragraph"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"instructions": {
|
||||||
|
"name": "instructions",
|
||||||
|
"optional": false,
|
||||||
|
"options": [],
|
||||||
|
"type": "paragraph"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tips": "Here is my search plan:\n{Agent:LargeFliesMelt@content}\nAre you okay with it?"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"upstream": [
|
||||||
|
"Agent:LargeFliesMelt"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"begin": {
|
||||||
|
"downstream": [
|
||||||
|
"Agent:LargeFliesMelt"
|
||||||
|
],
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Begin",
|
||||||
|
"params": {}
|
||||||
|
},
|
||||||
|
"upstream": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"globals": {
|
||||||
|
"sys.conversation_turns": 0,
|
||||||
|
"sys.files": [],
|
||||||
|
"sys.query": "",
|
||||||
|
"sys.user_id": ""
|
||||||
|
},
|
||||||
|
"graph": {
|
||||||
|
"edges": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"isHovered": false
|
||||||
|
},
|
||||||
|
"id": "xy-edge__beginstart-Agent:LargeFliesMeltend",
|
||||||
|
"source": "begin",
|
||||||
|
"sourceHandle": "start",
|
||||||
|
"target": "Agent:LargeFliesMelt",
|
||||||
|
"targetHandle": "end"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"isHovered": false
|
||||||
|
},
|
||||||
|
"id": "xy-edge__Agent:LargeFliesMeltstart-UserFillUp:GoldBroomsRelateend",
|
||||||
|
"source": "Agent:LargeFliesMelt",
|
||||||
|
"sourceHandle": "start",
|
||||||
|
"target": "UserFillUp:GoldBroomsRelate",
|
||||||
|
"targetHandle": "end"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"isHovered": false
|
||||||
|
},
|
||||||
|
"id": "xy-edge__UserFillUp:GoldBroomsRelatestart-Agent:TangyWordsTypeend",
|
||||||
|
"source": "UserFillUp:GoldBroomsRelate",
|
||||||
|
"sourceHandle": "start",
|
||||||
|
"target": "Agent:TangyWordsType",
|
||||||
|
"targetHandle": "end"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "xy-edge__Agent:TangyWordsTypetool-Tool:NastyBatsGoend",
|
||||||
|
"source": "Agent:TangyWordsType",
|
||||||
|
"sourceHandle": "tool",
|
||||||
|
"target": "Tool:NastyBatsGo",
|
||||||
|
"targetHandle": "end"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "xy-edge__Agent:TangyWordsTypestart-Message:FreshWallsStudyend",
|
||||||
|
"source": "Agent:TangyWordsType",
|
||||||
|
"sourceHandle": "start",
|
||||||
|
"target": "Message:FreshWallsStudy",
|
||||||
|
"targetHandle": "end"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"label": "Begin",
|
||||||
|
"name": "begin"
|
||||||
|
},
|
||||||
|
"dragging": false,
|
||||||
|
"id": "begin",
|
||||||
|
"measured": {
|
||||||
|
"height": 50,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 154.9008789064451,
|
||||||
|
"y": 119.51001744285344
|
||||||
|
},
|
||||||
|
"selected": false,
|
||||||
|
"sourcePosition": "left",
|
||||||
|
"targetPosition": "right",
|
||||||
|
"type": "beginNode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"form": {
|
||||||
|
"cite": true,
|
||||||
|
"delay_after_error": 1,
|
||||||
|
"description": "",
|
||||||
|
"exception_default_value": "",
|
||||||
|
"exception_goto": [],
|
||||||
|
"exception_method": "",
|
||||||
|
"frequencyPenaltyEnabled": false,
|
||||||
|
"frequency_penalty": 0.7,
|
||||||
|
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||||
|
"maxTokensEnabled": false,
|
||||||
|
"max_retries": 3,
|
||||||
|
"max_rounds": 1,
|
||||||
|
"max_tokens": 256,
|
||||||
|
"mcp": [],
|
||||||
|
"message_history_window_size": 12,
|
||||||
|
"outputs": {
|
||||||
|
"content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"structured": {}
|
||||||
|
},
|
||||||
|
"presencePenaltyEnabled": false,
|
||||||
|
"presence_penalty": 0.4,
|
||||||
|
"prompts": [
|
||||||
|
{
|
||||||
|
"content": "User query:{sys.query}",
|
||||||
|
"role": "user"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sys_prompt": "<role>\nYou are the Planning Agent in a multi-agent RAG workflow.\nYour sole job is to design a crisp, executable Search Plan for the next agent. Do not search or answer the user’s question.\n</role>\n<objectives>\nUnderstand the user’s task and decompose it into evidence-seeking steps.\nProduce high-quality queries and retrieval settings tailored to the task type (fact lookup, multi-hop reasoning, comparison, statistics, how-to, etc.).\nIdentify missing information that would materially change the plan (≤3 concise questions).\nOptimize for source trustworthiness, diversity, and recency; define stopping criteria to avoid over-searching.\nAnswer in 150 words.\n<objectives>",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"temperatureEnabled": false,
|
||||||
|
"tools": [],
|
||||||
|
"topPEnabled": false,
|
||||||
|
"top_p": 0.3,
|
||||||
|
"user_prompt": "",
|
||||||
|
"visual_files_var": ""
|
||||||
|
},
|
||||||
|
"label": "Agent",
|
||||||
|
"name": "Planning Agent"
|
||||||
|
},
|
||||||
|
"dragging": false,
|
||||||
|
"id": "Agent:LargeFliesMelt",
|
||||||
|
"measured": {
|
||||||
|
"height": 90,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 443.96309330796714,
|
||||||
|
"y": 104.61370811205677
|
||||||
|
},
|
||||||
|
"selected": false,
|
||||||
|
"sourcePosition": "right",
|
||||||
|
"targetPosition": "left",
|
||||||
|
"type": "agentNode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"form": {
|
||||||
|
"enable_tips": true,
|
||||||
|
"inputs": {
|
||||||
|
"instructions": {
|
||||||
|
"name": "instructions",
|
||||||
|
"optional": false,
|
||||||
|
"options": [],
|
||||||
|
"type": "paragraph"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"instructions": {
|
||||||
|
"name": "instructions",
|
||||||
|
"optional": false,
|
||||||
|
"options": [],
|
||||||
|
"type": "paragraph"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tips": "Here is my search plan:\n{Agent:LargeFliesMelt@content}\nAre you okay with it?"
|
||||||
|
},
|
||||||
|
"label": "UserFillUp",
|
||||||
|
"name": "Await Response"
|
||||||
|
},
|
||||||
|
"dragging": false,
|
||||||
|
"id": "UserFillUp:GoldBroomsRelate",
|
||||||
|
"measured": {
|
||||||
|
"height": 50,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 683.3409492927474,
|
||||||
|
"y": 116.76274137645598
|
||||||
|
},
|
||||||
|
"selected": false,
|
||||||
|
"sourcePosition": "right",
|
||||||
|
"targetPosition": "left",
|
||||||
|
"type": "ragNode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"form": {
|
||||||
|
"cite": true,
|
||||||
|
"delay_after_error": 1,
|
||||||
|
"description": "",
|
||||||
|
"exception_default_value": "",
|
||||||
|
"exception_goto": [],
|
||||||
|
"exception_method": "",
|
||||||
|
"frequencyPenaltyEnabled": false,
|
||||||
|
"frequency_penalty": 0.7,
|
||||||
|
"llm_id": "qwen-turbo@Tongyi-Qianwen",
|
||||||
|
"maxTokensEnabled": false,
|
||||||
|
"max_retries": 3,
|
||||||
|
"max_rounds": 1,
|
||||||
|
"max_tokens": 256,
|
||||||
|
"mcp": [],
|
||||||
|
"message_history_window_size": 12,
|
||||||
|
"outputs": {
|
||||||
|
"content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"structured": {}
|
||||||
|
},
|
||||||
|
"presencePenaltyEnabled": false,
|
||||||
|
"presence_penalty": 0.4,
|
||||||
|
"prompts": [
|
||||||
|
{
|
||||||
|
"content": "Search Plan: {Agent:LargeFliesMelt@content}\n\n\n\nAwait Response feedback:{UserFillUp:GoldBroomsRelate@instructions}\n",
|
||||||
|
"role": "user"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sys_prompt": "<role>\nYou are the Search Agent.\nYour job is to execute the approved Search Plan, integrate the Await Response feedback, retrieve evidence, and produce a well-grounded answer.\n</role>\n<objectives>\nTranslate the plan + feedback into concrete searches.\nCollect diverse, trustworthy, and recent evidence meeting the plan’s evidence bar.\nSynthesize a concise answer; include citations next to claims they support.\nIf evidence is insufficient or conflicting, clearly state limitations and propose next steps.\n</objectives>\n <tools>\nRetrieval: You must use Retrieval to do the search.\n </tools>\n",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"temperatureEnabled": false,
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"component_name": "Retrieval",
|
||||||
|
"name": "Retrieval",
|
||||||
|
"params": {
|
||||||
|
"cross_languages": [],
|
||||||
|
"description": "",
|
||||||
|
"empty_response": "",
|
||||||
|
"kb_ids": [],
|
||||||
|
"keywords_similarity_weight": 0.7,
|
||||||
|
"outputs": {
|
||||||
|
"formalized_content": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"type": "Array<Object>",
|
||||||
|
"value": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rerank_id": "",
|
||||||
|
"similarity_threshold": 0.2,
|
||||||
|
"toc_enhance": false,
|
||||||
|
"top_k": 1024,
|
||||||
|
"top_n": 8,
|
||||||
|
"use_kg": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"topPEnabled": false,
|
||||||
|
"top_p": 0.3,
|
||||||
|
"user_prompt": "",
|
||||||
|
"visual_files_var": ""
|
||||||
|
},
|
||||||
|
"label": "Agent",
|
||||||
|
"name": "Search Agent"
|
||||||
|
},
|
||||||
|
"dragging": false,
|
||||||
|
"id": "Agent:TangyWordsType",
|
||||||
|
"measured": {
|
||||||
|
"height": 90,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 944.6411255659472,
|
||||||
|
"y": 99.84499066368488
|
||||||
|
},
|
||||||
|
"selected": true,
|
||||||
|
"sourcePosition": "right",
|
||||||
|
"targetPosition": "left",
|
||||||
|
"type": "agentNode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"form": {
|
||||||
|
"description": "This is an agent for a specific task.",
|
||||||
|
"user_prompt": "This is the order you need to send to the agent."
|
||||||
|
},
|
||||||
|
"label": "Tool",
|
||||||
|
"name": "flow.tool_0"
|
||||||
|
},
|
||||||
|
"id": "Tool:NastyBatsGo",
|
||||||
|
"measured": {
|
||||||
|
"height": 50,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 862.6411255659472,
|
||||||
|
"y": 239.84499066368488
|
||||||
|
},
|
||||||
|
"sourcePosition": "right",
|
||||||
|
"targetPosition": "left",
|
||||||
|
"type": "toolNode"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"form": {
|
||||||
|
"content": [
|
||||||
|
"{Agent:TangyWordsType@content}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"label": "Message",
|
||||||
|
"name": "Message"
|
||||||
|
},
|
||||||
|
"dragging": false,
|
||||||
|
"id": "Message:FreshWallsStudy",
|
||||||
|
"measured": {
|
||||||
|
"height": 50,
|
||||||
|
"width": 200
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1216.7057997987163,
|
||||||
|
"y": 120.48541298149814
|
||||||
|
},
|
||||||
|
"selected": false,
|
||||||
|
"sourcePosition": "right",
|
||||||
|
"targetPosition": "left",
|
||||||
|
"type": "messageNode"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"history": [],
|
||||||
|
"messages": [],
|
||||||
|
"path": [],
|
||||||
|
"retrieval": [],
|
||||||
|
"variables": {}
|
||||||
|
},
|
||||||
|
"avatar":
|
||||||
|
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAACXBIWXMAABYlAAAWJQFJUiTwAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA1FSURBVHgBzVppcFRVFv7e672TTjrprGTrkI1tgFjiAI4TmgEGcEFgVFAHxr2mLFe0cKxyBOeHljVjwVDqzDhVTqksOlOiuCBrCwk7hCAkLAGy70kn3eklvb03571e8rrTCQGi5em6ecu9fe85557znXNuh0EUHTGb5/Ast4TnmXsB3oifBTFVxEsVy7PrZ5lM9RE9oZvTZrPeAe51Gvh85ABGMuonJh6Ra9Mzw2KDm2PXm0ymPoS6ReZ5n5n6p4f45QOsj0ihEXxwdGg9Pta3Ax2R98ErH+wIzMFjFFTlhcwkCMEKT4qE+Nc5np8e5Aa8MAf94UNtmEl5DPYFrpHPER8+xn3U3KNkXqDpKtFaSGi1Wm188MH7655+6inoExLR0toW0N211R+8jJ198YHtiFyDH5EHE8Oy7Iccx/1BeJ4yZRI2bXgHSfokyGQsLH19YDgax4RXGJz450A8v1HGMMw62soMEgRxcVqcrDyG9X95E339fSidPh05Odnot9vB0Qe84CB8wMQglSe8HWPI3HAmJVmLYTOYJ55YxVedOYcTxyux59svsfqpJ5GWkorGhhZYLL0oKSnC2pfX4DemOaIg9n473B63ZCFGOu2YU4QIgikIJiF5yaxcuZTPzzfC42WQnZWJLdu2IiXFgG+/3oOs7Ey0t3fC7/cjMyMdU6dOwpyyMswtmwO9PhmdnYE+5hrcS9Eq8MzSHRfuvT7Dj5qbiKeGY4fKsWL1ShQWFKG1uQVz55Thh5pqVJ87D6fLDY/HA7/PD87PiYxMnToRLzz7LI0zoamphQTxYewpKIzUB6OUJcIoOTEOVpjhcroRH69FQ2MLHANOZKZn4PcPP4DikgKoVWrI5CzkcjmUSiUuXriKRx77I2bMuh0tba1IStKL84TXDTU26hp9z0QzJYVVbhBuJdDL8Vy4yeRK+TrBNHp7e9HR0QWXawDNTa3o7ulB2R2zER+nQXZ2FhRKBVwOl7BloO+IVwGpBtxebPv0v0hNN+CO2beHHT4cD6I9Pvo+iobElmtYk6xkQuE6nvOjpaWNmHeDJxPp7bVCp4vHvv0HsXfvQVSeOgOtRoOFi+aJu+C0O6FVa2AwJEOhkMPhcODEiVNwuJ0w5ubRLsbB7/UPMsvH5HTYxvNBwSUAIT5yQcG4wTlk3+/5bt2cX5chIzMdbW3tOH/+EtRaNex2B1RkKkuXLkJCog41NZdw8MBhFBUVICU1WTQDwZwMKcloJJN74snHMeByoq7uCr1XECBkEzB4I3ge6p4xkp3hiOFj+rdcYFpg5vaZs3H3nXehh0znyPFjWLv2NcyeNQPbtn0hDlRr1GKc2L1rP5QqJeQyMiMZRH8QqKujDa+9+gruWf4A4siPlLQzeXlG+Dl/hAjR2B7KnURQEnyDC4kyXPoS5dN7dn7FR0umJ4d8aNUjBKEdePSxh8iMDuDixcsiCglbK5PJ4PMFUEe4F6B01zfbMeD1iTx8vPUTMi0lXn7uJXT1dIr+Eph/EFViAMqoKGBag89sZFIVuMaTplsJWZYtvwv7yQdqai5CQWYhoMyECUWYv8BE2s0RJ8gk03v/3Q24UHsJ27/YjnaKDa4BD1INBpQfrgDlWoFZRRPgcdNBW4hlLBNGM3mEdMGrmzDfSNt/4OBhdLR1Yv26tXj77U0iEqUYUvDvf34AjUpOaUclssaNQ2cXoZdzAC++/Cds3vwZLl+oxtZPt2INPVefOYH6hsbALtwkDabtg8jG8pK0OURtbR34dMtHaKhrImlZvPXW35FsSMKKFfeivOIwsnKMWLDwbrjdbtEHOB8HFfnF8YoD+Md7m7Bv3y7s2rtfXOjkqUqMFYnMR5tQsCdCQwK+t5MQ+3d/g4cfXiHCZHeXBZ98/D/R5gX0sfRZsGTZAzDNW4SG5kbk5uSSL3DIzc1GaekMkNyQ0Z+PN29FYmICxooEPkNN5Hv3zh0jhgq9PhFXrtSh+nw1Kg4dIah04Oix02JfQUEepk2bgs8//0bcocWLfotXXlqD7u5uzF1wp7hIcnIyKr7fjSZKTyDV5HXkO9EkrfjksQZI62Brnw2pqQbMz5yH5fcug4/zUtCqxIcffSRmsI2NrSKjlp5ebNnyGWG/B2++8QYW3TlfTAhZGQOej8EAw0SYbcTaGLk6kxZR8miGY4wWGRAY6+7pFu1PSLH/88G/yAcG8NcNG3H85CkxSgrBT4gPDqcDE4sn4GjKCbBCsGAki7KDO8AybIBRugqLMFFMhuPDCALJI9CBGfxidKkYfi+Mp0mFGCEkUy888wy0Wq3YJyf/sFOa0W/rR093DzLS0tDXZw3WHkPnE9li5IhzdMCu0EGupnn8/sgxLEISBJTMISKSsVKnCDEtvYY+ovLIKQVGQk1waCF56yDs7+zsotjRDlu/DR6fF4sXL0RN9QWwFD/ik1JJwdwQ7XEspSKudqSqFUhsqcWAtUccH4tC/Ilrh/hlmbB84RcjtZgTSwQMPQtayhqXiTPvvIDHJyXjtWVlUCUkQ6ZQBHZDXIvMR6akOmMALtoxTUom9K2XaPesYl9wsmsSG2aQYTDih4ndQvl89LOb0o72wlkonTYZ9y25BwffWYOexstQJ+jhJ8377b1Ic9ngam4A53GC9zqgpCrPd7YcnFwZqB4l64aVHNXYmBoMDkbMgmMUJNqqH6rEFChvW0wMWjFv0RKom37AuX3bkcnbUKCOQ3LqOFgb69Gu1GGgs5HiiwYTbp0JS8UOMBpdxLrDRXI2WruICmrRGh5VEz9kIj4PtIZMqGdQ1HZZEZ+ZhxmFuVD73WT/bhx5/8+wZk1AvHEqrAYjCdoHH9UUxRMnoefIDggFHitjR9QVK9VyQIBYzNxAHhPEeI6EkJM2vfm3QSnzwpA/FZaOeuzdvAn9GUYUlC2Gj2zfkJoPRfpkNFdW0DgWeVlGKJqr4SKQkFNmOxwHbDSTTFjxTFAuaQ9z7Y8EIUK+RdgIXq1DUsFM2Pva0dfWDGu/C8ZxBhiaTyGXbF/ltsBSfRI+dQKhmB8KbTwUcYlgG2vQ095CZaxy0KwxaOLscEyEtyWYx4ey4ZhloFQ/XLD5o5pgDl46V+q4igOU6PVyGtxy10ooEtPQfKYc+z/cBBfrRM7EKZR3dYs7x/IK6JJT4K2vRq8QTxgpfwgaaizn4MN/pFsyfAsWKRFNFlCPCNRC3k7PjVZKCpvrULZ8BV7927vobmxG7dG9OHK4HDYqW6/W1cI3wCAtdyql8c0U/Z3wUuGUWzwFF8w7oIzTiZYptQ2WCUdoVnKVoFL4lgn3D+5UYKx45YVdCupEsmNhUyRUUhrywGWUoGhCKdpbWmGpO42LZ8/BR/EgnmqNxb97FCqFH9UHd8JG6YjT1U9w7IOP6pNJhUYc3f0VtCSElBt5KDeJFEKKnbwElQLhX0oMI70O52rBBM3nRELRL9HQeh5x8KGy3IzjVbWwWfvxwc7dsFssuHz2NBo7W1Ciywdnt4KOQ+ChkxC1SoWmc+XwmBZRYaUWq0Oe80H26KpV60TNhTQW0fiAVoNFBBPSMs9E+ED08/CNxgm2HZcCy5XjkGt16Dp/Fms3vUfiyGFtv4KK/ftQWFxEhwEMvFQwyTihWFKIJ4Ie+u6V0+XkUjzcDhvcNJ9ckkxgSN6LoNfzsYA08t3ooTYA1UrjrXD+cAC/mj+X0CYJ/cT8oe++ppQiBf2OfjE5tFD2qzekIk9GdXpGDtRUSMnpN4wByptSUhOhdLVK6gFecu4SfY3Nxo0TbX18qhHujHqkjy9G84WT6GptwZUOC2qqziM7Lx16jRI54/MJwTzgKYdiBSgWYopSQ0edJXB6/NhyslYCo+F8Q5KDYGiUHVTiTYkAn9MGfcks9NFRpnAUqc/MIgY5LF1xH9Ioe1247H5MnjQNxZNvAUMptttuQyYdlrXRaYmccqnaLhvSEjWRFdlozEDK+I0KIVZiAsRSkaSdOBfeumPobWkUS9RcOqZJjtchJbsEjt42eEhQpS5ZFBiETHVX68jnPHR040YGHW1GFjQxXED67mbq2IhpJTvJu21QjZ8BFaXRPnq2kP3rE3RouUBVHsGzlQ4PEhLi4CB/aCLo9dLhmRDFs+k3jEt0GC3sQD01Y2jCoatJb6MGXEOe0QnMUPCyI+OWBdDk/AK1x3ejraFeLJYG6DTE7XIhjiC0z9IFJ9UNOq2AOz4UZ2bD6hyokj22enU+zTETN0LMyO16MliOzEmpVsH4i1lIzy2kn0+14CjAUSYHi80uFj8cOfL4wiJkpKfT4QKHLB37HVNuNs9hWc6MkVU5IiL9GCSccMsVKjrOl5HZuKnWtqKj4TI6as+AcffTkaUKfl6WL9rEoQP7NpAenhvNxEOO92LGjx+BGCGfUkKh0ZIv21Bz9vTGJ59+8XlRALPZrFexvLAL0zEGNNLJ80h910FVmoQkU2lpaeBfDYT/OXBzjIm0uhFjQMwN9o2KOH6jhngVmI853xGz2QgZ1lH2MI3yIXFHeP4nNP7YVE9cfMlw7BezTKbvpR3/Bx465XnKBextAAAAAElFTkSuQmCC"
|
||||||
|
}
|
||||||
@ -16,7 +16,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
from agent.canvas import Canvas
|
from agent.canvas import Canvas
|
||||||
from api import settings
|
from common import settings
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
|||||||
@ -63,12 +63,18 @@ class ArXiv(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("ArXiv processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("ArXiv processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sort_choices = {"relevance": arxiv.SortCriterion.Relevance,
|
sort_choices = {"relevance": arxiv.SortCriterion.Relevance,
|
||||||
"lastUpdatedDate": arxiv.SortCriterion.LastUpdatedDate,
|
"lastUpdatedDate": arxiv.SortCriterion.LastUpdatedDate,
|
||||||
@ -79,12 +85,20 @@ class ArXiv(ToolBase, ABC):
|
|||||||
max_results=self._param.top_n,
|
max_results=self._param.top_n,
|
||||||
sort_by=sort_choices[self._param.sort_by]
|
sort_by=sort_choices[self._param.sort_by]
|
||||||
)
|
)
|
||||||
self._retrieve_chunks(list(arxiv_client.results(search)),
|
results = list(arxiv_client.results(search))
|
||||||
|
|
||||||
|
if self.check_if_canceled("ArXiv processing"):
|
||||||
|
return
|
||||||
|
|
||||||
|
self._retrieve_chunks(results,
|
||||||
get_title=lambda r: r.title,
|
get_title=lambda r: r.title,
|
||||||
get_url=lambda r: r.pdf_url,
|
get_url=lambda r: r.pdf_url,
|
||||||
get_content=lambda r: r.summary)
|
get_content=lambda r: r.summary)
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("ArXiv processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"ArXiv error: {e}")
|
logging.exception(f"ArXiv error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -125,6 +125,9 @@ class ToolBase(ComponentBase):
|
|||||||
return self._param.get_meta()
|
return self._param.get_meta()
|
||||||
|
|
||||||
def invoke(self, **kwargs):
|
def invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Tool processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self.set_output("_created_time", time.perf_counter())
|
self.set_output("_created_time", time.perf_counter())
|
||||||
try:
|
try:
|
||||||
res = self._invoke(**kwargs)
|
res = self._invoke(**kwargs)
|
||||||
@ -170,4 +173,4 @@ class ToolBase(ComponentBase):
|
|||||||
self.set_output("formalized_content", "\n".join(kb_prompt({"chunks": chunks, "doc_aggs": aggs}, 200000, True)))
|
self.set_output("formalized_content", "\n".join(kb_prompt({"chunks": chunks, "doc_aggs": aggs}, 200000, True)))
|
||||||
|
|
||||||
def thoughts(self) -> str:
|
def thoughts(self) -> str:
|
||||||
return self._canvas.get_component_name(self._id) + " is running..."
|
return self._canvas.get_component_name(self._id) + " is running..."
|
||||||
|
|||||||
@ -21,8 +21,8 @@ from strenum import StrEnum
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
from agent.tools.base import ToolParamBase, ToolBase, ToolMeta
|
from agent.tools.base import ToolParamBase, ToolBase, ToolMeta
|
||||||
from api import settings
|
|
||||||
from common.connection_utils import timeout
|
from common.connection_utils import timeout
|
||||||
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
class Language(StrEnum):
|
class Language(StrEnum):
|
||||||
@ -131,10 +131,14 @@ class CodeExec(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("CodeExec processing"):
|
||||||
|
return
|
||||||
|
|
||||||
lang = kwargs.get("lang", self._param.lang)
|
lang = kwargs.get("lang", self._param.lang)
|
||||||
script = kwargs.get("script", self._param.script)
|
script = kwargs.get("script", self._param.script)
|
||||||
arguments = {}
|
arguments = {}
|
||||||
for k, v in self._param.arguments.items():
|
for k, v in self._param.arguments.items():
|
||||||
|
|
||||||
if kwargs.get(k):
|
if kwargs.get(k):
|
||||||
arguments[k] = kwargs[k]
|
arguments[k] = kwargs[k]
|
||||||
continue
|
continue
|
||||||
@ -149,15 +153,28 @@ class CodeExec(ToolBase, ABC):
|
|||||||
def _execute_code(self, language: str, code: str, arguments: dict):
|
def _execute_code(self, language: str, code: str, arguments: dict):
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
code_b64 = self._encode_code(code)
|
code_b64 = self._encode_code(code)
|
||||||
code_req = CodeExecutionRequest(code_b64=code_b64, language=language, arguments=arguments).model_dump()
|
code_req = CodeExecutionRequest(code_b64=code_b64, language=language, arguments=arguments).model_dump()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
self.set_output("_ERROR", "construct code request error: " + str(e))
|
self.set_output("_ERROR", "construct code request error: " + str(e))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return "Task has been canceled"
|
||||||
|
|
||||||
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
resp = requests.post(url=f"http://{settings.SANDBOX_HOST}:9385/run", json=code_req, timeout=int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run, code_req: {code_req}, resp.status_code {resp.status_code}:")
|
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run, code_req: {code_req}, resp.status_code {resp.status_code}:")
|
||||||
|
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return "Task has been canceled"
|
||||||
|
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
body = resp.json()
|
body = resp.json()
|
||||||
@ -173,16 +190,25 @@ class CodeExec(ToolBase, ABC):
|
|||||||
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run -> {rt}")
|
logging.info(f"http://{settings.SANDBOX_HOST}:9385/run -> {rt}")
|
||||||
if isinstance(rt, tuple):
|
if isinstance(rt, tuple):
|
||||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
if k.find("_") == 0:
|
if k.find("_") == 0:
|
||||||
continue
|
continue
|
||||||
o["value"] = rt[i]
|
o["value"] = rt[i]
|
||||||
elif isinstance(rt, dict):
|
elif isinstance(rt, dict):
|
||||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
if k not in rt or k.find("_") == 0:
|
if k not in rt or k.find("_") == 0:
|
||||||
continue
|
continue
|
||||||
o["value"] = rt[k]
|
o["value"] = rt[k]
|
||||||
else:
|
else:
|
||||||
for i, (k, o) in enumerate(self._param.outputs.items()):
|
for i, (k, o) in enumerate(self._param.outputs.items()):
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
if k.find("_") == 0:
|
if k.find("_") == 0:
|
||||||
continue
|
continue
|
||||||
o["value"] = rt
|
o["value"] = rt
|
||||||
@ -190,6 +216,9 @@ class CodeExec(ToolBase, ABC):
|
|||||||
self.set_output("_ERROR", "There is no response from sandbox")
|
self.set_output("_ERROR", "There is no response from sandbox")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("CodeExec execution"):
|
||||||
|
return
|
||||||
|
|
||||||
self.set_output("_ERROR", "Exception executing code: " + str(e))
|
self.set_output("_ERROR", "Exception executing code: " + str(e))
|
||||||
|
|
||||||
return self.output()
|
return self.output()
|
||||||
|
|||||||
@ -29,7 +29,7 @@ class CrawlerParam(ToolParamBase):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self.proxy = None
|
self.proxy = None
|
||||||
self.extract_type = "markdown"
|
self.extract_type = "markdown"
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
self.check_valid_value(self.extract_type, "Type of content from the crawler", ['html', 'markdown', 'content'])
|
self.check_valid_value(self.extract_type, "Type of content from the crawler", ['html', 'markdown', 'content'])
|
||||||
|
|
||||||
@ -47,18 +47,24 @@ class Crawler(ToolBase, ABC):
|
|||||||
result = asyncio.run(self.get_web(ans))
|
result = asyncio.run(self.get_web(ans))
|
||||||
|
|
||||||
return Crawler.be_output(result)
|
return Crawler.be_output(result)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return Crawler.be_output(f"An unexpected error occurred: {str(e)}")
|
return Crawler.be_output(f"An unexpected error occurred: {str(e)}")
|
||||||
|
|
||||||
async def get_web(self, url):
|
async def get_web(self, url):
|
||||||
|
if self.check_if_canceled("Crawler async operation"):
|
||||||
|
return
|
||||||
|
|
||||||
proxy = self._param.proxy if self._param.proxy else None
|
proxy = self._param.proxy if self._param.proxy else None
|
||||||
async with AsyncWebCrawler(verbose=True, proxy=proxy) as crawler:
|
async with AsyncWebCrawler(verbose=True, proxy=proxy) as crawler:
|
||||||
result = await crawler.arun(
|
result = await crawler.arun(
|
||||||
url=url,
|
url=url,
|
||||||
bypass_cache=True
|
bypass_cache=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.check_if_canceled("Crawler async operation"):
|
||||||
|
return
|
||||||
|
|
||||||
if self._param.extract_type == 'html':
|
if self._param.extract_type == 'html':
|
||||||
return result.cleaned_html
|
return result.cleaned_html
|
||||||
elif self._param.extract_type == 'markdown':
|
elif self._param.extract_type == 'markdown':
|
||||||
|
|||||||
@ -46,11 +46,16 @@ class DeepL(ComponentBase, ABC):
|
|||||||
component_name = "DeepL"
|
component_name = "DeepL"
|
||||||
|
|
||||||
def _run(self, history, **kwargs):
|
def _run(self, history, **kwargs):
|
||||||
|
if self.check_if_canceled("DeepL processing"):
|
||||||
|
return
|
||||||
ans = self.get_input()
|
ans = self.get_input()
|
||||||
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
||||||
if not ans:
|
if not ans:
|
||||||
return DeepL.be_output("")
|
return DeepL.be_output("")
|
||||||
|
|
||||||
|
if self.check_if_canceled("DeepL processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
translator = deepl.Translator(self._param.auth_key)
|
translator = deepl.Translator(self._param.auth_key)
|
||||||
result = translator.translate_text(ans, source_lang=self._param.source_lang,
|
result = translator.translate_text(ans, source_lang=self._param.source_lang,
|
||||||
@ -58,4 +63,6 @@ class DeepL(ComponentBase, ABC):
|
|||||||
|
|
||||||
return DeepL.be_output(result.text)
|
return DeepL.be_output(result.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("DeepL processing"):
|
||||||
|
return
|
||||||
DeepL.be_output("**Error**:" + str(e))
|
DeepL.be_output("**Error**:" + str(e))
|
||||||
|
|||||||
@ -75,17 +75,30 @@ class DuckDuckGo(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if kwargs.get("topic", "general") == "general":
|
if kwargs.get("topic", "general") == "general":
|
||||||
with DDGS() as ddgs:
|
with DDGS() as ddgs:
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
# {'title': '', 'href': '', 'body': ''}
|
# {'title': '', 'href': '', 'body': ''}
|
||||||
duck_res = ddgs.text(kwargs["query"], max_results=self._param.top_n)
|
duck_res = ddgs.text(kwargs["query"], max_results=self._param.top_n)
|
||||||
|
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(duck_res,
|
self._retrieve_chunks(duck_res,
|
||||||
get_title=lambda r: r["title"],
|
get_title=lambda r: r["title"],
|
||||||
get_url=lambda r: r.get("href", r.get("url")),
|
get_url=lambda r: r.get("href", r.get("url")),
|
||||||
@ -94,8 +107,15 @@ class DuckDuckGo(ToolBase, ABC):
|
|||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
else:
|
else:
|
||||||
with DDGS() as ddgs:
|
with DDGS() as ddgs:
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
# {'date': '', 'title': '', 'body': '', 'url': '', 'image': '', 'source': ''}
|
# {'date': '', 'title': '', 'body': '', 'url': '', 'image': '', 'source': ''}
|
||||||
duck_res = ddgs.news(kwargs["query"], max_results=self._param.top_n)
|
duck_res = ddgs.news(kwargs["query"], max_results=self._param.top_n)
|
||||||
|
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(duck_res,
|
self._retrieve_chunks(duck_res,
|
||||||
get_title=lambda r: r["title"],
|
get_title=lambda r: r["title"],
|
||||||
get_url=lambda r: r.get("href", r.get("url")),
|
get_url=lambda r: r.get("href", r.get("url")),
|
||||||
@ -103,6 +123,9 @@ class DuckDuckGo(ToolBase, ABC):
|
|||||||
self.set_output("json", duck_res)
|
self.set_output("json", duck_res)
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("DuckDuckGo processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"DuckDuckGo error: {e}")
|
logging.exception(f"DuckDuckGo error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -101,19 +101,27 @@ class Email(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Email processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("to_email"):
|
if not kwargs.get("to_email"):
|
||||||
self.set_output("success", False)
|
self.set_output("success", False)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("Email processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Parse JSON string passed from upstream
|
# Parse JSON string passed from upstream
|
||||||
email_data = kwargs
|
email_data = kwargs
|
||||||
|
|
||||||
# Validate required fields
|
# Validate required fields
|
||||||
if "to_email" not in email_data:
|
if "to_email" not in email_data:
|
||||||
return Email.be_output("Missing required field: to_email")
|
self.set_output("_ERROR", "Missing required field: to_email")
|
||||||
|
self.set_output("success", False)
|
||||||
|
return False
|
||||||
|
|
||||||
# Create email object
|
# Create email object
|
||||||
msg = MIMEMultipart('alternative')
|
msg = MIMEMultipart('alternative')
|
||||||
@ -133,6 +141,9 @@ class Email(ToolBase, ABC):
|
|||||||
# Connect to SMTP server and send
|
# Connect to SMTP server and send
|
||||||
logging.info(f"Connecting to SMTP server {self._param.smtp_server}:{self._param.smtp_port}")
|
logging.info(f"Connecting to SMTP server {self._param.smtp_server}:{self._param.smtp_port}")
|
||||||
|
|
||||||
|
if self.check_if_canceled("Email processing"):
|
||||||
|
return
|
||||||
|
|
||||||
context = smtplib.ssl.create_default_context()
|
context = smtplib.ssl.create_default_context()
|
||||||
with smtplib.SMTP(self._param.smtp_server, self._param.smtp_port) as server:
|
with smtplib.SMTP(self._param.smtp_server, self._param.smtp_port) as server:
|
||||||
server.ehlo()
|
server.ehlo()
|
||||||
@ -149,6 +160,10 @@ class Email(ToolBase, ABC):
|
|||||||
|
|
||||||
# Send email
|
# Send email
|
||||||
logging.info(f"Sending email to recipients: {recipients}")
|
logging.info(f"Sending email to recipients: {recipients}")
|
||||||
|
|
||||||
|
if self.check_if_canceled("Email processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
server.send_message(msg, self._param.email, recipients)
|
server.send_message(msg, self._param.email, recipients)
|
||||||
success = True
|
success = True
|
||||||
|
|||||||
@ -81,6 +81,8 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
return
|
||||||
|
|
||||||
def convert_decimals(obj):
|
def convert_decimals(obj):
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
@ -96,6 +98,9 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
if not sql:
|
if not sql:
|
||||||
raise Exception("SQL for `ExeSQL` MUST not be empty.")
|
raise Exception("SQL for `ExeSQL` MUST not be empty.")
|
||||||
|
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
return
|
||||||
|
|
||||||
vars = self.get_input_elements_from_text(sql)
|
vars = self.get_input_elements_from_text(sql)
|
||||||
args = {}
|
args = {}
|
||||||
for k, o in vars.items():
|
for k, o in vars.items():
|
||||||
@ -108,6 +113,9 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
self.set_input_value(k, args[k])
|
self.set_input_value(k, args[k])
|
||||||
sql = self.string_format(sql, args)
|
sql = self.string_format(sql, args)
|
||||||
|
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
return
|
||||||
|
|
||||||
sqls = sql.split(";")
|
sqls = sql.split(";")
|
||||||
if self._param.db_type in ["mysql", "mariadb"]:
|
if self._param.db_type in ["mysql", "mariadb"]:
|
||||||
db = pymysql.connect(db=self._param.database, user=self._param.username, host=self._param.host,
|
db = pymysql.connect(db=self._param.database, user=self._param.username, host=self._param.host,
|
||||||
@ -181,6 +189,10 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
sql_res = []
|
sql_res = []
|
||||||
formalized_content = []
|
formalized_content = []
|
||||||
for single_sql in sqls:
|
for single_sql in sqls:
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
ibm_db.close(conn)
|
||||||
|
return
|
||||||
|
|
||||||
single_sql = single_sql.replace("```", "").strip()
|
single_sql = single_sql.replace("```", "").strip()
|
||||||
if not single_sql:
|
if not single_sql:
|
||||||
continue
|
continue
|
||||||
@ -190,6 +202,9 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
rows = []
|
rows = []
|
||||||
row = ibm_db.fetch_assoc(stmt)
|
row = ibm_db.fetch_assoc(stmt)
|
||||||
while row and len(rows) < self._param.max_records:
|
while row and len(rows) < self._param.max_records:
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
ibm_db.close(conn)
|
||||||
|
return
|
||||||
rows.append(row)
|
rows.append(row)
|
||||||
row = ibm_db.fetch_assoc(stmt)
|
row = ibm_db.fetch_assoc(stmt)
|
||||||
|
|
||||||
@ -220,6 +235,11 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
sql_res = []
|
sql_res = []
|
||||||
formalized_content = []
|
formalized_content = []
|
||||||
for single_sql in sqls:
|
for single_sql in sqls:
|
||||||
|
if self.check_if_canceled("ExeSQL processing"):
|
||||||
|
cursor.close()
|
||||||
|
db.close()
|
||||||
|
return
|
||||||
|
|
||||||
single_sql = single_sql.replace('```','')
|
single_sql = single_sql.replace('```','')
|
||||||
if not single_sql:
|
if not single_sql:
|
||||||
continue
|
continue
|
||||||
@ -244,6 +264,9 @@ class ExeSQL(ToolBase, ABC):
|
|||||||
sql_res.append(convert_decimals(single_res.to_dict(orient='records')))
|
sql_res.append(convert_decimals(single_res.to_dict(orient='records')))
|
||||||
formalized_content.append(single_res.to_markdown(index=False, floatfmt=".6f"))
|
formalized_content.append(single_res.to_markdown(index=False, floatfmt=".6f"))
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
self.set_output("json", sql_res)
|
self.set_output("json", sql_res)
|
||||||
self.set_output("formalized_content", "\n\n".join(formalized_content))
|
self.set_output("formalized_content", "\n\n".join(formalized_content))
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
|
|||||||
@ -59,17 +59,27 @@ class GitHub(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("GitHub processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("GitHub processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = 'https://api.github.com/search/repositories?q=' + kwargs["query"] + '&sort=stars&order=desc&per_page=' + str(
|
url = 'https://api.github.com/search/repositories?q=' + kwargs["query"] + '&sort=stars&order=desc&per_page=' + str(
|
||||||
self._param.top_n)
|
self._param.top_n)
|
||||||
headers = {"Content-Type": "application/vnd.github+json", "X-GitHub-Api-Version": '2022-11-28'}
|
headers = {"Content-Type": "application/vnd.github+json", "X-GitHub-Api-Version": '2022-11-28'}
|
||||||
response = requests.get(url=url, headers=headers).json()
|
response = requests.get(url=url, headers=headers).json()
|
||||||
|
|
||||||
|
if self.check_if_canceled("GitHub processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(response['items'],
|
self._retrieve_chunks(response['items'],
|
||||||
get_title=lambda r: r["name"],
|
get_title=lambda r: r["name"],
|
||||||
get_url=lambda r: r["html_url"],
|
get_url=lambda r: r["html_url"],
|
||||||
@ -77,6 +87,9 @@ class GitHub(ToolBase, ABC):
|
|||||||
self.set_output("json", response['items'])
|
self.set_output("json", response['items'])
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("GitHub processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"GitHub error: {e}")
|
logging.exception(f"GitHub error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -118,6 +118,9 @@ class Google(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Google processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("q"):
|
if not kwargs.get("q"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
@ -132,8 +135,15 @@ class Google(ToolBase, ABC):
|
|||||||
}
|
}
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("Google processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
search = GoogleSearch(params).get_dict()
|
search = GoogleSearch(params).get_dict()
|
||||||
|
|
||||||
|
if self.check_if_canceled("Google processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(search["organic_results"],
|
self._retrieve_chunks(search["organic_results"],
|
||||||
get_title=lambda r: r["title"],
|
get_title=lambda r: r["title"],
|
||||||
get_url=lambda r: r["link"],
|
get_url=lambda r: r["link"],
|
||||||
@ -142,6 +152,9 @@ class Google(ToolBase, ABC):
|
|||||||
self.set_output("json", search["organic_results"])
|
self.set_output("json", search["organic_results"])
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("Google processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"Google error: {e}")
|
logging.exception(f"Google error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -65,15 +65,25 @@ class GoogleScholar(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("GoogleScholar processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("GoogleScholar processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
scholar_client = scholarly.search_pubs(kwargs["query"], patents=self._param.patents, year_low=self._param.year_low,
|
scholar_client = scholarly.search_pubs(kwargs["query"], patents=self._param.patents, year_low=self._param.year_low,
|
||||||
year_high=self._param.year_high, sort_by=self._param.sort_by)
|
year_high=self._param.year_high, sort_by=self._param.sort_by)
|
||||||
|
|
||||||
|
if self.check_if_canceled("GoogleScholar processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(scholar_client,
|
self._retrieve_chunks(scholar_client,
|
||||||
get_title=lambda r: r['bib']['title'],
|
get_title=lambda r: r['bib']['title'],
|
||||||
get_url=lambda r: r["pub_url"],
|
get_url=lambda r: r["pub_url"],
|
||||||
@ -82,6 +92,9 @@ class GoogleScholar(ToolBase, ABC):
|
|||||||
self.set_output("json", list(scholar_client))
|
self.set_output("json", list(scholar_client))
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("GoogleScholar processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"GoogleScholar error: {e}")
|
logging.exception(f"GoogleScholar error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -50,6 +50,9 @@ class Jin10(ComponentBase, ABC):
|
|||||||
component_name = "Jin10"
|
component_name = "Jin10"
|
||||||
|
|
||||||
def _run(self, history, **kwargs):
|
def _run(self, history, **kwargs):
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
|
|
||||||
ans = self.get_input()
|
ans = self.get_input()
|
||||||
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
ans = " - ".join(ans["content"]) if "content" in ans else ""
|
||||||
if not ans:
|
if not ans:
|
||||||
@ -58,6 +61,9 @@ class Jin10(ComponentBase, ABC):
|
|||||||
jin10_res = []
|
jin10_res = []
|
||||||
headers = {'secret-key': self._param.secret_key}
|
headers = {'secret-key': self._param.secret_key}
|
||||||
try:
|
try:
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if self._param.type == "flash":
|
if self._param.type == "flash":
|
||||||
params = {
|
params = {
|
||||||
'category': self._param.flash_type,
|
'category': self._param.flash_type,
|
||||||
@ -69,6 +75,8 @@ class Jin10(ComponentBase, ABC):
|
|||||||
headers=headers, data=json.dumps(params))
|
headers=headers, data=json.dumps(params))
|
||||||
response = response.json()
|
response = response.json()
|
||||||
for i in response['data']:
|
for i in response['data']:
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
jin10_res.append({"content": i['data']['content']})
|
jin10_res.append({"content": i['data']['content']})
|
||||||
if self._param.type == "calendar":
|
if self._param.type == "calendar":
|
||||||
params = {
|
params = {
|
||||||
@ -79,6 +87,8 @@ class Jin10(ComponentBase, ABC):
|
|||||||
headers=headers, data=json.dumps(params))
|
headers=headers, data=json.dumps(params))
|
||||||
|
|
||||||
response = response.json()
|
response = response.json()
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
||||||
if self._param.type == "symbols":
|
if self._param.type == "symbols":
|
||||||
params = {
|
params = {
|
||||||
@ -90,8 +100,12 @@ class Jin10(ComponentBase, ABC):
|
|||||||
url='https://open-data-api.jin10.com/data-api/' + self._param.symbols_datatype + '?type=' + self._param.symbols_type,
|
url='https://open-data-api.jin10.com/data-api/' + self._param.symbols_datatype + '?type=' + self._param.symbols_type,
|
||||||
headers=headers, data=json.dumps(params))
|
headers=headers, data=json.dumps(params))
|
||||||
response = response.json()
|
response = response.json()
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
if self._param.symbols_datatype == "symbols":
|
if self._param.symbols_datatype == "symbols":
|
||||||
for i in response['data']:
|
for i in response['data']:
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
i['Commodity Code'] = i['c']
|
i['Commodity Code'] = i['c']
|
||||||
i['Stock Exchange'] = i['e']
|
i['Stock Exchange'] = i['e']
|
||||||
i['Commodity Name'] = i['n']
|
i['Commodity Name'] = i['n']
|
||||||
@ -99,6 +113,8 @@ class Jin10(ComponentBase, ABC):
|
|||||||
del i['c'], i['e'], i['n'], i['t']
|
del i['c'], i['e'], i['n'], i['t']
|
||||||
if self._param.symbols_datatype == "quotes":
|
if self._param.symbols_datatype == "quotes":
|
||||||
for i in response['data']:
|
for i in response['data']:
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
i['Selling Price'] = i['a']
|
i['Selling Price'] = i['a']
|
||||||
i['Buying Price'] = i['b']
|
i['Buying Price'] = i['b']
|
||||||
i['Commodity Code'] = i['c']
|
i['Commodity Code'] = i['c']
|
||||||
@ -120,8 +136,12 @@ class Jin10(ComponentBase, ABC):
|
|||||||
url='https://open-data-api.jin10.com/data-api/news',
|
url='https://open-data-api.jin10.com/data-api/news',
|
||||||
headers=headers, data=json.dumps(params))
|
headers=headers, data=json.dumps(params))
|
||||||
response = response.json()
|
response = response.json()
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
jin10_res.append({"content": pd.DataFrame(response['data']).to_markdown()})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("Jin10 processing"):
|
||||||
|
return
|
||||||
return Jin10.be_output("**ERROR**: " + str(e))
|
return Jin10.be_output("**ERROR**: " + str(e))
|
||||||
|
|
||||||
if not jin10_res:
|
if not jin10_res:
|
||||||
|
|||||||
@ -71,23 +71,40 @@ class PubMed(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("PubMed processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("PubMed processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Entrez.email = self._param.email
|
Entrez.email = self._param.email
|
||||||
pubmedids = Entrez.read(Entrez.esearch(db='pubmed', retmax=self._param.top_n, term=kwargs["query"]))['IdList']
|
pubmedids = Entrez.read(Entrez.esearch(db='pubmed', retmax=self._param.top_n, term=kwargs["query"]))['IdList']
|
||||||
|
|
||||||
|
if self.check_if_canceled("PubMed processing"):
|
||||||
|
return
|
||||||
|
|
||||||
pubmedcnt = ET.fromstring(re.sub(r'<(/?)b>|<(/?)i>', '', Entrez.efetch(db='pubmed', id=",".join(pubmedids),
|
pubmedcnt = ET.fromstring(re.sub(r'<(/?)b>|<(/?)i>', '', Entrez.efetch(db='pubmed', id=",".join(pubmedids),
|
||||||
retmode="xml").read().decode("utf-8")))
|
retmode="xml").read().decode("utf-8")))
|
||||||
|
|
||||||
|
if self.check_if_canceled("PubMed processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(pubmedcnt.findall("PubmedArticle"),
|
self._retrieve_chunks(pubmedcnt.findall("PubmedArticle"),
|
||||||
get_title=lambda child: child.find("MedlineCitation").find("Article").find("ArticleTitle").text,
|
get_title=lambda child: child.find("MedlineCitation").find("Article").find("ArticleTitle").text,
|
||||||
get_url=lambda child: "https://pubmed.ncbi.nlm.nih.gov/" + child.find("MedlineCitation").find("PMID").text,
|
get_url=lambda child: "https://pubmed.ncbi.nlm.nih.gov/" + child.find("MedlineCitation").find("PMID").text,
|
||||||
get_content=lambda child: self._format_pubmed_content(child),)
|
get_content=lambda child: self._format_pubmed_content(child),)
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("PubMed processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"PubMed error: {e}")
|
logging.exception(f"PubMed error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -58,12 +58,18 @@ class QWeather(ComponentBase, ABC):
|
|||||||
component_name = "QWeather"
|
component_name = "QWeather"
|
||||||
|
|
||||||
def _run(self, history, **kwargs):
|
def _run(self, history, **kwargs):
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
|
|
||||||
ans = self.get_input()
|
ans = self.get_input()
|
||||||
ans = "".join(ans["content"]) if "content" in ans else ""
|
ans = "".join(ans["content"]) if "content" in ans else ""
|
||||||
if not ans:
|
if not ans:
|
||||||
return QWeather.be_output("")
|
return QWeather.be_output("")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
url="https://geoapi.qweather.com/v2/city/lookup?location=" + ans + "&key=" + self._param.web_apikey).json()
|
url="https://geoapi.qweather.com/v2/city/lookup?location=" + ans + "&key=" + self._param.web_apikey).json()
|
||||||
if response["code"] == "200":
|
if response["code"] == "200":
|
||||||
@ -71,16 +77,23 @@ class QWeather(ComponentBase, ABC):
|
|||||||
else:
|
else:
|
||||||
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
||||||
|
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
|
|
||||||
base_url = "https://api.qweather.com/v7/" if self._param.user_type == 'paid' else "https://devapi.qweather.com/v7/"
|
base_url = "https://api.qweather.com/v7/" if self._param.user_type == 'paid' else "https://devapi.qweather.com/v7/"
|
||||||
|
|
||||||
if self._param.type == "weather":
|
if self._param.type == "weather":
|
||||||
url = base_url + "weather/" + self._param.time_period + "?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
url = base_url + "weather/" + self._param.time_period + "?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||||
response = requests.get(url=url).json()
|
response = requests.get(url=url).json()
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
if response["code"] == "200":
|
if response["code"] == "200":
|
||||||
if self._param.time_period == "now":
|
if self._param.time_period == "now":
|
||||||
return QWeather.be_output(str(response["now"]))
|
return QWeather.be_output(str(response["now"]))
|
||||||
else:
|
else:
|
||||||
qweather_res = [{"content": str(i) + "\n"} for i in response["daily"]]
|
qweather_res = [{"content": str(i) + "\n"} for i in response["daily"]]
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
if not qweather_res:
|
if not qweather_res:
|
||||||
return QWeather.be_output("")
|
return QWeather.be_output("")
|
||||||
|
|
||||||
@ -92,6 +105,8 @@ class QWeather(ComponentBase, ABC):
|
|||||||
elif self._param.type == "indices":
|
elif self._param.type == "indices":
|
||||||
url = base_url + "indices/1d?type=0&location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
url = base_url + "indices/1d?type=0&location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||||
response = requests.get(url=url).json()
|
response = requests.get(url=url).json()
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
if response["code"] == "200":
|
if response["code"] == "200":
|
||||||
indices_res = response["daily"][0]["date"] + "\n" + "\n".join(
|
indices_res = response["daily"][0]["date"] + "\n" + "\n".join(
|
||||||
[i["name"] + ": " + i["category"] + ", " + i["text"] for i in response["daily"]])
|
[i["name"] + ": " + i["category"] + ", " + i["text"] for i in response["daily"]])
|
||||||
@ -103,9 +118,13 @@ class QWeather(ComponentBase, ABC):
|
|||||||
elif self._param.type == "airquality":
|
elif self._param.type == "airquality":
|
||||||
url = base_url + "air/now?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
url = base_url + "air/now?location=" + location_id + "&key=" + self._param.web_apikey + "&lang=" + self._param.lang
|
||||||
response = requests.get(url=url).json()
|
response = requests.get(url=url).json()
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
if response["code"] == "200":
|
if response["code"] == "200":
|
||||||
return QWeather.be_output(str(response["now"]))
|
return QWeather.be_output(str(response["now"]))
|
||||||
else:
|
else:
|
||||||
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
return QWeather.be_output("**Error**" + self._param.error_code[response["code"]])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("Qweather processing"):
|
||||||
|
return
|
||||||
return QWeather.be_output("**Error**" + str(e))
|
return QWeather.be_output("**Error**" + str(e))
|
||||||
|
|||||||
@ -24,8 +24,7 @@ from api.db.services.document_service import DocumentService
|
|||||||
from api.db.services.dialog_service import meta_filter
|
from api.db.services.dialog_service import meta_filter
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from api.db.services.llm_service import LLMBundle
|
from api.db.services.llm_service import LLMBundle
|
||||||
from api import settings
|
from common import settings
|
||||||
from common import globals
|
|
||||||
from common.connection_utils import timeout
|
from common.connection_utils import timeout
|
||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
from rag.prompts.generator import cross_languages, kb_prompt, gen_meta_filter
|
from rag.prompts.generator import cross_languages, kb_prompt, gen_meta_filter
|
||||||
@ -83,8 +82,12 @@ class Retrieval(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Retrieval processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", self._param.empty_response)
|
self.set_output("formalized_content", self._param.empty_response)
|
||||||
|
return
|
||||||
|
|
||||||
kb_ids: list[str] = []
|
kb_ids: list[str] = []
|
||||||
for id in self._param.kb_ids:
|
for id in self._param.kb_ids:
|
||||||
@ -123,7 +126,7 @@ class Retrieval(ToolBase, ABC):
|
|||||||
vars = self.get_input_elements_from_text(kwargs["query"])
|
vars = self.get_input_elements_from_text(kwargs["query"])
|
||||||
vars = {k:o["value"] for k,o in vars.items()}
|
vars = {k:o["value"] for k,o in vars.items()}
|
||||||
query = self.string_format(kwargs["query"], vars)
|
query = self.string_format(kwargs["query"], vars)
|
||||||
|
|
||||||
doc_ids=[]
|
doc_ids=[]
|
||||||
if self._param.meta_data_filter!={}:
|
if self._param.meta_data_filter!={}:
|
||||||
metas = DocumentService.get_meta_by_kbs(kb_ids)
|
metas = DocumentService.get_meta_by_kbs(kb_ids)
|
||||||
@ -136,7 +139,7 @@ class Retrieval(ToolBase, ABC):
|
|||||||
elif self._param.meta_data_filter.get("method") == "manual":
|
elif self._param.meta_data_filter.get("method") == "manual":
|
||||||
filters=self._param.meta_data_filter["manual"]
|
filters=self._param.meta_data_filter["manual"]
|
||||||
for flt in filters:
|
for flt in filters:
|
||||||
pat = re.compile(r"\{* *\{([a-zA-Z:0-9]+@[A-Za-z:0-9_.-]+|sys\.[a-z_]+)\} *\}*")
|
pat = re.compile(self.variable_ref_patt)
|
||||||
s = flt["value"]
|
s = flt["value"]
|
||||||
out_parts = []
|
out_parts = []
|
||||||
last = 0
|
last = 0
|
||||||
@ -171,7 +174,7 @@ class Retrieval(ToolBase, ABC):
|
|||||||
|
|
||||||
if kbs:
|
if kbs:
|
||||||
query = re.sub(r"^user[::\s]*", "", query, flags=re.IGNORECASE)
|
query = re.sub(r"^user[::\s]*", "", query, flags=re.IGNORECASE)
|
||||||
kbinfos = globals.retriever.retrieval(
|
kbinfos = settings.retriever.retrieval(
|
||||||
query,
|
query,
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
[kb.tenant_id for kb in kbs],
|
[kb.tenant_id for kb in kbs],
|
||||||
@ -185,9 +188,14 @@ class Retrieval(ToolBase, ABC):
|
|||||||
rerank_mdl=rerank_mdl,
|
rerank_mdl=rerank_mdl,
|
||||||
rank_feature=label_question(query, kbs),
|
rank_feature=label_question(query, kbs),
|
||||||
)
|
)
|
||||||
|
if self.check_if_canceled("Retrieval processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if self._param.toc_enhance:
|
if self._param.toc_enhance:
|
||||||
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT)
|
chat_mdl = LLMBundle(self._canvas._tenant_id, LLMType.CHAT)
|
||||||
cks = globals.retriever.retrieval_by_toc(query, kbinfos["chunks"], [kb.tenant_id for kb in kbs], chat_mdl, self._param.top_n)
|
cks = settings.retriever.retrieval_by_toc(query, kbinfos["chunks"], [kb.tenant_id for kb in kbs], chat_mdl, self._param.top_n)
|
||||||
|
if self.check_if_canceled("Retrieval processing"):
|
||||||
|
return
|
||||||
if cks:
|
if cks:
|
||||||
kbinfos["chunks"] = cks
|
kbinfos["chunks"] = cks
|
||||||
if self._param.use_kg:
|
if self._param.use_kg:
|
||||||
@ -196,6 +204,8 @@ class Retrieval(ToolBase, ABC):
|
|||||||
kb_ids,
|
kb_ids,
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT))
|
LLMBundle(self._canvas.get_tenant_id(), LLMType.CHAT))
|
||||||
|
if self.check_if_canceled("Retrieval processing"):
|
||||||
|
return
|
||||||
if ck["content_with_weight"]:
|
if ck["content_with_weight"]:
|
||||||
kbinfos["chunks"].insert(0, ck)
|
kbinfos["chunks"].insert(0, ck)
|
||||||
else:
|
else:
|
||||||
@ -203,6 +213,8 @@ class Retrieval(ToolBase, ABC):
|
|||||||
|
|
||||||
if self._param.use_kg and kbs:
|
if self._param.use_kg and kbs:
|
||||||
ck = settings.kg_retriever.retrieval(query, [kb.tenant_id for kb in kbs], filtered_kb_ids, embd_mdl, LLMBundle(kbs[0].tenant_id, LLMType.CHAT))
|
ck = settings.kg_retriever.retrieval(query, [kb.tenant_id for kb in kbs], filtered_kb_ids, embd_mdl, LLMBundle(kbs[0].tenant_id, LLMType.CHAT))
|
||||||
|
if self.check_if_canceled("Retrieval processing"):
|
||||||
|
return
|
||||||
if ck["content_with_weight"]:
|
if ck["content_with_weight"]:
|
||||||
ck["content"] = ck["content_with_weight"]
|
ck["content"] = ck["content_with_weight"]
|
||||||
del ck["content_with_weight"]
|
del ck["content_with_weight"]
|
||||||
|
|||||||
@ -79,6 +79,9 @@ class SearXNG(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
# Gracefully handle try-run without inputs
|
# Gracefully handle try-run without inputs
|
||||||
query = kwargs.get("query")
|
query = kwargs.get("query")
|
||||||
if not query or not isinstance(query, str) or not query.strip():
|
if not query or not isinstance(query, str) or not query.strip():
|
||||||
@ -93,6 +96,9 @@ class SearXNG(ToolBase, ABC):
|
|||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
search_params = {
|
search_params = {
|
||||||
'q': query,
|
'q': query,
|
||||||
@ -110,6 +116,9 @@ class SearXNG(ToolBase, ABC):
|
|||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|
||||||
if not data or not isinstance(data, dict):
|
if not data or not isinstance(data, dict):
|
||||||
@ -121,6 +130,9 @@ class SearXNG(ToolBase, ABC):
|
|||||||
|
|
||||||
results = results[:self._param.top_n]
|
results = results[:self._param.top_n]
|
||||||
|
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(results,
|
self._retrieve_chunks(results,
|
||||||
get_title=lambda r: r.get("title", ""),
|
get_title=lambda r: r.get("title", ""),
|
||||||
get_url=lambda r: r.get("url", ""),
|
get_url=lambda r: r.get("url", ""),
|
||||||
@ -130,10 +142,16 @@ class SearXNG(ToolBase, ABC):
|
|||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
|
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = f"Network error: {e}"
|
last_e = f"Network error: {e}"
|
||||||
logging.exception(f"SearXNG network error: {e}")
|
logging.exception(f"SearXNG network error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("SearXNG processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = str(e)
|
last_e = str(e)
|
||||||
logging.exception(f"SearXNG error: {e}")
|
logging.exception(f"SearXNG error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -103,6 +103,9 @@ class TavilySearch(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("TavilySearch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
@ -113,10 +116,16 @@ class TavilySearch(ToolBase, ABC):
|
|||||||
if fld not in kwargs:
|
if fld not in kwargs:
|
||||||
kwargs[fld] = getattr(self._param, fld)
|
kwargs[fld] = getattr(self._param, fld)
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("TavilySearch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
kwargs["include_images"] = False
|
kwargs["include_images"] = False
|
||||||
kwargs["include_raw_content"] = False
|
kwargs["include_raw_content"] = False
|
||||||
res = self.tavily_client.search(**kwargs)
|
res = self.tavily_client.search(**kwargs)
|
||||||
|
if self.check_if_canceled("TavilySearch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self._retrieve_chunks(res["results"],
|
self._retrieve_chunks(res["results"],
|
||||||
get_title=lambda r: r["title"],
|
get_title=lambda r: r["title"],
|
||||||
get_url=lambda r: r["url"],
|
get_url=lambda r: r["url"],
|
||||||
@ -125,6 +134,9 @@ class TavilySearch(ToolBase, ABC):
|
|||||||
self.set_output("json", res["results"])
|
self.set_output("json", res["results"])
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("TavilySearch processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"Tavily error: {e}")
|
logging.exception(f"Tavily error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
@ -201,6 +213,9 @@ class TavilyExtract(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 10*60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("TavilyExtract processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self.tavily_client = TavilyClient(api_key=self._param.api_key)
|
self.tavily_client = TavilyClient(api_key=self._param.api_key)
|
||||||
last_e = None
|
last_e = None
|
||||||
for fld in ["urls", "extract_depth", "format"]:
|
for fld in ["urls", "extract_depth", "format"]:
|
||||||
@ -209,12 +224,21 @@ class TavilyExtract(ToolBase, ABC):
|
|||||||
if kwargs.get("urls") and isinstance(kwargs["urls"], str):
|
if kwargs.get("urls") and isinstance(kwargs["urls"], str):
|
||||||
kwargs["urls"] = kwargs["urls"].split(",")
|
kwargs["urls"] = kwargs["urls"].split(",")
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("TavilyExtract processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
kwargs["include_images"] = False
|
kwargs["include_images"] = False
|
||||||
res = self.tavily_client.extract(**kwargs)
|
res = self.tavily_client.extract(**kwargs)
|
||||||
|
if self.check_if_canceled("TavilyExtract processing"):
|
||||||
|
return
|
||||||
|
|
||||||
self.set_output("json", res["results"])
|
self.set_output("json", res["results"])
|
||||||
return self.output("json")
|
return self.output("json")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("TavilyExtract processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"Tavily error: {e}")
|
logging.exception(f"Tavily error: {e}")
|
||||||
if last_e:
|
if last_e:
|
||||||
|
|||||||
@ -43,12 +43,18 @@ class TuShare(ComponentBase, ABC):
|
|||||||
component_name = "TuShare"
|
component_name = "TuShare"
|
||||||
|
|
||||||
def _run(self, history, **kwargs):
|
def _run(self, history, **kwargs):
|
||||||
|
if self.check_if_canceled("TuShare processing"):
|
||||||
|
return
|
||||||
|
|
||||||
ans = self.get_input()
|
ans = self.get_input()
|
||||||
ans = ",".join(ans["content"]) if "content" in ans else ""
|
ans = ",".join(ans["content"]) if "content" in ans else ""
|
||||||
if not ans:
|
if not ans:
|
||||||
return TuShare.be_output("")
|
return TuShare.be_output("")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.check_if_canceled("TuShare processing"):
|
||||||
|
return
|
||||||
|
|
||||||
tus_res = []
|
tus_res = []
|
||||||
params = {
|
params = {
|
||||||
"api_name": "news",
|
"api_name": "news",
|
||||||
@ -58,12 +64,18 @@ class TuShare(ComponentBase, ABC):
|
|||||||
}
|
}
|
||||||
response = requests.post(url="http://api.tushare.pro", data=json.dumps(params).encode('utf-8'))
|
response = requests.post(url="http://api.tushare.pro", data=json.dumps(params).encode('utf-8'))
|
||||||
response = response.json()
|
response = response.json()
|
||||||
|
if self.check_if_canceled("TuShare processing"):
|
||||||
|
return
|
||||||
if response['code'] != 0:
|
if response['code'] != 0:
|
||||||
return TuShare.be_output(response['msg'])
|
return TuShare.be_output(response['msg'])
|
||||||
df = pd.DataFrame(response['data']['items'])
|
df = pd.DataFrame(response['data']['items'])
|
||||||
df.columns = response['data']['fields']
|
df.columns = response['data']['fields']
|
||||||
|
if self.check_if_canceled("TuShare processing"):
|
||||||
|
return
|
||||||
tus_res.append({"content": (df[df['content'].str.contains(self._param.keyword, case=False)]).to_markdown()})
|
tus_res.append({"content": (df[df['content'].str.contains(self._param.keyword, case=False)]).to_markdown()})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("TuShare processing"):
|
||||||
|
return
|
||||||
return TuShare.be_output("**ERROR**: " + str(e))
|
return TuShare.be_output("**ERROR**: " + str(e))
|
||||||
|
|
||||||
if not tus_res:
|
if not tus_res:
|
||||||
|
|||||||
@ -70,19 +70,31 @@ class WenCai(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 12)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("WenCai processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("report", "")
|
self.set_output("report", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("WenCai processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
wencai_res = []
|
wencai_res = []
|
||||||
res = pywencai.get(query=kwargs["query"], query_type=self._param.query_type, perpage=self._param.top_n)
|
res = pywencai.get(query=kwargs["query"], query_type=self._param.query_type, perpage=self._param.top_n)
|
||||||
|
if self.check_if_canceled("WenCai processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if isinstance(res, pd.DataFrame):
|
if isinstance(res, pd.DataFrame):
|
||||||
wencai_res.append(res.to_markdown())
|
wencai_res.append(res.to_markdown())
|
||||||
elif isinstance(res, dict):
|
elif isinstance(res, dict):
|
||||||
for item in res.items():
|
for item in res.items():
|
||||||
|
if self.check_if_canceled("WenCai processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if isinstance(item[1], list):
|
if isinstance(item[1], list):
|
||||||
wencai_res.append(item[0] + "\n" + pd.DataFrame(item[1]).to_markdown())
|
wencai_res.append(item[0] + "\n" + pd.DataFrame(item[1]).to_markdown())
|
||||||
elif isinstance(item[1], str):
|
elif isinstance(item[1], str):
|
||||||
@ -100,6 +112,9 @@ class WenCai(ToolBase, ABC):
|
|||||||
self.set_output("report", "\n\n".join(wencai_res))
|
self.set_output("report", "\n\n".join(wencai_res))
|
||||||
return self.output("report")
|
return self.output("report")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("WenCai processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"WenCai error: {e}")
|
logging.exception(f"WenCai error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -66,17 +66,26 @@ class Wikipedia(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("Wikipedia processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("query"):
|
if not kwargs.get("query"):
|
||||||
self.set_output("formalized_content", "")
|
self.set_output("formalized_content", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("Wikipedia processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
wikipedia.set_lang(self._param.language)
|
wikipedia.set_lang(self._param.language)
|
||||||
wiki_engine = wikipedia
|
wiki_engine = wikipedia
|
||||||
pages = []
|
pages = []
|
||||||
for p in wiki_engine.search(kwargs["query"], results=self._param.top_n):
|
for p in wiki_engine.search(kwargs["query"], results=self._param.top_n):
|
||||||
|
if self.check_if_canceled("Wikipedia processing"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pages.append(wikipedia.page(p))
|
pages.append(wikipedia.page(p))
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -87,6 +96,9 @@ class Wikipedia(ToolBase, ABC):
|
|||||||
get_content=lambda r: r.summary)
|
get_content=lambda r: r.summary)
|
||||||
return self.output("formalized_content")
|
return self.output("formalized_content")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("Wikipedia processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"Wikipedia error: {e}")
|
logging.exception(f"Wikipedia error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -74,15 +74,24 @@ class YahooFinance(ToolBase, ABC):
|
|||||||
|
|
||||||
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
@timeout(int(os.environ.get("COMPONENT_EXEC_TIMEOUT", 60)))
|
||||||
def _invoke(self, **kwargs):
|
def _invoke(self, **kwargs):
|
||||||
|
if self.check_if_canceled("YahooFinance processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if not kwargs.get("stock_code"):
|
if not kwargs.get("stock_code"):
|
||||||
self.set_output("report", "")
|
self.set_output("report", "")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
last_e = ""
|
last_e = ""
|
||||||
for _ in range(self._param.max_retries+1):
|
for _ in range(self._param.max_retries+1):
|
||||||
|
if self.check_if_canceled("YahooFinance processing"):
|
||||||
|
return
|
||||||
|
|
||||||
yohoo_res = []
|
yohoo_res = []
|
||||||
try:
|
try:
|
||||||
msft = yf.Ticker(kwargs["stock_code"])
|
msft = yf.Ticker(kwargs["stock_code"])
|
||||||
|
if self.check_if_canceled("YahooFinance processing"):
|
||||||
|
return
|
||||||
|
|
||||||
if self._param.info:
|
if self._param.info:
|
||||||
yohoo_res.append("# Information:\n" + pd.Series(msft.info).to_markdown() + "\n")
|
yohoo_res.append("# Information:\n" + pd.Series(msft.info).to_markdown() + "\n")
|
||||||
if self._param.history:
|
if self._param.history:
|
||||||
@ -100,6 +109,9 @@ class YahooFinance(ToolBase, ABC):
|
|||||||
self.set_output("report", "\n\n".join(yohoo_res))
|
self.set_output("report", "\n\n".join(yohoo_res))
|
||||||
return self.output("report")
|
return self.output("report")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if self.check_if_canceled("YahooFinance processing"):
|
||||||
|
return
|
||||||
|
|
||||||
last_e = e
|
last_e = e
|
||||||
logging.exception(f"YahooFinance error: {e}")
|
logging.exception(f"YahooFinance error: {e}")
|
||||||
time.sleep(self._param.delay_after_error)
|
time.sleep(self._param.delay_after_error)
|
||||||
|
|||||||
@ -33,7 +33,7 @@ from api.utils import commands
|
|||||||
from flask_mail import Mail
|
from flask_mail import Mail
|
||||||
from flask_session import Session
|
from flask_session import Session
|
||||||
from flask_login import LoginManager
|
from flask_login import LoginManager
|
||||||
from api import settings
|
from common import settings
|
||||||
from api.utils.api_utils import server_error_response
|
from api.utils.api_utils import server_error_response
|
||||||
from api.constants import API_VERSION
|
from api.constants import API_VERSION
|
||||||
|
|
||||||
|
|||||||
@ -40,14 +40,13 @@ from api.utils.api_utils import server_error_response, get_data_error_result, ge
|
|||||||
from api.utils.file_utils import filename_type, thumbnail
|
from api.utils.file_utils import filename_type, thumbnail
|
||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
from rag.prompts.generator import keyword_extraction
|
from rag.prompts.generator import keyword_extraction
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from common.time_utils import current_timestamp, datetime_format
|
from common.time_utils import current_timestamp, datetime_format
|
||||||
|
|
||||||
from api.db.services.canvas_service import UserCanvasService
|
from api.db.services.canvas_service import UserCanvasService
|
||||||
from agent.canvas import Canvas
|
from agent.canvas import Canvas
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/new_token', methods=['POST']) # noqa: F821
|
@manager.route('/new_token', methods=['POST']) # noqa: F821
|
||||||
@ -428,10 +427,10 @@ def upload():
|
|||||||
message="This type of file has not been supported yet!")
|
message="This type of file has not been supported yet!")
|
||||||
|
|
||||||
location = filename
|
location = filename
|
||||||
while STORAGE_IMPL.obj_exist(kb_id, location):
|
while settings.STORAGE_IMPL.obj_exist(kb_id, location):
|
||||||
location += "_"
|
location += "_"
|
||||||
blob = request.files['file'].read()
|
blob = request.files['file'].read()
|
||||||
STORAGE_IMPL.put(kb_id, location, blob)
|
settings.STORAGE_IMPL.put(kb_id, location, blob)
|
||||||
doc = {
|
doc = {
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"kb_id": kb.id,
|
"kb_id": kb.id,
|
||||||
@ -467,10 +466,7 @@ def upload():
|
|||||||
if "run" in form_data.keys():
|
if "run" in form_data.keys():
|
||||||
if request.form.get("run").strip() == "1":
|
if request.form.get("run").strip() == "1":
|
||||||
try:
|
try:
|
||||||
info = {"run": 1, "progress": 0}
|
info = {"run": 1, "progress": 0, "progress_msg": "", "chunk_num": 0, "token_num": 0}
|
||||||
info["progress_msg"] = ""
|
|
||||||
info["chunk_num"] = 0
|
|
||||||
info["token_num"] = 0
|
|
||||||
DocumentService.update_by_id(doc["id"], info)
|
DocumentService.update_by_id(doc["id"], info)
|
||||||
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
# if str(req["run"]) == TaskStatus.CANCEL.value:
|
||||||
tenant_id = DocumentService.get_tenant_id(doc["id"])
|
tenant_id = DocumentService.get_tenant_id(doc["id"])
|
||||||
@ -538,7 +534,7 @@ def list_chunks():
|
|||||||
)
|
)
|
||||||
kb_ids = KnowledgebaseService.get_kb_ids(tenant_id)
|
kb_ids = KnowledgebaseService.get_kb_ids(tenant_id)
|
||||||
|
|
||||||
res = globals.retriever.chunk_list(doc_id, tenant_id, kb_ids)
|
res = settings.retriever.chunk_list(doc_id, tenant_id, kb_ids)
|
||||||
res = [
|
res = [
|
||||||
{
|
{
|
||||||
"content": res_item["content_with_weight"],
|
"content": res_item["content_with_weight"],
|
||||||
@ -564,7 +560,7 @@ def get_chunk(chunk_id):
|
|||||||
try:
|
try:
|
||||||
tenant_id = objs[0].tenant_id
|
tenant_id = objs[0].tenant_id
|
||||||
kb_ids = KnowledgebaseService.get_kb_ids(tenant_id)
|
kb_ids = KnowledgebaseService.get_kb_ids(tenant_id)
|
||||||
chunk = globals.docStoreConn.get(chunk_id, search.index_name(tenant_id), kb_ids)
|
chunk = settings.docStoreConn.get(chunk_id, search.index_name(tenant_id), kb_ids)
|
||||||
if chunk is None:
|
if chunk is None:
|
||||||
return server_error_response(Exception("Chunk not found"))
|
return server_error_response(Exception("Chunk not found"))
|
||||||
k = []
|
k = []
|
||||||
@ -699,7 +695,7 @@ def document_rm():
|
|||||||
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
||||||
File2DocumentService.delete_by_document_id(doc_id)
|
File2DocumentService.delete_by_document_id(doc_id)
|
||||||
|
|
||||||
STORAGE_IMPL.rm(b, n)
|
settings.STORAGE_IMPL.rm(b, n)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errors += str(e)
|
errors += str(e)
|
||||||
|
|
||||||
@ -727,8 +723,7 @@ def completion_faq():
|
|||||||
if "quote" not in req:
|
if "quote" not in req:
|
||||||
req["quote"] = True
|
req["quote"] = True
|
||||||
|
|
||||||
msg = []
|
msg = [{"role": "user", "content": req["word"]}]
|
||||||
msg.append({"role": "user", "content": req["word"]})
|
|
||||||
if not msg[-1].get("id"):
|
if not msg[-1].get("id"):
|
||||||
msg[-1]["id"] = get_uuid()
|
msg[-1]["id"] = get_uuid()
|
||||||
message_id = msg[-1]["id"]
|
message_id = msg[-1]["id"]
|
||||||
@ -792,7 +787,7 @@ def completion_faq():
|
|||||||
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
||||||
try:
|
try:
|
||||||
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
||||||
response = STORAGE_IMPL.get(bkt, nm)
|
response = settings.STORAGE_IMPL.get(bkt, nm)
|
||||||
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
||||||
data.append(data_type_picture)
|
data.append(data_type_picture)
|
||||||
break
|
break
|
||||||
@ -837,7 +832,7 @@ def completion_faq():
|
|||||||
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
if ans["reference"]["chunks"][chunk_idx]["img_id"]:
|
||||||
try:
|
try:
|
||||||
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
bkt, nm = ans["reference"]["chunks"][chunk_idx]["img_id"].split("-")
|
||||||
response = STORAGE_IMPL.get(bkt, nm)
|
response = settings.STORAGE_IMPL.get(bkt, nm)
|
||||||
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
data_type_picture["url"] = base64.b64encode(response).decode('utf-8')
|
||||||
data.append(data_type_picture)
|
data.append(data_type_picture)
|
||||||
break
|
break
|
||||||
@ -886,7 +881,7 @@ def retrieval():
|
|||||||
if req.get("keyword", False):
|
if req.get("keyword", False):
|
||||||
chat_mdl = LLMBundle(kbs[0].tenant_id, LLMType.CHAT)
|
chat_mdl = LLMBundle(kbs[0].tenant_id, LLMType.CHAT)
|
||||||
question += keyword_extraction(chat_mdl, question)
|
question += keyword_extraction(chat_mdl, question)
|
||||||
ranks = globals.retriever.retrieval(question, embd_mdl, kbs[0].tenant_id, kb_ids, page, size,
|
ranks = settings.retriever.retrieval(question, embd_mdl, kbs[0].tenant_id, kb_ids, page, size,
|
||||||
similarity_threshold, vector_similarity_weight, top,
|
similarity_threshold, vector_similarity_weight, top,
|
||||||
doc_ids, rerank_mdl=rerank_mdl, highlight= highlight,
|
doc_ids, rerank_mdl=rerank_mdl, highlight= highlight,
|
||||||
rank_feature=label_question(question, kbs))
|
rank_feature=label_question(question, kbs))
|
||||||
|
|||||||
@ -34,7 +34,7 @@ class GithubOAuthClient(OAuthClient):
|
|||||||
|
|
||||||
def fetch_user_info(self, access_token, **kwargs):
|
def fetch_user_info(self, access_token, **kwargs):
|
||||||
"""
|
"""
|
||||||
Fetch github user info.
|
Fetch GitHub user info.
|
||||||
"""
|
"""
|
||||||
user_info = {}
|
user_info = {}
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -43,7 +43,8 @@ class OIDCClient(OAuthClient):
|
|||||||
self.jwks_uri = config['jwks_uri']
|
self.jwks_uri = config['jwks_uri']
|
||||||
|
|
||||||
|
|
||||||
def _load_oidc_metadata(self, issuer):
|
@staticmethod
|
||||||
|
def _load_oidc_metadata(issuer):
|
||||||
"""
|
"""
|
||||||
Load OIDC metadata from `/.well-known/openid-configuration`.
|
Load OIDC metadata from `/.well-known/openid-configuration`.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -45,7 +45,7 @@ from api.utils.file_utils import filename_type, read_potential_broken_pdf
|
|||||||
from rag.flow.pipeline import Pipeline
|
from rag.flow.pipeline import Pipeline
|
||||||
from rag.nlp import search
|
from rag.nlp import search
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/templates', methods=['GET']) # noqa: F821
|
@manager.route('/templates', methods=['GET']) # noqa: F821
|
||||||
@ -156,7 +156,7 @@ def run():
|
|||||||
return get_json_result(data={"message_id": task_id})
|
return get_json_result(data={"message_id": task_id})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
canvas = Canvas(cvs.dsl, current_user.id, req["id"])
|
canvas = Canvas(cvs.dsl, current_user.id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
|
|
||||||
@ -168,8 +168,10 @@ def run():
|
|||||||
|
|
||||||
cvs.dsl = json.loads(str(canvas))
|
cvs.dsl = json.loads(str(canvas))
|
||||||
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
canvas.cancel_task()
|
||||||
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": False}, ensure_ascii=False) + "\n\n"
|
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": False}, ensure_ascii=False) + "\n\n"
|
||||||
|
|
||||||
resp = Response(sse(), mimetype="text/event-stream")
|
resp = Response(sse(), mimetype="text/event-stream")
|
||||||
@ -177,6 +179,7 @@ def run():
|
|||||||
resp.headers.add_header("Connection", "keep-alive")
|
resp.headers.add_header("Connection", "keep-alive")
|
||||||
resp.headers.add_header("X-Accel-Buffering", "no")
|
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||||
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||||
|
resp.call_on_close(lambda: canvas.cancel_task())
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
@ -192,8 +195,8 @@ def rerun():
|
|||||||
if 0 < doc["progress"] < 1:
|
if 0 < doc["progress"] < 1:
|
||||||
return get_data_error_result(message=f"`{doc['name']}` is processing...")
|
return get_data_error_result(message=f"`{doc['name']}` is processing...")
|
||||||
|
|
||||||
if globals.docStoreConn.indexExist(search.index_name(current_user.id), doc["kb_id"]):
|
if settings.docStoreConn.indexExist(search.index_name(current_user.id), doc["kb_id"]):
|
||||||
globals.docStoreConn.delete({"doc_id": doc["id"]}, search.index_name(current_user.id), doc["kb_id"])
|
settings.docStoreConn.delete({"doc_id": doc["id"]}, search.index_name(current_user.id), doc["kb_id"])
|
||||||
doc["progress_msg"] = ""
|
doc["progress_msg"] = ""
|
||||||
doc["chunk_num"] = 0
|
doc["chunk_num"] = 0
|
||||||
doc["token_num"] = 0
|
doc["token_num"] = 0
|
||||||
@ -410,27 +413,27 @@ def test_db_connect():
|
|||||||
ibm_db.close(conn)
|
ibm_db.close(conn)
|
||||||
return get_json_result(data="Database Connection Successful!")
|
return get_json_result(data="Database Connection Successful!")
|
||||||
elif req["db_type"] == 'trino':
|
elif req["db_type"] == 'trino':
|
||||||
def _parse_catalog_schema(db: str):
|
def _parse_catalog_schema(db_name: str):
|
||||||
if not db:
|
if not db_name:
|
||||||
return None, None
|
return None, None
|
||||||
if "." in db:
|
if "." in db_name:
|
||||||
c, s = db.split(".", 1)
|
catalog_name, schema_name = db_name.split(".", 1)
|
||||||
elif "/" in db:
|
elif "/" in db_name:
|
||||||
c, s = db.split("/", 1)
|
catalog_name, schema_name = db_name.split("/", 1)
|
||||||
else:
|
else:
|
||||||
c, s = db, "default"
|
catalog_name, schema_name = db_name, "default"
|
||||||
return c, s
|
return catalog_name, schema_name
|
||||||
try:
|
try:
|
||||||
import trino
|
import trino
|
||||||
import os
|
import os
|
||||||
from trino.auth import BasicAuthentication
|
from trino.auth import BasicAuthentication
|
||||||
except Exception:
|
except Exception as e:
|
||||||
return server_error_response("Missing dependency 'trino'. Please install: pip install trino")
|
return server_error_response(f"Missing dependency 'trino'. Please install: pip install trino, detail: {e}")
|
||||||
|
|
||||||
catalog, schema = _parse_catalog_schema(req["database"])
|
catalog, schema = _parse_catalog_schema(req["database"])
|
||||||
if not catalog:
|
if not catalog:
|
||||||
return server_error_response("For Trino, 'database' must be 'catalog.schema' or at least 'catalog'.")
|
return server_error_response("For Trino, 'database' must be 'catalog.schema' or at least 'catalog'.")
|
||||||
|
|
||||||
http_scheme = "https" if os.environ.get("TRINO_USE_TLS", "0") == "1" else "http"
|
http_scheme = "https" if os.environ.get("TRINO_USE_TLS", "0") == "1" else "http"
|
||||||
|
|
||||||
auth = None
|
auth = None
|
||||||
@ -479,7 +482,6 @@ def getlistversion(canvas_id):
|
|||||||
@login_required
|
@login_required
|
||||||
def getversion( version_id):
|
def getversion( version_id):
|
||||||
try:
|
try:
|
||||||
|
|
||||||
e, version = UserCanvasVersionService.get_by_id(version_id)
|
e, version = UserCanvasVersionService.get_by_id(version_id)
|
||||||
if version:
|
if version:
|
||||||
return get_json_result(data=version.to_dict())
|
return get_json_result(data=version.to_dict())
|
||||||
@ -546,11 +548,11 @@ def trace():
|
|||||||
cvs_id = request.args.get("canvas_id")
|
cvs_id = request.args.get("canvas_id")
|
||||||
msg_id = request.args.get("message_id")
|
msg_id = request.args.get("message_id")
|
||||||
try:
|
try:
|
||||||
bin = REDIS_CONN.get(f"{cvs_id}-{msg_id}-logs")
|
binary = REDIS_CONN.get(f"{cvs_id}-{msg_id}-logs")
|
||||||
if not bin:
|
if not binary:
|
||||||
return get_json_result(data={})
|
return get_json_result(data={})
|
||||||
|
|
||||||
return get_json_result(data=json.loads(bin.encode("utf-8")))
|
return get_json_result(data=json.loads(binary.encode("utf-8")))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
|
||||||
@ -604,4 +606,4 @@ def download():
|
|||||||
id = request.args.get("id")
|
id = request.args.get("id")
|
||||||
created_by = request.args.get("created_by")
|
created_by = request.args.get("created_by")
|
||||||
blob = FileService.get_blob(created_by, id)
|
blob = FileService.get_blob(created_by, id)
|
||||||
return flask.make_response(blob)
|
return flask.make_response(blob)
|
||||||
|
|||||||
@ -21,7 +21,6 @@ import xxhash
|
|||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import current_user, login_required
|
from flask_login import current_user, login_required
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.db.services.dialog_service import meta_filter
|
from api.db.services.dialog_service import meta_filter
|
||||||
from api.db.services.document_service import DocumentService
|
from api.db.services.document_service import DocumentService
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
@ -33,10 +32,9 @@ from rag.app.qa import beAdoc, rmPrefix
|
|||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
from rag.nlp import rag_tokenizer, search
|
from rag.nlp import rag_tokenizer, search
|
||||||
from rag.prompts.generator import gen_meta_filter, cross_languages, keyword_extraction
|
from rag.prompts.generator import gen_meta_filter, cross_languages, keyword_extraction
|
||||||
from rag.settings import PAGERANK_FLD
|
|
||||||
from common.string_utils import remove_redundant_spaces
|
from common.string_utils import remove_redundant_spaces
|
||||||
from common.constants import RetCode, LLMType, ParserType
|
from common.constants import RetCode, LLMType, ParserType, PAGERANK_FLD
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/list', methods=['POST']) # noqa: F821
|
@manager.route('/list', methods=['POST']) # noqa: F821
|
||||||
@ -61,7 +59,7 @@ def list_chunk():
|
|||||||
}
|
}
|
||||||
if "available_int" in req:
|
if "available_int" in req:
|
||||||
query["available_int"] = int(req["available_int"])
|
query["available_int"] = int(req["available_int"])
|
||||||
sres = globals.retriever.search(query, search.index_name(tenant_id), kb_ids, highlight=["content_ltks"])
|
sres = settings.retriever.search(query, search.index_name(tenant_id), kb_ids, highlight=["content_ltks"])
|
||||||
res = {"total": sres.total, "chunks": [], "doc": doc.to_dict()}
|
res = {"total": sres.total, "chunks": [], "doc": doc.to_dict()}
|
||||||
for id in sres.ids:
|
for id in sres.ids:
|
||||||
d = {
|
d = {
|
||||||
@ -99,7 +97,7 @@ def get():
|
|||||||
return get_data_error_result(message="Tenant not found!")
|
return get_data_error_result(message="Tenant not found!")
|
||||||
for tenant in tenants:
|
for tenant in tenants:
|
||||||
kb_ids = KnowledgebaseService.get_kb_ids(tenant.tenant_id)
|
kb_ids = KnowledgebaseService.get_kb_ids(tenant.tenant_id)
|
||||||
chunk = globals.docStoreConn.get(chunk_id, search.index_name(tenant.tenant_id), kb_ids)
|
chunk = settings.docStoreConn.get(chunk_id, search.index_name(tenant.tenant_id), kb_ids)
|
||||||
if chunk:
|
if chunk:
|
||||||
break
|
break
|
||||||
if chunk is None:
|
if chunk is None:
|
||||||
@ -171,7 +169,7 @@ def set():
|
|||||||
v, c = embd_mdl.encode([doc.name, req["content_with_weight"] if not d.get("question_kwd") else "\n".join(d["question_kwd"])])
|
v, c = embd_mdl.encode([doc.name, req["content_with_weight"] if not d.get("question_kwd") else "\n".join(d["question_kwd"])])
|
||||||
v = 0.1 * v[0] + 0.9 * v[1] if doc.parser_id != ParserType.QA else v[1]
|
v = 0.1 * v[0] + 0.9 * v[1] if doc.parser_id != ParserType.QA else v[1]
|
||||||
d["q_%d_vec" % len(v)] = v.tolist()
|
d["q_%d_vec" % len(v)] = v.tolist()
|
||||||
globals.docStoreConn.update({"id": req["chunk_id"]}, d, search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.update({"id": req["chunk_id"]}, d, search.index_name(tenant_id), doc.kb_id)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
@ -187,7 +185,7 @@ def switch():
|
|||||||
if not e:
|
if not e:
|
||||||
return get_data_error_result(message="Document not found!")
|
return get_data_error_result(message="Document not found!")
|
||||||
for cid in req["chunk_ids"]:
|
for cid in req["chunk_ids"]:
|
||||||
if not globals.docStoreConn.update({"id": cid},
|
if not settings.docStoreConn.update({"id": cid},
|
||||||
{"available_int": int(req["available_int"])},
|
{"available_int": int(req["available_int"])},
|
||||||
search.index_name(DocumentService.get_tenant_id(req["doc_id"])),
|
search.index_name(DocumentService.get_tenant_id(req["doc_id"])),
|
||||||
doc.kb_id):
|
doc.kb_id):
|
||||||
@ -201,13 +199,12 @@ def switch():
|
|||||||
@login_required
|
@login_required
|
||||||
@validate_request("chunk_ids", "doc_id")
|
@validate_request("chunk_ids", "doc_id")
|
||||||
def rm():
|
def rm():
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
req = request.json
|
req = request.json
|
||||||
try:
|
try:
|
||||||
e, doc = DocumentService.get_by_id(req["doc_id"])
|
e, doc = DocumentService.get_by_id(req["doc_id"])
|
||||||
if not e:
|
if not e:
|
||||||
return get_data_error_result(message="Document not found!")
|
return get_data_error_result(message="Document not found!")
|
||||||
if not globals.docStoreConn.delete({"id": req["chunk_ids"]},
|
if not settings.docStoreConn.delete({"id": req["chunk_ids"]},
|
||||||
search.index_name(DocumentService.get_tenant_id(req["doc_id"])),
|
search.index_name(DocumentService.get_tenant_id(req["doc_id"])),
|
||||||
doc.kb_id):
|
doc.kb_id):
|
||||||
return get_data_error_result(message="Chunk deleting failure")
|
return get_data_error_result(message="Chunk deleting failure")
|
||||||
@ -215,8 +212,8 @@ def rm():
|
|||||||
chunk_number = len(deleted_chunk_ids)
|
chunk_number = len(deleted_chunk_ids)
|
||||||
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
||||||
for cid in deleted_chunk_ids:
|
for cid in deleted_chunk_ids:
|
||||||
if STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||||
STORAGE_IMPL.rm(doc.kb_id, cid)
|
settings.STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
@ -271,7 +268,7 @@ def create():
|
|||||||
v, c = embd_mdl.encode([doc.name, req["content_with_weight"] if not d["question_kwd"] else "\n".join(d["question_kwd"])])
|
v, c = embd_mdl.encode([doc.name, req["content_with_weight"] if not d["question_kwd"] else "\n".join(d["question_kwd"])])
|
||||||
v = 0.1 * v[0] + 0.9 * v[1]
|
v = 0.1 * v[0] + 0.9 * v[1]
|
||||||
d["q_%d_vec" % len(v)] = v.tolist()
|
d["q_%d_vec" % len(v)] = v.tolist()
|
||||||
globals.docStoreConn.insert([d], search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.insert([d], search.index_name(tenant_id), doc.kb_id)
|
||||||
|
|
||||||
DocumentService.increment_chunk_num(
|
DocumentService.increment_chunk_num(
|
||||||
doc.id, doc.kb_id, c, 1, 0)
|
doc.id, doc.kb_id, c, 1, 0)
|
||||||
@ -347,7 +344,7 @@ def retrieval_test():
|
|||||||
question += keyword_extraction(chat_mdl, question)
|
question += keyword_extraction(chat_mdl, question)
|
||||||
|
|
||||||
labels = label_question(question, [kb])
|
labels = label_question(question, [kb])
|
||||||
ranks = globals.retriever.retrieval(question, embd_mdl, tenant_ids, kb_ids, page, size,
|
ranks = settings.retriever.retrieval(question, embd_mdl, tenant_ids, kb_ids, page, size,
|
||||||
float(req.get("similarity_threshold", 0.0)),
|
float(req.get("similarity_threshold", 0.0)),
|
||||||
float(req.get("vector_similarity_weight", 0.3)),
|
float(req.get("vector_similarity_weight", 0.3)),
|
||||||
top,
|
top,
|
||||||
@ -386,7 +383,7 @@ def knowledge_graph():
|
|||||||
"doc_ids": [doc_id],
|
"doc_ids": [doc_id],
|
||||||
"knowledge_graph_kwd": ["graph", "mind_map"]
|
"knowledge_graph_kwd": ["graph", "mind_map"]
|
||||||
}
|
}
|
||||||
sres = globals.retriever.search(req, search.index_name(tenant_id), kb_ids)
|
sres = settings.retriever.search(req, search.index_name(tenant_id), kb_ids)
|
||||||
obj = {"graph": {}, "mind_map": {}}
|
obj = {"graph": {}, "mind_map": {}}
|
||||||
for id in sres.ids[:2]:
|
for id in sres.ids[:2]:
|
||||||
ty = sres.field[id]["knowledge_graph_kwd"]
|
ty = sres.field[id]["knowledge_graph_kwd"]
|
||||||
|
|||||||
@ -13,16 +13,26 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import uuid
|
||||||
|
from html import escape
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from flask import request
|
from flask import make_response, request
|
||||||
from flask_login import login_required, current_user
|
from flask_login import current_user, login_required
|
||||||
|
from google_auth_oauthlib.flow import Flow
|
||||||
|
|
||||||
from api.db import InputType
|
from api.db import InputType
|
||||||
from api.db.services.connector_service import ConnectorService, Connector2KbService, SyncLogsService
|
from api.db.services.connector_service import ConnectorService, SyncLogsService
|
||||||
from api.utils.api_utils import get_json_result, validate_request, get_data_error_result
|
from api.utils.api_utils import get_data_error_result, get_json_result, validate_request
|
||||||
from common.misc_utils import get_uuid
|
|
||||||
from common.constants import RetCode, TaskStatus
|
from common.constants import RetCode, TaskStatus
|
||||||
|
from common.data_source.config import GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI, DocumentSource
|
||||||
|
from common.data_source.google_util.constant import GOOGLE_DRIVE_WEB_OAUTH_POPUP_TEMPLATE, GOOGLE_SCOPES
|
||||||
|
from common.misc_utils import get_uuid
|
||||||
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/set", methods=["POST"]) # noqa: F821
|
@manager.route("/set", methods=["POST"]) # noqa: F821
|
||||||
@login_required
|
@login_required
|
||||||
@ -42,12 +52,12 @@ def set_connector():
|
|||||||
"config": req["config"],
|
"config": req["config"],
|
||||||
"refresh_freq": int(req.get("refresh_freq", 30)),
|
"refresh_freq": int(req.get("refresh_freq", 30)),
|
||||||
"prune_freq": int(req.get("prune_freq", 720)),
|
"prune_freq": int(req.get("prune_freq", 720)),
|
||||||
"timeout_secs": int(req.get("timeout_secs", 60*29)),
|
"timeout_secs": int(req.get("timeout_secs", 60 * 29)),
|
||||||
"status": TaskStatus.SCHEDULE
|
"status": TaskStatus.SCHEDULE,
|
||||||
}
|
}
|
||||||
conn["status"] = TaskStatus.SCHEDULE
|
conn["status"] = TaskStatus.SCHEDULE
|
||||||
|
ConnectorService.save(**conn)
|
||||||
|
|
||||||
ConnectorService.save(**conn)
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
e, conn = ConnectorService.get_by_id(req["id"])
|
e, conn = ConnectorService.get_by_id(req["id"])
|
||||||
|
|
||||||
@ -73,7 +83,8 @@ def get_connector(connector_id):
|
|||||||
@login_required
|
@login_required
|
||||||
def list_logs(connector_id):
|
def list_logs(connector_id):
|
||||||
req = request.args.to_dict(flat=True)
|
req = request.args.to_dict(flat=True)
|
||||||
return get_json_result(data=SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15))))
|
arr, total = SyncLogsService.list_sync_tasks(connector_id, int(req.get("page", 1)), int(req.get("page_size", 15)))
|
||||||
|
return get_json_result(data={"total": total, "logs": arr})
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/<connector_id>/resume", methods=["PUT"]) # noqa: F821
|
@manager.route("/<connector_id>/resume", methods=["PUT"]) # noqa: F821
|
||||||
@ -87,14 +98,14 @@ def resume(connector_id):
|
|||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/<connector_id>/link", methods=["POST"]) # noqa: F821
|
@manager.route("/<connector_id>/rebuild", methods=["PUT"]) # noqa: F821
|
||||||
@validate_request("kb_ids")
|
|
||||||
@login_required
|
@login_required
|
||||||
def link_kb(connector_id):
|
@validate_request("kb_id")
|
||||||
|
def rebuild(connector_id):
|
||||||
req = request.json
|
req = request.json
|
||||||
errors = Connector2KbService.link_kb(connector_id, req["kb_ids"], current_user.id)
|
err = ConnectorService.rebuild(req["kb_id"], connector_id, current_user.id)
|
||||||
if errors:
|
if err:
|
||||||
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
return get_json_result(data=False, message=err, code=RetCode.SERVER_ERROR)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|
||||||
|
|
||||||
@ -103,4 +114,182 @@ def link_kb(connector_id):
|
|||||||
def rm_connector(connector_id):
|
def rm_connector(connector_id):
|
||||||
ConnectorService.resume(connector_id, TaskStatus.CANCEL)
|
ConnectorService.resume(connector_id, TaskStatus.CANCEL)
|
||||||
ConnectorService.delete_by_id(connector_id)
|
ConnectorService.delete_by_id(connector_id)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|
||||||
|
|
||||||
|
GOOGLE_WEB_FLOW_STATE_PREFIX = "google_drive_web_flow_state"
|
||||||
|
GOOGLE_WEB_FLOW_RESULT_PREFIX = "google_drive_web_flow_result"
|
||||||
|
WEB_FLOW_TTL_SECS = 15 * 60
|
||||||
|
|
||||||
|
|
||||||
|
def _web_state_cache_key(flow_id: str) -> str:
|
||||||
|
return f"{GOOGLE_WEB_FLOW_STATE_PREFIX}:{flow_id}"
|
||||||
|
|
||||||
|
|
||||||
|
def _web_result_cache_key(flow_id: str) -> str:
|
||||||
|
return f"{GOOGLE_WEB_FLOW_RESULT_PREFIX}:{flow_id}"
|
||||||
|
|
||||||
|
|
||||||
|
def _load_credentials(payload: str | dict[str, Any]) -> dict[str, Any]:
|
||||||
|
if isinstance(payload, dict):
|
||||||
|
return payload
|
||||||
|
try:
|
||||||
|
return json.loads(payload)
|
||||||
|
except json.JSONDecodeError as exc: # pragma: no cover - defensive
|
||||||
|
raise ValueError("Invalid Google credentials JSON.") from exc
|
||||||
|
|
||||||
|
|
||||||
|
def _get_web_client_config(credentials: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
web_section = credentials.get("web")
|
||||||
|
if not isinstance(web_section, dict):
|
||||||
|
raise ValueError("Google OAuth JSON must include a 'web' client configuration to use browser-based authorization.")
|
||||||
|
return {"web": web_section}
|
||||||
|
|
||||||
|
|
||||||
|
def _render_web_oauth_popup(flow_id: str, success: bool, message: str):
|
||||||
|
status = "success" if success else "error"
|
||||||
|
auto_close = "window.close();" if success else ""
|
||||||
|
escaped_message = escape(message)
|
||||||
|
payload_json = json.dumps(
|
||||||
|
{
|
||||||
|
"type": "ragflow-google-drive-oauth",
|
||||||
|
"status": status,
|
||||||
|
"flowId": flow_id or "",
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
html = GOOGLE_DRIVE_WEB_OAUTH_POPUP_TEMPLATE.format(
|
||||||
|
heading="Authorization complete" if success else "Authorization failed",
|
||||||
|
message=escaped_message,
|
||||||
|
payload_json=payload_json,
|
||||||
|
auto_close=auto_close,
|
||||||
|
)
|
||||||
|
response = make_response(html, 200)
|
||||||
|
response.headers["Content-Type"] = "text/html; charset=utf-8"
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@manager.route("/google-drive/oauth/web/start", methods=["POST"]) # noqa: F821
|
||||||
|
@login_required
|
||||||
|
@validate_request("credentials")
|
||||||
|
def start_google_drive_web_oauth():
|
||||||
|
if not GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI:
|
||||||
|
return get_json_result(
|
||||||
|
code=RetCode.SERVER_ERROR,
|
||||||
|
message="Google Drive OAuth redirect URI is not configured on the server.",
|
||||||
|
)
|
||||||
|
|
||||||
|
req = request.json or {}
|
||||||
|
raw_credentials = req.get("credentials", "")
|
||||||
|
try:
|
||||||
|
credentials = _load_credentials(raw_credentials)
|
||||||
|
except ValueError as exc:
|
||||||
|
return get_json_result(code=RetCode.ARGUMENT_ERROR, message=str(exc))
|
||||||
|
|
||||||
|
if credentials.get("refresh_token"):
|
||||||
|
return get_json_result(
|
||||||
|
code=RetCode.ARGUMENT_ERROR,
|
||||||
|
message="Uploaded credentials already include a refresh token.",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
client_config = _get_web_client_config(credentials)
|
||||||
|
except ValueError as exc:
|
||||||
|
return get_json_result(code=RetCode.ARGUMENT_ERROR, message=str(exc))
|
||||||
|
|
||||||
|
flow_id = str(uuid.uuid4())
|
||||||
|
try:
|
||||||
|
flow = Flow.from_client_config(client_config, scopes=GOOGLE_SCOPES[DocumentSource.GOOGLE_DRIVE])
|
||||||
|
flow.redirect_uri = GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI
|
||||||
|
authorization_url, _ = flow.authorization_url(
|
||||||
|
access_type="offline",
|
||||||
|
include_granted_scopes="true",
|
||||||
|
prompt="consent",
|
||||||
|
state=flow_id,
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive
|
||||||
|
logging.exception("Failed to create Google OAuth flow: %s", exc)
|
||||||
|
return get_json_result(
|
||||||
|
code=RetCode.SERVER_ERROR,
|
||||||
|
message="Failed to initialize Google OAuth flow. Please verify the uploaded client configuration.",
|
||||||
|
)
|
||||||
|
|
||||||
|
cache_payload = {
|
||||||
|
"user_id": current_user.id,
|
||||||
|
"client_config": client_config,
|
||||||
|
"created_at": int(time.time()),
|
||||||
|
}
|
||||||
|
REDIS_CONN.set_obj(_web_state_cache_key(flow_id), cache_payload, WEB_FLOW_TTL_SECS)
|
||||||
|
|
||||||
|
return get_json_result(
|
||||||
|
data={
|
||||||
|
"flow_id": flow_id,
|
||||||
|
"authorization_url": authorization_url,
|
||||||
|
"expires_in": WEB_FLOW_TTL_SECS,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@manager.route("/google-drive/oauth/web/callback", methods=["GET"]) # noqa: F821
|
||||||
|
def google_drive_web_oauth_callback():
|
||||||
|
state_id = request.args.get("state")
|
||||||
|
error = request.args.get("error")
|
||||||
|
error_description = request.args.get("error_description") or error
|
||||||
|
|
||||||
|
if not state_id:
|
||||||
|
return _render_web_oauth_popup("", False, "Missing OAuth state parameter.")
|
||||||
|
|
||||||
|
state_cache = REDIS_CONN.get(_web_state_cache_key(state_id))
|
||||||
|
if not state_cache:
|
||||||
|
return _render_web_oauth_popup(state_id, False, "Authorization session expired. Please restart from the main window.")
|
||||||
|
|
||||||
|
state_obj = json.loads(state_cache)
|
||||||
|
client_config = state_obj.get("client_config")
|
||||||
|
if not client_config:
|
||||||
|
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||||
|
return _render_web_oauth_popup(state_id, False, "Authorization session was invalid. Please retry.")
|
||||||
|
|
||||||
|
if error:
|
||||||
|
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||||
|
return _render_web_oauth_popup(state_id, False, error_description or "Authorization was cancelled.")
|
||||||
|
|
||||||
|
code = request.args.get("code")
|
||||||
|
if not code:
|
||||||
|
return _render_web_oauth_popup(state_id, False, "Missing authorization code from Google.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
flow = Flow.from_client_config(client_config, scopes=GOOGLE_SCOPES[DocumentSource.GOOGLE_DRIVE])
|
||||||
|
flow.redirect_uri = GOOGLE_DRIVE_WEB_OAUTH_REDIRECT_URI
|
||||||
|
flow.fetch_token(code=code)
|
||||||
|
except Exception as exc: # pragma: no cover - defensive
|
||||||
|
logging.exception("Failed to exchange Google OAuth code: %s", exc)
|
||||||
|
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||||
|
return _render_web_oauth_popup(state_id, False, "Failed to exchange tokens with Google. Please retry.")
|
||||||
|
|
||||||
|
creds_json = flow.credentials.to_json()
|
||||||
|
result_payload = {
|
||||||
|
"user_id": state_obj.get("user_id"),
|
||||||
|
"credentials": creds_json,
|
||||||
|
}
|
||||||
|
REDIS_CONN.set_obj(_web_result_cache_key(state_id), result_payload, WEB_FLOW_TTL_SECS)
|
||||||
|
REDIS_CONN.delete(_web_state_cache_key(state_id))
|
||||||
|
|
||||||
|
return _render_web_oauth_popup(state_id, True, "Authorization completed successfully.")
|
||||||
|
|
||||||
|
|
||||||
|
@manager.route("/google-drive/oauth/web/result", methods=["POST"]) # noqa: F821
|
||||||
|
@login_required
|
||||||
|
@validate_request("flow_id")
|
||||||
|
def poll_google_drive_web_result():
|
||||||
|
req = request.json or {}
|
||||||
|
flow_id = req.get("flow_id")
|
||||||
|
cache_raw = REDIS_CONN.get(_web_result_cache_key(flow_id))
|
||||||
|
if not cache_raw:
|
||||||
|
return get_json_result(code=RetCode.RUNNING, message="Authorization is still pending.")
|
||||||
|
|
||||||
|
result = json.loads(cache_raw)
|
||||||
|
if result.get("user_id") != current_user.id:
|
||||||
|
return get_json_result(code=RetCode.PERMISSION_ERROR, message="You are not allowed to access this authorization result.")
|
||||||
|
|
||||||
|
REDIS_CONN.delete(_web_result_cache_key(flow_id))
|
||||||
|
return get_json_result(data={"credentials": result.get("credentials")})
|
||||||
|
|||||||
@ -47,8 +47,7 @@ from common.constants import RetCode, VALID_TASK_STATUS, ParserType, TaskStatus
|
|||||||
from api.utils.web_utils import CONTENT_TYPE_MAP, html2pdf, is_valid_url
|
from api.utils.web_utils import CONTENT_TYPE_MAP, html2pdf, is_valid_url
|
||||||
from deepdoc.parser.html_parser import RAGFlowHtmlParser
|
from deepdoc.parser.html_parser import RAGFlowHtmlParser
|
||||||
from rag.nlp import search, rag_tokenizer
|
from rag.nlp import search, rag_tokenizer
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from common import settings
|
||||||
from common import globals
|
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/upload", methods=["POST"]) # noqa: F821
|
@manager.route("/upload", methods=["POST"]) # noqa: F821
|
||||||
@ -119,9 +118,9 @@ def web_crawl():
|
|||||||
raise RuntimeError("This type of file has not been supported yet!")
|
raise RuntimeError("This type of file has not been supported yet!")
|
||||||
|
|
||||||
location = filename
|
location = filename
|
||||||
while STORAGE_IMPL.obj_exist(kb_id, location):
|
while settings.STORAGE_IMPL.obj_exist(kb_id, location):
|
||||||
location += "_"
|
location += "_"
|
||||||
STORAGE_IMPL.put(kb_id, location, blob)
|
settings.STORAGE_IMPL.put(kb_id, location, blob)
|
||||||
doc = {
|
doc = {
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"kb_id": kb.id,
|
"kb_id": kb.id,
|
||||||
@ -261,6 +260,8 @@ def list_docs():
|
|||||||
for doc_item in docs:
|
for doc_item in docs:
|
||||||
if doc_item["thumbnail"] and not doc_item["thumbnail"].startswith(IMG_BASE64_PREFIX):
|
if doc_item["thumbnail"] and not doc_item["thumbnail"].startswith(IMG_BASE64_PREFIX):
|
||||||
doc_item["thumbnail"] = f"/v1/document/image/{kb_id}-{doc_item['thumbnail']}"
|
doc_item["thumbnail"] = f"/v1/document/image/{kb_id}-{doc_item['thumbnail']}"
|
||||||
|
if doc_item.get("source_type"):
|
||||||
|
doc_item["source_type"] = doc_item["source_type"].split("/")[0]
|
||||||
|
|
||||||
return get_json_result(data={"total": tol, "docs": docs})
|
return get_json_result(data={"total": tol, "docs": docs})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -367,7 +368,7 @@ def change_status():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
status_int = int(status)
|
status_int = int(status)
|
||||||
if not globals.docStoreConn.update({"doc_id": doc_id}, {"available_int": status_int}, search.index_name(kb.tenant_id), doc.kb_id):
|
if not settings.docStoreConn.update({"doc_id": doc_id}, {"available_int": status_int}, search.index_name(kb.tenant_id), doc.kb_id):
|
||||||
result[doc_id] = {"error": "Database error (docStore update)!"}
|
result[doc_id] = {"error": "Database error (docStore update)!"}
|
||||||
result[doc_id] = {"status": status}
|
result[doc_id] = {"status": status}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -432,8 +433,8 @@ def run():
|
|||||||
DocumentService.update_by_id(id, info)
|
DocumentService.update_by_id(id, info)
|
||||||
if req.get("delete", False):
|
if req.get("delete", False):
|
||||||
TaskService.filter_delete([Task.doc_id == id])
|
TaskService.filter_delete([Task.doc_id == id])
|
||||||
if globals.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
if settings.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
||||||
globals.docStoreConn.delete({"doc_id": id}, search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.delete({"doc_id": id}, search.index_name(tenant_id), doc.kb_id)
|
||||||
|
|
||||||
if str(req["run"]) == TaskStatus.RUNNING.value:
|
if str(req["run"]) == TaskStatus.RUNNING.value:
|
||||||
doc = doc.to_dict()
|
doc = doc.to_dict()
|
||||||
@ -479,8 +480,8 @@ def rename():
|
|||||||
"title_tks": title_tks,
|
"title_tks": title_tks,
|
||||||
"title_sm_tks": rag_tokenizer.fine_grained_tokenize(title_tks),
|
"title_sm_tks": rag_tokenizer.fine_grained_tokenize(title_tks),
|
||||||
}
|
}
|
||||||
if globals.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
if settings.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
||||||
globals.docStoreConn.update(
|
settings.docStoreConn.update(
|
||||||
{"doc_id": req["doc_id"]},
|
{"doc_id": req["doc_id"]},
|
||||||
es_body,
|
es_body,
|
||||||
search.index_name(tenant_id),
|
search.index_name(tenant_id),
|
||||||
@ -501,7 +502,7 @@ def get(doc_id):
|
|||||||
return get_data_error_result(message="Document not found!")
|
return get_data_error_result(message="Document not found!")
|
||||||
|
|
||||||
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
b, n = File2DocumentService.get_storage_address(doc_id=doc_id)
|
||||||
response = flask.make_response(STORAGE_IMPL.get(b, n))
|
response = flask.make_response(settings.STORAGE_IMPL.get(b, n))
|
||||||
|
|
||||||
ext = re.search(r"\.([^.]+)$", doc.name.lower())
|
ext = re.search(r"\.([^.]+)$", doc.name.lower())
|
||||||
ext = ext.group(1) if ext else None
|
ext = ext.group(1) if ext else None
|
||||||
@ -541,8 +542,8 @@ def change_parser():
|
|||||||
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
tenant_id = DocumentService.get_tenant_id(req["doc_id"])
|
||||||
if not tenant_id:
|
if not tenant_id:
|
||||||
return get_data_error_result(message="Tenant not found!")
|
return get_data_error_result(message="Tenant not found!")
|
||||||
if globals.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
if settings.docStoreConn.indexExist(search.index_name(tenant_id), doc.kb_id):
|
||||||
globals.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if "pipeline_id" in req and req["pipeline_id"] != "":
|
if "pipeline_id" in req and req["pipeline_id"] != "":
|
||||||
@ -577,7 +578,7 @@ def get_image(image_id):
|
|||||||
if len(arr) != 2:
|
if len(arr) != 2:
|
||||||
return get_data_error_result(message="Image not found.")
|
return get_data_error_result(message="Image not found.")
|
||||||
bkt, nm = image_id.split("-")
|
bkt, nm = image_id.split("-")
|
||||||
response = flask.make_response(STORAGE_IMPL.get(bkt, nm))
|
response = flask.make_response(settings.STORAGE_IMPL.get(bkt, nm))
|
||||||
response.headers.set("Content-Type", "image/JPEG")
|
response.headers.set("Content-Type", "image/JPEG")
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@ -34,7 +34,7 @@ from api.db.services.file_service import FileService
|
|||||||
from api.utils.api_utils import get_json_result
|
from api.utils.api_utils import get_json_result
|
||||||
from api.utils.file_utils import filename_type
|
from api.utils.file_utils import filename_type
|
||||||
from api.utils.web_utils import CONTENT_TYPE_MAP
|
from api.utils.web_utils import CONTENT_TYPE_MAP
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/upload', methods=['POST']) # noqa: F821
|
@manager.route('/upload', methods=['POST']) # noqa: F821
|
||||||
@ -95,14 +95,14 @@ def upload():
|
|||||||
# file type
|
# file type
|
||||||
filetype = filename_type(file_obj_names[file_len - 1])
|
filetype = filename_type(file_obj_names[file_len - 1])
|
||||||
location = file_obj_names[file_len - 1]
|
location = file_obj_names[file_len - 1]
|
||||||
while STORAGE_IMPL.obj_exist(last_folder.id, location):
|
while settings.STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||||
location += "_"
|
location += "_"
|
||||||
blob = file_obj.read()
|
blob = file_obj.read()
|
||||||
filename = duplicate_name(
|
filename = duplicate_name(
|
||||||
FileService.query,
|
FileService.query,
|
||||||
name=file_obj_names[file_len - 1],
|
name=file_obj_names[file_len - 1],
|
||||||
parent_id=last_folder.id)
|
parent_id=last_folder.id)
|
||||||
STORAGE_IMPL.put(last_folder.id, location, blob)
|
settings.STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||||
file = {
|
file = {
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"parent_id": last_folder.id,
|
"parent_id": last_folder.id,
|
||||||
@ -245,7 +245,7 @@ def rm():
|
|||||||
def _delete_single_file(file):
|
def _delete_single_file(file):
|
||||||
try:
|
try:
|
||||||
if file.location:
|
if file.location:
|
||||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||||
except Exception:
|
except Exception:
|
||||||
logging.exception(f"Fail to remove object: {file.parent_id}/{file.location}")
|
logging.exception(f"Fail to remove object: {file.parent_id}/{file.location}")
|
||||||
|
|
||||||
@ -346,10 +346,10 @@ def get(file_id):
|
|||||||
if not check_file_team_permission(file, current_user.id):
|
if not check_file_team_permission(file, current_user.id):
|
||||||
return get_json_result(data=False, message='No authorization.', code=RetCode.AUTHENTICATION_ERROR)
|
return get_json_result(data=False, message='No authorization.', code=RetCode.AUTHENTICATION_ERROR)
|
||||||
|
|
||||||
blob = STORAGE_IMPL.get(file.parent_id, file.location)
|
blob = settings.STORAGE_IMPL.get(file.parent_id, file.location)
|
||||||
if not blob:
|
if not blob:
|
||||||
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
||||||
blob = STORAGE_IMPL.get(b, n)
|
blob = settings.STORAGE_IMPL.get(b, n)
|
||||||
|
|
||||||
response = flask.make_response(blob)
|
response = flask.make_response(blob)
|
||||||
ext = re.search(r"\.([^.]+)$", file.name.lower())
|
ext = re.search(r"\.([^.]+)$", file.name.lower())
|
||||||
@ -428,11 +428,11 @@ def move():
|
|||||||
filename = source_file_entry.name
|
filename = source_file_entry.name
|
||||||
|
|
||||||
new_location = filename
|
new_location = filename
|
||||||
while STORAGE_IMPL.obj_exist(dest_folder.id, new_location):
|
while settings.STORAGE_IMPL.obj_exist(dest_folder.id, new_location):
|
||||||
new_location += "_"
|
new_location += "_"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
STORAGE_IMPL.move(old_parent_id, old_location, dest_folder.id, new_location)
|
settings.STORAGE_IMPL.move(old_parent_id, old_location, dest_folder.id, new_location)
|
||||||
except Exception as storage_err:
|
except Exception as storage_err:
|
||||||
raise RuntimeError(f"Move file failed at storage layer: {str(storage_err)}")
|
raise RuntimeError(f"Move file failed at storage layer: {str(storage_err)}")
|
||||||
|
|
||||||
|
|||||||
@ -37,12 +37,10 @@ from api.db.db_models import File
|
|||||||
from api.utils.api_utils import get_json_result
|
from api.utils.api_utils import get_json_result
|
||||||
from rag.nlp import search
|
from rag.nlp import search
|
||||||
from api.constants import DATASET_NAME_LIMIT
|
from api.constants import DATASET_NAME_LIMIT
|
||||||
from rag.settings import PAGERANK_FLD
|
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from rag.utils.doc_store_conn import OrderByExpr
|
||||||
from rag.utils.doc_store_conn import OrderByExpr
|
from common.constants import RetCode, PipelineTaskType, StatusEnum, VALID_TASK_STATUS, FileSource, LLMType, PAGERANK_FLD
|
||||||
from common.constants import RetCode, PipelineTaskType, StatusEnum, VALID_TASK_STATUS, FileSource, LLMType
|
from common import settings
|
||||||
from common import globals
|
|
||||||
|
|
||||||
@manager.route('/create', methods=['post']) # noqa: F821
|
@manager.route('/create', methods=['post']) # noqa: F821
|
||||||
@login_required
|
@login_required
|
||||||
@ -54,7 +52,7 @@ def create():
|
|||||||
tenant_id = current_user.id,
|
tenant_id = current_user.id,
|
||||||
parser_id = req.pop("parser_id", None),
|
parser_id = req.pop("parser_id", None),
|
||||||
**req
|
**req
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not KnowledgebaseService.save(**req):
|
if not KnowledgebaseService.save(**req):
|
||||||
@ -104,24 +102,32 @@ def update():
|
|||||||
message="Duplicated knowledgebase name.")
|
message="Duplicated knowledgebase name.")
|
||||||
|
|
||||||
del req["kb_id"]
|
del req["kb_id"]
|
||||||
|
connectors = []
|
||||||
|
if "connectors" in req:
|
||||||
|
connectors = req["connectors"]
|
||||||
|
del req["connectors"]
|
||||||
if not KnowledgebaseService.update_by_id(kb.id, req):
|
if not KnowledgebaseService.update_by_id(kb.id, req):
|
||||||
return get_data_error_result()
|
return get_data_error_result()
|
||||||
|
|
||||||
if kb.pagerank != req.get("pagerank", 0):
|
if kb.pagerank != req.get("pagerank", 0):
|
||||||
if req.get("pagerank", 0) > 0:
|
if req.get("pagerank", 0) > 0:
|
||||||
globals.docStoreConn.update({"kb_id": kb.id}, {PAGERANK_FLD: req["pagerank"]},
|
settings.docStoreConn.update({"kb_id": kb.id}, {PAGERANK_FLD: req["pagerank"]},
|
||||||
search.index_name(kb.tenant_id), kb.id)
|
search.index_name(kb.tenant_id), kb.id)
|
||||||
else:
|
else:
|
||||||
# Elasticsearch requires PAGERANK_FLD be non-zero!
|
# Elasticsearch requires PAGERANK_FLD be non-zero!
|
||||||
globals.docStoreConn.update({"exists": PAGERANK_FLD}, {"remove": PAGERANK_FLD},
|
settings.docStoreConn.update({"exists": PAGERANK_FLD}, {"remove": PAGERANK_FLD},
|
||||||
search.index_name(kb.tenant_id), kb.id)
|
search.index_name(kb.tenant_id), kb.id)
|
||||||
|
|
||||||
e, kb = KnowledgebaseService.get_by_id(kb.id)
|
e, kb = KnowledgebaseService.get_by_id(kb.id)
|
||||||
if not e:
|
if not e:
|
||||||
return get_data_error_result(
|
return get_data_error_result(
|
||||||
message="Database error (Knowledgebase rename)!")
|
message="Database error (Knowledgebase rename)!")
|
||||||
|
errors = Connector2KbService.link_connectors(kb.id, [conn for conn in connectors], current_user.id)
|
||||||
|
if errors:
|
||||||
|
logging.error("Link KB errors: ", errors)
|
||||||
kb = kb.to_dict()
|
kb = kb.to_dict()
|
||||||
kb.update(req)
|
kb.update(req)
|
||||||
|
kb["connectors"] = connectors
|
||||||
|
|
||||||
return get_json_result(data=kb)
|
return get_json_result(data=kb)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -225,10 +231,10 @@ def rm():
|
|||||||
return get_data_error_result(
|
return get_data_error_result(
|
||||||
message="Database error (Knowledgebase removal)!")
|
message="Database error (Knowledgebase removal)!")
|
||||||
for kb in kbs:
|
for kb in kbs:
|
||||||
globals.docStoreConn.delete({"kb_id": kb.id}, search.index_name(kb.tenant_id), kb.id)
|
settings.docStoreConn.delete({"kb_id": kb.id}, search.index_name(kb.tenant_id), kb.id)
|
||||||
globals.docStoreConn.deleteIdx(search.index_name(kb.tenant_id), kb.id)
|
settings.docStoreConn.deleteIdx(search.index_name(kb.tenant_id), kb.id)
|
||||||
if hasattr(STORAGE_IMPL, 'remove_bucket'):
|
if hasattr(settings.STORAGE_IMPL, 'remove_bucket'):
|
||||||
STORAGE_IMPL.remove_bucket(kb.id)
|
settings.STORAGE_IMPL.remove_bucket(kb.id)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
@ -247,7 +253,7 @@ def list_tags(kb_id):
|
|||||||
tenants = UserTenantService.get_tenants_by_user_id(current_user.id)
|
tenants = UserTenantService.get_tenants_by_user_id(current_user.id)
|
||||||
tags = []
|
tags = []
|
||||||
for tenant in tenants:
|
for tenant in tenants:
|
||||||
tags += globals.retriever.all_tags(tenant["tenant_id"], [kb_id])
|
tags += settings.retriever.all_tags(tenant["tenant_id"], [kb_id])
|
||||||
return get_json_result(data=tags)
|
return get_json_result(data=tags)
|
||||||
|
|
||||||
|
|
||||||
@ -266,7 +272,7 @@ def list_tags_from_kbs():
|
|||||||
tenants = UserTenantService.get_tenants_by_user_id(current_user.id)
|
tenants = UserTenantService.get_tenants_by_user_id(current_user.id)
|
||||||
tags = []
|
tags = []
|
||||||
for tenant in tenants:
|
for tenant in tenants:
|
||||||
tags += globals.retriever.all_tags(tenant["tenant_id"], kb_ids)
|
tags += settings.retriever.all_tags(tenant["tenant_id"], kb_ids)
|
||||||
return get_json_result(data=tags)
|
return get_json_result(data=tags)
|
||||||
|
|
||||||
|
|
||||||
@ -283,7 +289,7 @@ def rm_tags(kb_id):
|
|||||||
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||||
|
|
||||||
for t in req["tags"]:
|
for t in req["tags"]:
|
||||||
globals.docStoreConn.update({"tag_kwd": t, "kb_id": [kb_id]},
|
settings.docStoreConn.update({"tag_kwd": t, "kb_id": [kb_id]},
|
||||||
{"remove": {"tag_kwd": t}},
|
{"remove": {"tag_kwd": t}},
|
||||||
search.index_name(kb.tenant_id),
|
search.index_name(kb.tenant_id),
|
||||||
kb_id)
|
kb_id)
|
||||||
@ -302,7 +308,7 @@ def rename_tags(kb_id):
|
|||||||
)
|
)
|
||||||
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
e, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||||
|
|
||||||
globals.docStoreConn.update({"tag_kwd": req["from_tag"], "kb_id": [kb_id]},
|
settings.docStoreConn.update({"tag_kwd": req["from_tag"], "kb_id": [kb_id]},
|
||||||
{"remove": {"tag_kwd": req["from_tag"].strip()}, "add": {"tag_kwd": req["to_tag"]}},
|
{"remove": {"tag_kwd": req["from_tag"].strip()}, "add": {"tag_kwd": req["to_tag"]}},
|
||||||
search.index_name(kb.tenant_id),
|
search.index_name(kb.tenant_id),
|
||||||
kb_id)
|
kb_id)
|
||||||
@ -325,9 +331,9 @@ def knowledge_graph(kb_id):
|
|||||||
}
|
}
|
||||||
|
|
||||||
obj = {"graph": {}, "mind_map": {}}
|
obj = {"graph": {}, "mind_map": {}}
|
||||||
if not globals.docStoreConn.indexExist(search.index_name(kb.tenant_id), kb_id):
|
if not settings.docStoreConn.indexExist(search.index_name(kb.tenant_id), kb_id):
|
||||||
return get_json_result(data=obj)
|
return get_json_result(data=obj)
|
||||||
sres = globals.retriever.search(req, search.index_name(kb.tenant_id), [kb_id])
|
sres = settings.retriever.search(req, search.index_name(kb.tenant_id), [kb_id])
|
||||||
if not len(sres.ids):
|
if not len(sres.ids):
|
||||||
return get_json_result(data=obj)
|
return get_json_result(data=obj)
|
||||||
|
|
||||||
@ -359,7 +365,7 @@ def delete_knowledge_graph(kb_id):
|
|||||||
code=RetCode.AUTHENTICATION_ERROR
|
code=RetCode.AUTHENTICATION_ERROR
|
||||||
)
|
)
|
||||||
_, kb = KnowledgebaseService.get_by_id(kb_id)
|
_, kb = KnowledgebaseService.get_by_id(kb_id)
|
||||||
globals.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
||||||
|
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|
||||||
@ -565,7 +571,7 @@ def trace_graphrag():
|
|||||||
|
|
||||||
ok, task = TaskService.get_by_id(task_id)
|
ok, task = TaskService.get_by_id(task_id)
|
||||||
if not ok:
|
if not ok:
|
||||||
return get_error_data_result(message="GraphRAG Task Not Found or Error Occurred")
|
return get_json_result(data={})
|
||||||
|
|
||||||
return get_json_result(data=task.to_dict())
|
return get_json_result(data=task.to_dict())
|
||||||
|
|
||||||
@ -731,13 +737,13 @@ def delete_kb_task():
|
|||||||
task_id = kb.graphrag_task_id
|
task_id = kb.graphrag_task_id
|
||||||
kb_task_finish_at = "graphrag_task_finish_at"
|
kb_task_finish_at = "graphrag_task_finish_at"
|
||||||
cancel_task(task_id)
|
cancel_task(task_id)
|
||||||
globals.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
||||||
case PipelineTaskType.RAPTOR:
|
case PipelineTaskType.RAPTOR:
|
||||||
kb_task_id_field = "raptor_task_id"
|
kb_task_id_field = "raptor_task_id"
|
||||||
task_id = kb.raptor_task_id
|
task_id = kb.raptor_task_id
|
||||||
kb_task_finish_at = "raptor_task_finish_at"
|
kb_task_finish_at = "raptor_task_finish_at"
|
||||||
cancel_task(task_id)
|
cancel_task(task_id)
|
||||||
globals.docStoreConn.delete({"raptor_kwd": ["raptor"]}, search.index_name(kb.tenant_id), kb_id)
|
settings.docStoreConn.delete({"raptor_kwd": ["raptor"]}, search.index_name(kb.tenant_id), kb_id)
|
||||||
case PipelineTaskType.MINDMAP:
|
case PipelineTaskType.MINDMAP:
|
||||||
kb_task_id_field = "mindmap_task_id"
|
kb_task_id_field = "mindmap_task_id"
|
||||||
task_id = kb.mindmap_task_id
|
task_id = kb.mindmap_task_id
|
||||||
@ -774,14 +780,14 @@ def check_embedding():
|
|||||||
|
|
||||||
def _to_1d(x):
|
def _to_1d(x):
|
||||||
a = np.asarray(x, dtype=np.float32)
|
a = np.asarray(x, dtype=np.float32)
|
||||||
return a.reshape(-1)
|
return a.reshape(-1)
|
||||||
|
|
||||||
def _cos_sim(a, b, eps=1e-12):
|
def _cos_sim(a, b, eps=1e-12):
|
||||||
a = _to_1d(a)
|
a = _to_1d(a)
|
||||||
b = _to_1d(b)
|
b = _to_1d(b)
|
||||||
na = np.linalg.norm(a)
|
na = np.linalg.norm(a)
|
||||||
nb = np.linalg.norm(b)
|
nb = np.linalg.norm(b)
|
||||||
if na < eps or nb < eps:
|
if na < eps or nb < eps:
|
||||||
return 0.0
|
return 0.0
|
||||||
return float(np.dot(a, b) / (na * nb))
|
return float(np.dot(a, b) / (na * nb))
|
||||||
|
|
||||||
@ -819,7 +825,7 @@ def check_embedding():
|
|||||||
indexNames=index_nm, knowledgebaseIds=[kb_id]
|
indexNames=index_nm, knowledgebaseIds=[kb_id]
|
||||||
)
|
)
|
||||||
ids = docStoreConn.getChunkIds(res1)
|
ids = docStoreConn.getChunkIds(res1)
|
||||||
if not ids:
|
if not ids:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cid = ids[0]
|
cid = ids[0]
|
||||||
@ -849,7 +855,7 @@ def check_embedding():
|
|||||||
tenant_id = kb.tenant_id
|
tenant_id = kb.tenant_id
|
||||||
|
|
||||||
emb_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embd_id)
|
emb_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embd_id)
|
||||||
samples = sample_random_chunks_with_vectors(globals.docStoreConn, tenant_id=tenant_id, kb_id=kb_id, n=n)
|
samples = sample_random_chunks_with_vectors(settings.docStoreConn, tenant_id=tenant_id, kb_id=kb_id, n=n)
|
||||||
|
|
||||||
results, eff_sims = [], []
|
results, eff_sims = [], []
|
||||||
for ck in samples:
|
for ck in samples:
|
||||||
@ -863,7 +869,7 @@ def check_embedding():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
qv, _ = emb_mdl.encode_queries(txt)
|
qv, _ = emb_mdl.encode_queries(txt)
|
||||||
sim = _cos_sim(qv, ck["vector"])
|
sim = _cos_sim(qv, ck["vector"])
|
||||||
except Exception:
|
except Exception:
|
||||||
return get_error_data_result(message="embedding failure")
|
return get_error_data_result(message="embedding failure")
|
||||||
@ -892,13 +898,3 @@ def check_embedding():
|
|||||||
return get_json_result(code=RetCode.NOT_EFFECTIVE, message="failed", data={"summary": summary, "results": results})
|
return get_json_result(code=RetCode.NOT_EFFECTIVE, message="failed", data={"summary": summary, "results": results})
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/<kb_id>/link", methods=["POST"]) # noqa: F821
|
|
||||||
@validate_request("connector_ids")
|
|
||||||
@login_required
|
|
||||||
def link_connector(kb_id):
|
|
||||||
req = request.json
|
|
||||||
errors = Connector2KbService.link_connectors(kb_id, req["connector_ids"], current_user.id)
|
|
||||||
if errors:
|
|
||||||
return get_json_result(data=False, message=errors, code=RetCode.SERVER_ERROR)
|
|
||||||
return get_json_result(data=True)
|
|
||||||
|
|
||||||
|
|||||||
@ -33,7 +33,7 @@ from rag.llm import EmbeddingModel, ChatModel, RerankModel, CvModel, TTSModel
|
|||||||
def factories():
|
def factories():
|
||||||
try:
|
try:
|
||||||
fac = get_allowed_llm_factories()
|
fac = get_allowed_llm_factories()
|
||||||
fac = [f.to_dict() for f in fac if f.name not in ["Youdao", "FastEmbed", "BAAI"]]
|
fac = [f.to_dict() for f in fac if f.name not in ["Youdao", "FastEmbed", "BAAI", "Builtin"]]
|
||||||
llms = LLMService.get_all()
|
llms = LLMService.get_all()
|
||||||
mdl_types = {}
|
mdl_types = {}
|
||||||
for m in llms:
|
for m in llms:
|
||||||
@ -128,7 +128,7 @@ def add_llm():
|
|||||||
api_key = req.get("api_key", "x")
|
api_key = req.get("api_key", "x")
|
||||||
llm_name = req.get("llm_name")
|
llm_name = req.get("llm_name")
|
||||||
|
|
||||||
if factory not in get_allowed_llm_factories():
|
if factory not in [f.name for f in get_allowed_llm_factories()]:
|
||||||
return get_data_error_result(message=f"LLM factory {factory} is not allowed")
|
return get_data_error_result(message=f"LLM factory {factory} is not allowed")
|
||||||
|
|
||||||
def apikey_json(keys):
|
def apikey_json(keys):
|
||||||
@ -348,7 +348,7 @@ def list_app():
|
|||||||
facts = set([o.to_dict()["llm_factory"] for o in objs if o.api_key and o.status == StatusEnum.VALID.value])
|
facts = set([o.to_dict()["llm_factory"] for o in objs if o.api_key and o.status == StatusEnum.VALID.value])
|
||||||
status = {(o.llm_name + "@" + o.llm_factory) for o in objs if o.status == StatusEnum.VALID.value}
|
status = {(o.llm_name + "@" + o.llm_factory) for o in objs if o.status == StatusEnum.VALID.value}
|
||||||
llms = LLMService.get_all()
|
llms = LLMService.get_all()
|
||||||
llms = [m.to_dict() for m in llms if m.status == StatusEnum.VALID.value and m.fid not in weighted and (m.llm_name + "@" + m.fid) in status]
|
llms = [m.to_dict() for m in llms if m.status == StatusEnum.VALID.value and m.fid not in weighted and (m.fid == 'Builtin' or (m.llm_name + "@" + m.fid) in status)]
|
||||||
for m in llms:
|
for m in llms:
|
||||||
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in self_deployed
|
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in self_deployed
|
||||||
if "tei-" in os.getenv("COMPOSE_PROFILES", "") and m["model_type"] == LLMType.EMBEDDING and m["fid"] == "Builtin" and m["llm_name"] == os.getenv("TEI_MODEL", ""):
|
if "tei-" in os.getenv("COMPOSE_PROFILES", "") and m["model_type"] == LLMType.EMBEDDING and m["fid"] == "Builtin" and m["llm_name"] == os.getenv("TEI_MODEL", ""):
|
||||||
@ -358,7 +358,7 @@ def list_app():
|
|||||||
for o in objs:
|
for o in objs:
|
||||||
if o.llm_name + "@" + o.llm_factory in llm_set:
|
if o.llm_name + "@" + o.llm_factory in llm_set:
|
||||||
continue
|
continue
|
||||||
llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True})
|
llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True, "status": StatusEnum.VALID.value})
|
||||||
|
|
||||||
res = {}
|
res = {}
|
||||||
for m in llms:
|
for m in llms:
|
||||||
|
|||||||
@ -15,15 +15,19 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import time
|
import time
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from agent.canvas import Canvas
|
||||||
|
from api.db import CanvasCategory
|
||||||
from api.db.services.canvas_service import UserCanvasService
|
from api.db.services.canvas_service import UserCanvasService
|
||||||
from api.db.services.user_canvas_version import UserCanvasVersionService
|
from api.db.services.user_canvas_version import UserCanvasVersionService
|
||||||
from common.constants import RetCode
|
from common.constants import RetCode
|
||||||
from common.misc_utils import get_uuid
|
from common.misc_utils import get_uuid
|
||||||
from api.utils.api_utils import get_data_error_result, get_error_data_result, get_json_result, token_required
|
from api.utils.api_utils import get_data_error_result, get_error_data_result, get_json_result, token_required
|
||||||
from api.utils.api_utils import get_result
|
from api.utils.api_utils import get_result
|
||||||
from flask import request
|
from flask import request, Response
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/agents', methods=['GET']) # noqa: F821
|
@manager.route('/agents', methods=['GET']) # noqa: F821
|
||||||
@ -127,3 +131,49 @@ def delete_agent(tenant_id: str, agent_id: str):
|
|||||||
|
|
||||||
UserCanvasService.delete_by_id(agent_id)
|
UserCanvasService.delete_by_id(agent_id)
|
||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
|
|
||||||
|
|
||||||
|
@manager.route('/webhook/<agent_id>', methods=['POST']) # noqa: F821
|
||||||
|
@token_required
|
||||||
|
def webhook(tenant_id: str, agent_id: str):
|
||||||
|
req = request.json
|
||||||
|
if not UserCanvasService.accessible(req["id"], tenant_id):
|
||||||
|
return get_json_result(
|
||||||
|
data=False, message='Only owner of canvas authorized for this operation.',
|
||||||
|
code=RetCode.OPERATING_ERROR)
|
||||||
|
|
||||||
|
e, cvs = UserCanvasService.get_by_id(req["id"])
|
||||||
|
if not e:
|
||||||
|
return get_data_error_result(message="canvas not found.")
|
||||||
|
|
||||||
|
if not isinstance(cvs.dsl, str):
|
||||||
|
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
||||||
|
|
||||||
|
if cvs.canvas_category == CanvasCategory.DataFlow:
|
||||||
|
return get_data_error_result(message="Dataflow can not be triggered by webhook.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
canvas = Canvas(cvs.dsl, tenant_id, agent_id)
|
||||||
|
except Exception as e:
|
||||||
|
return get_json_result(
|
||||||
|
data=False, message=str(e),
|
||||||
|
code=RetCode.EXCEPTION_ERROR)
|
||||||
|
|
||||||
|
def sse():
|
||||||
|
nonlocal canvas
|
||||||
|
try:
|
||||||
|
for ans in canvas.run(query=req.get("query", ""), files=req.get("files", []), user_id=req.get("user_id", tenant_id), webhook_payload=req):
|
||||||
|
yield "data:" + json.dumps(ans, ensure_ascii=False) + "\n\n"
|
||||||
|
|
||||||
|
cvs.dsl = json.loads(str(canvas))
|
||||||
|
UserCanvasService.update_by_id(req["id"], cvs.to_dict())
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(e)
|
||||||
|
yield "data:" + json.dumps({"code": 500, "message": str(e), "data": False}, ensure_ascii=False) + "\n\n"
|
||||||
|
|
||||||
|
resp = Response(sse(), mimetype="text/event-stream")
|
||||||
|
resp.headers.add_header("Cache-control", "no-cache")
|
||||||
|
resp.headers.add_header("Connection", "keep-alive")
|
||||||
|
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||||
|
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||||
|
return resp
|
||||||
@ -47,8 +47,8 @@ from api.utils.validation_utils import (
|
|||||||
validate_and_parse_request_args,
|
validate_and_parse_request_args,
|
||||||
)
|
)
|
||||||
from rag.nlp import search
|
from rag.nlp import search
|
||||||
from rag.settings import PAGERANK_FLD
|
from common.constants import PAGERANK_FLD
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/datasets", methods=["POST"]) # noqa: F821
|
@manager.route("/datasets", methods=["POST"]) # noqa: F821
|
||||||
@ -360,11 +360,11 @@ def update(tenant_id, dataset_id):
|
|||||||
return get_error_argument_result(message="'pagerank' can only be set when doc_engine is elasticsearch")
|
return get_error_argument_result(message="'pagerank' can only be set when doc_engine is elasticsearch")
|
||||||
|
|
||||||
if req["pagerank"] > 0:
|
if req["pagerank"] > 0:
|
||||||
globals.docStoreConn.update({"kb_id": kb.id}, {PAGERANK_FLD: req["pagerank"]},
|
settings.docStoreConn.update({"kb_id": kb.id}, {PAGERANK_FLD: req["pagerank"]},
|
||||||
search.index_name(kb.tenant_id), kb.id)
|
search.index_name(kb.tenant_id), kb.id)
|
||||||
else:
|
else:
|
||||||
# Elasticsearch requires PAGERANK_FLD be non-zero!
|
# Elasticsearch requires PAGERANK_FLD be non-zero!
|
||||||
globals.docStoreConn.update({"exists": PAGERANK_FLD}, {"remove": PAGERANK_FLD},
|
settings.docStoreConn.update({"exists": PAGERANK_FLD}, {"remove": PAGERANK_FLD},
|
||||||
search.index_name(kb.tenant_id), kb.id)
|
search.index_name(kb.tenant_id), kb.id)
|
||||||
|
|
||||||
if not KnowledgebaseService.update_by_id(kb.id, req):
|
if not KnowledgebaseService.update_by_id(kb.id, req):
|
||||||
@ -493,9 +493,9 @@ def knowledge_graph(tenant_id, dataset_id):
|
|||||||
}
|
}
|
||||||
|
|
||||||
obj = {"graph": {}, "mind_map": {}}
|
obj = {"graph": {}, "mind_map": {}}
|
||||||
if not globals.docStoreConn.indexExist(search.index_name(kb.tenant_id), dataset_id):
|
if not settings.docStoreConn.indexExist(search.index_name(kb.tenant_id), dataset_id):
|
||||||
return get_result(data=obj)
|
return get_result(data=obj)
|
||||||
sres = globals.retriever.search(req, search.index_name(kb.tenant_id), [dataset_id])
|
sres = settings.retriever.search(req, search.index_name(kb.tenant_id), [dataset_id])
|
||||||
if not len(sres.ids):
|
if not len(sres.ids):
|
||||||
return get_result(data=obj)
|
return get_result(data=obj)
|
||||||
|
|
||||||
@ -528,7 +528,7 @@ def delete_knowledge_graph(tenant_id, dataset_id):
|
|||||||
code=RetCode.AUTHENTICATION_ERROR
|
code=RetCode.AUTHENTICATION_ERROR
|
||||||
)
|
)
|
||||||
_, kb = KnowledgebaseService.get_by_id(dataset_id)
|
_, kb = KnowledgebaseService.get_by_id(dataset_id)
|
||||||
globals.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]},
|
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]},
|
||||||
search.index_name(kb.tenant_id), dataset_id)
|
search.index_name(kb.tenant_id), dataset_id)
|
||||||
|
|
||||||
return get_result(data=True)
|
return get_result(data=True)
|
||||||
|
|||||||
@ -20,12 +20,11 @@ from flask import request, jsonify
|
|||||||
from api.db.services.document_service import DocumentService
|
from api.db.services.document_service import DocumentService
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from api.db.services.llm_service import LLMBundle
|
from api.db.services.llm_service import LLMBundle
|
||||||
from api import settings
|
|
||||||
from api.utils.api_utils import validate_request, build_error_result, apikey_required
|
from api.utils.api_utils import validate_request, build_error_result, apikey_required
|
||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
from api.db.services.dialog_service import meta_filter, convert_conditions
|
from api.db.services.dialog_service import meta_filter, convert_conditions
|
||||||
from common.constants import RetCode, LLMType
|
from common.constants import RetCode, LLMType
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
|
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
|
||||||
@apikey_required
|
@apikey_required
|
||||||
@ -138,7 +137,7 @@ def retrieval(tenant_id):
|
|||||||
# print("doc_ids", doc_ids)
|
# print("doc_ids", doc_ids)
|
||||||
if not doc_ids and metadata_condition is not None:
|
if not doc_ids and metadata_condition is not None:
|
||||||
doc_ids = ['-999']
|
doc_ids = ['-999']
|
||||||
ranks = globals.retriever.retrieval(
|
ranks = settings.retriever.retrieval(
|
||||||
question,
|
question,
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
kb.tenant_id,
|
kb.tenant_id,
|
||||||
|
|||||||
@ -24,7 +24,6 @@ from flask import request, send_file
|
|||||||
from peewee import OperationalError
|
from peewee import OperationalError
|
||||||
from pydantic import BaseModel, Field, validator
|
from pydantic import BaseModel, Field, validator
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.constants import FILE_NAME_LEN_LIMIT
|
from api.constants import FILE_NAME_LEN_LIMIT
|
||||||
from api.db import FileType
|
from api.db import FileType
|
||||||
from api.db.db_models import File, Task
|
from api.db.db_models import File, Task
|
||||||
@ -41,10 +40,9 @@ from rag.app.qa import beAdoc, rmPrefix
|
|||||||
from rag.app.tag import label_question
|
from rag.app.tag import label_question
|
||||||
from rag.nlp import rag_tokenizer, search
|
from rag.nlp import rag_tokenizer, search
|
||||||
from rag.prompts.generator import cross_languages, keyword_extraction
|
from rag.prompts.generator import cross_languages, keyword_extraction
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from common.string_utils import remove_redundant_spaces
|
from common.string_utils import remove_redundant_spaces
|
||||||
from common.constants import RetCode, LLMType, ParserType, TaskStatus, FileSource
|
from common.constants import RetCode, LLMType, ParserType, TaskStatus, FileSource
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
MAXIMUM_OF_UPLOADING_FILES = 256
|
MAXIMUM_OF_UPLOADING_FILES = 256
|
||||||
|
|
||||||
@ -308,7 +306,7 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|||||||
)
|
)
|
||||||
if not e:
|
if not e:
|
||||||
return get_error_data_result(message="Document not found!")
|
return get_error_data_result(message="Document not found!")
|
||||||
globals.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), dataset_id)
|
settings.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), dataset_id)
|
||||||
|
|
||||||
if "enabled" in req:
|
if "enabled" in req:
|
||||||
status = int(req["enabled"])
|
status = int(req["enabled"])
|
||||||
@ -317,7 +315,7 @@ def update_doc(tenant_id, dataset_id, document_id):
|
|||||||
if not DocumentService.update_by_id(doc.id, {"status": str(status)}):
|
if not DocumentService.update_by_id(doc.id, {"status": str(status)}):
|
||||||
return get_error_data_result(message="Database error (Document update)!")
|
return get_error_data_result(message="Database error (Document update)!")
|
||||||
|
|
||||||
globals.docStoreConn.update({"doc_id": doc.id}, {"available_int": status}, search.index_name(kb.tenant_id), doc.kb_id)
|
settings.docStoreConn.update({"doc_id": doc.id}, {"available_int": status}, search.index_name(kb.tenant_id), doc.kb_id)
|
||||||
return get_result(data=True)
|
return get_result(data=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
@ -402,7 +400,7 @@ def download(tenant_id, dataset_id, document_id):
|
|||||||
return get_error_data_result(message=f"The dataset not own the document {document_id}.")
|
return get_error_data_result(message=f"The dataset not own the document {document_id}.")
|
||||||
# The process of downloading
|
# The process of downloading
|
||||||
doc_id, doc_location = File2DocumentService.get_storage_address(doc_id=document_id) # minio address
|
doc_id, doc_location = File2DocumentService.get_storage_address(doc_id=document_id) # minio address
|
||||||
file_stream = STORAGE_IMPL.get(doc_id, doc_location)
|
file_stream = settings.STORAGE_IMPL.get(doc_id, doc_location)
|
||||||
if not file_stream:
|
if not file_stream:
|
||||||
return construct_json_result(message="This file is empty.", code=RetCode.DATA_ERROR)
|
return construct_json_result(message="This file is empty.", code=RetCode.DATA_ERROR)
|
||||||
file = BytesIO(file_stream)
|
file = BytesIO(file_stream)
|
||||||
@ -672,7 +670,7 @@ def delete(tenant_id, dataset_id):
|
|||||||
)
|
)
|
||||||
File2DocumentService.delete_by_document_id(doc_id)
|
File2DocumentService.delete_by_document_id(doc_id)
|
||||||
|
|
||||||
STORAGE_IMPL.rm(b, n)
|
settings.STORAGE_IMPL.rm(b, n)
|
||||||
success_count += 1
|
success_count += 1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errors += str(e)
|
errors += str(e)
|
||||||
@ -756,7 +754,7 @@ def parse(tenant_id, dataset_id):
|
|||||||
return get_error_data_result("Can't parse document that is currently being processed")
|
return get_error_data_result("Can't parse document that is currently being processed")
|
||||||
info = {"run": "1", "progress": 0, "progress_msg": "", "chunk_num": 0, "token_num": 0}
|
info = {"run": "1", "progress": 0, "progress_msg": "", "chunk_num": 0, "token_num": 0}
|
||||||
DocumentService.update_by_id(id, info)
|
DocumentService.update_by_id(id, info)
|
||||||
globals.docStoreConn.delete({"doc_id": id}, search.index_name(tenant_id), dataset_id)
|
settings.docStoreConn.delete({"doc_id": id}, search.index_name(tenant_id), dataset_id)
|
||||||
TaskService.filter_delete([Task.doc_id == id])
|
TaskService.filter_delete([Task.doc_id == id])
|
||||||
e, doc = DocumentService.get_by_id(id)
|
e, doc = DocumentService.get_by_id(id)
|
||||||
doc = doc.to_dict()
|
doc = doc.to_dict()
|
||||||
@ -836,7 +834,7 @@ def stop_parsing(tenant_id, dataset_id):
|
|||||||
return get_error_data_result("Can't stop parsing document with progress at 0 or 1")
|
return get_error_data_result("Can't stop parsing document with progress at 0 or 1")
|
||||||
info = {"run": "2", "progress": 0, "chunk_num": 0}
|
info = {"run": "2", "progress": 0, "chunk_num": 0}
|
||||||
DocumentService.update_by_id(id, info)
|
DocumentService.update_by_id(id, info)
|
||||||
globals.docStoreConn.delete({"doc_id": doc[0].id}, search.index_name(tenant_id), dataset_id)
|
settings.docStoreConn.delete({"doc_id": doc[0].id}, search.index_name(tenant_id), dataset_id)
|
||||||
success_count += 1
|
success_count += 1
|
||||||
if duplicate_messages:
|
if duplicate_messages:
|
||||||
if success_count > 0:
|
if success_count > 0:
|
||||||
@ -969,7 +967,7 @@ def list_chunks(tenant_id, dataset_id, document_id):
|
|||||||
|
|
||||||
res = {"total": 0, "chunks": [], "doc": renamed_doc}
|
res = {"total": 0, "chunks": [], "doc": renamed_doc}
|
||||||
if req.get("id"):
|
if req.get("id"):
|
||||||
chunk = globals.docStoreConn.get(req.get("id"), search.index_name(tenant_id), [dataset_id])
|
chunk = settings.docStoreConn.get(req.get("id"), search.index_name(tenant_id), [dataset_id])
|
||||||
if not chunk:
|
if not chunk:
|
||||||
return get_result(message=f"Chunk not found: {dataset_id}/{req.get('id')}", code=RetCode.NOT_FOUND)
|
return get_result(message=f"Chunk not found: {dataset_id}/{req.get('id')}", code=RetCode.NOT_FOUND)
|
||||||
k = []
|
k = []
|
||||||
@ -996,8 +994,8 @@ def list_chunks(tenant_id, dataset_id, document_id):
|
|||||||
res["chunks"].append(final_chunk)
|
res["chunks"].append(final_chunk)
|
||||||
_ = Chunk(**final_chunk)
|
_ = Chunk(**final_chunk)
|
||||||
|
|
||||||
elif globals.docStoreConn.indexExist(search.index_name(tenant_id), dataset_id):
|
elif settings.docStoreConn.indexExist(search.index_name(tenant_id), dataset_id):
|
||||||
sres = globals.retriever.search(query, search.index_name(tenant_id), [dataset_id], emb_mdl=None, highlight=True)
|
sres = settings.retriever.search(query, search.index_name(tenant_id), [dataset_id], emb_mdl=None, highlight=True)
|
||||||
res["total"] = sres.total
|
res["total"] = sres.total
|
||||||
for id in sres.ids:
|
for id in sres.ids:
|
||||||
d = {
|
d = {
|
||||||
@ -1121,7 +1119,7 @@ def add_chunk(tenant_id, dataset_id, document_id):
|
|||||||
v, c = embd_mdl.encode([doc.name, req["content"] if not d["question_kwd"] else "\n".join(d["question_kwd"])])
|
v, c = embd_mdl.encode([doc.name, req["content"] if not d["question_kwd"] else "\n".join(d["question_kwd"])])
|
||||||
v = 0.1 * v[0] + 0.9 * v[1]
|
v = 0.1 * v[0] + 0.9 * v[1]
|
||||||
d["q_%d_vec" % len(v)] = v.tolist()
|
d["q_%d_vec" % len(v)] = v.tolist()
|
||||||
globals.docStoreConn.insert([d], search.index_name(tenant_id), dataset_id)
|
settings.docStoreConn.insert([d], search.index_name(tenant_id), dataset_id)
|
||||||
|
|
||||||
DocumentService.increment_chunk_num(doc.id, doc.kb_id, c, 1, 0)
|
DocumentService.increment_chunk_num(doc.id, doc.kb_id, c, 1, 0)
|
||||||
# rename keys
|
# rename keys
|
||||||
@ -1202,7 +1200,7 @@ def rm_chunk(tenant_id, dataset_id, document_id):
|
|||||||
if "chunk_ids" in req:
|
if "chunk_ids" in req:
|
||||||
unique_chunk_ids, duplicate_messages = check_duplicate_ids(req["chunk_ids"], "chunk")
|
unique_chunk_ids, duplicate_messages = check_duplicate_ids(req["chunk_ids"], "chunk")
|
||||||
condition["id"] = unique_chunk_ids
|
condition["id"] = unique_chunk_ids
|
||||||
chunk_number = globals.docStoreConn.delete(condition, search.index_name(tenant_id), dataset_id)
|
chunk_number = settings.docStoreConn.delete(condition, search.index_name(tenant_id), dataset_id)
|
||||||
if chunk_number != 0:
|
if chunk_number != 0:
|
||||||
DocumentService.decrement_chunk_num(document_id, dataset_id, 1, chunk_number, 0)
|
DocumentService.decrement_chunk_num(document_id, dataset_id, 1, chunk_number, 0)
|
||||||
if "chunk_ids" in req and chunk_number != len(unique_chunk_ids):
|
if "chunk_ids" in req and chunk_number != len(unique_chunk_ids):
|
||||||
@ -1274,7 +1272,7 @@ def update_chunk(tenant_id, dataset_id, document_id, chunk_id):
|
|||||||
schema:
|
schema:
|
||||||
type: object
|
type: object
|
||||||
"""
|
"""
|
||||||
chunk = globals.docStoreConn.get(chunk_id, search.index_name(tenant_id), [dataset_id])
|
chunk = settings.docStoreConn.get(chunk_id, search.index_name(tenant_id), [dataset_id])
|
||||||
if chunk is None:
|
if chunk is None:
|
||||||
return get_error_data_result(f"Can't find this chunk {chunk_id}")
|
return get_error_data_result(f"Can't find this chunk {chunk_id}")
|
||||||
if not KnowledgebaseService.accessible(kb_id=dataset_id, user_id=tenant_id):
|
if not KnowledgebaseService.accessible(kb_id=dataset_id, user_id=tenant_id):
|
||||||
@ -1319,7 +1317,7 @@ def update_chunk(tenant_id, dataset_id, document_id, chunk_id):
|
|||||||
v, c = embd_mdl.encode([doc.name, d["content_with_weight"] if not d.get("question_kwd") else "\n".join(d["question_kwd"])])
|
v, c = embd_mdl.encode([doc.name, d["content_with_weight"] if not d.get("question_kwd") else "\n".join(d["question_kwd"])])
|
||||||
v = 0.1 * v[0] + 0.9 * v[1] if doc.parser_id != ParserType.QA else v[1]
|
v = 0.1 * v[0] + 0.9 * v[1] if doc.parser_id != ParserType.QA else v[1]
|
||||||
d["q_%d_vec" % len(v)] = v.tolist()
|
d["q_%d_vec" % len(v)] = v.tolist()
|
||||||
globals.docStoreConn.update({"id": chunk_id}, d, search.index_name(tenant_id), dataset_id)
|
settings.docStoreConn.update({"id": chunk_id}, d, search.index_name(tenant_id), dataset_id)
|
||||||
return get_result()
|
return get_result()
|
||||||
|
|
||||||
|
|
||||||
@ -1465,7 +1463,7 @@ def retrieval_test(tenant_id):
|
|||||||
chat_mdl = LLMBundle(kb.tenant_id, LLMType.CHAT)
|
chat_mdl = LLMBundle(kb.tenant_id, LLMType.CHAT)
|
||||||
question += keyword_extraction(chat_mdl, question)
|
question += keyword_extraction(chat_mdl, question)
|
||||||
|
|
||||||
ranks = globals.retriever.retrieval(
|
ranks = settings.retriever.retrieval(
|
||||||
question,
|
question,
|
||||||
embd_mdl,
|
embd_mdl,
|
||||||
tenant_ids,
|
tenant_ids,
|
||||||
|
|||||||
@ -32,7 +32,7 @@ from api.db.services import duplicate_name
|
|||||||
from api.db.services.file_service import FileService
|
from api.db.services.file_service import FileService
|
||||||
from api.utils.api_utils import get_json_result
|
from api.utils.api_utils import get_json_result
|
||||||
from api.utils.file_utils import filename_type
|
from api.utils.file_utils import filename_type
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/file/upload', methods=['POST']) # noqa: F821
|
@manager.route('/file/upload', methods=['POST']) # noqa: F821
|
||||||
@ -126,7 +126,7 @@ def upload(tenant_id):
|
|||||||
|
|
||||||
filetype = filename_type(file_obj_names[file_len - 1])
|
filetype = filename_type(file_obj_names[file_len - 1])
|
||||||
location = file_obj_names[file_len - 1]
|
location = file_obj_names[file_len - 1]
|
||||||
while STORAGE_IMPL.obj_exist(last_folder.id, location):
|
while settings.STORAGE_IMPL.obj_exist(last_folder.id, location):
|
||||||
location += "_"
|
location += "_"
|
||||||
blob = file_obj.read()
|
blob = file_obj.read()
|
||||||
filename = duplicate_name(FileService.query, name=file_obj_names[file_len - 1], parent_id=last_folder.id)
|
filename = duplicate_name(FileService.query, name=file_obj_names[file_len - 1], parent_id=last_folder.id)
|
||||||
@ -142,7 +142,7 @@ def upload(tenant_id):
|
|||||||
"size": len(blob),
|
"size": len(blob),
|
||||||
}
|
}
|
||||||
file = FileService.insert(file)
|
file = FileService.insert(file)
|
||||||
STORAGE_IMPL.put(last_folder.id, location, blob)
|
settings.STORAGE_IMPL.put(last_folder.id, location, blob)
|
||||||
file_res.append(file.to_json())
|
file_res.append(file.to_json())
|
||||||
return get_json_result(data=file_res)
|
return get_json_result(data=file_res)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -497,10 +497,10 @@ def rm(tenant_id):
|
|||||||
e, file = FileService.get_by_id(inner_file_id)
|
e, file = FileService.get_by_id(inner_file_id)
|
||||||
if not e:
|
if not e:
|
||||||
return get_json_result(message="File not found!", code=404)
|
return get_json_result(message="File not found!", code=404)
|
||||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||||
FileService.delete_folder_by_pf_id(tenant_id, file_id)
|
FileService.delete_folder_by_pf_id(tenant_id, file_id)
|
||||||
else:
|
else:
|
||||||
STORAGE_IMPL.rm(file.parent_id, file.location)
|
settings.STORAGE_IMPL.rm(file.parent_id, file.location)
|
||||||
if not FileService.delete(file):
|
if not FileService.delete(file):
|
||||||
return get_json_result(message="Database error (File removal)!", code=500)
|
return get_json_result(message="Database error (File removal)!", code=500)
|
||||||
|
|
||||||
@ -614,10 +614,10 @@ def get(tenant_id, file_id):
|
|||||||
if not e:
|
if not e:
|
||||||
return get_json_result(message="Document not found!", code=404)
|
return get_json_result(message="Document not found!", code=404)
|
||||||
|
|
||||||
blob = STORAGE_IMPL.get(file.parent_id, file.location)
|
blob = settings.STORAGE_IMPL.get(file.parent_id, file.location)
|
||||||
if not blob:
|
if not blob:
|
||||||
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
b, n = File2DocumentService.get_storage_address(file_id=file_id)
|
||||||
blob = STORAGE_IMPL.get(b, n)
|
blob = settings.STORAGE_IMPL.get(b, n)
|
||||||
|
|
||||||
response = flask.make_response(blob)
|
response = flask.make_response(blob)
|
||||||
ext = re.search(r"\.([^.]+)$", file.name)
|
ext = re.search(r"\.([^.]+)$", file.name)
|
||||||
|
|||||||
@ -21,7 +21,6 @@ import tiktoken
|
|||||||
from flask import Response, jsonify, request
|
from flask import Response, jsonify, request
|
||||||
|
|
||||||
from agent.canvas import Canvas
|
from agent.canvas import Canvas
|
||||||
from api import settings
|
|
||||||
from api.db.db_models import APIToken
|
from api.db.db_models import APIToken
|
||||||
from api.db.services.api_service import API4ConversationService
|
from api.db.services.api_service import API4ConversationService
|
||||||
from api.db.services.canvas_service import UserCanvasService, completion_openai
|
from api.db.services.canvas_service import UserCanvasService, completion_openai
|
||||||
@ -41,7 +40,7 @@ from rag.app.tag import label_question
|
|||||||
from rag.prompts.template import load_prompt
|
from rag.prompts.template import load_prompt
|
||||||
from rag.prompts.generator import cross_languages, gen_meta_filter, keyword_extraction, chunks_format
|
from rag.prompts.generator import cross_languages, gen_meta_filter, keyword_extraction, chunks_format
|
||||||
from common.constants import RetCode, LLMType, StatusEnum
|
from common.constants import RetCode, LLMType, StatusEnum
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
@manager.route("/chats/<chat_id>/sessions", methods=["POST"]) # noqa: F821
|
@manager.route("/chats/<chat_id>/sessions", methods=["POST"]) # noqa: F821
|
||||||
@token_required
|
@token_required
|
||||||
@ -1016,7 +1015,7 @@ def retrieval_test_embedded():
|
|||||||
question += keyword_extraction(chat_mdl, question)
|
question += keyword_extraction(chat_mdl, question)
|
||||||
|
|
||||||
labels = label_question(question, [kb])
|
labels = label_question(question, [kb])
|
||||||
ranks = globals.retriever.retrieval(
|
ranks = settings.retriever.retrieval(
|
||||||
question, embd_mdl, tenant_ids, kb_ids, page, size, similarity_threshold, vector_similarity_weight, top,
|
question, embd_mdl, tenant_ids, kb_ids, page, size, similarity_threshold, vector_similarity_weight, top,
|
||||||
doc_ids, rerank_mdl=rerank_mdl, highlight=req.get("highlight"), rank_feature=labels
|
doc_ids, rerank_mdl=rerank_mdl, highlight=req.get("highlight"), rank_feature=labels
|
||||||
)
|
)
|
||||||
|
|||||||
@ -23,22 +23,20 @@ from api.db.db_models import APIToken
|
|||||||
from api.db.services.api_service import APITokenService
|
from api.db.services.api_service import APITokenService
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from api.db.services.user_service import UserTenantService
|
from api.db.services.user_service import UserTenantService
|
||||||
from api import settings
|
|
||||||
from api.utils.api_utils import (
|
from api.utils.api_utils import (
|
||||||
get_json_result,
|
get_json_result,
|
||||||
get_data_error_result,
|
get_data_error_result,
|
||||||
server_error_response,
|
server_error_response,
|
||||||
generate_confirmation_token,
|
generate_confirmation_token,
|
||||||
)
|
)
|
||||||
from api.versions import get_ragflow_version
|
from common.versions import get_ragflow_version
|
||||||
from common.time_utils import current_timestamp, datetime_format
|
from common.time_utils import current_timestamp, datetime_format
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL, STORAGE_IMPL_TYPE
|
|
||||||
from timeit import default_timer as timer
|
from timeit import default_timer as timer
|
||||||
|
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from api.utils.health_utils import run_health_checks
|
from api.utils.health_utils import run_health_checks
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/version", methods=["GET"]) # noqa: F821
|
@manager.route("/version", methods=["GET"]) # noqa: F821
|
||||||
@ -101,7 +99,7 @@ def status():
|
|||||||
res = {}
|
res = {}
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
res["doc_engine"] = globals.docStoreConn.health()
|
res["doc_engine"] = settings.docStoreConn.health()
|
||||||
res["doc_engine"]["elapsed"] = "{:.1f}".format((timer() - st) * 1000.0)
|
res["doc_engine"]["elapsed"] = "{:.1f}".format((timer() - st) * 1000.0)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["doc_engine"] = {
|
res["doc_engine"] = {
|
||||||
@ -113,15 +111,15 @@ def status():
|
|||||||
|
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
STORAGE_IMPL.health()
|
settings.STORAGE_IMPL.health()
|
||||||
res["storage"] = {
|
res["storage"] = {
|
||||||
"storage": STORAGE_IMPL_TYPE.lower(),
|
"storage": settings.STORAGE_IMPL_TYPE.lower(),
|
||||||
"status": "green",
|
"status": "green",
|
||||||
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["storage"] = {
|
res["storage"] = {
|
||||||
"storage": STORAGE_IMPL_TYPE.lower(),
|
"storage": settings.STORAGE_IMPL_TYPE.lower(),
|
||||||
"status": "red",
|
"status": "red",
|
||||||
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
|
|||||||
@ -17,7 +17,6 @@
|
|||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.apps import smtp_mail_server
|
from api.apps import smtp_mail_server
|
||||||
from api.db import UserTenantRole
|
from api.db import UserTenantRole
|
||||||
from api.db.db_models import UserTenant
|
from api.db.db_models import UserTenant
|
||||||
@ -28,6 +27,7 @@ from common.misc_utils import get_uuid
|
|||||||
from common.time_utils import delta_seconds
|
from common.time_utils import delta_seconds
|
||||||
from api.utils.api_utils import get_json_result, validate_request, server_error_response, get_data_error_result
|
from api.utils.api_utils import get_json_result, validate_request, server_error_response, get_data_error_result
|
||||||
from api.utils.web_utils import send_invite_email
|
from api.utils.web_utils import send_invite_email
|
||||||
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/<tenant_id>/user/list", methods=["GET"]) # noqa: F821
|
@manager.route("/<tenant_id>/user/list", methods=["GET"]) # noqa: F821
|
||||||
|
|||||||
@ -26,7 +26,6 @@ from flask import redirect, request, session, make_response
|
|||||||
from flask_login import current_user, login_required, login_user, logout_user
|
from flask_login import current_user, login_required, login_user, logout_user
|
||||||
from werkzeug.security import check_password_hash, generate_password_hash
|
from werkzeug.security import check_password_hash, generate_password_hash
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.apps.auth import get_auth_client
|
from api.apps.auth import get_auth_client
|
||||||
from api.db import FileType, UserTenantRole
|
from api.db import FileType, UserTenantRole
|
||||||
from api.db.db_models import TenantLLM
|
from api.db.db_models import TenantLLM
|
||||||
@ -58,7 +57,7 @@ from api.utils.web_utils import (
|
|||||||
hash_code,
|
hash_code,
|
||||||
captcha_key,
|
captcha_key,
|
||||||
)
|
)
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/login", methods=["POST", "GET"]) # noqa: F821
|
@manager.route("/login", methods=["POST", "GET"]) # noqa: F821
|
||||||
@ -624,7 +623,7 @@ def user_register(user_id, user):
|
|||||||
"id": user_id,
|
"id": user_id,
|
||||||
"name": user["nickname"] + "‘s Kingdom",
|
"name": user["nickname"] + "‘s Kingdom",
|
||||||
"llm_id": settings.CHAT_MDL,
|
"llm_id": settings.CHAT_MDL,
|
||||||
"embd_id": globals.EMBEDDING_MDL,
|
"embd_id": settings.EMBEDDING_MDL,
|
||||||
"asr_id": settings.ASR_MDL,
|
"asr_id": settings.ASR_MDL,
|
||||||
"parser_ids": settings.PARSERS,
|
"parser_ids": settings.PARSERS,
|
||||||
"img2txt_id": settings.IMAGE2TEXT_MDL,
|
"img2txt_id": settings.IMAGE2TEXT_MDL,
|
||||||
|
|||||||
@ -70,4 +70,7 @@ class PipelineTaskType(StrEnum):
|
|||||||
VALID_PIPELINE_TASK_TYPES = {PipelineTaskType.PARSE, PipelineTaskType.DOWNLOAD, PipelineTaskType.RAPTOR, PipelineTaskType.GRAPH_RAG, PipelineTaskType.MINDMAP}
|
VALID_PIPELINE_TASK_TYPES = {PipelineTaskType.PARSE, PipelineTaskType.DOWNLOAD, PipelineTaskType.RAPTOR, PipelineTaskType.GRAPH_RAG, PipelineTaskType.MINDMAP}
|
||||||
|
|
||||||
|
|
||||||
|
PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES = {PipelineTaskType.RAPTOR.lower(), PipelineTaskType.GRAPH_RAG.lower(), PipelineTaskType.MINDMAP.lower()}
|
||||||
|
|
||||||
|
|
||||||
KNOWLEDGEBASE_FOLDER_NAME=".knowledgebase"
|
KNOWLEDGEBASE_FOLDER_NAME=".knowledgebase"
|
||||||
|
|||||||
@ -31,7 +31,7 @@ from peewee import InterfaceError, OperationalError, BigIntegerField, BooleanFie
|
|||||||
from playhouse.migrate import MySQLMigrator, PostgresqlMigrator, migrate
|
from playhouse.migrate import MySQLMigrator, PostgresqlMigrator, migrate
|
||||||
from playhouse.pool import PooledMySQLDatabase, PooledPostgresqlDatabase
|
from playhouse.pool import PooledMySQLDatabase, PooledPostgresqlDatabase
|
||||||
|
|
||||||
from api import settings, utils
|
from api import utils
|
||||||
from api.db import SerializedType
|
from api.db import SerializedType
|
||||||
from api.utils.json_encode import json_dumps, json_loads
|
from api.utils.json_encode import json_dumps, json_loads
|
||||||
from api.utils.configs import deserialize_b64, serialize_b64
|
from api.utils.configs import deserialize_b64, serialize_b64
|
||||||
@ -39,6 +39,7 @@ from api.utils.configs import deserialize_b64, serialize_b64
|
|||||||
from common.time_utils import current_timestamp, timestamp_to_date, date_string_to_timestamp
|
from common.time_utils import current_timestamp, timestamp_to_date, date_string_to_timestamp
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from common.constants import ParserType
|
from common.constants import ParserType
|
||||||
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
CONTINUOUS_FIELD_TYPE = {IntegerField, FloatField, DateTimeField}
|
CONTINUOUS_FIELD_TYPE = {IntegerField, FloatField, DateTimeField}
|
||||||
@ -668,6 +669,7 @@ class LLMFactories(DataBaseModel):
|
|||||||
name = CharField(max_length=128, null=False, help_text="LLM factory name", primary_key=True)
|
name = CharField(max_length=128, null=False, help_text="LLM factory name", primary_key=True)
|
||||||
logo = TextField(null=True, help_text="llm logo base64")
|
logo = TextField(null=True, help_text="llm logo base64")
|
||||||
tags = CharField(max_length=255, null=False, help_text="LLM, Text Embedding, Image2Text, ASR", index=True)
|
tags = CharField(max_length=255, null=False, help_text="LLM, Text Embedding, Image2Text, ASR", index=True)
|
||||||
|
rank = IntegerField(default=0, index=False)
|
||||||
status = CharField(max_length=1, null=True, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)
|
status = CharField(max_length=1, null=True, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -1063,6 +1065,7 @@ class Connector2Kb(DataBaseModel):
|
|||||||
id = CharField(max_length=32, primary_key=True)
|
id = CharField(max_length=32, primary_key=True)
|
||||||
connector_id = CharField(max_length=32, null=False, index=True)
|
connector_id = CharField(max_length=32, null=False, index=True)
|
||||||
kb_id = CharField(max_length=32, null=False, index=True)
|
kb_id = CharField(max_length=32, null=False, index=True)
|
||||||
|
auto_parse = CharField(max_length=1, null=False, default="1", index=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
db_table = "connector2kb"
|
db_table = "connector2kb"
|
||||||
@ -1281,4 +1284,12 @@ def migrate_db():
|
|||||||
migrate(migrator.add_column("tenant_llm", "status", CharField(max_length=1, null=False, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)))
|
migrate(migrator.add_column("tenant_llm", "status", CharField(max_length=1, null=False, help_text="is it validate(0: wasted, 1: validate)", default="1", index=True)))
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
try:
|
||||||
|
migrate(migrator.add_column("connector2kb", "auto_parse", CharField(max_length=1, null=False, default="1", index=False)))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
migrate(migrator.add_column("llm_factories", "rank", IntegerField(default=0, index=False)))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
logging.disable(logging.NOTSET)
|
logging.disable(logging.NOTSET)
|
||||||
|
|||||||
@ -29,10 +29,9 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
|||||||
from api.db.services.tenant_llm_service import LLMFactoriesService, TenantLLMService
|
from api.db.services.tenant_llm_service import LLMFactoriesService, TenantLLMService
|
||||||
from api.db.services.llm_service import LLMService, LLMBundle, get_init_tenant_llm
|
from api.db.services.llm_service import LLMService, LLMBundle, get_init_tenant_llm
|
||||||
from api.db.services.user_service import TenantService, UserTenantService
|
from api.db.services.user_service import TenantService, UserTenantService
|
||||||
from api import settings
|
|
||||||
from common.constants import LLMType
|
from common.constants import LLMType
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
from common import globals
|
from common import settings
|
||||||
from api.common.base64 import encode_to_base64
|
from api.common.base64 import encode_to_base64
|
||||||
|
|
||||||
|
|
||||||
@ -50,7 +49,7 @@ def init_superuser():
|
|||||||
"id": user_info["id"],
|
"id": user_info["id"],
|
||||||
"name": user_info["nickname"] + "‘s Kingdom",
|
"name": user_info["nickname"] + "‘s Kingdom",
|
||||||
"llm_id": settings.CHAT_MDL,
|
"llm_id": settings.CHAT_MDL,
|
||||||
"embd_id": globals.EMBEDDING_MDL,
|
"embd_id": settings.EMBEDDING_MDL,
|
||||||
"asr_id": settings.ASR_MDL,
|
"asr_id": settings.ASR_MDL,
|
||||||
"parser_ids": settings.PARSERS,
|
"parser_ids": settings.PARSERS,
|
||||||
"img2txt_id": settings.IMAGE2TEXT_MDL
|
"img2txt_id": settings.IMAGE2TEXT_MDL
|
||||||
@ -90,13 +89,7 @@ def init_superuser():
|
|||||||
|
|
||||||
|
|
||||||
def init_llm_factory():
|
def init_llm_factory():
|
||||||
try:
|
LLMFactoriesService.filter_delete([1 == 1])
|
||||||
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
|
|
||||||
LLMService.filter_delete([(LLM.fid == "cohere")])
|
|
||||||
LLMFactoriesService.filter_delete([LLMFactories.name == "cohere"])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
factory_llm_infos = settings.FACTORY_LLM_INFOS
|
factory_llm_infos = settings.FACTORY_LLM_INFOS
|
||||||
for factory_llm_info in factory_llm_infos:
|
for factory_llm_info in factory_llm_infos:
|
||||||
info = deepcopy(factory_llm_info)
|
info = deepcopy(factory_llm_info)
|
||||||
|
|||||||
@ -16,7 +16,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.utils.api_utils import group_by
|
from api.utils.api_utils import group_by
|
||||||
from api.db import FileType, UserTenantRole
|
from api.db import FileType, UserTenantRole
|
||||||
from api.db.services.api_service import APITokenService, API4ConversationService
|
from api.db.services.api_service import APITokenService, API4ConversationService
|
||||||
@ -35,10 +34,9 @@ from api.db.services.task_service import TaskService
|
|||||||
from api.db.services.tenant_llm_service import TenantLLMService
|
from api.db.services.tenant_llm_service import TenantLLMService
|
||||||
from api.db.services.user_canvas_version import UserCanvasVersionService
|
from api.db.services.user_canvas_version import UserCanvasVersionService
|
||||||
from api.db.services.user_service import TenantService, UserService, UserTenantService
|
from api.db.services.user_service import TenantService, UserService, UserTenantService
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from rag.nlp import search
|
from rag.nlp import search
|
||||||
from common.constants import ActiveEnum
|
from common.constants import ActiveEnum
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
def create_new_user(user_info: dict) -> dict:
|
def create_new_user(user_info: dict) -> dict:
|
||||||
"""
|
"""
|
||||||
@ -64,7 +62,7 @@ def create_new_user(user_info: dict) -> dict:
|
|||||||
"id": user_id,
|
"id": user_id,
|
||||||
"name": user_info["nickname"] + "‘s Kingdom",
|
"name": user_info["nickname"] + "‘s Kingdom",
|
||||||
"llm_id": settings.CHAT_MDL,
|
"llm_id": settings.CHAT_MDL,
|
||||||
"embd_id": globals.EMBEDDING_MDL,
|
"embd_id": settings.EMBEDDING_MDL,
|
||||||
"asr_id": settings.ASR_MDL,
|
"asr_id": settings.ASR_MDL,
|
||||||
"parser_ids": settings.PARSERS,
|
"parser_ids": settings.PARSERS,
|
||||||
"img2txt_id": settings.IMAGE2TEXT_MDL,
|
"img2txt_id": settings.IMAGE2TEXT_MDL,
|
||||||
@ -159,8 +157,8 @@ def delete_user_data(user_id: str) -> dict:
|
|||||||
if kb_ids:
|
if kb_ids:
|
||||||
# step1.1.1 delete files in storage, remove bucket
|
# step1.1.1 delete files in storage, remove bucket
|
||||||
for kb_id in kb_ids:
|
for kb_id in kb_ids:
|
||||||
if STORAGE_IMPL.bucket_exists(kb_id):
|
if settings.STORAGE_IMPL.bucket_exists(kb_id):
|
||||||
STORAGE_IMPL.remove_bucket(kb_id)
|
settings.STORAGE_IMPL.remove_bucket(kb_id)
|
||||||
done_msg += f"- Removed {len(kb_ids)} dataset's buckets.\n"
|
done_msg += f"- Removed {len(kb_ids)} dataset's buckets.\n"
|
||||||
# step1.1.2 delete file and document info in db
|
# step1.1.2 delete file and document info in db
|
||||||
doc_ids = DocumentService.get_all_doc_ids_by_kb_ids(kb_ids)
|
doc_ids = DocumentService.get_all_doc_ids_by_kb_ids(kb_ids)
|
||||||
@ -180,7 +178,7 @@ def delete_user_data(user_id: str) -> dict:
|
|||||||
)
|
)
|
||||||
done_msg += f"- Deleted {file2doc_delete_res} document-file relation records.\n"
|
done_msg += f"- Deleted {file2doc_delete_res} document-file relation records.\n"
|
||||||
# step1.1.3 delete chunk in es
|
# step1.1.3 delete chunk in es
|
||||||
r = globals.docStoreConn.delete({"kb_id": kb_ids},
|
r = settings.docStoreConn.delete({"kb_id": kb_ids},
|
||||||
search.index_name(tenant_id), kb_ids)
|
search.index_name(tenant_id), kb_ids)
|
||||||
done_msg += f"- Deleted {r} chunk records.\n"
|
done_msg += f"- Deleted {r} chunk records.\n"
|
||||||
kb_delete_res = KnowledgebaseService.delete_by_ids(kb_ids)
|
kb_delete_res = KnowledgebaseService.delete_by_ids(kb_ids)
|
||||||
@ -219,7 +217,7 @@ def delete_user_data(user_id: str) -> dict:
|
|||||||
if created_files:
|
if created_files:
|
||||||
# step2.1.1.1 delete file in storage
|
# step2.1.1.1 delete file in storage
|
||||||
for f in created_files:
|
for f in created_files:
|
||||||
STORAGE_IMPL.rm(f.parent_id, f.location)
|
settings.STORAGE_IMPL.rm(f.parent_id, f.location)
|
||||||
done_msg += f"- Deleted {len(created_files)} uploaded file.\n"
|
done_msg += f"- Deleted {len(created_files)} uploaded file.\n"
|
||||||
# step2.1.1.2 delete file record
|
# step2.1.1.2 delete file record
|
||||||
file_delete_res = FileService.delete_by_ids([f.id for f in created_files])
|
file_delete_res = FileService.delete_by_ids([f.id for f in created_files])
|
||||||
@ -238,7 +236,7 @@ def delete_user_data(user_id: str) -> dict:
|
|||||||
kb_doc_info = {}
|
kb_doc_info = {}
|
||||||
for _tenant_id, kb_doc in kb_grouped_doc.items():
|
for _tenant_id, kb_doc in kb_grouped_doc.items():
|
||||||
for _kb_id, docs in kb_doc.items():
|
for _kb_id, docs in kb_doc.items():
|
||||||
chunk_delete_res += globals.docStoreConn.delete(
|
chunk_delete_res += settings.docStoreConn.delete(
|
||||||
{"doc_id": [d["id"] for d in docs]},
|
{"doc_id": [d["id"] for d in docs]},
|
||||||
search.index_name(_tenant_id), _kb_id
|
search.index_name(_tenant_id), _kb_id
|
||||||
)
|
)
|
||||||
|
|||||||
@ -13,7 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
from api.versions import get_ragflow_version
|
from common.versions import get_ragflow_version
|
||||||
from .reload_config_base import ReloadConfigBase
|
from .reload_config_base import ReloadConfigBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -67,6 +67,7 @@ class UserCanvasService(CommonService):
|
|||||||
# will get all permitted agents, be cautious
|
# will get all permitted agents, be cautious
|
||||||
fields = [
|
fields = [
|
||||||
cls.model.id,
|
cls.model.id,
|
||||||
|
cls.model.avatar,
|
||||||
cls.model.title,
|
cls.model.title,
|
||||||
cls.model.permission,
|
cls.model.permission,
|
||||||
cls.model.canvas_type,
|
cls.model.canvas_type,
|
||||||
|
|||||||
@ -90,7 +90,7 @@ class CommonService:
|
|||||||
else:
|
else:
|
||||||
query_records = cls.model.select()
|
query_records = cls.model.select()
|
||||||
if reverse is not None:
|
if reverse is not None:
|
||||||
if not order_by or not hasattr(cls, order_by):
|
if not order_by or not hasattr(cls.model, order_by):
|
||||||
order_by = "create_time"
|
order_by = "create_time"
|
||||||
if reverse is True:
|
if reverse is True:
|
||||||
query_records = query_records.order_by(cls.model.getter_by(order_by).desc())
|
query_records = query_records.order_by(cls.model.getter_by(order_by).desc())
|
||||||
|
|||||||
@ -15,6 +15,7 @@
|
|||||||
#
|
#
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Tuple, List
|
||||||
|
|
||||||
from anthropic import BaseModel
|
from anthropic import BaseModel
|
||||||
from peewee import SQL, fn
|
from peewee import SQL, fn
|
||||||
@ -39,17 +40,20 @@ class ConnectorService(CommonService):
|
|||||||
if not task:
|
if not task:
|
||||||
if status == TaskStatus.SCHEDULE:
|
if status == TaskStatus.SCHEDULE:
|
||||||
SyncLogsService.schedule(connector_id, c2k.kb_id)
|
SyncLogsService.schedule(connector_id, c2k.kb_id)
|
||||||
|
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||||
|
return
|
||||||
|
|
||||||
if task.status == TaskStatus.DONE:
|
if task.status == TaskStatus.DONE:
|
||||||
if status == TaskStatus.SCHEDULE:
|
if status == TaskStatus.SCHEDULE:
|
||||||
SyncLogsService.schedule(connector_id, c2k.kb_id, task.poll_range_end, total_docs_indexed=task.total_docs_indexed)
|
SyncLogsService.schedule(connector_id, c2k.kb_id, task.poll_range_end, total_docs_indexed=task.total_docs_indexed)
|
||||||
|
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||||
|
return
|
||||||
|
|
||||||
task = task.to_dict()
|
task = task.to_dict()
|
||||||
task["status"] = status
|
task["status"] = status
|
||||||
SyncLogsService.update_by_id(task["id"], task)
|
SyncLogsService.update_by_id(task["id"], task)
|
||||||
ConnectorService.update_by_id(connector_id, {"status": status})
|
ConnectorService.update_by_id(connector_id, {"status": status})
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list(cls, tenant_id):
|
def list(cls, tenant_id):
|
||||||
fields = [
|
fields = [
|
||||||
@ -62,26 +66,42 @@ class ConnectorService(CommonService):
|
|||||||
cls.model.tenant_id == tenant_id
|
cls.model.tenant_id == tenant_id
|
||||||
).dicts())
|
).dicts())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def rebuild(cls, kb_id:str, connector_id: str, tenant_id:str):
|
||||||
|
e, conn = cls.get_by_id(connector_id)
|
||||||
|
if not e:
|
||||||
|
return
|
||||||
|
SyncLogsService.filter_delete([SyncLogs.connector_id==connector_id, SyncLogs.kb_id==kb_id])
|
||||||
|
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}", kb_id=kb_id)
|
||||||
|
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||||
|
SyncLogsService.schedule(connector_id, kb_id, reindex=True)
|
||||||
|
return err
|
||||||
|
|
||||||
|
|
||||||
class SyncLogsService(CommonService):
|
class SyncLogsService(CommonService):
|
||||||
model = SyncLogs
|
model = SyncLogs
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15):
|
def list_sync_tasks(cls, connector_id=None, page_number=None, items_per_page=15) -> Tuple[List[dict], int]:
|
||||||
fields = [
|
fields = [
|
||||||
cls.model.id,
|
cls.model.id,
|
||||||
cls.model.connector_id,
|
cls.model.connector_id,
|
||||||
cls.model.kb_id,
|
cls.model.kb_id,
|
||||||
|
cls.model.update_date,
|
||||||
cls.model.poll_range_start,
|
cls.model.poll_range_start,
|
||||||
cls.model.poll_range_end,
|
cls.model.poll_range_end,
|
||||||
cls.model.new_docs_indexed,
|
cls.model.new_docs_indexed,
|
||||||
|
cls.model.total_docs_indexed,
|
||||||
cls.model.error_msg,
|
cls.model.error_msg,
|
||||||
|
cls.model.full_exception_trace,
|
||||||
cls.model.error_count,
|
cls.model.error_count,
|
||||||
Connector.name,
|
Connector.name,
|
||||||
Connector.source,
|
Connector.source,
|
||||||
Connector.tenant_id,
|
Connector.tenant_id,
|
||||||
Connector.timeout_secs,
|
Connector.timeout_secs,
|
||||||
Knowledgebase.name.alias("kb_name"),
|
Knowledgebase.name.alias("kb_name"),
|
||||||
|
Knowledgebase.avatar.alias("kb_avatar"),
|
||||||
|
Connector2Kb.auto_parse,
|
||||||
cls.model.from_beginning.alias("reindex"),
|
cls.model.from_beginning.alias("reindex"),
|
||||||
cls.model.status
|
cls.model.status
|
||||||
]
|
]
|
||||||
@ -105,10 +125,11 @@ class SyncLogsService(CommonService):
|
|||||||
)
|
)
|
||||||
|
|
||||||
query = query.distinct().order_by(cls.model.update_time.desc())
|
query = query.distinct().order_by(cls.model.update_time.desc())
|
||||||
|
totbal = query.count()
|
||||||
if page_number:
|
if page_number:
|
||||||
query = query.paginate(page_number, items_per_page)
|
query = query.paginate(page_number, items_per_page)
|
||||||
|
|
||||||
return list(query.dicts())
|
return list(query.dicts()), totbal
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def start(cls, id, connector_id):
|
def start(cls, id, connector_id):
|
||||||
@ -122,13 +143,21 @@ class SyncLogsService(CommonService):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def schedule(cls, connector_id, kb_id, poll_range_start=None, reindex=False, total_docs_indexed=0):
|
def schedule(cls, connector_id, kb_id, poll_range_start=None, reindex=False, total_docs_indexed=0):
|
||||||
|
try:
|
||||||
|
if cls.model.select().where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).count() > 100:
|
||||||
|
rm_ids = [m.id for m in cls.model.select(cls.model.id).where(cls.model.kb_id == kb_id, cls.model.connector_id == connector_id).order_by(cls.model.update_time.asc()).limit(70)]
|
||||||
|
deleted = cls.model.delete().where(cls.model.id.in_(rm_ids)).execute()
|
||||||
|
logging.info(f"[SyncLogService] Cleaned {deleted} old logs.")
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
e = cls.query(kb_id=kb_id, connector_id=connector_id, status=TaskStatus.SCHEDULE)
|
e = cls.query(kb_id=kb_id, connector_id=connector_id, status=TaskStatus.SCHEDULE)
|
||||||
if e:
|
if e:
|
||||||
logging.warning(f"{kb_id}--{connector_id} has already had a scheduling sync task which is abnormal.")
|
logging.warning(f"{kb_id}--{connector_id} has already had a scheduling sync task which is abnormal.")
|
||||||
return None
|
return None
|
||||||
reindex = "1" if reindex else "0"
|
reindex = "1" if reindex else "0"
|
||||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDUL})
|
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDULE})
|
||||||
return cls.save(**{
|
return cls.save(**{
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"kb_id": kb_id, "status": TaskStatus.SCHEDULE, "connector_id": connector_id,
|
"kb_id": kb_id, "status": TaskStatus.SCHEDULE, "connector_id": connector_id,
|
||||||
@ -145,7 +174,7 @@ class SyncLogsService(CommonService):
|
|||||||
full_exception_trace=cls.model.full_exception_trace + str(e)
|
full_exception_trace=cls.model.full_exception_trace + str(e)
|
||||||
) \
|
) \
|
||||||
.where(cls.model.id == task.id).execute()
|
.where(cls.model.id == task.id).execute()
|
||||||
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDUL})
|
ConnectorService.update_by_id(connector_id, {"status": TaskStatus.SCHEDULE})
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def increase_docs(cls, id, min_update, max_update, doc_num, err_msg="", error_count=0):
|
def increase_docs(cls, id, min_update, max_update, doc_num, err_msg="", error_count=0):
|
||||||
@ -161,7 +190,7 @@ class SyncLogsService(CommonService):
|
|||||||
.where(cls.model.id == id).execute()
|
.where(cls.model.id == id).execute()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def duplicate_and_parse(cls, kb, docs, tenant_id, src):
|
def duplicate_and_parse(cls, kb, docs, tenant_id, src, auto_parse=True):
|
||||||
if not docs:
|
if not docs:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -173,15 +202,17 @@ class SyncLogsService(CommonService):
|
|||||||
return self.blob
|
return self.blob
|
||||||
|
|
||||||
errs = []
|
errs = []
|
||||||
files = [FileObj(filename=d["semantic_identifier"]+f".{d['extension']}", blob=d["blob"]) for d in docs]
|
files = [FileObj(filename=d["semantic_identifier"]+(f"{d['extension']}" if d["semantic_identifier"][::-1].find(d['extension'][::-1])<0 else ""), blob=d["blob"]) for d in docs]
|
||||||
doc_ids = []
|
doc_ids = []
|
||||||
err, doc_blob_pairs = FileService.upload_document(kb, files, tenant_id, src)
|
err, doc_blob_pairs = FileService.upload_document(kb, files, tenant_id, src)
|
||||||
errs.extend(err)
|
errs.extend(err)
|
||||||
if not err:
|
|
||||||
kb_table_num_map = {}
|
kb_table_num_map = {}
|
||||||
for doc, _ in doc_blob_pairs:
|
for doc, _ in doc_blob_pairs:
|
||||||
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
doc_ids.append(doc["id"])
|
||||||
doc_ids.append(doc["id"])
|
if not auto_parse or auto_parse == "0":
|
||||||
|
continue
|
||||||
|
DocumentService.run(tenant_id, doc, kb_table_num_map)
|
||||||
|
|
||||||
return errs, doc_ids
|
return errs, doc_ids
|
||||||
|
|
||||||
@ -197,43 +228,21 @@ class Connector2KbService(CommonService):
|
|||||||
model = Connector2Kb
|
model = Connector2Kb
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def link_kb(cls, conn_id:str, kb_ids: list[str], tenant_id:str):
|
def link_connectors(cls, kb_id:str, connectors: list[dict], tenant_id:str):
|
||||||
arr = cls.query(connector_id=conn_id)
|
|
||||||
old_kb_ids = [a.kb_id for a in arr]
|
|
||||||
for kb_id in kb_ids:
|
|
||||||
if kb_id in old_kb_ids:
|
|
||||||
continue
|
|
||||||
cls.save(**{
|
|
||||||
"id": get_uuid(),
|
|
||||||
"connector_id": conn_id,
|
|
||||||
"kb_id": kb_id
|
|
||||||
})
|
|
||||||
SyncLogsService.schedule(conn_id, kb_id, reindex=True)
|
|
||||||
|
|
||||||
errs = []
|
|
||||||
e, conn = ConnectorService.get_by_id(conn_id)
|
|
||||||
for kb_id in old_kb_ids:
|
|
||||||
if kb_id in kb_ids:
|
|
||||||
continue
|
|
||||||
cls.filter_delete([cls.model.kb_id==kb_id, cls.model.connector_id==conn_id])
|
|
||||||
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status==TaskStatus.SCHEDULE], {"status": TaskStatus.CANCEL})
|
|
||||||
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
|
||||||
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
|
||||||
if err:
|
|
||||||
errs.append(err)
|
|
||||||
return "\n".join(errs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def link_connectors(cls, kb_id:str, connector_ids: list[str], tenant_id:str):
|
|
||||||
arr = cls.query(kb_id=kb_id)
|
arr = cls.query(kb_id=kb_id)
|
||||||
old_conn_ids = [a.connector_id for a in arr]
|
old_conn_ids = [a.connector_id for a in arr]
|
||||||
for conn_id in connector_ids:
|
connector_ids = []
|
||||||
|
for conn in connectors:
|
||||||
|
conn_id = conn["id"]
|
||||||
|
connector_ids.append(conn_id)
|
||||||
if conn_id in old_conn_ids:
|
if conn_id in old_conn_ids:
|
||||||
|
cls.filter_update([cls.model.connector_id==conn_id, cls.model.kb_id==kb_id], {"auto_parse": conn.get("auto_parse", "1")})
|
||||||
continue
|
continue
|
||||||
cls.save(**{
|
cls.save(**{
|
||||||
"id": get_uuid(),
|
"id": get_uuid(),
|
||||||
"connector_id": conn_id,
|
"connector_id": conn_id,
|
||||||
"kb_id": kb_id
|
"kb_id": kb_id,
|
||||||
|
"auto_parse": conn.get("auto_parse", "1")
|
||||||
})
|
})
|
||||||
SyncLogsService.schedule(conn_id, kb_id, reindex=True)
|
SyncLogsService.schedule(conn_id, kb_id, reindex=True)
|
||||||
|
|
||||||
@ -243,11 +252,15 @@ class Connector2KbService(CommonService):
|
|||||||
continue
|
continue
|
||||||
cls.filter_delete([cls.model.kb_id==kb_id, cls.model.connector_id==conn_id])
|
cls.filter_delete([cls.model.kb_id==kb_id, cls.model.connector_id==conn_id])
|
||||||
e, conn = ConnectorService.get_by_id(conn_id)
|
e, conn = ConnectorService.get_by_id(conn_id)
|
||||||
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status==TaskStatus.SCHEDULE], {"status": TaskStatus.CANCEL})
|
if not e:
|
||||||
docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
continue
|
||||||
err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
#SyncLogsService.filter_delete([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id])
|
||||||
if err:
|
# Do not delete docs while unlinking.
|
||||||
errs.append(err)
|
SyncLogsService.filter_update([SyncLogs.connector_id==conn_id, SyncLogs.kb_id==kb_id, SyncLogs.status.in_([TaskStatus.SCHEDULE, TaskStatus.RUNNING])], {"status": TaskStatus.CANCEL})
|
||||||
|
#docs = DocumentService.query(source_type=f"{conn.source}/{conn.id}")
|
||||||
|
#err = FileService.delete_docs([d.id for d in docs], tenant_id)
|
||||||
|
#if err:
|
||||||
|
# errs.append(err)
|
||||||
return "\n".join(errs)
|
return "\n".join(errs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -256,6 +269,7 @@ class Connector2KbService(CommonService):
|
|||||||
Connector.id,
|
Connector.id,
|
||||||
Connector.source,
|
Connector.source,
|
||||||
Connector.name,
|
Connector.name,
|
||||||
|
cls.model.auto_parse,
|
||||||
Connector.status
|
Connector.status
|
||||||
]
|
]
|
||||||
return list(cls.model.select(*fields)\
|
return list(cls.model.select(*fields)\
|
||||||
@ -265,3 +279,5 @@ class Connector2KbService(CommonService):
|
|||||||
).dicts()
|
).dicts()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,6 @@ import trio
|
|||||||
from langfuse import Langfuse
|
from langfuse import Langfuse
|
||||||
from peewee import fn
|
from peewee import fn
|
||||||
from agentic_reasoning import DeepResearcher
|
from agentic_reasoning import DeepResearcher
|
||||||
from api import settings
|
|
||||||
from common.constants import LLMType, ParserType, StatusEnum
|
from common.constants import LLMType, ParserType, StatusEnum
|
||||||
from api.db.db_models import DB, Dialog
|
from api.db.db_models import DB, Dialog
|
||||||
from api.db.services.common_service import CommonService
|
from api.db.services.common_service import CommonService
|
||||||
@ -44,7 +43,7 @@ from rag.prompts.generator import chunks_format, citation_prompt, cross_language
|
|||||||
from common.token_utils import num_tokens_from_string
|
from common.token_utils import num_tokens_from_string
|
||||||
from rag.utils.tavily_conn import Tavily
|
from rag.utils.tavily_conn import Tavily
|
||||||
from common.string_utils import remove_redundant_spaces
|
from common.string_utils import remove_redundant_spaces
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
class DialogService(CommonService):
|
class DialogService(CommonService):
|
||||||
@ -373,7 +372,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
|||||||
chat_mdl.bind_tools(toolcall_session, tools)
|
chat_mdl.bind_tools(toolcall_session, tools)
|
||||||
bind_models_ts = timer()
|
bind_models_ts = timer()
|
||||||
|
|
||||||
retriever = globals.retriever
|
retriever = settings.retriever
|
||||||
questions = [m["content"] for m in messages if m["role"] == "user"][-3:]
|
questions = [m["content"] for m in messages if m["role"] == "user"][-3:]
|
||||||
attachments = kwargs["doc_ids"].split(",") if "doc_ids" in kwargs else []
|
attachments = kwargs["doc_ids"].split(",") if "doc_ids" in kwargs else []
|
||||||
if "doc_ids" in messages[-1]:
|
if "doc_ids" in messages[-1]:
|
||||||
@ -620,7 +619,12 @@ def chat(dialog, messages, stream=True, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def use_sql(question, field_map, tenant_id, chat_mdl, quota=True, kb_ids=None):
|
def use_sql(question, field_map, tenant_id, chat_mdl, quota=True, kb_ids=None):
|
||||||
sys_prompt = "You are a Database Administrator. You need to check the fields of the following tables based on the user's list of questions and write the SQL corresponding to the last question."
|
sys_prompt = """
|
||||||
|
You are a Database Administrator. You need to check the fields of the following tables based on the user's list of questions and write the SQL corresponding to the last question.
|
||||||
|
Ensure that:
|
||||||
|
1. Field names should not start with a digit. If any field name starts with a digit, use double quotes around it.
|
||||||
|
2. Write only the SQL, no explanations or additional text.
|
||||||
|
"""
|
||||||
user_prompt = """
|
user_prompt = """
|
||||||
Table name: {};
|
Table name: {};
|
||||||
Table of database fields are as follows:
|
Table of database fields are as follows:
|
||||||
@ -641,6 +645,7 @@ Please write the SQL, only SQL, without any other explanations or text.
|
|||||||
sql = re.sub(r".*select ", "select ", sql.lower())
|
sql = re.sub(r".*select ", "select ", sql.lower())
|
||||||
sql = re.sub(r" +", " ", sql)
|
sql = re.sub(r" +", " ", sql)
|
||||||
sql = re.sub(r"([;;]|```).*", "", sql)
|
sql = re.sub(r"([;;]|```).*", "", sql)
|
||||||
|
sql = re.sub(r"&", "and", sql)
|
||||||
if sql[: len("select ")] != "select ":
|
if sql[: len("select ")] != "select ":
|
||||||
return None, None
|
return None, None
|
||||||
if not re.search(r"((sum|avg|max|min)\(|group by )", sql.lower()):
|
if not re.search(r"((sum|avg|max|min)\(|group by )", sql.lower()):
|
||||||
@ -665,7 +670,7 @@ Please write the SQL, only SQL, without any other explanations or text.
|
|||||||
|
|
||||||
logging.debug(f"{question} get SQL(refined): {sql}")
|
logging.debug(f"{question} get SQL(refined): {sql}")
|
||||||
tried_times += 1
|
tried_times += 1
|
||||||
return globals.retriever.sql_retrieval(sql, format="json"), sql
|
return settings.retriever.sql_retrieval(sql, format="json"), sql
|
||||||
|
|
||||||
tbl, sql = get_table()
|
tbl, sql = get_table()
|
||||||
if tbl is None:
|
if tbl is None:
|
||||||
@ -759,7 +764,7 @@ def ask(question, kb_ids, tenant_id, chat_llm_name=None, search_config={}):
|
|||||||
embedding_list = list(set([kb.embd_id for kb in kbs]))
|
embedding_list = list(set([kb.embd_id for kb in kbs]))
|
||||||
|
|
||||||
is_knowledge_graph = all([kb.parser_id == ParserType.KG for kb in kbs])
|
is_knowledge_graph = all([kb.parser_id == ParserType.KG for kb in kbs])
|
||||||
retriever = globals.retriever if not is_knowledge_graph else settings.kg_retriever
|
retriever = settings.retriever if not is_knowledge_graph else settings.kg_retriever
|
||||||
|
|
||||||
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embedding_list[0])
|
embd_mdl = LLMBundle(tenant_id, LLMType.EMBEDDING, embedding_list[0])
|
||||||
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, chat_llm_name)
|
chat_mdl = LLMBundle(tenant_id, LLMType.CHAT, chat_llm_name)
|
||||||
@ -855,7 +860,7 @@ def gen_mindmap(question, kb_ids, tenant_id, search_config={}):
|
|||||||
if not doc_ids:
|
if not doc_ids:
|
||||||
doc_ids = None
|
doc_ids = None
|
||||||
|
|
||||||
ranks = globals.retriever.retrieval(
|
ranks = settings.retriever.retrieval(
|
||||||
question=question,
|
question=question,
|
||||||
embd_mdl=embd_mdl,
|
embd_mdl=embd_mdl,
|
||||||
tenant_ids=tenant_ids,
|
tenant_ids=tenant_ids,
|
||||||
|
|||||||
@ -27,7 +27,7 @@ import xxhash
|
|||||||
from peewee import fn, Case, JOIN
|
from peewee import fn, Case, JOIN
|
||||||
|
|
||||||
from api.constants import IMG_BASE64_PREFIX, FILE_NAME_LEN_LIMIT
|
from api.constants import IMG_BASE64_PREFIX, FILE_NAME_LEN_LIMIT
|
||||||
from api.db import FileType, UserTenantRole, CanvasCategory
|
from api.db import PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES, FileType, UserTenantRole, CanvasCategory
|
||||||
from api.db.db_models import DB, Document, Knowledgebase, Task, Tenant, UserTenant, File2Document, File, UserCanvas, \
|
from api.db.db_models import DB, Document, Knowledgebase, Task, Tenant, UserTenant, File2Document, File, UserCanvas, \
|
||||||
User
|
User
|
||||||
from api.db.db_utils import bulk_insert_into_db
|
from api.db.db_utils import bulk_insert_into_db
|
||||||
@ -35,13 +35,11 @@ from api.db.services.common_service import CommonService
|
|||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from common.misc_utils import get_uuid
|
from common.misc_utils import get_uuid
|
||||||
from common.time_utils import current_timestamp, get_format_time
|
from common.time_utils import current_timestamp, get_format_time
|
||||||
from common.constants import LLMType, ParserType, StatusEnum, TaskStatus
|
from common.constants import LLMType, ParserType, StatusEnum, TaskStatus, SVR_CONSUMER_GROUP_NAME
|
||||||
from rag.nlp import rag_tokenizer, search
|
from rag.nlp import rag_tokenizer, search
|
||||||
from rag.settings import get_svr_queue_name, SVR_CONSUMER_GROUP_NAME
|
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from rag.utils.doc_store_conn import OrderByExpr
|
from rag.utils.doc_store_conn import OrderByExpr
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
class DocumentService(CommonService):
|
class DocumentService(CommonService):
|
||||||
model = Document
|
model = Document
|
||||||
@ -308,33 +306,33 @@ class DocumentService(CommonService):
|
|||||||
page_size = 1000
|
page_size = 1000
|
||||||
all_chunk_ids = []
|
all_chunk_ids = []
|
||||||
while True:
|
while True:
|
||||||
chunks = globals.docStoreConn.search(["img_id"], [], {"doc_id": doc.id}, [], OrderByExpr(),
|
chunks = settings.docStoreConn.search(["img_id"], [], {"doc_id": doc.id}, [], OrderByExpr(),
|
||||||
page * page_size, page_size, search.index_name(tenant_id),
|
page * page_size, page_size, search.index_name(tenant_id),
|
||||||
[doc.kb_id])
|
[doc.kb_id])
|
||||||
chunk_ids = globals.docStoreConn.getChunkIds(chunks)
|
chunk_ids = settings.docStoreConn.getChunkIds(chunks)
|
||||||
if not chunk_ids:
|
if not chunk_ids:
|
||||||
break
|
break
|
||||||
all_chunk_ids.extend(chunk_ids)
|
all_chunk_ids.extend(chunk_ids)
|
||||||
page += 1
|
page += 1
|
||||||
for cid in all_chunk_ids:
|
for cid in all_chunk_ids:
|
||||||
if STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, cid):
|
||||||
STORAGE_IMPL.rm(doc.kb_id, cid)
|
settings.STORAGE_IMPL.rm(doc.kb_id, cid)
|
||||||
if doc.thumbnail and not doc.thumbnail.startswith(IMG_BASE64_PREFIX):
|
if doc.thumbnail and not doc.thumbnail.startswith(IMG_BASE64_PREFIX):
|
||||||
if STORAGE_IMPL.obj_exist(doc.kb_id, doc.thumbnail):
|
if settings.STORAGE_IMPL.obj_exist(doc.kb_id, doc.thumbnail):
|
||||||
STORAGE_IMPL.rm(doc.kb_id, doc.thumbnail)
|
settings.STORAGE_IMPL.rm(doc.kb_id, doc.thumbnail)
|
||||||
globals.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
settings.docStoreConn.delete({"doc_id": doc.id}, search.index_name(tenant_id), doc.kb_id)
|
||||||
|
|
||||||
graph_source = globals.docStoreConn.getFields(
|
graph_source = settings.docStoreConn.getFields(
|
||||||
globals.docStoreConn.search(["source_id"], [], {"kb_id": doc.kb_id, "knowledge_graph_kwd": ["graph"]}, [], OrderByExpr(), 0, 1, search.index_name(tenant_id), [doc.kb_id]), ["source_id"]
|
settings.docStoreConn.search(["source_id"], [], {"kb_id": doc.kb_id, "knowledge_graph_kwd": ["graph"]}, [], OrderByExpr(), 0, 1, search.index_name(tenant_id), [doc.kb_id]), ["source_id"]
|
||||||
)
|
)
|
||||||
if len(graph_source) > 0 and doc.id in list(graph_source.values())[0]["source_id"]:
|
if len(graph_source) > 0 and doc.id in list(graph_source.values())[0]["source_id"]:
|
||||||
globals.docStoreConn.update({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["entity", "relation", "graph", "subgraph", "community_report"], "source_id": doc.id},
|
settings.docStoreConn.update({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["entity", "relation", "graph", "subgraph", "community_report"], "source_id": doc.id},
|
||||||
{"remove": {"source_id": doc.id}},
|
{"remove": {"source_id": doc.id}},
|
||||||
search.index_name(tenant_id), doc.kb_id)
|
search.index_name(tenant_id), doc.kb_id)
|
||||||
globals.docStoreConn.update({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["graph"]},
|
settings.docStoreConn.update({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["graph"]},
|
||||||
{"removed_kwd": "Y"},
|
{"removed_kwd": "Y"},
|
||||||
search.index_name(tenant_id), doc.kb_id)
|
search.index_name(tenant_id), doc.kb_id)
|
||||||
globals.docStoreConn.delete({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["entity", "relation", "graph", "subgraph", "community_report"], "must_not": {"exists": "source_id"}},
|
settings.docStoreConn.delete({"kb_id": doc.kb_id, "knowledge_graph_kwd": ["entity", "relation", "graph", "subgraph", "community_report"], "must_not": {"exists": "source_id"}},
|
||||||
search.index_name(tenant_id), doc.kb_id)
|
search.index_name(tenant_id), doc.kb_id)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@ -374,12 +372,16 @@ class DocumentService(CommonService):
|
|||||||
def get_unfinished_docs(cls):
|
def get_unfinished_docs(cls):
|
||||||
fields = [cls.model.id, cls.model.process_begin_at, cls.model.parser_config, cls.model.progress_msg,
|
fields = [cls.model.id, cls.model.process_begin_at, cls.model.parser_config, cls.model.progress_msg,
|
||||||
cls.model.run, cls.model.parser_id]
|
cls.model.run, cls.model.parser_id]
|
||||||
|
unfinished_task_query = Task.select(Task.doc_id).where(
|
||||||
|
(Task.progress >= 0) & (Task.progress < 1)
|
||||||
|
)
|
||||||
|
|
||||||
docs = cls.model.select(*fields) \
|
docs = cls.model.select(*fields) \
|
||||||
.where(
|
.where(
|
||||||
cls.model.status == StatusEnum.VALID.value,
|
cls.model.status == StatusEnum.VALID.value,
|
||||||
~(cls.model.type == FileType.VIRTUAL.value),
|
~(cls.model.type == FileType.VIRTUAL.value),
|
||||||
cls.model.progress < 1,
|
(((cls.model.progress < 1) & (cls.model.progress > 0)) |
|
||||||
cls.model.progress > 0)
|
(cls.model.id.in_(unfinished_task_query)))) # including unfinished tasks like GraphRAG, RAPTOR and Mindmap
|
||||||
return list(docs.dicts())
|
return list(docs.dicts())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -621,12 +623,17 @@ class DocumentService(CommonService):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@DB.connection_context()
|
@DB.connection_context()
|
||||||
def begin2parse(cls, docid):
|
def begin2parse(cls, doc_id, keep_progress=False):
|
||||||
cls.update_by_id(
|
info = {
|
||||||
docid, {"progress": random.random() * 1 / 100.,
|
"progress_msg": "Task is queued...",
|
||||||
"progress_msg": "Task is queued...",
|
"process_begin_at": get_format_time(),
|
||||||
"process_begin_at": get_format_time()
|
}
|
||||||
})
|
if not keep_progress:
|
||||||
|
info["progress"] = random.random() * 1 / 100.
|
||||||
|
info["run"] = TaskStatus.RUNNING.value
|
||||||
|
# keep the doc in DONE state when keep_progress=True for GraphRAG, RAPTOR and Mindmap tasks
|
||||||
|
|
||||||
|
cls.update_by_id(doc_id, info)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@DB.connection_context()
|
@DB.connection_context()
|
||||||
@ -685,8 +692,13 @@ class DocumentService(CommonService):
|
|||||||
bad = 0
|
bad = 0
|
||||||
e, doc = DocumentService.get_by_id(d["id"])
|
e, doc = DocumentService.get_by_id(d["id"])
|
||||||
status = doc.run # TaskStatus.RUNNING.value
|
status = doc.run # TaskStatus.RUNNING.value
|
||||||
|
doc_progress = doc.progress if doc and doc.progress else 0.0
|
||||||
|
special_task_running = False
|
||||||
priority = 0
|
priority = 0
|
||||||
for t in tsks:
|
for t in tsks:
|
||||||
|
task_type = (t.task_type or "").lower()
|
||||||
|
if task_type in PIPELINE_SPECIAL_PROGRESS_FREEZE_TASK_TYPES:
|
||||||
|
special_task_running = True
|
||||||
if 0 <= t.progress < 1:
|
if 0 <= t.progress < 1:
|
||||||
finished = False
|
finished = False
|
||||||
if t.progress == -1:
|
if t.progress == -1:
|
||||||
@ -703,13 +715,15 @@ class DocumentService(CommonService):
|
|||||||
prg = 1
|
prg = 1
|
||||||
status = TaskStatus.DONE.value
|
status = TaskStatus.DONE.value
|
||||||
|
|
||||||
|
# only for special task and parsed docs and unfinised
|
||||||
|
freeze_progress = special_task_running and doc_progress >= 1 and not finished
|
||||||
msg = "\n".join(sorted(msg))
|
msg = "\n".join(sorted(msg))
|
||||||
info = {
|
info = {
|
||||||
"process_duration": datetime.timestamp(
|
"process_duration": datetime.timestamp(
|
||||||
datetime.now()) -
|
datetime.now()) -
|
||||||
d["process_begin_at"].timestamp(),
|
d["process_begin_at"].timestamp(),
|
||||||
"run": status}
|
"run": status}
|
||||||
if prg != 0:
|
if prg != 0 and not freeze_progress:
|
||||||
info["progress"] = prg
|
info["progress"] = prg
|
||||||
if msg:
|
if msg:
|
||||||
info["progress_msg"] = msg
|
info["progress_msg"] = msg
|
||||||
@ -756,6 +770,14 @@ class DocumentService(CommonService):
|
|||||||
.where((cls.model.kb_id == kb_id) & (cls.model.run == TaskStatus.CANCEL))
|
.where((cls.model.kb_id == kb_id) & (cls.model.run == TaskStatus.CANCEL))
|
||||||
.scalar()
|
.scalar()
|
||||||
)
|
)
|
||||||
|
downloaded = (
|
||||||
|
cls.model.select(fn.COUNT(1))
|
||||||
|
.where(
|
||||||
|
cls.model.kb_id == kb_id,
|
||||||
|
cls.model.source_type != "local"
|
||||||
|
)
|
||||||
|
.scalar()
|
||||||
|
)
|
||||||
|
|
||||||
row = (
|
row = (
|
||||||
cls.model.select(
|
cls.model.select(
|
||||||
@ -792,6 +814,7 @@ class DocumentService(CommonService):
|
|||||||
"finished": int(row["finished"]),
|
"finished": int(row["finished"]),
|
||||||
"failed": int(row["failed"]),
|
"failed": int(row["failed"]),
|
||||||
"cancelled": int(cancelled),
|
"cancelled": int(cancelled),
|
||||||
|
"downloaded": int(downloaded)
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -838,7 +861,7 @@ def queue_raptor_o_graphrag_tasks(sample_doc_id, ty, priority, fake_doc_id="", d
|
|||||||
"to_page": 100000000,
|
"to_page": 100000000,
|
||||||
"task_type": ty,
|
"task_type": ty,
|
||||||
"progress_msg": datetime.now().strftime("%H:%M:%S") + " created task " + ty,
|
"progress_msg": datetime.now().strftime("%H:%M:%S") + " created task " + ty,
|
||||||
"begin_at": datetime.now(),
|
"begin_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
}
|
}
|
||||||
|
|
||||||
task = new_task()
|
task = new_task()
|
||||||
@ -850,13 +873,13 @@ def queue_raptor_o_graphrag_tasks(sample_doc_id, ty, priority, fake_doc_id="", d
|
|||||||
|
|
||||||
task["doc_id"] = fake_doc_id
|
task["doc_id"] = fake_doc_id
|
||||||
task["doc_ids"] = doc_ids
|
task["doc_ids"] = doc_ids
|
||||||
DocumentService.begin2parse(sample_doc_id["id"])
|
DocumentService.begin2parse(sample_doc_id["id"], keep_progress=True)
|
||||||
assert REDIS_CONN.queue_product(get_svr_queue_name(priority), message=task), "Can't access Redis. Please check the Redis' status."
|
assert REDIS_CONN.queue_product(settings.get_svr_queue_name(priority), message=task), "Can't access Redis. Please check the Redis' status."
|
||||||
return task["id"]
|
return task["id"]
|
||||||
|
|
||||||
|
|
||||||
def get_queue_length(priority):
|
def get_queue_length(priority):
|
||||||
group_info = REDIS_CONN.queue_info(get_svr_queue_name(priority), SVR_CONSUMER_GROUP_NAME)
|
group_info = REDIS_CONN.queue_info(settings.get_svr_queue_name(priority), SVR_CONSUMER_GROUP_NAME)
|
||||||
if not group_info:
|
if not group_info:
|
||||||
return 0
|
return 0
|
||||||
return int(group_info.get("lag", 0) or 0)
|
return int(group_info.get("lag", 0) or 0)
|
||||||
@ -938,7 +961,7 @@ def doc_upload_and_parse(conversation_id, file_objs, user_id):
|
|||||||
else:
|
else:
|
||||||
d["image"].save(output_buffer, format='JPEG')
|
d["image"].save(output_buffer, format='JPEG')
|
||||||
|
|
||||||
STORAGE_IMPL.put(kb.id, d["id"], output_buffer.getvalue())
|
settings.STORAGE_IMPL.put(kb.id, d["id"], output_buffer.getvalue())
|
||||||
d["img_id"] = "{}-{}".format(kb.id, d["id"])
|
d["img_id"] = "{}-{}".format(kb.id, d["id"])
|
||||||
d.pop("image", None)
|
d.pop("image", None)
|
||||||
docs.append(d)
|
docs.append(d)
|
||||||
@ -995,13 +1018,12 @@ def doc_upload_and_parse(conversation_id, file_objs, user_id):
|
|||||||
d["q_%d_vec" % len(v)] = v
|
d["q_%d_vec" % len(v)] = v
|
||||||
for b in range(0, len(cks), es_bulk_size):
|
for b in range(0, len(cks), es_bulk_size):
|
||||||
if try_create_idx:
|
if try_create_idx:
|
||||||
if not globals.docStoreConn.indexExist(idxnm, kb_id):
|
if not settings.docStoreConn.indexExist(idxnm, kb_id):
|
||||||
globals.docStoreConn.createIdx(idxnm, kb_id, len(vects[0]))
|
settings.docStoreConn.createIdx(idxnm, kb_id, len(vects[0]))
|
||||||
try_create_idx = False
|
try_create_idx = False
|
||||||
globals.docStoreConn.insert(cks[b:b + es_bulk_size], idxnm, kb_id)
|
settings.docStoreConn.insert(cks[b:b + es_bulk_size], idxnm, kb_id)
|
||||||
|
|
||||||
DocumentService.increment_chunk_num(
|
DocumentService.increment_chunk_num(
|
||||||
doc_id, kb.id, token_counts[doc_id], chunk_counts[doc_id], 0)
|
doc_id, kb.id, token_counts[doc_id], chunk_counts[doc_id], 0)
|
||||||
|
|
||||||
return [d["id"] for d, _ in files]
|
return [d["id"] for d, _ in files]
|
||||||
|
|
||||||
|
|||||||
@ -33,7 +33,7 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
|||||||
from api.db.services.task_service import TaskService
|
from api.db.services.task_service import TaskService
|
||||||
from api.utils.file_utils import filename_type, read_potential_broken_pdf, thumbnail_img
|
from api.utils.file_utils import filename_type, read_potential_broken_pdf, thumbnail_img
|
||||||
from rag.llm.cv_model import GptV4
|
from rag.llm.cv_model import GptV4
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
class FileService(CommonService):
|
class FileService(CommonService):
|
||||||
@ -440,13 +440,13 @@ class FileService(CommonService):
|
|||||||
raise RuntimeError("This type of file has not been supported yet!")
|
raise RuntimeError("This type of file has not been supported yet!")
|
||||||
|
|
||||||
location = filename
|
location = filename
|
||||||
while STORAGE_IMPL.obj_exist(kb.id, location):
|
while settings.STORAGE_IMPL.obj_exist(kb.id, location):
|
||||||
location += "_"
|
location += "_"
|
||||||
|
|
||||||
blob = file.read()
|
blob = file.read()
|
||||||
if filetype == FileType.PDF.value:
|
if filetype == FileType.PDF.value:
|
||||||
blob = read_potential_broken_pdf(blob)
|
blob = read_potential_broken_pdf(blob)
|
||||||
STORAGE_IMPL.put(kb.id, location, blob)
|
settings.STORAGE_IMPL.put(kb.id, location, blob)
|
||||||
|
|
||||||
doc_id = get_uuid()
|
doc_id = get_uuid()
|
||||||
|
|
||||||
@ -454,7 +454,7 @@ class FileService(CommonService):
|
|||||||
thumbnail_location = ""
|
thumbnail_location = ""
|
||||||
if img is not None:
|
if img is not None:
|
||||||
thumbnail_location = f"thumbnail_{doc_id}.png"
|
thumbnail_location = f"thumbnail_{doc_id}.png"
|
||||||
STORAGE_IMPL.put(kb.id, thumbnail_location, img)
|
settings.STORAGE_IMPL.put(kb.id, thumbnail_location, img)
|
||||||
|
|
||||||
doc = {
|
doc = {
|
||||||
"id": doc_id,
|
"id": doc_id,
|
||||||
@ -534,12 +534,12 @@ class FileService(CommonService):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_blob(user_id, location):
|
def get_blob(user_id, location):
|
||||||
bname = f"{user_id}-downloads"
|
bname = f"{user_id}-downloads"
|
||||||
return STORAGE_IMPL.get(bname, location)
|
return settings.STORAGE_IMPL.get(bname, location)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def put_blob(user_id, location, blob):
|
def put_blob(user_id, location, blob):
|
||||||
bname = f"{user_id}-downloads"
|
bname = f"{user_id}-downloads"
|
||||||
return STORAGE_IMPL.put(bname, location, blob)
|
return settings.STORAGE_IMPL.put(bname, location, blob)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@DB.connection_context()
|
@DB.connection_context()
|
||||||
@ -570,7 +570,7 @@ class FileService(CommonService):
|
|||||||
deleted_file_count = FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
deleted_file_count = FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
||||||
File2DocumentService.delete_by_document_id(doc_id)
|
File2DocumentService.delete_by_document_id(doc_id)
|
||||||
if deleted_file_count > 0:
|
if deleted_file_count > 0:
|
||||||
STORAGE_IMPL.rm(b, n)
|
settings.STORAGE_IMPL.rm(b, n)
|
||||||
|
|
||||||
doc_parser = doc.parser_id
|
doc_parser = doc.parser_id
|
||||||
if doc_parser == ParserType.TABLE:
|
if doc_parser == ParserType.TABLE:
|
||||||
|
|||||||
@ -201,6 +201,7 @@ class KnowledgebaseService(CommonService):
|
|||||||
# will get all permitted kb, be cautious.
|
# will get all permitted kb, be cautious.
|
||||||
fields = [
|
fields = [
|
||||||
cls.model.name,
|
cls.model.name,
|
||||||
|
cls.model.avatar,
|
||||||
cls.model.language,
|
cls.model.language,
|
||||||
cls.model.permission,
|
cls.model.permission,
|
||||||
cls.model.doc_num,
|
cls.model.doc_num,
|
||||||
|
|||||||
@ -29,15 +29,14 @@ class LLMService(CommonService):
|
|||||||
|
|
||||||
|
|
||||||
def get_init_tenant_llm(user_id):
|
def get_init_tenant_llm(user_id):
|
||||||
from api import settings
|
from common import settings
|
||||||
from common import globals
|
|
||||||
tenant_llm = []
|
tenant_llm = []
|
||||||
|
|
||||||
seen = set()
|
seen = set()
|
||||||
factory_configs = []
|
factory_configs = []
|
||||||
for factory_config in [
|
for factory_config in [
|
||||||
settings.CHAT_CFG,
|
settings.CHAT_CFG,
|
||||||
globals.EMBEDDING_CFG,
|
settings.EMBEDDING_CFG,
|
||||||
settings.ASR_CFG,
|
settings.ASR_CFG,
|
||||||
settings.IMAGE2TEXT_CFG,
|
settings.IMAGE2TEXT_CFG,
|
||||||
settings.RERANK_CFG,
|
settings.RERANK_CFG,
|
||||||
|
|||||||
@ -159,7 +159,7 @@ class PipelineOperationLogService(CommonService):
|
|||||||
document_name=document.name,
|
document_name=document.name,
|
||||||
document_suffix=document.suffix,
|
document_suffix=document.suffix,
|
||||||
document_type=document.type,
|
document_type=document.type,
|
||||||
source_from="", # TODO: add in the future
|
source_from=document.source_type.split("/")[0],
|
||||||
progress=document.progress,
|
progress=document.progress,
|
||||||
progress_msg=document.progress_msg,
|
progress_msg=document.progress_msg,
|
||||||
process_begin_at=document.process_begin_at,
|
process_begin_at=document.process_begin_at,
|
||||||
|
|||||||
@ -31,10 +31,8 @@ from common.misc_utils import get_uuid
|
|||||||
from common.time_utils import current_timestamp
|
from common.time_utils import current_timestamp
|
||||||
from common.constants import StatusEnum, TaskStatus
|
from common.constants import StatusEnum, TaskStatus
|
||||||
from deepdoc.parser.excel_parser import RAGFlowExcelParser
|
from deepdoc.parser.excel_parser import RAGFlowExcelParser
|
||||||
from rag.settings import get_svr_queue_name
|
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from common import globals
|
from common import settings
|
||||||
from rag.nlp import search
|
from rag.nlp import search
|
||||||
|
|
||||||
CANVAS_DEBUG_DOC_ID = "dataflow_x"
|
CANVAS_DEBUG_DOC_ID = "dataflow_x"
|
||||||
@ -359,7 +357,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
|||||||
parse_task_array = []
|
parse_task_array = []
|
||||||
|
|
||||||
if doc["type"] == FileType.PDF.value:
|
if doc["type"] == FileType.PDF.value:
|
||||||
file_bin = STORAGE_IMPL.get(bucket, name)
|
file_bin = settings.STORAGE_IMPL.get(bucket, name)
|
||||||
do_layout = doc["parser_config"].get("layout_recognize", "DeepDOC")
|
do_layout = doc["parser_config"].get("layout_recognize", "DeepDOC")
|
||||||
pages = PdfParser.total_page_number(doc["name"], file_bin)
|
pages = PdfParser.total_page_number(doc["name"], file_bin)
|
||||||
if pages is None:
|
if pages is None:
|
||||||
@ -381,7 +379,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
|||||||
parse_task_array.append(task)
|
parse_task_array.append(task)
|
||||||
|
|
||||||
elif doc["parser_id"] == "table":
|
elif doc["parser_id"] == "table":
|
||||||
file_bin = STORAGE_IMPL.get(bucket, name)
|
file_bin = settings.STORAGE_IMPL.get(bucket, name)
|
||||||
rn = RAGFlowExcelParser.row_number(doc["name"], file_bin)
|
rn = RAGFlowExcelParser.row_number(doc["name"], file_bin)
|
||||||
for i in range(0, rn, 3000):
|
for i in range(0, rn, 3000):
|
||||||
task = new_task()
|
task = new_task()
|
||||||
@ -418,7 +416,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
|||||||
if pre_task["chunk_ids"]:
|
if pre_task["chunk_ids"]:
|
||||||
pre_chunk_ids.extend(pre_task["chunk_ids"].split())
|
pre_chunk_ids.extend(pre_task["chunk_ids"].split())
|
||||||
if pre_chunk_ids:
|
if pre_chunk_ids:
|
||||||
globals.docStoreConn.delete({"id": pre_chunk_ids}, search.index_name(chunking_config["tenant_id"]),
|
settings.docStoreConn.delete({"id": pre_chunk_ids}, search.index_name(chunking_config["tenant_id"]),
|
||||||
chunking_config["kb_id"])
|
chunking_config["kb_id"])
|
||||||
DocumentService.update_by_id(doc["id"], {"chunk_num": ck_num})
|
DocumentService.update_by_id(doc["id"], {"chunk_num": ck_num})
|
||||||
|
|
||||||
@ -428,7 +426,7 @@ def queue_tasks(doc: dict, bucket: str, name: str, priority: int):
|
|||||||
unfinished_task_array = [task for task in parse_task_array if task["progress"] < 1.0]
|
unfinished_task_array = [task for task in parse_task_array if task["progress"] < 1.0]
|
||||||
for unfinished_task in unfinished_task_array:
|
for unfinished_task in unfinished_task_array:
|
||||||
assert REDIS_CONN.queue_product(
|
assert REDIS_CONN.queue_product(
|
||||||
get_svr_queue_name(priority), message=unfinished_task
|
settings.get_svr_queue_name(priority), message=unfinished_task
|
||||||
), "Can't access Redis. Please check the Redis' status."
|
), "Can't access Redis. Please check the Redis' status."
|
||||||
|
|
||||||
|
|
||||||
@ -518,7 +516,7 @@ def queue_dataflow(tenant_id:str, flow_id:str, task_id:str, doc_id:str=CANVAS_DE
|
|||||||
task["file"] = file
|
task["file"] = file
|
||||||
|
|
||||||
if not REDIS_CONN.queue_product(
|
if not REDIS_CONN.queue_product(
|
||||||
get_svr_queue_name(priority), message=task
|
settings.get_svr_queue_name(priority), message=task
|
||||||
):
|
):
|
||||||
return False, "Can't access Redis. Please check the Redis' status."
|
return False, "Can't access Redis. Please check the Redis' status."
|
||||||
|
|
||||||
|
|||||||
@ -16,8 +16,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from langfuse import Langfuse
|
from langfuse import Langfuse
|
||||||
from api import settings
|
from common import settings
|
||||||
from common import globals
|
|
||||||
from common.constants import LLMType
|
from common.constants import LLMType
|
||||||
from api.db.db_models import DB, LLMFactories, TenantLLM
|
from api.db.db_models import DB, LLMFactories, TenantLLM
|
||||||
from api.db.services.common_service import CommonService
|
from api.db.services.common_service import CommonService
|
||||||
@ -115,7 +114,7 @@ class TenantLLMService(CommonService):
|
|||||||
if model_config:
|
if model_config:
|
||||||
model_config = model_config.to_dict()
|
model_config = model_config.to_dict()
|
||||||
elif llm_type == LLMType.EMBEDDING and fid == 'Builtin' and "tei-" in os.getenv("COMPOSE_PROFILES", "") and mdlnm == os.getenv('TEI_MODEL', ''):
|
elif llm_type == LLMType.EMBEDDING and fid == 'Builtin' and "tei-" in os.getenv("COMPOSE_PROFILES", "") and mdlnm == os.getenv('TEI_MODEL', ''):
|
||||||
embedding_cfg = globals.EMBEDDING_CFG
|
embedding_cfg = settings.EMBEDDING_CFG
|
||||||
model_config = {"llm_factory": 'Builtin', "api_key": embedding_cfg["api_key"], "llm_name": mdlnm, "api_base": embedding_cfg["base_url"]}
|
model_config = {"llm_factory": 'Builtin', "api_key": embedding_cfg["api_key"], "llm_name": mdlnm, "api_base": embedding_cfg["base_url"]}
|
||||||
else:
|
else:
|
||||||
raise LookupError(f"Model({mdlnm}@{fid}) not authorized")
|
raise LookupError(f"Model({mdlnm}@{fid}) not authorized")
|
||||||
|
|||||||
@ -27,7 +27,7 @@ from api.db.services.common_service import CommonService
|
|||||||
from common.misc_utils import get_uuid
|
from common.misc_utils import get_uuid
|
||||||
from common.time_utils import current_timestamp, datetime_format
|
from common.time_utils import current_timestamp, datetime_format
|
||||||
from common.constants import StatusEnum
|
from common.constants import StatusEnum
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
class UserService(CommonService):
|
class UserService(CommonService):
|
||||||
@ -221,7 +221,7 @@ class TenantService(CommonService):
|
|||||||
@DB.connection_context()
|
@DB.connection_context()
|
||||||
def user_gateway(cls, tenant_id):
|
def user_gateway(cls, tenant_id):
|
||||||
hash_obj = hashlib.sha256(tenant_id.encode("utf-8"))
|
hash_obj = hashlib.sha256(tenant_id.encode("utf-8"))
|
||||||
return int(hash_obj.hexdigest(), 16)%len(globals.MINIO)
|
return int(hash_obj.hexdigest(), 16)%len(settings.MINIO)
|
||||||
|
|
||||||
|
|
||||||
class UserTenantService(CommonService):
|
class UserTenantService(CommonService):
|
||||||
|
|||||||
@ -32,17 +32,15 @@ import threading
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from werkzeug.serving import run_simple
|
from werkzeug.serving import run_simple
|
||||||
from api import settings
|
|
||||||
from api.apps import app, smtp_mail_server
|
from api.apps import app, smtp_mail_server
|
||||||
from api.db.runtime_config import RuntimeConfig
|
from api.db.runtime_config import RuntimeConfig
|
||||||
from api.db.services.document_service import DocumentService
|
from api.db.services.document_service import DocumentService
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
|
from common import settings
|
||||||
from api.db.db_models import init_database_tables as init_web_db
|
from api.db.db_models import init_database_tables as init_web_db
|
||||||
from api.db.init_data import init_web_data
|
from api.db.init_data import init_web_data
|
||||||
from api.versions import get_ragflow_version
|
from common.versions import get_ragflow_version
|
||||||
from common.config_utils import show_configs
|
from common.config_utils import show_configs
|
||||||
from rag.settings import print_rag_settings
|
|
||||||
from rag.utils.mcp_tool_call_conn import shutdown_all_mcp_sessions
|
from rag.utils.mcp_tool_call_conn import shutdown_all_mcp_sessions
|
||||||
from rag.utils.redis_conn import RedisDistributedLock
|
from rag.utils.redis_conn import RedisDistributedLock
|
||||||
|
|
||||||
@ -92,7 +90,7 @@ if __name__ == '__main__':
|
|||||||
)
|
)
|
||||||
show_configs()
|
show_configs()
|
||||||
settings.init_settings()
|
settings.init_settings()
|
||||||
print_rag_settings()
|
settings.print_rag_settings()
|
||||||
|
|
||||||
if RAGFLOW_DEBUGPY_LISTEN > 0:
|
if RAGFLOW_DEBUGPY_LISTEN > 0:
|
||||||
logging.info(f"debugpy listen on {RAGFLOW_DEBUGPY_LISTEN}")
|
logging.info(f"debugpy listen on {RAGFLOW_DEBUGPY_LISTEN}")
|
||||||
|
|||||||
223
api/settings.py
223
api/settings.py
@ -13,226 +13,3 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
from datetime import date
|
|
||||||
|
|
||||||
import rag.utils
|
|
||||||
import rag.utils.es_conn
|
|
||||||
import rag.utils.infinity_conn
|
|
||||||
import rag.utils.opensearch_conn
|
|
||||||
from api.constants import RAG_FLOW_SERVICE_NAME
|
|
||||||
from common.config_utils import decrypt_database_config, get_base_config
|
|
||||||
from common.file_utils import get_project_base_directory
|
|
||||||
from common import globals
|
|
||||||
from rag.nlp import search
|
|
||||||
|
|
||||||
LLM = None
|
|
||||||
LLM_FACTORY = None
|
|
||||||
LLM_BASE_URL = None
|
|
||||||
CHAT_MDL = ""
|
|
||||||
# EMBEDDING_MDL = "" has been moved to common/globals.py
|
|
||||||
RERANK_MDL = ""
|
|
||||||
ASR_MDL = ""
|
|
||||||
IMAGE2TEXT_MDL = ""
|
|
||||||
CHAT_CFG = ""
|
|
||||||
# EMBEDDING_CFG = "" has been moved to common/globals.py
|
|
||||||
RERANK_CFG = ""
|
|
||||||
ASR_CFG = ""
|
|
||||||
IMAGE2TEXT_CFG = ""
|
|
||||||
API_KEY = None
|
|
||||||
PARSERS = None
|
|
||||||
HOST_IP = None
|
|
||||||
HOST_PORT = None
|
|
||||||
SECRET_KEY = None
|
|
||||||
FACTORY_LLM_INFOS = None
|
|
||||||
ALLOWED_LLM_FACTORIES = None
|
|
||||||
|
|
||||||
DATABASE_TYPE = os.getenv("DB_TYPE", "mysql")
|
|
||||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
|
||||||
|
|
||||||
# authentication
|
|
||||||
AUTHENTICATION_CONF = None
|
|
||||||
|
|
||||||
# client
|
|
||||||
CLIENT_AUTHENTICATION = None
|
|
||||||
HTTP_APP_KEY = None
|
|
||||||
GITHUB_OAUTH = None
|
|
||||||
FEISHU_OAUTH = None
|
|
||||||
OAUTH_CONFIG = None
|
|
||||||
# DOC_ENGINE = None has been moved to common/globals.py
|
|
||||||
# docStoreConn = None has been moved to common/globals.py
|
|
||||||
|
|
||||||
#retriever = None has been moved to common/globals.py
|
|
||||||
kg_retriever = None
|
|
||||||
|
|
||||||
# user registration switch
|
|
||||||
REGISTER_ENABLED = 1
|
|
||||||
|
|
||||||
|
|
||||||
# sandbox-executor-manager
|
|
||||||
SANDBOX_ENABLED = 0
|
|
||||||
SANDBOX_HOST = None
|
|
||||||
STRONG_TEST_COUNT = int(os.environ.get("STRONG_TEST_COUNT", "8"))
|
|
||||||
|
|
||||||
SMTP_CONF = None
|
|
||||||
MAIL_SERVER = ""
|
|
||||||
MAIL_PORT = 000
|
|
||||||
MAIL_USE_SSL = True
|
|
||||||
MAIL_USE_TLS = False
|
|
||||||
MAIL_USERNAME = ""
|
|
||||||
MAIL_PASSWORD = ""
|
|
||||||
MAIL_DEFAULT_SENDER = ()
|
|
||||||
MAIL_FRONTEND_URL = ""
|
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_secret_key():
|
|
||||||
secret_key = os.environ.get("RAGFLOW_SECRET_KEY")
|
|
||||||
if secret_key and len(secret_key) >= 32:
|
|
||||||
return secret_key
|
|
||||||
|
|
||||||
# Check if there's a configured secret key
|
|
||||||
configured_key = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("secret_key")
|
|
||||||
if configured_key and configured_key != str(date.today()) and len(configured_key) >= 32:
|
|
||||||
return configured_key
|
|
||||||
|
|
||||||
# Generate a new secure key and warn about it
|
|
||||||
import logging
|
|
||||||
|
|
||||||
new_key = secrets.token_hex(32)
|
|
||||||
logging.warning(f"SECURITY WARNING: Using auto-generated SECRET_KEY. Generated key: {new_key}")
|
|
||||||
return new_key
|
|
||||||
|
|
||||||
|
|
||||||
def init_settings():
|
|
||||||
global LLM, LLM_FACTORY, LLM_BASE_URL, DATABASE_TYPE, DATABASE, FACTORY_LLM_INFOS, REGISTER_ENABLED, ALLOWED_LLM_FACTORIES
|
|
||||||
DATABASE_TYPE = os.getenv("DB_TYPE", "mysql")
|
|
||||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
|
||||||
LLM = get_base_config("user_default_llm", {}) or {}
|
|
||||||
LLM_DEFAULT_MODELS = LLM.get("default_models", {}) or {}
|
|
||||||
LLM_FACTORY = LLM.get("factory", "") or ""
|
|
||||||
LLM_BASE_URL = LLM.get("base_url", "") or ""
|
|
||||||
ALLOWED_LLM_FACTORIES = LLM.get("allowed_factories", None)
|
|
||||||
try:
|
|
||||||
REGISTER_ENABLED = int(os.environ.get("REGISTER_ENABLED", "1"))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(os.path.join(get_project_base_directory(), "conf", "llm_factories.json"), "r") as f:
|
|
||||||
FACTORY_LLM_INFOS = json.load(f)["factory_llm_infos"]
|
|
||||||
except Exception:
|
|
||||||
FACTORY_LLM_INFOS = []
|
|
||||||
|
|
||||||
global CHAT_MDL, RERANK_MDL, ASR_MDL, IMAGE2TEXT_MDL
|
|
||||||
global CHAT_CFG, RERANK_CFG, ASR_CFG, IMAGE2TEXT_CFG
|
|
||||||
|
|
||||||
global API_KEY, PARSERS, HOST_IP, HOST_PORT, SECRET_KEY
|
|
||||||
API_KEY = LLM.get("api_key")
|
|
||||||
PARSERS = LLM.get(
|
|
||||||
"parsers", "naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One,audio:Audio,email:Email,tag:Tag"
|
|
||||||
)
|
|
||||||
|
|
||||||
chat_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("chat_model", CHAT_MDL))
|
|
||||||
embedding_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("embedding_model", globals.EMBEDDING_MDL))
|
|
||||||
rerank_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("rerank_model", RERANK_MDL))
|
|
||||||
asr_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("asr_model", ASR_MDL))
|
|
||||||
image2text_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("image2text_model", IMAGE2TEXT_MDL))
|
|
||||||
|
|
||||||
CHAT_CFG = _resolve_per_model_config(chat_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
|
||||||
globals.EMBEDDING_CFG = _resolve_per_model_config(embedding_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
|
||||||
RERANK_CFG = _resolve_per_model_config(rerank_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
|
||||||
ASR_CFG = _resolve_per_model_config(asr_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
|
||||||
IMAGE2TEXT_CFG = _resolve_per_model_config(image2text_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
|
||||||
|
|
||||||
CHAT_MDL = CHAT_CFG.get("model", "") or ""
|
|
||||||
globals.EMBEDDING_MDL = os.getenv("TEI_MODEL", "BAAI/bge-small-en-v1.5") if "tei-" in os.getenv("COMPOSE_PROFILES", "") else ""
|
|
||||||
RERANK_MDL = RERANK_CFG.get("model", "") or ""
|
|
||||||
ASR_MDL = ASR_CFG.get("model", "") or ""
|
|
||||||
IMAGE2TEXT_MDL = IMAGE2TEXT_CFG.get("model", "") or ""
|
|
||||||
|
|
||||||
HOST_IP = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("host", "127.0.0.1")
|
|
||||||
HOST_PORT = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("http_port")
|
|
||||||
|
|
||||||
SECRET_KEY = get_or_create_secret_key()
|
|
||||||
|
|
||||||
global AUTHENTICATION_CONF, CLIENT_AUTHENTICATION, HTTP_APP_KEY, GITHUB_OAUTH, FEISHU_OAUTH, OAUTH_CONFIG
|
|
||||||
# authentication
|
|
||||||
AUTHENTICATION_CONF = get_base_config("authentication", {})
|
|
||||||
|
|
||||||
# client
|
|
||||||
CLIENT_AUTHENTICATION = AUTHENTICATION_CONF.get("client", {}).get("switch", False)
|
|
||||||
HTTP_APP_KEY = AUTHENTICATION_CONF.get("client", {}).get("http_app_key")
|
|
||||||
GITHUB_OAUTH = get_base_config("oauth", {}).get("github")
|
|
||||||
FEISHU_OAUTH = get_base_config("oauth", {}).get("feishu")
|
|
||||||
|
|
||||||
OAUTH_CONFIG = get_base_config("oauth", {})
|
|
||||||
|
|
||||||
global kg_retriever
|
|
||||||
globals.DOC_ENGINE = os.environ.get("DOC_ENGINE", "elasticsearch")
|
|
||||||
# globals.DOC_ENGINE = os.environ.get('DOC_ENGINE', "opensearch")
|
|
||||||
lower_case_doc_engine = globals.DOC_ENGINE.lower()
|
|
||||||
if lower_case_doc_engine == "elasticsearch":
|
|
||||||
globals.docStoreConn = rag.utils.es_conn.ESConnection()
|
|
||||||
elif lower_case_doc_engine == "infinity":
|
|
||||||
globals.docStoreConn = rag.utils.infinity_conn.InfinityConnection()
|
|
||||||
elif lower_case_doc_engine == "opensearch":
|
|
||||||
globals.docStoreConn = rag.utils.opensearch_conn.OSConnection()
|
|
||||||
else:
|
|
||||||
raise Exception(f"Not supported doc engine: {globals.DOC_ENGINE}")
|
|
||||||
|
|
||||||
globals.retriever = search.Dealer(globals.docStoreConn)
|
|
||||||
from graphrag import search as kg_search
|
|
||||||
|
|
||||||
kg_retriever = kg_search.KGSearch(globals.docStoreConn)
|
|
||||||
|
|
||||||
if int(os.environ.get("SANDBOX_ENABLED", "0")):
|
|
||||||
global SANDBOX_HOST
|
|
||||||
SANDBOX_HOST = os.environ.get("SANDBOX_HOST", "sandbox-executor-manager")
|
|
||||||
|
|
||||||
global SMTP_CONF, MAIL_SERVER, MAIL_PORT, MAIL_USE_SSL, MAIL_USE_TLS
|
|
||||||
global MAIL_USERNAME, MAIL_PASSWORD, MAIL_DEFAULT_SENDER, MAIL_FRONTEND_URL
|
|
||||||
SMTP_CONF = get_base_config("smtp", {})
|
|
||||||
|
|
||||||
MAIL_SERVER = SMTP_CONF.get("mail_server", "")
|
|
||||||
MAIL_PORT = SMTP_CONF.get("mail_port", 000)
|
|
||||||
MAIL_USE_SSL = SMTP_CONF.get("mail_use_ssl", True)
|
|
||||||
MAIL_USE_TLS = SMTP_CONF.get("mail_use_tls", False)
|
|
||||||
MAIL_USERNAME = SMTP_CONF.get("mail_username", "")
|
|
||||||
MAIL_PASSWORD = SMTP_CONF.get("mail_password", "")
|
|
||||||
mail_default_sender = SMTP_CONF.get("mail_default_sender", [])
|
|
||||||
if mail_default_sender and len(mail_default_sender) >= 2:
|
|
||||||
MAIL_DEFAULT_SENDER = (mail_default_sender[0], mail_default_sender[1])
|
|
||||||
MAIL_FRONTEND_URL = SMTP_CONF.get("mail_frontend_url", "")
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_model_entry(entry):
|
|
||||||
if isinstance(entry, str):
|
|
||||||
return {"name": entry, "factory": None, "api_key": None, "base_url": None}
|
|
||||||
if isinstance(entry, dict):
|
|
||||||
name = entry.get("name") or entry.get("model") or ""
|
|
||||||
return {
|
|
||||||
"name": name,
|
|
||||||
"factory": entry.get("factory"),
|
|
||||||
"api_key": entry.get("api_key"),
|
|
||||||
"base_url": entry.get("base_url"),
|
|
||||||
}
|
|
||||||
return {"name": "", "factory": None, "api_key": None, "base_url": None}
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_per_model_config(entry_dict, backup_factory, backup_api_key, backup_base_url):
|
|
||||||
name = (entry_dict.get("name") or "").strip()
|
|
||||||
m_factory = entry_dict.get("factory") or backup_factory or ""
|
|
||||||
m_api_key = entry_dict.get("api_key") or backup_api_key or ""
|
|
||||||
m_base_url = entry_dict.get("base_url") or backup_base_url or ""
|
|
||||||
|
|
||||||
if name and "@" not in name and m_factory:
|
|
||||||
name = f"{name}@{m_factory}"
|
|
||||||
|
|
||||||
return {
|
|
||||||
"model": name,
|
|
||||||
"factory": m_factory,
|
|
||||||
"api_key": m_api_key,
|
|
||||||
"base_url": m_base_url,
|
|
||||||
}
|
|
||||||
|
|||||||
@ -34,7 +34,6 @@ from flask import (
|
|||||||
)
|
)
|
||||||
from peewee import OperationalError
|
from peewee import OperationalError
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from common.constants import ActiveEnum
|
from common.constants import ActiveEnum
|
||||||
from api.db.db_models import APIToken
|
from api.db.db_models import APIToken
|
||||||
from api.utils.json_encode import CustomJSONEncoder
|
from api.utils.json_encode import CustomJSONEncoder
|
||||||
@ -42,7 +41,7 @@ from rag.utils.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_
|
|||||||
from api.db.services.tenant_llm_service import LLMFactoriesService
|
from api.db.services.tenant_llm_service import LLMFactoriesService
|
||||||
from common.connection_utils import timeout
|
from common.connection_utils import timeout
|
||||||
from common.constants import RetCode
|
from common.constants import RetCode
|
||||||
|
from common import settings
|
||||||
|
|
||||||
requests.models.complexjson.dumps = functools.partial(json.dumps, cls=CustomJSONEncoder)
|
requests.models.complexjson.dumps = functools.partial(json.dumps, cls=CustomJSONEncoder)
|
||||||
|
|
||||||
@ -626,7 +625,7 @@ async def is_strong_enough(chat_model, embedding_model):
|
|||||||
|
|
||||||
|
|
||||||
def get_allowed_llm_factories() -> list:
|
def get_allowed_llm_factories() -> list:
|
||||||
factories = list(LLMFactoriesService.get_all())
|
factories = list(LLMFactoriesService.get_all(reverse=True, order_by="rank"))
|
||||||
if settings.ALLOWED_LLM_FACTORIES is None:
|
if settings.ALLOWED_LLM_FACTORIES is None:
|
||||||
return factories
|
return factories
|
||||||
|
|
||||||
|
|||||||
@ -13,17 +13,17 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import requests
|
import requests
|
||||||
from timeit import default_timer as timer
|
from timeit import default_timer as timer
|
||||||
|
|
||||||
from api import settings
|
|
||||||
from api.db.db_models import DB
|
from api.db.db_models import DB
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
|
||||||
from rag.utils.es_conn import ESConnection
|
from rag.utils.es_conn import ESConnection
|
||||||
from rag.utils.infinity_conn import InfinityConnection
|
from rag.utils.infinity_conn import InfinityConnection
|
||||||
from common import globals
|
from common import settings
|
||||||
|
|
||||||
|
|
||||||
def _ok_nok(ok: bool) -> str:
|
def _ok_nok(ok: bool) -> str:
|
||||||
@ -52,7 +52,7 @@ def check_redis() -> tuple[bool, dict]:
|
|||||||
def check_doc_engine() -> tuple[bool, dict]:
|
def check_doc_engine() -> tuple[bool, dict]:
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
meta = globals.docStoreConn.health()
|
meta = settings.docStoreConn.health()
|
||||||
# treat any successful call as ok
|
# treat any successful call as ok
|
||||||
return True, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", **(meta or {})}
|
return True, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", **(meta or {})}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -62,7 +62,7 @@ def check_doc_engine() -> tuple[bool, dict]:
|
|||||||
def check_storage() -> tuple[bool, dict]:
|
def check_storage() -> tuple[bool, dict]:
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
STORAGE_IMPL.health()
|
settings.STORAGE_IMPL.health()
|
||||||
return True, {"elapsed": f"{(timer() - st) * 1000.0:.1f}"}
|
return True, {"elapsed": f"{(timer() - st) * 1000.0:.1f}"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return False, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", "error": str(e)}
|
return False, {"elapsed": f"{(timer() - st) * 1000.0:.1f}", "error": str(e)}
|
||||||
@ -120,7 +120,7 @@ def get_mysql_status():
|
|||||||
def check_minio_alive():
|
def check_minio_alive():
|
||||||
start_time = timer()
|
start_time = timer()
|
||||||
try:
|
try:
|
||||||
response = requests.get(f'http://{globals.MINIO["host"]}/minio/health/live')
|
response = requests.get(f'http://{settings.MINIO["host"]}/minio/health/live')
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||||
else:
|
else:
|
||||||
@ -148,7 +148,10 @@ def get_redis_info():
|
|||||||
def check_ragflow_server_alive():
|
def check_ragflow_server_alive():
|
||||||
start_time = timer()
|
start_time = timer()
|
||||||
try:
|
try:
|
||||||
response = requests.get(f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping')
|
url = f'http://{settings.HOST_IP}:{settings.HOST_PORT}/v1/system/ping'
|
||||||
|
if '0.0.0.0' in url:
|
||||||
|
url = url.replace('0.0.0.0', '127.0.0.1')
|
||||||
|
response = requests.get(url)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||||
else:
|
else:
|
||||||
@ -160,6 +163,26 @@ def check_ragflow_server_alive():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def check_task_executor_alive():
|
||||||
|
task_executor_heartbeats = {}
|
||||||
|
try:
|
||||||
|
task_executors = REDIS_CONN.smembers("TASKEXE")
|
||||||
|
now = datetime.now().timestamp()
|
||||||
|
for task_executor_id in task_executors:
|
||||||
|
heartbeats = REDIS_CONN.zrangebyscore(task_executor_id, now - 60 * 30, now)
|
||||||
|
heartbeats = [json.loads(heartbeat) for heartbeat in heartbeats]
|
||||||
|
task_executor_heartbeats[task_executor_id] = heartbeats
|
||||||
|
if task_executor_heartbeats:
|
||||||
|
return {"status": "alive", "message": task_executor_heartbeats}
|
||||||
|
else:
|
||||||
|
return {"status": "timeout", "message": "Not found any task executor."}
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"status": "timeout",
|
||||||
|
"message": f"error: {str(e)}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def run_health_checks() -> tuple[dict, bool]:
|
def run_health_checks() -> tuple[dict, bool]:
|
||||||
result: dict[str, str | dict] = {}
|
result: dict[str, str | dict] = {}
|
||||||
|
|
||||||
|
|||||||
@ -18,7 +18,7 @@ from enum import Enum, IntEnum
|
|||||||
from strenum import StrEnum
|
from strenum import StrEnum
|
||||||
|
|
||||||
SERVICE_CONF = "service_conf.yaml"
|
SERVICE_CONF = "service_conf.yaml"
|
||||||
|
RAG_FLOW_SERVICE_NAME = "ragflow"
|
||||||
|
|
||||||
class CustomEnum(Enum):
|
class CustomEnum(Enum):
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -113,7 +113,7 @@ class FileSource(StrEnum):
|
|||||||
DISCORD = "discord"
|
DISCORD = "discord"
|
||||||
CONFLUENCE = "confluence"
|
CONFLUENCE = "confluence"
|
||||||
GMAIL = "gmail"
|
GMAIL = "gmail"
|
||||||
GOOGLE_DRIVER = "google_driver"
|
GOOGLE_DRIVE = "google_drive"
|
||||||
JIRA = "jira"
|
JIRA = "jira"
|
||||||
SHAREPOINT = "sharepoint"
|
SHAREPOINT = "sharepoint"
|
||||||
SLACK = "slack"
|
SLACK = "slack"
|
||||||
@ -137,6 +137,14 @@ class MCPServerType(StrEnum):
|
|||||||
|
|
||||||
VALID_MCP_SERVER_TYPES = {MCPServerType.SSE, MCPServerType.STREAMABLE_HTTP}
|
VALID_MCP_SERVER_TYPES = {MCPServerType.SSE, MCPServerType.STREAMABLE_HTTP}
|
||||||
|
|
||||||
|
class Storage(Enum):
|
||||||
|
MINIO = 1
|
||||||
|
AZURE_SPN = 2
|
||||||
|
AZURE_SAS = 3
|
||||||
|
AWS_S3 = 4
|
||||||
|
OSS = 5
|
||||||
|
OPENDAL = 6
|
||||||
|
|
||||||
# environment
|
# environment
|
||||||
# ENV_STRONG_TEST_COUNT = "STRONG_TEST_COUNT"
|
# ENV_STRONG_TEST_COUNT = "STRONG_TEST_COUNT"
|
||||||
# ENV_RAGFLOW_SECRET_KEY = "RAGFLOW_SECRET_KEY"
|
# ENV_RAGFLOW_SECRET_KEY = "RAGFLOW_SECRET_KEY"
|
||||||
@ -181,3 +189,8 @@ VALID_MCP_SERVER_TYPES = {MCPServerType.SSE, MCPServerType.STREAMABLE_HTTP}
|
|||||||
# ENV_MAX_CONCURRENT_MINIO = "MAX_CONCURRENT_MINIO"
|
# ENV_MAX_CONCURRENT_MINIO = "MAX_CONCURRENT_MINIO"
|
||||||
# ENV_WORKER_HEARTBEAT_TIMEOUT = "WORKER_HEARTBEAT_TIMEOUT"
|
# ENV_WORKER_HEARTBEAT_TIMEOUT = "WORKER_HEARTBEAT_TIMEOUT"
|
||||||
# ENV_TRACE_MALLOC_ENABLED = "TRACE_MALLOC_ENABLED"
|
# ENV_TRACE_MALLOC_ENABLED = "TRACE_MALLOC_ENABLED"
|
||||||
|
|
||||||
|
PAGERANK_FLD = "pagerank_fea"
|
||||||
|
SVR_QUEUE_NAME = "rag_flow_svr_queue"
|
||||||
|
SVR_CONSUMER_GROUP_NAME = "rag_flow_svr_task_broker"
|
||||||
|
TAG_FLD = "tag_feas"
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from .notion_connector import NotionConnector
|
|||||||
from .confluence_connector import ConfluenceConnector
|
from .confluence_connector import ConfluenceConnector
|
||||||
from .discord_connector import DiscordConnector
|
from .discord_connector import DiscordConnector
|
||||||
from .dropbox_connector import DropboxConnector
|
from .dropbox_connector import DropboxConnector
|
||||||
from .google_drive_connector import GoogleDriveConnector
|
from .google_drive.connector import GoogleDriveConnector
|
||||||
from .jira_connector import JiraConnector
|
from .jira_connector import JiraConnector
|
||||||
from .sharepoint_connector import SharePointConnector
|
from .sharepoint_connector import SharePointConnector
|
||||||
from .teams_connector import TeamsConnector
|
from .teams_connector import TeamsConnector
|
||||||
@ -47,4 +47,4 @@ __all__ = [
|
|||||||
"CredentialExpiredError",
|
"CredentialExpiredError",
|
||||||
"InsufficientPermissionsError",
|
"InsufficientPermissionsError",
|
||||||
"UnexpectedValidationError"
|
"UnexpectedValidationError"
|
||||||
]
|
]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user