Files
ragflow/.github/workflows/tests.yml
Zhichang Yu 82f572ff95 Check workflow duplication (#10399)
### What problem does this PR solve?

Check workflow duplication

### Type of change

- [x] Other (please describe): CI
2025-10-02 10:57:08 +08:00

213 lines
10 KiB
YAML

name: tests
on:
push:
branches:
- 'main'
- '*.*.*'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.mdx'
pull_request:
types: [ opened, synchronize, reopened, labeled ]
paths-ignore:
- 'docs/**'
- '*.md'
- '*.mdx'
schedule:
- cron: '0 16 * * *' # This schedule runs every 16:00:00Z(00:00:00+08:00)
# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
ragflow_tests:
name: ragflow_tests
# https://docs.github.com/en/actions/using-jobs/using-conditions-to-control-job-execution
# https://github.com/orgs/community/discussions/26261
if: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci') }}
runs-on: [ "self-hosted", "debug" ]
steps:
# https://github.com/hmarr/debug-action
#- uses: hmarr/debug-action@v2
- name: Ensure workspace ownership
run: |
echo "Workflow triggered by ${{ github.event_name }}"
echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE
# https://github.com/actions/checkout/issues/1781
- name: Check out code
uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true
- name: Check workflow duplication
if: ${{ !cancelled() && !failure() && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci')) }}
run: |
if [[ ${{ github.event_name }} != 'pull_request' ]]; then
HEAD=$(git rev-parse HEAD)
# Find a PR that introduced a given commit
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
PR_NUMBER=$(gh pr list --search ${HEAD} --state merged --json number --jq .[0].number)
echo "HEAD=${HEAD}"
echo "PR_NUMBER=${PR_NUMBER}"
if [[ -n ${PR_NUMBER} ]]; then
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
if [[ -f ${PR_SHA_FP} ]]; then
read -r PR_SHA PR_RUN_ID < "${PR_SHA_FP}"
# Calculate the hash of the current workspace content
HEAD_SHA=$(git rev-parse HEAD^{tree})
if [[ ${HEAD_SHA} == ${PR_SHA} ]]; then
echo "Cancel myself since the workspace content hash is the same with PR #${PR_NUMBER} merged. See ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${PR_RUN_ID} for details."
gh run cancel ${GITHUB_RUN_ID}
while true; do
status=$(gh run view ${GITHUB_RUN_ID} --json status -q .status)
[ "$status" = "completed" ] && break
sleep 5
done
exit 1
fi
fi
fi
else
PR_NUMBER=${{ github.event.pull_request.number }}
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
# Calculate the hash of the current workspace content
PR_SHA=$(git rev-parse HEAD^{tree})
echo "PR #${PR_NUMBER} workspace content hash: ${PR_SHA}"
mkdir -p ${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}
echo "${PR_SHA} ${GITHUB_RUN_ID}" > ${PR_SHA_FP}
fi
# https://github.com/astral-sh/ruff-action
- name: Static check with Ruff
uses: astral-sh/ruff-action@v3
with:
version: ">=0.11.x"
args: "check"
- name: Build ragflow:nightly-slim
run: |
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
sudo docker pull ubuntu:22.04
sudo docker build --progress=plain --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
- name: Build ragflow:nightly
run: |
sudo docker build --progress=plain --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
- name: Start ragflow:nightly-slim
run: |
sudo docker compose -f docker/docker-compose.yml down --volumes --remove-orphans
echo -e "\nRAGFLOW_IMAGE=infiniflow/ragflow:nightly-slim" >> docker/.env
sudo docker compose -f docker/docker-compose.yml up -d
- name: Stop ragflow:nightly-slim
if: always() # always run this step even if previous steps failed
run: |
sudo docker compose -f docker/docker-compose.yml down -v
- name: Start ragflow:nightly
run: |
echo -e "\nRAGFLOW_IMAGE=infiniflow/ragflow:nightly" >> docker/.env
sudo docker compose -f docker/docker-compose.yml up -d
- name: Run sdk tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python && uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
- name: Run frontend api tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
cd sdk/python && UV_LINK_MODE=copy uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
- name: Run http api tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
- name: Stop ragflow:nightly
if: always() # always run this step even if previous steps failed
run: |
sudo docker compose -f docker/docker-compose.yml down -v
- name: Start ragflow:nightly
run: |
sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml up -d
- name: Run sdk tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python && DOC_ENGINE=infinity uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
- name: Run frontend api tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
cd sdk/python && UV_LINK_MODE=copy uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
- name: Run http api tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && DOC_ENGINE=infinity uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
- name: Stop ragflow:nightly
if: always() # always run this step even if previous steps failed
run: |
sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml down -v