Don't release full image (#10654)

### What problem does this PR solve?

Introduced gpu profile in .env
Added Dockerfile_tei
fix datrie
Removed LIGHTEN flag

### Type of change

- [x] Documentation Update
- [x] Refactoring
This commit is contained in:
Zhichang Yu
2025-10-23 23:02:27 +08:00
committed by GitHub
parent 92739ea804
commit 73144e278b
67 changed files with 2792 additions and 3608 deletions

View File

@ -49,20 +49,20 @@ jobs:
- name: Check workflow duplication
if: ${{ !cancelled() && !failure() && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci')) }}
run: |
if [[ ${{ github.event_name }} != 'pull_request' ]]; then
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "schedule" ]]; then
HEAD=$(git rev-parse HEAD)
# Find a PR that introduced a given commit
gh auth login --with-token <<< "${{ secrets.GITHUB_TOKEN }}"
PR_NUMBER=$(gh pr list --search ${HEAD} --state merged --json number --jq .[0].number)
echo "HEAD=${HEAD}"
echo "PR_NUMBER=${PR_NUMBER}"
if [[ -n ${PR_NUMBER} ]]; then
if [[ -n "${PR_NUMBER}" ]]; then
PR_SHA_FP=${RUNNER_WORKSPACE_PREFIX}/artifacts/${GITHUB_REPOSITORY}/PR_${PR_NUMBER}
if [[ -f ${PR_SHA_FP} ]]; then
if [[ -f "${PR_SHA_FP}" ]]; then
read -r PR_SHA PR_RUN_ID < "${PR_SHA_FP}"
# Calculate the hash of the current workspace content
HEAD_SHA=$(git rev-parse HEAD^{tree})
if [[ ${HEAD_SHA} == ${PR_SHA} ]]; then
if [[ "${HEAD_SHA}" == "${PR_SHA}" ]]; then
echo "Cancel myself since the workspace content hash is the same with PR #${PR_NUMBER} merged. See ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${PR_RUN_ID} for details."
gh run cancel ${GITHUB_RUN_ID}
while true; do
@ -91,122 +91,98 @@ jobs:
version: ">=0.11.x"
args: "check"
- name: Build ragflow:nightly-slim
- name: Build ragflow:nightly
run: |
RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME}
sudo docker pull ubuntu:22.04
sudo DOCKER_BUILDKIT=1 docker build --build-arg LIGHTEN=1 --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly-slim .
- name: Build ragflow:nightly
run: |
sudo DOCKER_BUILDKIT=1 docker build --build-arg NEED_MIRROR=1 -f Dockerfile -t infiniflow/ragflow:nightly .
- name: Start ragflow:nightly-slim
run: |
sudo docker compose -f docker/docker-compose.yml down --volumes --remove-orphans
echo -e "\nRAGFLOW_IMAGE=infiniflow/ragflow:nightly-slim" >> docker/.env
sudo docker compose -f docker/docker-compose.yml up -d
- name: Stop ragflow:nightly-slim
if: always() # always run this step even if previous steps failed
run: |
sudo docker compose -f docker/docker-compose.yml down -v
if [[ "$GITHUB_EVENT_NAME" == "schedule" ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
echo "HTTP_API_TEST_LEVEL=${HTTP_API_TEST_LEVEL}" >> $GITHUB_ENV
echo "RAGFLOW_CONTAINER=${GITHUB_RUN_ID}-ragflow-cpu-1" >> $GITHUB_ENV
- name: Start ragflow:nightly
run: |
echo -e "\nCOMPOSE_PROFILES=\${COMPOSE_PROFILES},tei-cpu" >> docker/.env
echo -e "\nTEI_MODEL=BAAI/bge-small-en-v1.5" >> docker/.env
echo -e "\nRAGFLOW_IMAGE=infiniflow/ragflow:nightly" >> docker/.env
sudo docker compose -f docker/docker-compose.yml up -d
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python
- name: Run sdk tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python && uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
- name: Run frontend api tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
cd sdk/python && UV_LINK_MODE=copy uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
source .venv/bin/activate && pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py
- name: Run http api tests against Elasticsearch
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
source .venv/bin/activate && pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
- name: Stop ragflow:nightly
if: always() # always run this step even if previous steps failed
run: |
sudo docker compose -f docker/docker-compose.yml down -v
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v
- name: Start ragflow:nightly
run: |
sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml up -d
sed -i '1i DOC_ENGINE=infinity' docker/.env
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} up -d
- name: Run sdk tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && uv pip install sdk/python && DOC_ENGINE=infinity uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_sdk_api
- name: Run frontend api tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
cd sdk/python && UV_LINK_MODE=copy uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short sdk/python/test/test_frontend_api/get_email.py sdk/python/test/test_frontend_api/test_dataset.py
- name: Run http api tests against Infinity
run: |
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
export HOST_ADDRESS=http://host.docker.internal:9380
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
until sudo docker exec ${RAGFLOW_CONTAINER} curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
echo "Waiting for service to be available..."
sleep 5
done
if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then
export HTTP_API_TEST_LEVEL=p3
else
export HTTP_API_TEST_LEVEL=p2
fi
UV_LINK_MODE=copy uv sync --python 3.10 --only-group test --no-default-groups --frozen && DOC_ENGINE=infinity uv run --only-group test --no-default-groups pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
source .venv/bin/activate && DOC_ENGINE=infinity pytest -s --tb=short --level=${HTTP_API_TEST_LEVEL} test/testcases/test_http_api
- name: Stop ragflow:nightly
if: always() # always run this step even if previous steps failed
run: |
sudo DOC_ENGINE=infinity docker compose -f docker/docker-compose.yml down -v
sudo docker compose -f docker/docker-compose.yml -p ${GITHUB_RUN_ID} down -v