From 49e3ad47bda8b87a85d74e4266a0f3251e58f0d6 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Tue, 2 Dec 2025 17:27:12 -0500
Subject: [PATCH 01/15] create workflow for external contributors
---
.github/workflows/pull_request_external.yml | 3827 +++++++++++++++++++
ci/praktika/yaml_generator.py | 15 +-
ci/workflows/pull_request.py | 2 +-
ci/workflows/pull_request_external.py | 113 +
4 files changed, 3955 insertions(+), 2 deletions(-)
create mode 100644 .github/workflows/pull_request_external.yml
create mode 100644 ci/workflows/pull_request_external.py
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
new file mode 100644
index 000000000000..ed45dd608085
--- /dev/null
+++ b/.github/workflows/pull_request_external.yml
@@ -0,0 +1,3827 @@
+# generated by praktika
+
+name: Community PR
+
+on:
+ workflow_dispatch:
+ inputs:
+ no_cache:
+ description: Run without cache
+ required: false
+ type: boolean
+ default: false
+ pull_request:
+ branches: ['antalya', 'releases/*', 'antalya-*']
+
+env:
+ # Force the stdout and stderr streams to be unbuffered
+ PYTHONUNBUFFERED: 1
+ DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}
+ DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
+ CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}
+
+# Allow updating GH commit statuses and PR comments to post an actual job reports link
+permissions: write-all
+
+jobs:
+
+ config_workflow:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
+ needs: []
+ if: ${{ github.actor == 'strtgbb' }}
+ name: "Config Workflow"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Config Workflow"
+
+ - name: Note report location to summary
+ env:
+ PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
+ COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ run: |
+ if [ "$PR_NUMBER" -eq 0 ]; then
+ PREFIX="REFs/$GITHUB_REF_NAME/$COMMIT_SHA"
+ else
+ PREFIX="PRs/$PR_NUMBER/$COMMIT_SHA"
+ fi
+ REPORT_LINK=https://s3.amazonaws.com/altinity-build-artifacts/$PREFIX/$GITHUB_RUN_ID/ci_run_report.html
+ echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY
+
+ - name: Dump env
+ run: env | sort
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ fast_test:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow]
+ name: "Fast test"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Fast test"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_debug:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_debug)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_debug)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_release:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_release)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_release)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_asan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_asan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_asan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_tsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_tsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_tsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_msan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_msan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_msan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_ubsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_ubsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_ubsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_amd_binary:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (amd_binary)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (amd_binary)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_arm_release:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (arm_release)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (arm_release)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_arm_coverage:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (arm_coverage)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (arm_coverage)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (arm_coverage)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (arm_coverage)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ build_arm_binary:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, fast_test]
+ name: "Build (arm_binary)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Build (arm_binary)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ unit_tests_asan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, build_amd_asan]
+ name: "Unit tests (asan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Unit tests (asan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ unit_tests_tsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, build_amd_tsan]
+ name: "Unit tests (tsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Unit tests (tsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ unit_tests_msan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, build_amd_msan]
+ name: "Unit tests (msan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Unit tests (msan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ unit_tests_ubsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-builder]
+ needs: [config_workflow, build_amd_ubsan]
+ name: "Unit tests (ubsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Unit tests (ubsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_asan_distributed_plan_parallel_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_asan]
+ name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_asan_distributed_plan_parallel_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_asan]
+ name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_asan_distributed_plan_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Stateless tests (amd_asan, distributed plan, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_asan, distributed plan, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_asyncinsert_s3_storage_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_asyncinsert_s3_storage_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_debug]
+ name: "Stateless tests (amd_debug, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Stateless tests (amd_debug, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_parallel_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, parallel, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, parallel, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_parallel_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, parallel, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, parallel, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_sequential_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, sequential, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, sequential, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_sequential_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, sequential, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, sequential, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_msan_parallel_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "Stateless tests (amd_msan, parallel, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_msan, parallel, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_msan_parallel_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "Stateless tests (amd_msan, parallel, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_msan, parallel, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_msan_sequential_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "Stateless tests (amd_msan, sequential, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_msan, sequential, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_msan_sequential_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "Stateless tests (amd_msan, sequential, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_msan, sequential, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_ubsan_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ name: "Stateless tests (amd_ubsan, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_ubsan, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_ubsan_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ name: "Stateless tests (amd_ubsan, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_ubsan, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_distributed_plan_s3_storage_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_debug_distributed_plan_s3_storage_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_s3_storage_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, s3 storage, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, s3 storage, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_s3_storage_sequential_1_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_tsan_s3_storage_sequential_2_2:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_arm_binary_parallel:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, build_arm_binary]
+ name: "Stateless tests (arm_binary, parallel)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (arm_binary, parallel)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_arm_binary_sequential:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ name: "Stateless tests (arm_binary, sequential)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (arm_binary, sequential)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ bugfix_validation_integration_tests:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, fast_test]
+ name: "Bugfix validation (integration tests)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Bugfix validation (integration tests)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Bugfix validation (integration tests)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Bugfix validation (integration tests)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ bugfix_validation_functional_tests:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow]
+ name: "Bugfix validation (functional tests)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Bugfix validation (functional tests)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Bugfix validation (functional tests)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Bugfix validation (functional tests)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stateless_tests_amd_asan_flaky_check:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
+ needs: [config_workflow, build_amd_asan]
+ name: "Stateless tests (amd_asan, flaky check)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stateless tests (amd_asan, flaky check)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stateless tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stateless tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_1_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 1/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 1/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_2_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 2/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 2/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_3_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 3/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 3/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_4_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 4/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 4/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_5_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 5/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 5/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_old_analyzer_6_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ name: "Integration tests (amd_asan, old analyzer, 6/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, old analyzer, 6/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_binary_1_5:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Integration tests (amd_binary, 1/5)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_binary, 1/5)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_binary_2_5:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Integration tests (amd_binary, 2/5)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_binary, 2/5)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_binary_3_5:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Integration tests (amd_binary, 3/5)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_binary, 3/5)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_binary_4_5:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Integration tests (amd_binary, 4/5)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_binary, 4/5)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_binary_5_5:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ name: "Integration tests (amd_binary, 5/5)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_binary, 5/5)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_arm_binary_distributed_plan_1_4:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ name: "Integration tests (arm_binary, distributed plan, 1/4)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (arm_binary, distributed plan, 1/4)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_arm_binary_distributed_plan_2_4:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ name: "Integration tests (arm_binary, distributed plan, 2/4)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (arm_binary, distributed plan, 2/4)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_arm_binary_distributed_plan_3_4:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ name: "Integration tests (arm_binary, distributed plan, 3/4)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (arm_binary, distributed plan, 3/4)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_arm_binary_distributed_plan_4_4:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ name: "Integration tests (arm_binary, distributed plan, 4/4)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (arm_binary, distributed plan, 4/4)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_1_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 1/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 1/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_2_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 2/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 2/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_3_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 3/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 3/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_4_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 4/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 4/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_5_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 5/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 5/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_tsan_6_6:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "Integration tests (amd_tsan, 6/6)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_tsan, 6/6)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ integration_tests_amd_asan_flaky_check:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_asan]
+ name: "Integration tests (amd_asan, flaky check)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Integration tests (amd_asan, flaky check)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Integration tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Integration tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ docker_server_image:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
+ needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Docker server image"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Docker server image"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Docker server image' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Docker server image' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ docker_keeper_image:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
+ needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Docker keeper image"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Docker keeper image"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Docker keeper image' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Docker keeper image' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ install_packages_amd_debug:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "Install packages (amd_debug)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Install packages (amd_debug)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ compatibility_check_release:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
+ needs: [config_workflow, build_amd_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Compatibility check (release)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Compatibility check (release)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Compatibility check (release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Compatibility check (release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ compatibility_check_aarch64:
+ runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
+ needs: [config_workflow, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Compatibility check (aarch64)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Compatibility check (aarch64)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Compatibility check (aarch64)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Compatibility check (aarch64)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stress_test_amd_debug:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_debug, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Stress test (amd_debug)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stress test (amd_debug)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stress test (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stress test (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stress_test_amd_tsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_tsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Stress test (amd_tsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stress test (amd_tsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stress test (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stress test (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stress_test_amd_ubsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_ubsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Stress test (amd_ubsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stress test (amd_ubsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stress test (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stress test (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ stress_test_amd_msan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, build_amd_msan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ name: "Stress test (amd_msan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "Stress test (amd_msan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'Stress test (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'Stress test (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ ast_fuzzer_amd_debug:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "AST fuzzer (amd_debug)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "AST fuzzer (amd_debug)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'AST fuzzer (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'AST fuzzer (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ ast_fuzzer_amd_tsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "AST fuzzer (amd_tsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "AST fuzzer (amd_tsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'AST fuzzer (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'AST fuzzer (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ ast_fuzzer_amd_msan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "AST fuzzer (amd_msan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "AST fuzzer (amd_msan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'AST fuzzer (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'AST fuzzer (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ ast_fuzzer_amd_ubsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ name: "AST fuzzer (amd_ubsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "AST fuzzer (amd_ubsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'AST fuzzer (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'AST fuzzer (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ buzzhouse_amd_debug:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ name: "BuzzHouse (amd_debug)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "BuzzHouse (amd_debug)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'BuzzHouse (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'BuzzHouse (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ buzzhouse_amd_tsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ name: "BuzzHouse (amd_tsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "BuzzHouse (amd_tsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'BuzzHouse (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'BuzzHouse (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ buzzhouse_amd_msan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ name: "BuzzHouse (amd_msan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "BuzzHouse (amd_msan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'BuzzHouse (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'BuzzHouse (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
+
+ buzzhouse_amd_ubsan:
+ runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
+ needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ name: "BuzzHouse (amd_ubsan)"
+ outputs:
+ data: ${{ steps.run.outputs.DATA }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.CHECKOUT_REF }}
+
+ - name: Setup
+ uses: ./.github/actions/runner_setup
+ - name: Docker setup
+ uses: ./.github/actions/docker_setup
+ with:
+ test_name: "BuzzHouse (amd_ubsan)"
+
+ - name: Prepare env script
+ run: |
+ rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
+ mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
+ cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
+ export PYTHONPATH=./ci:.:
+ cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
+ ${{ needs.config_workflow.outputs.data }}
+ EOF
+ cat > ./ci/tmp/workflow_status.json << 'EOF'
+ ${{ toJson(needs) }}
+ EOF
+ ENV_SETUP_SCRIPT_EOF
+
+ - name: Run
+ id: run
+ run: |
+ . ./ci/tmp/praktika_setup_env.sh
+ set -o pipefail
+ if command -v ts &> /dev/null; then
+ python3 -m praktika run 'BuzzHouse (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
+ else
+ python3 -m praktika run 'BuzzHouse (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
+ fi
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index 3793e6d07250..e7d9fa6d724c 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -342,6 +342,14 @@ def generate(self):
YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION_NOT_CANCELLED
)
+ # TODO: replace this hack with a proper configuration
+ if (
+ job_name == Settings.CI_CONFIG_JOB_NAME
+ and self.workflow_config.name == "Community PR"
+ ):
+ if_expression = "\n if: ${{ github.actor == 'strtgbb' }}"
+ job_addons.append("\n - name: Dump env\n run: env | sort\n")
+
secrets_envs = []
# note(strtgbb): This adds github secrets to praktika_setup_env.sh
# This makes the workflow very verbose and we don't need it
@@ -486,7 +494,12 @@ def generate(self):
VAR_NAME=secret.name
)
format_kwargs["ENV_SECRETS"] = GH_VAR_ENVS + SECRET_ENVS
- format_kwargs["ENV_SECRETS"] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV
+
+ if self.parser.config.secrets:
+ # Only add global env if there are secrets in workflow config
+ format_kwargs[
+ "ENV_SECRETS"
+ ] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV
template_1 = base_template.strip().format(
NAME=self.workflow_config.name,
diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py
index 6a30b58b651e..2185a889ad6e 100644
--- a/ci/workflows/pull_request.py
+++ b/ci/workflows/pull_request.py
@@ -56,7 +56,7 @@
JobConfigs.bugfix_validation_it_job.set_dependency(
[
# JobNames.STYLE_CHECK, # NOTE (strtgbb): we don't run style check
- # JobNames.FAST_TEST, # NOTE (strtgbb): we don't run fast tests
+ # JobNames.FAST_TEST, # NOTE (strtgbb): this takes too long, revisit later
# JobConfigs.tidy_build_arm_jobs[0].name, # NOTE (strtgbb): we don't run tidy build jobs
]
),
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
new file mode 100644
index 000000000000..858047ae0a82
--- /dev/null
+++ b/ci/workflows/pull_request_external.py
@@ -0,0 +1,113 @@
+from praktika import Workflow
+
+from ci.defs.defs import BASE_BRANCH, DOCKERS, SECRETS, ArtifactConfigs, JobNames
+from ci.defs.job_configs import JobConfigs
+from ci.jobs.scripts.workflow_hooks.filter_job import should_skip_job
+
+FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES = [
+ job.name
+ for job in JobConfigs.functional_tests_jobs
+ if any(
+ substr in job.name
+ for substr in (
+ "_debug, parallel",
+ "_binary, parallel",
+ "_asan, distributed plan, parallel",
+ )
+ )
+]
+
+REGULAR_BUILD_NAMES = [job.name for job in JobConfigs.build_jobs]
+
+workflow = Workflow.Config(
+ name="Community PR",
+ event=Workflow.Event.PULL_REQUEST,
+ base_branches=[BASE_BRANCH, "releases/*", "antalya-*"],
+ jobs=[
+ JobConfigs.fast_test,
+ *[job.set_dependency([JobNames.FAST_TEST]) for job in JobConfigs.build_jobs],
+ *JobConfigs.unittest_jobs,
+ *[
+ j.set_dependency(
+ FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
+ if j.name not in FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
+ else []
+ )
+ for j in JobConfigs.functional_tests_jobs
+ ],
+ JobConfigs.bugfix_validation_it_job.set_dependency([JobNames.FAST_TEST]),
+ JobConfigs.bugfix_validation_ft_pr_job,
+ *JobConfigs.stateless_tests_flaky_pr_jobs,
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.integration_test_jobs_required[:]
+ ],
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.integration_test_jobs_non_required
+ ],
+ JobConfigs.integration_test_asan_flaky_pr_job,
+ JobConfigs.docker_sever.set_dependency(
+ FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
+ ),
+ JobConfigs.docker_keeper.set_dependency(
+ FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
+ ),
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.install_check_jobs
+ ],
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.compatibility_test_jobs
+ ],
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.stress_test_jobs
+ ],
+ # *[
+ # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ # for job in JobConfigs.upgrade_test_jobs
+ # ], # TODO: customize for our repo
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.ast_fuzzer_jobs
+ ],
+ *[
+ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ for job in JobConfigs.buzz_fuzzer_jobs
+ ],
+ ],
+ artifacts=[
+ *ArtifactConfigs.unittests_binaries,
+ *ArtifactConfigs.clickhouse_binaries,
+ *ArtifactConfigs.clickhouse_stripped_binaries,
+ *ArtifactConfigs.clickhouse_debians,
+ *ArtifactConfigs.clickhouse_rpms,
+ *ArtifactConfigs.clickhouse_tgzs,
+ ArtifactConfigs.fuzzers,
+ ArtifactConfigs.fuzzers_corpus,
+ ],
+ dockers=DOCKERS,
+ disable_dockers_build=True,
+ enable_dockers_manifest_merge=False,
+ secrets=[],
+ enable_job_filtering_by_changes=True,
+ enable_cache=False,
+ enable_report=False,
+ enable_cidb=False,
+ enable_merge_ready_status=False,
+ enable_commit_status_on_failure=True,
+ pre_hooks=[
+ # "python3 ./ci/jobs/scripts/workflow_hooks/is_external_pr.py", TODO: implement
+ "python3 ./ci/jobs/scripts/workflow_hooks/store_data.py",
+ "python3 ./ci/jobs/scripts/workflow_hooks/version_log.py",
+ "python3 ./ci/jobs/scripts/workflow_hooks/parse_ci_tags.py",
+ ],
+ workflow_filter_hooks=[should_skip_job],
+ post_hooks=[],
+)
+
+WORKFLOWS = [
+ workflow,
+]
From 320e08411a95f7d121f3179133cd5bfcf3cb7543 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 11:19:26 -0500
Subject: [PATCH 02/15] move GH_TOKEN out of custom secrets
---
.github/workflows/backport_branches.yml | 2 +-
.github/workflows/master.yml | 2 +-
.github/workflows/merge_queue.yml | 2 +-
.github/workflows/nightly_fuzzers.yml | 1 +
.github/workflows/nightly_jepsen.yml | 1 +
.github/workflows/nightly_statistics.yml | 1 +
.github/workflows/pull_request.yml | 2 +-
.github/workflows/pull_request_external.yml | 1 +
.github/workflows/release_branches.yml | 2 +-
.github/workflows/release_builds.yml | 2 +-
.github/workflows/vectorsearchstress.yml | 1 +
ci/praktika/yaml_additional_templates.py | 1 -
ci/praktika/yaml_generator.py | 3 +++
ci/workflows/pull_request_external.py | 3 +--
14 files changed, 15 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml
index 549efdf6bc38..a36577eb6b54 100644
--- a/.github/workflows/backport_branches.yml
+++ b/.github/workflows/backport_branches.yml
@@ -16,6 +16,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}
@@ -32,7 +33,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 8d0d79f2ee13..0266a25e95f1 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -16,6 +16,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ""
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
@@ -31,7 +32,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
diff --git a/.github/workflows/merge_queue.yml b/.github/workflows/merge_queue.yml
index aa7066382f26..b1eec174dc98 100644
--- a/.github/workflows/merge_queue.yml
+++ b/.github/workflows/merge_queue.yml
@@ -8,6 +8,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
@@ -22,7 +23,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
jobs:
diff --git a/.github/workflows/nightly_fuzzers.yml b/.github/workflows/nightly_fuzzers.yml
index 84cb1e8e02b9..9d7a389e7318 100644
--- a/.github/workflows/nightly_fuzzers.yml
+++ b/.github/workflows/nightly_fuzzers.yml
@@ -11,6 +11,7 @@ concurrency:
env:
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
jobs:
diff --git a/.github/workflows/nightly_jepsen.yml b/.github/workflows/nightly_jepsen.yml
index 1ff46f516f75..4201db8788aa 100644
--- a/.github/workflows/nightly_jepsen.yml
+++ b/.github/workflows/nightly_jepsen.yml
@@ -11,6 +11,7 @@ concurrency:
env:
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
jobs:
diff --git a/.github/workflows/nightly_statistics.yml b/.github/workflows/nightly_statistics.yml
index 8a0e96858eb9..c9228837b508 100644
--- a/.github/workflows/nightly_statistics.yml
+++ b/.github/workflows/nightly_statistics.yml
@@ -11,6 +11,7 @@ concurrency:
env:
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
jobs:
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index ff2904b1d497..6689273f96d2 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -16,6 +16,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}
@@ -32,7 +33,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index ed45dd608085..f305a993537b 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -16,6 +16,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index 12e370e00207..ff38e18379b8 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -16,6 +16,7 @@ on:
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ""
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
@@ -31,7 +32,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml
index 00bfb1015b15..5a5e3b495bd1 100644
--- a/.github/workflows/release_builds.yml
+++ b/.github/workflows/release_builds.yml
@@ -7,6 +7,7 @@ on:
env:
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
@@ -21,7 +22,6 @@ env:
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
- GH_TOKEN: ${{ github.token }}
jobs:
diff --git a/.github/workflows/vectorsearchstress.yml b/.github/workflows/vectorsearchstress.yml
index e3d123cf315f..b151328d44fb 100644
--- a/.github/workflows/vectorsearchstress.yml
+++ b/.github/workflows/vectorsearchstress.yml
@@ -11,6 +11,7 @@ concurrency:
env:
PYTHONUNBUFFERED: 1
+ GH_TOKEN: ${{ github.token }}
CHECKOUT_REF: ""
jobs:
diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py
index a31ba2b7ea75..6eb522901061 100644
--- a/ci/praktika/yaml_additional_templates.py
+++ b/ci/praktika/yaml_additional_templates.py
@@ -8,7 +8,6 @@ class AltinityWorkflowTemplates:
AZURE_CONTAINER_NAME: ${{{{ secrets.AZURE_CONTAINER_NAME }}}}
AZURE_STORAGE_ACCOUNT_URL: "https://${{{{ secrets.AZURE_ACCOUNT_NAME }}}}.blob.core.windows.net/"
ROBOT_TOKEN: ${{{{ secrets.ROBOT_TOKEN }}}}
- GH_TOKEN: ${{{{ github.token }}}}
"""
# Additional pre steps for all jobs
JOB_SETUP_STEPS = """
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index e7d9fa6d724c..b48a3942adaa 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -60,15 +60,18 @@ class Templates:
permissions: write-all\
"""
TEMPLATE_ENV_CHECKOUT_REF_PR = """\
+ GH_TOKEN: ${{{{ github.token }}}}
DISABLE_CI_MERGE_COMMIT: ${{{{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}}}
DISABLE_CI_CACHE: ${{{{ github.event.inputs.no_cache || '0' }}}}
CHECKOUT_REF: ${{{{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}}}\
"""
TEMPLATE_ENV_CHECKOUT_REF_PUSH = """\
+ GH_TOKEN: ${{{{ github.token }}}}
DISABLE_CI_CACHE: ${{{{ github.event.inputs.no_cache || '0' }}}}
CHECKOUT_REF: ""\
"""
TEMPLATE_ENV_CHECKOUT_REF_DEFAULT = """\
+ GH_TOKEN: ${{{{ github.token }}}}
CHECKOUT_REF: ""\
"""
TEMPLATE_ENV_SECRET = """\
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
index 858047ae0a82..03ee24f5562a 100644
--- a/ci/workflows/pull_request_external.py
+++ b/ci/workflows/pull_request_external.py
@@ -97,9 +97,8 @@
enable_report=False,
enable_cidb=False,
enable_merge_ready_status=False,
- enable_commit_status_on_failure=True,
+ enable_commit_status_on_failure=False,
pre_hooks=[
- # "python3 ./ci/jobs/scripts/workflow_hooks/is_external_pr.py", TODO: implement
"python3 ./ci/jobs/scripts/workflow_hooks/store_data.py",
"python3 ./ci/jobs/scripts/workflow_hooks/version_log.py",
"python3 ./ci/jobs/scripts/workflow_hooks/parse_ci_tags.py",
From 9069d91f8cdfcbb51990c2b774fd0e84cf56e81a Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 11:19:56 -0500
Subject: [PATCH 03/15] remove dump env
---
.github/workflows/pull_request_external.yml | 3 ---
ci/praktika/yaml_generator.py | 1 -
2 files changed, 4 deletions(-)
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index f305a993537b..a0c0ddd871d1 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -59,9 +59,6 @@ jobs:
REPORT_LINK=https://s3.amazonaws.com/altinity-build-artifacts/$PREFIX/$GITHUB_RUN_ID/ci_run_report.html
echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY
- - name: Dump env
- run: env | sort
-
- name: Prepare env script
run: |
rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index b48a3942adaa..e373a3a10f62 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -351,7 +351,6 @@ def generate(self):
and self.workflow_config.name == "Community PR"
):
if_expression = "\n if: ${{ github.actor == 'strtgbb' }}"
- job_addons.append("\n - name: Dump env\n run: env | sort\n")
secrets_envs = []
# note(strtgbb): This adds github secrets to praktika_setup_env.sh
From 576be099308ac87686d4a4b3e7804894392c9838 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 11:24:47 -0500
Subject: [PATCH 04/15] remove permissions: write-all from workflows
It is dangerous that have this for untrusted workflows
and trusted workflows should have it by default
---
.github/workflows/backport_branches.yml | 3 +--
.github/workflows/master.yml | 3 +--
.github/workflows/pull_request.yml | 3 +--
.github/workflows/pull_request_external.yml | 3 +--
.github/workflows/release_branches.yml | 3 +--
ci/praktika/yaml_generator.py | 6 ++----
6 files changed, 7 insertions(+), 14 deletions(-)
diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml
index a36577eb6b54..f814b5e3c16e 100644
--- a/.github/workflows/backport_branches.yml
+++ b/.github/workflows/backport_branches.yml
@@ -34,8 +34,7 @@ env:
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all
+
jobs:
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 0266a25e95f1..9bae1567fa75 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -33,8 +33,7 @@ env:
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all
+
jobs:
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index 6689273f96d2..28d1c7a6f9c5 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -34,8 +34,7 @@ env:
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all
+
jobs:
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index a0c0ddd871d1..aa25166aae3c 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -21,8 +21,7 @@ env:
DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }}
CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all
+
jobs:
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index ff38e18379b8..22f1b3a03a88 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -33,8 +33,7 @@ env:
AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/"
ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }}
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all
+
jobs:
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index e373a3a10f62..793c0a0f8e5c 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -55,10 +55,8 @@ class Templates:
jobs:
{JOBS}\
"""
- TEMPLATE_GH_TOKEN_PERMISSIONS = """\
-# Allow updating GH commit statuses and PR comments to post an actual job reports link
-permissions: write-all\
-"""
+ # NOTE (strtgbb): This is dangerous to set for untrusted workflows, and for trusted workflows it should already be the default
+ TEMPLATE_GH_TOKEN_PERMISSIONS = ""
TEMPLATE_ENV_CHECKOUT_REF_PR = """\
GH_TOKEN: ${{{{ github.token }}}}
DISABLE_CI_MERGE_COMMIT: ${{{{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}}}
From f3ccdd3fa0ed1b62f620ed5ee12cdfea1807e109 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 11:32:25 -0500
Subject: [PATCH 05/15] disable job filtering and commit status for community
pr
---
ci/workflows/pull_request_external.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
index 03ee24f5562a..509a03244c41 100644
--- a/ci/workflows/pull_request_external.py
+++ b/ci/workflows/pull_request_external.py
@@ -92,7 +92,7 @@
disable_dockers_build=True,
enable_dockers_manifest_merge=False,
secrets=[],
- enable_job_filtering_by_changes=True,
+ enable_job_filtering_by_changes=False, # TODO: Change this back?
enable_cache=False,
enable_report=False,
enable_cidb=False,
@@ -107,6 +107,9 @@
post_hooks=[],
)
+for job in workflow.jobs:
+ job.enable_commit_status = False
+
WORKFLOWS = [
workflow,
]
From 42633ec2afb49386310534c7ae1e8528345bf5e2 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 12:37:25 -0500
Subject: [PATCH 06/15] Configure sccache to run without credentials for
community PR
---
ci/jobs/build_clickhouse.py | 7 +++++++
ci/jobs/fast_test.py | 7 +++++++
2 files changed, 14 insertions(+)
diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py
index 18dc461f5efd..57de3261677f 100644
--- a/ci/jobs/build_clickhouse.py
+++ b/ci/jobs/build_clickhouse.py
@@ -113,6 +113,13 @@ def main():
os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200"
os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH
os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache"
+ if "Community" in info.workflow_name:
+ print("NOTE: Community contribution - set sccache to run without AWS credentials")
+ os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1"
+ # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1
+ os.environ.pop("AWS_SECRET_ACCESS_KEY", None)
+ os.environ.pop("AWS_ACCESS_KEY_ID", None)
+
os.environ["CTCACHE_LOG_LEVEL"] = "debug"
os.environ["CTCACHE_DIR"] = f"{build_dir}/ccache/clang-tidy-cache"
os.environ["CTCACHE_S3_BUCKET"] = Settings.S3_ARTIFACT_PATH
diff --git a/ci/jobs/fast_test.py b/ci/jobs/fast_test.py
index 860a14f6ca02..684a169cc353 100644
--- a/ci/jobs/fast_test.py
+++ b/ci/jobs/fast_test.py
@@ -145,6 +145,13 @@ def main():
os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200"
os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH
os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache"
+ if "Community" in Info().workflow_name:
+ print("NOTE: Community contribution - set sccache to run without AWS credentials")
+ os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1"
+ # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1
+ os.environ.pop("AWS_SECRET_ACCESS_KEY", None)
+ os.environ.pop("AWS_ACCESS_KEY_ID", None)
+
Shell.check("sccache --show-stats", verbose=True)
Utils.add_to_PATH(f"{build_dir}/programs:{current_directory}/tests")
From f18f81589dbbff45e8d188547f0f17909fc2b804 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 14:31:27 -0500
Subject: [PATCH 07/15] switch from s3 artifacts to github artifacts
---
.github/workflows/pull_request_external.yml | 577 ++++++++++++++++++++
ci/praktika/parser.py | 3 +-
ci/praktika/yaml_generator.py | 4 +-
ci/workflows/pull_request_external.py | 13 +-
4 files changed, 592 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index aa25166aae3c..3c39318eee08 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -171,6 +171,19 @@ jobs:
python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_AMD_DEBUG
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_AMD_DEBUG
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_DEBUG
+ path: ci/tmp/*.deb
+
build_amd_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -215,6 +228,40 @@ jobs:
python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_AMD_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_RELEASE
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact CH_AMD_RELEASE_STRIPPED
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_RELEASE_STRIPPED
+ path: ci/tmp/build/programs/self-extracting/clickhouse-stripped
+
+
+ - name: Upload artifact DEB_AMD_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_RELEASE
+ path: ci/tmp/*.deb
+
+
+ - name: Upload artifact RPM_AMD_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: RPM_AMD_RELEASE
+ path: ci/tmp/*.rpm
+
+
+ - name: Upload artifact TGZ_AMD_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: TGZ_AMD_RELEASE
+ path: ci/tmp/*64.tgz*
+
build_amd_asan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -259,6 +306,26 @@ jobs:
python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact UNITTEST_AMD_ASAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: UNITTEST_AMD_ASAN
+ path: ci/tmp/build/src/unit_tests_dbms
+
+
+ - name: Upload artifact CH_AMD_ASAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_AMD_ASAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_ASAN
+ path: ci/tmp/*.deb
+
build_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -303,6 +370,26 @@ jobs:
python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact UNITTEST_AMD_TSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: UNITTEST_AMD_TSAN
+ path: ci/tmp/build/src/unit_tests_dbms
+
+
+ - name: Upload artifact CH_AMD_TSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_AMD_TSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_TSAN
+ path: ci/tmp/*.deb
+
build_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -347,6 +434,26 @@ jobs:
python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact UNITTEST_AMD_MSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: UNITTEST_AMD_MSAN
+ path: ci/tmp/build/src/unit_tests_dbms
+
+
+ - name: Upload artifact CH_AMD_MSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_AMD_MSAM
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_MSAM
+ path: ci/tmp/*.deb
+
build_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -391,6 +498,26 @@ jobs:
python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact UNITTEST_AMD_UBSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: UNITTEST_AMD_UBSAN
+ path: ci/tmp/build/src/unit_tests_dbms
+
+
+ - name: Upload artifact CH_AMD_UBSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_UBSAN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_AMD_UBSAN
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_AMD_UBSAN
+ path: ci/tmp/*.deb
+
build_amd_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -435,6 +562,12 @@ jobs:
python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_AMD_BINARY
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
build_arm_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -479,6 +612,40 @@ jobs:
python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_ARM_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_ARM_RELEASE
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact CH_ARM_RELEASE_STRIPPED
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_ARM_RELEASE_STRIPPED
+ path: ci/tmp/build/programs/self-extracting/clickhouse-stripped
+
+
+ - name: Upload artifact DEB_ARM_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_ARM_RELEASE
+ path: ci/tmp/*.deb
+
+
+ - name: Upload artifact RPM_ARM_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: RPM_ARM_RELEASE
+ path: ci/tmp/*.rpm
+
+
+ - name: Upload artifact TGZ_ARM_RELEASE
+ uses: actions/upload-artifact@v4
+ with:
+ name: TGZ_ARM_RELEASE
+ path: ci/tmp/*64.tgz*
+
build_arm_coverage:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -523,6 +690,19 @@ jobs:
python3 -m praktika run 'Build (arm_coverage)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_COV_BIN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_COV_BIN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
+
+ - name: Upload artifact DEB_COV
+ uses: actions/upload-artifact@v4
+ with:
+ name: DEB_COV
+ path: ci/tmp/*.deb
+
build_arm_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
@@ -567,6 +747,12 @@ jobs:
python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
+ - name: Upload artifact CH_ARM_BIN
+ uses: actions/upload-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ci/tmp/build/programs/self-extracting/clickhouse
+
unit_tests_asan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, build_amd_asan]
@@ -600,6 +786,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact UNITTEST_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: UNITTEST_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -644,6 +836,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact UNITTEST_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: UNITTEST_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -688,6 +886,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact UNITTEST_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: UNITTEST_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -732,6 +936,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact UNITTEST_AMD_UBSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: UNITTEST_AMD_UBSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -776,6 +986,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -820,6 +1036,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -864,6 +1086,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -908,6 +1136,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -952,6 +1186,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -996,6 +1236,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1040,6 +1286,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1084,6 +1336,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1128,6 +1386,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1172,6 +1436,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1216,6 +1486,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1260,6 +1536,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1304,6 +1586,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1348,6 +1636,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1392,6 +1686,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1436,6 +1736,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1480,6 +1786,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1524,6 +1836,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1568,6 +1886,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1612,6 +1936,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_UBSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_UBSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1656,6 +1986,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_UBSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_UBSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1700,6 +2036,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1744,6 +2086,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1788,6 +2136,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1832,6 +2186,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1876,6 +2236,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1920,6 +2286,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -1964,6 +2336,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2096,6 +2474,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2140,6 +2524,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2184,6 +2574,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2228,6 +2624,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2272,6 +2674,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2316,6 +2724,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2360,6 +2774,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2404,6 +2824,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2448,6 +2874,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2492,6 +2924,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2536,6 +2974,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2580,6 +3024,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_BINARY
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_BINARY
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2624,6 +3074,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2668,6 +3124,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2712,6 +3174,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2756,6 +3224,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_ARM_BIN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_ARM_BIN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2800,6 +3274,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2844,6 +3324,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2888,6 +3374,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2932,6 +3424,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -2976,6 +3474,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3020,6 +3524,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3064,6 +3574,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_ASAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_ASAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3196,6 +3712,19 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
+
+ - name: Download artifact DEB_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: DEB_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3504,6 +4033,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3548,6 +4083,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3592,6 +4133,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3636,6 +4183,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_UBSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_UBSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3680,6 +4233,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_DEBUG
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_DEBUG
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3724,6 +4283,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_TSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_TSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3768,6 +4333,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_MSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_MSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
@@ -3812,6 +4383,12 @@ jobs:
EOF
ENV_SETUP_SCRIPT_EOF
+ - name: Download artifact CH_AMD_UBSAN
+ uses: actions/download-artifact@v4
+ with:
+ name: CH_AMD_UBSAN
+ path: ./ci/tmp
+
- name: Run
id: run
run: |
diff --git a/ci/praktika/parser.py b/ci/praktika/parser.py
index 3df2b847dac0..64bb20a795f1 100644
--- a/ci/praktika/parser.py
+++ b/ci/praktika/parser.py
@@ -236,7 +236,8 @@ def parse(self):
assert (
False
), f"Artifact [{artifact_name}] has unsupported type [{artifact.type}]"
- if artifact.type == Artifact.Type.GH:
+ # NOTE (strtgbb): Added a check that provided_by is not empty, which is the case for artifacts that are defined in defs.py but not used in a workflow
+ if artifact.type == Artifact.Type.GH and artifact.provided_by != "":
self.workflow_yaml_config.job_to_config[
artifact.provided_by
].artifacts_gh_provides.append(artifact)
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index 793c0a0f8e5c..eba9d4c773e4 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -1,5 +1,6 @@
import dataclasses
from typing import List
+import os
from . import Artifact, Job, Workflow
from .mangle import _get_workflows
@@ -314,7 +315,8 @@ def generate(self):
for artifact in job.artifacts_gh_provides:
uploads_github.append(
YamlGenerator.Templates.TEMPLATE_GH_UPLOAD.format(
- NAME=artifact.name, PATH=artifact.path
+ NAME=artifact.name,
+ PATH=os.path.relpath(artifact.path, os.getcwd()),
)
)
downloads_github = []
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
index 509a03244c41..edbb8a0d56c2 100644
--- a/ci/workflows/pull_request_external.py
+++ b/ci/workflows/pull_request_external.py
@@ -1,4 +1,5 @@
-from praktika import Workflow
+import copy
+from praktika import Workflow, Artifact
from ci.defs.defs import BASE_BRANCH, DOCKERS, SECRETS, ArtifactConfigs, JobNames
from ci.defs.job_configs import JobConfigs
@@ -107,8 +108,14 @@
post_hooks=[],
)
-for job in workflow.jobs:
- job.enable_commit_status = False
+# NOTE (strtgbb): use deepcopy to avoid modifying workflows generated after this one
+for i, job in enumerate(workflow.jobs):
+ workflow.jobs[i] = copy.deepcopy(job)
+ workflow.jobs[i].enable_commit_status = False
+
+for i, artifact in enumerate(workflow.artifacts):
+ workflow.artifacts[i] = copy.deepcopy(artifact)
+ workflow.artifacts[i].type = Artifact.Type.GH
WORKFLOWS = [
workflow,
From cde24dc92a546e17fb5673ba915a44cf52cec9ba Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 15:44:02 -0500
Subject: [PATCH 08/15] azure storage_account_url needs a sane default
---
tests/config/config.d/azure_storage_conf.xml | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/tests/config/config.d/azure_storage_conf.xml b/tests/config/config.d/azure_storage_conf.xml
index eb7c4d154d34..3104fdd8d040 100644
--- a/tests/config/config.d/azure_storage_conf.xml
+++ b/tests/config/config.d/azure_storage_conf.xml
@@ -6,7 +6,9 @@
azure
false
33554432
-
+
+ https://127.0.0.1:10000/devstoreaccount1
+
From 44a1b19b3e428c3b545ea0a9a90fdf0c02984c93 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 15:54:49 -0500
Subject: [PATCH 09/15] job skip logic should be present even if cache is not
to support PR workflow config
---
.github/workflows/pull_request_external.yml | 85 +++++++++++++++++++++
.github/workflows/release_builds.yml | 21 +++++
ci/praktika/yaml_generator.py | 16 ++--
ci/settings/altinity_overrides.py | 3 +
4 files changed, 117 insertions(+), 8 deletions(-)
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index 3c39318eee08..c15f44d2bbf5 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -86,6 +86,7 @@ jobs:
fast_test:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }}
name: "Fast test"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -130,6 +131,7 @@ jobs:
build_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }}
name: "Build (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -187,6 +189,7 @@ jobs:
build_amd_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }}
name: "Build (amd_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -265,6 +268,7 @@ jobs:
build_amd_asan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }}
name: "Build (amd_asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -329,6 +333,7 @@ jobs:
build_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }}
name: "Build (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -393,6 +398,7 @@ jobs:
build_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }}
name: "Build (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -457,6 +463,7 @@ jobs:
build_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }}
name: "Build (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -521,6 +528,7 @@ jobs:
build_amd_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }}
name: "Build (amd_binary)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -571,6 +579,7 @@ jobs:
build_arm_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }}
name: "Build (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -649,6 +658,7 @@ jobs:
build_arm_coverage:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9jb3ZlcmFnZSk=') }}
name: "Build (arm_coverage)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -706,6 +716,7 @@ jobs:
build_arm_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }}
name: "Build (arm_binary)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -756,6 +767,7 @@ jobs:
unit_tests_asan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAoYXNhbik=') }}
name: "Unit tests (asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -806,6 +818,7 @@ jobs:
unit_tests_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAodHNhbik=') }}
name: "Unit tests (tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -856,6 +869,7 @@ jobs:
unit_tests_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAobXNhbik=') }}
name: "Unit tests (msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -906,6 +920,7 @@ jobs:
unit_tests_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, build_amd_ubsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAodWJzYW4p') }}
name: "Unit tests (ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -956,6 +971,7 @@ jobs:
stateless_tests_amd_asan_distributed_plan_parallel_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDEvMik=') }}
name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1006,6 +1022,7 @@ jobs:
stateless_tests_amd_asan_distributed_plan_parallel_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDIvMik=') }}
name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1056,6 +1073,7 @@ jobs:
stateless_tests_amd_asan_distributed_plan_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgc2VxdWVudGlhbCk=') }}
name: "Stateless tests (amd_asan, distributed plan, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1106,6 +1124,7 @@ jobs:
stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }}
name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1156,6 +1175,7 @@ jobs:
stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }}
name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1206,6 +1226,7 @@ jobs:
stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }}
name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1256,6 +1277,7 @@ jobs:
stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }}
name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1306,6 +1328,7 @@ jobs:
stateless_tests_amd_debug_asyncinsert_s3_storage_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }}
name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1356,6 +1379,7 @@ jobs:
stateless_tests_amd_debug_asyncinsert_s3_storage_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }}
name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1406,6 +1430,7 @@ jobs:
stateless_tests_amd_debug_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHBhcmFsbGVsKQ==') }}
name: "Stateless tests (amd_debug, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1456,6 +1481,7 @@ jobs:
stateless_tests_amd_debug_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHNlcXVlbnRpYWwp') }}
name: "Stateless tests (amd_debug, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1506,6 +1532,7 @@ jobs:
stateless_tests_amd_tsan_parallel_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDEvMik=') }}
name: "Stateless tests (amd_tsan, parallel, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1556,6 +1583,7 @@ jobs:
stateless_tests_amd_tsan_parallel_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDIvMik=') }}
name: "Stateless tests (amd_tsan, parallel, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1606,6 +1634,7 @@ jobs:
stateless_tests_amd_tsan_sequential_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }}
name: "Stateless tests (amd_tsan, sequential, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1656,6 +1685,7 @@ jobs:
stateless_tests_amd_tsan_sequential_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }}
name: "Stateless tests (amd_tsan, sequential, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1706,6 +1736,7 @@ jobs:
stateless_tests_amd_msan_parallel_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDEvMik=') }}
name: "Stateless tests (amd_msan, parallel, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1756,6 +1787,7 @@ jobs:
stateless_tests_amd_msan_parallel_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDIvMik=') }}
name: "Stateless tests (amd_msan, parallel, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1806,6 +1838,7 @@ jobs:
stateless_tests_amd_msan_sequential_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }}
name: "Stateless tests (amd_msan, sequential, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1856,6 +1889,7 @@ jobs:
stateless_tests_amd_msan_sequential_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }}
name: "Stateless tests (amd_msan, sequential, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1906,6 +1940,7 @@ jobs:
stateless_tests_amd_ubsan_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHBhcmFsbGVsKQ==') }}
name: "Stateless tests (amd_ubsan, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1956,6 +1991,7 @@ jobs:
stateless_tests_amd_ubsan_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHNlcXVlbnRpYWwp') }}
name: "Stateless tests (amd_ubsan, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2006,6 +2042,7 @@ jobs:
stateless_tests_amd_debug_distributed_plan_s3_storage_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHBhcmFsbGVsKQ==') }}
name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2056,6 +2093,7 @@ jobs:
stateless_tests_amd_debug_distributed_plan_s3_storage_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHNlcXVlbnRpYWwp') }}
name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2106,6 +2144,7 @@ jobs:
stateless_tests_amd_tsan_s3_storage_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgcGFyYWxsZWwp') }}
name: "Stateless tests (amd_tsan, s3 storage, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2156,6 +2195,7 @@ jobs:
stateless_tests_amd_tsan_s3_storage_sequential_1_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMS8yKQ==') }}
name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2206,6 +2246,7 @@ jobs:
stateless_tests_amd_tsan_s3_storage_sequential_2_2:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMi8yKQ==') }}
name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2256,6 +2297,7 @@ jobs:
stateless_tests_arm_binary_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }}
name: "Stateless tests (arm_binary, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2306,6 +2348,7 @@ jobs:
stateless_tests_arm_binary_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }}
name: "Stateless tests (arm_binary, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2356,6 +2399,7 @@ jobs:
bugfix_validation_integration_tests:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, fast_test]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVnZml4IHZhbGlkYXRpb24gKGludGVncmF0aW9uIHRlc3RzKQ==') }}
name: "Bugfix validation (integration tests)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2400,6 +2444,7 @@ jobs:
bugfix_validation_functional_tests:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVnZml4IHZhbGlkYXRpb24gKGZ1bmN0aW9uYWwgdGVzdHMp') }}
name: "Bugfix validation (functional tests)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2444,6 +2489,7 @@ jobs:
stateless_tests_amd_asan_flaky_check:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZmxha3kgY2hlY2sp') }}
name: "Stateless tests (amd_asan, flaky check)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2494,6 +2540,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_1_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDEvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 1/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2544,6 +2591,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_2_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDIvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 2/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2594,6 +2642,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_3_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDMvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 3/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2644,6 +2693,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_4_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDQvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 4/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2694,6 +2744,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_5_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDUvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 5/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2744,6 +2795,7 @@ jobs:
integration_tests_amd_asan_old_analyzer_6_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDYvNik=') }}
name: "Integration tests (amd_asan, old analyzer, 6/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2794,6 +2846,7 @@ jobs:
integration_tests_amd_binary_1_5:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDEvNSk=') }}
name: "Integration tests (amd_binary, 1/5)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2844,6 +2897,7 @@ jobs:
integration_tests_amd_binary_2_5:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDIvNSk=') }}
name: "Integration tests (amd_binary, 2/5)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2894,6 +2948,7 @@ jobs:
integration_tests_amd_binary_3_5:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDMvNSk=') }}
name: "Integration tests (amd_binary, 3/5)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2944,6 +2999,7 @@ jobs:
integration_tests_amd_binary_4_5:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDQvNSk=') }}
name: "Integration tests (amd_binary, 4/5)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -2994,6 +3050,7 @@ jobs:
integration_tests_amd_binary_5_5:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDUvNSk=') }}
name: "Integration tests (amd_binary, 5/5)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3044,6 +3101,7 @@ jobs:
integration_tests_arm_binary_distributed_plan_1_4:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDEvNCk=') }}
name: "Integration tests (arm_binary, distributed plan, 1/4)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3094,6 +3152,7 @@ jobs:
integration_tests_arm_binary_distributed_plan_2_4:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDIvNCk=') }}
name: "Integration tests (arm_binary, distributed plan, 2/4)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3144,6 +3203,7 @@ jobs:
integration_tests_arm_binary_distributed_plan_3_4:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDMvNCk=') }}
name: "Integration tests (arm_binary, distributed plan, 3/4)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3194,6 +3254,7 @@ jobs:
integration_tests_arm_binary_distributed_plan_4_4:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDQvNCk=') }}
name: "Integration tests (arm_binary, distributed plan, 4/4)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3244,6 +3305,7 @@ jobs:
integration_tests_amd_tsan_1_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAxLzYp') }}
name: "Integration tests (amd_tsan, 1/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3294,6 +3356,7 @@ jobs:
integration_tests_amd_tsan_2_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAyLzYp') }}
name: "Integration tests (amd_tsan, 2/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3344,6 +3407,7 @@ jobs:
integration_tests_amd_tsan_3_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAzLzYp') }}
name: "Integration tests (amd_tsan, 3/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3394,6 +3458,7 @@ jobs:
integration_tests_amd_tsan_4_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA0LzYp') }}
name: "Integration tests (amd_tsan, 4/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3444,6 +3509,7 @@ jobs:
integration_tests_amd_tsan_5_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA1LzYp') }}
name: "Integration tests (amd_tsan, 5/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3494,6 +3560,7 @@ jobs:
integration_tests_amd_tsan_6_6:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA2LzYp') }}
name: "Integration tests (amd_tsan, 6/6)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3544,6 +3611,7 @@ jobs:
integration_tests_amd_asan_flaky_check:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_asan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBmbGFreSBjaGVjayk=') }}
name: "Integration tests (amd_asan, flaky check)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3594,6 +3662,7 @@ jobs:
docker_server_image:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }}
name: "Docker server image"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3638,6 +3707,7 @@ jobs:
docker_keeper_image:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }}
name: "Docker keeper image"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3682,6 +3752,7 @@ jobs:
install_packages_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX2RlYnVnKQ==') }}
name: "Install packages (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3739,6 +3810,7 @@ jobs:
compatibility_check_release:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, build_amd_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAocmVsZWFzZSk=') }}
name: "Compatibility check (release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3783,6 +3855,7 @@ jobs:
compatibility_check_aarch64:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
needs: [config_workflow, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAoYWFyY2g2NCk=') }}
name: "Compatibility check (aarch64)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3827,6 +3900,7 @@ jobs:
stress_test_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_debug, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF9kZWJ1Zyk=') }}
name: "Stress test (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3871,6 +3945,7 @@ jobs:
stress_test_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_tsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF90c2FuKQ==') }}
name: "Stress test (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3915,6 +3990,7 @@ jobs:
stress_test_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_ubsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF91YnNhbik=') }}
name: "Stress test (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -3959,6 +4035,7 @@ jobs:
stress_test_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_msan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF9tc2FuKQ==') }}
name: "Stress test (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4003,6 +4080,7 @@ jobs:
ast_fuzzer_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX2RlYnVnKQ==') }}
name: "AST fuzzer (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4053,6 +4131,7 @@ jobs:
ast_fuzzer_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX3RzYW4p') }}
name: "AST fuzzer (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4103,6 +4182,7 @@ jobs:
ast_fuzzer_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX21zYW4p') }}
name: "AST fuzzer (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4153,6 +4233,7 @@ jobs:
ast_fuzzer_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX3Vic2FuKQ==') }}
name: "AST fuzzer (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4203,6 +4284,7 @@ jobs:
buzzhouse_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfZGVidWcp') }}
name: "BuzzHouse (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4253,6 +4335,7 @@ jobs:
buzzhouse_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfdHNhbik=') }}
name: "BuzzHouse (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4303,6 +4386,7 @@ jobs:
buzzhouse_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfbXNhbik=') }}
name: "BuzzHouse (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -4353,6 +4437,7 @@ jobs:
buzzhouse_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfdWJzYW4p') }}
name: "BuzzHouse (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml
index 5a5e3b495bd1..35aa1922bebd 100644
--- a/.github/workflows/release_builds.yml
+++ b/.github/workflows/release_builds.yml
@@ -89,6 +89,7 @@ jobs:
dockers_build_amd:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYW1kKQ==') }}
name: "Dockers Build (amd)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -136,6 +137,7 @@ jobs:
dockers_build_arm:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
needs: [config_workflow]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYXJtKQ==') }}
name: "Dockers Build (arm)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -183,6 +185,7 @@ jobs:
build_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }}
name: "Build (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -230,6 +233,7 @@ jobs:
build_amd_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }}
name: "Build (amd_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -277,6 +281,7 @@ jobs:
build_amd_asan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }}
name: "Build (amd_asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -324,6 +329,7 @@ jobs:
build_amd_tsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }}
name: "Build (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -371,6 +377,7 @@ jobs:
build_amd_msan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }}
name: "Build (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -418,6 +425,7 @@ jobs:
build_amd_ubsan:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }}
name: "Build (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -465,6 +473,7 @@ jobs:
build_amd_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }}
name: "Build (amd_binary)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -512,6 +521,7 @@ jobs:
build_arm_release:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }}
name: "Build (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -559,6 +569,7 @@ jobs:
build_arm_binary:
runs-on: [self-hosted, altinity-on-demand, altinity-builder]
needs: [config_workflow, dockers_build_amd, dockers_build_arm]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }}
name: "Build (arm_binary)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -606,6 +617,7 @@ jobs:
docker_server_image:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }}
name: "Docker server image"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -653,6 +665,7 @@ jobs:
docker_keeper_image:
runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }}
name: "Docker keeper image"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -700,6 +713,7 @@ jobs:
install_packages_amd_release:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX3JlbGVhc2Up') }}
name: "Install packages (amd_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -747,6 +761,7 @@ jobs:
install_packages_arm_release:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_release]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYXJtX3JlbGVhc2Up') }}
name: "Install packages (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -794,6 +809,7 @@ jobs:
stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }}
name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -841,6 +857,7 @@ jobs:
stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }}
name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -888,6 +905,7 @@ jobs:
stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }}
name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -935,6 +953,7 @@ jobs:
stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }}
name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -982,6 +1001,7 @@ jobs:
stateless_tests_arm_binary_parallel:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }}
name: "Stateless tests (arm_binary, parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
@@ -1029,6 +1049,7 @@ jobs:
stateless_tests_arm_binary_sequential:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary]
+ if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }}
name: "Stateless tests (arm_binary, sequential)"
outputs:
data: ${{ steps.run.outputs.DATA }}
diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py
index eba9d4c773e4..7238676436fa 100644
--- a/ci/praktika/yaml_generator.py
+++ b/ci/praktika/yaml_generator.py
@@ -332,25 +332,25 @@ def generate(self):
)
if_expression = ""
+ # NOTE (strtgbb): We still want the cache logic, we use it for skipping based on PR config
if (
- self.workflow_config.config.enable_cache
- and job_name_normalized != config_job_name_normalized
+ # self.workflow_config.config.enable_cache
+ # and
+ job_name_normalized != config_job_name_normalized
):
if_expression = YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION.format(
WORKFLOW_CONFIG_JOB_NAME=config_job_name_normalized,
JOB_NAME_BASE64=Utils.to_base64(job_name),
)
+ elif self.workflow_config.name == "Community PR":
+ # TODO: replace this hack with a proper configuration
+ if_expression = "\n if: ${{ github.actor == 'strtgbb' }}"
+
if job.run_unless_cancelled:
if_expression = (
YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION_NOT_CANCELLED
)
- # TODO: replace this hack with a proper configuration
- if (
- job_name == Settings.CI_CONFIG_JOB_NAME
- and self.workflow_config.name == "Community PR"
- ):
- if_expression = "\n if: ${{ github.actor == 'strtgbb' }}"
secrets_envs = []
# note(strtgbb): This adds github secrets to praktika_setup_env.sh
diff --git a/ci/settings/altinity_overrides.py b/ci/settings/altinity_overrides.py
index dc91e4ada82c..5c2c550d51d5 100644
--- a/ci/settings/altinity_overrides.py
+++ b/ci/settings/altinity_overrides.py
@@ -49,6 +49,9 @@ class RunnerLabels:
DISABLED_WORKFLOWS = [
"new_pull_request.py",
+ "nightly_statistics.py",
+ "nightly_jepsen.py",
+ "VectorSearchStress.py",
]
DEFAULT_LOCAL_TEST_WORKFLOW = "pull_request.py"
From e162911fc5b05dcae35a3cd0131e3bffaf0c38c9 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Wed, 3 Dec 2025 17:33:26 -0500
Subject: [PATCH 10/15] disable azure tests for external PRs
---
ci/jobs/functional_tests.py | 10 ++++++++--
ci/praktika/info.py | 4 ++++
tests/config/config.d/azure_storage_conf.xml | 4 +---
3 files changed, 13 insertions(+), 5 deletions(-)
diff --git a/ci/jobs/functional_tests.py b/ci/jobs/functional_tests.py
index 02d148e2bd5f..cd9a7865afa4 100644
--- a/ci/jobs/functional_tests.py
+++ b/ci/jobs/functional_tests.py
@@ -191,13 +191,19 @@ def main():
runner_options += f" --jobs {nproc}"
if not info.is_local_run:
+ # NOTE(strtgbb): We pass azure credentials through the docker command, not SSM.
# TODO: find a way to work with Azure secret so it's ok for local tests as well, for now keep azure disabled
# os.environ["AZURE_CONNECTION_STRING"] = Shell.get_output(
# f"aws ssm get-parameter --region us-east-1 --name azure_connection_string --with-decryption --output text --query Parameter.Value",
# verbose=True,
# )
- # NOTE(strtgbb): We pass azure credentials through the docker command, not SSM.
- pass
+
+ # NOTE(strtgbb): Azure credentials don't exist in community workflow
+ if info.is_community_pr:
+ print(
+ "NOTE: No azure credentials provided for community PR - disable azure storage"
+ )
+ config_installs_args += " --no-azure"
else:
print("Disable azure for a local run")
config_installs_args += " --no-azure"
diff --git a/ci/praktika/info.py b/ci/praktika/info.py
index 9683acb39c5b..a44248da8313 100644
--- a/ci/praktika/info.py
+++ b/ci/praktika/info.py
@@ -93,6 +93,10 @@ def repo_owner(self):
def fork_name(self):
return self.env.FORK_NAME
+ @property
+ def is_community_pr(self):
+ return "Community" in self.env.WORKFLOW_NAME
+
@property
def user_name(self):
return self.env.USER_LOGIN
diff --git a/tests/config/config.d/azure_storage_conf.xml b/tests/config/config.d/azure_storage_conf.xml
index 3104fdd8d040..eb7c4d154d34 100644
--- a/tests/config/config.d/azure_storage_conf.xml
+++ b/tests/config/config.d/azure_storage_conf.xml
@@ -6,9 +6,7 @@
azure
false
33554432
-
- https://127.0.0.1:10000/devstoreaccount1
-
+
From 539e0de211b7e22f92165513c92b818cbae663a0 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Thu, 4 Dec 2025 10:51:08 -0500
Subject: [PATCH 11/15] community workflow: fix integration tests db query and
remove compat check
---
.github/workflows/backport_branches.yml | 1 +
.github/workflows/master.yml | 1 +
.github/workflows/merge_queue.yml | 1 +
.github/workflows/pull_request.yml | 1 +
.github/workflows/pull_request_external.yml | 90 ---------------------
.github/workflows/release_branches.yml | 1 +
.github/workflows/release_builds.yml | 1 +
ci/jobs/scripts/integration_tests_runner.py | 2 +-
ci/praktika/yaml_additional_templates.py | 1 +
ci/workflows/pull_request_external.py | 4 -
10 files changed, 8 insertions(+), 95 deletions(-)
diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml
index f814b5e3c16e..f61c70f198c6 100644
--- a/.github/workflows/backport_branches.yml
+++ b/.github/workflows/backport_branches.yml
@@ -28,6 +28,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 9bae1567fa75..af4708aeaa1c 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -27,6 +27,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/.github/workflows/merge_queue.yml b/.github/workflows/merge_queue.yml
index b1eec174dc98..b1aa4c48e3e0 100644
--- a/.github/workflows/merge_queue.yml
+++ b/.github/workflows/merge_queue.yml
@@ -18,6 +18,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index 28d1c7a6f9c5..c7f2e6ae18b4 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -28,6 +28,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index c15f44d2bbf5..46c019958634 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -3807,96 +3807,6 @@ jobs:
python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
- compatibility_check_release:
- runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
- needs: [config_workflow, build_amd_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAocmVsZWFzZSk=') }}
- name: "Compatibility check (release)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Compatibility check (release)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Compatibility check (release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Compatibility check (release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
- compatibility_check_aarch64:
- runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64]
- needs: [config_workflow, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAoYWFyY2g2NCk=') }}
- name: "Compatibility check (aarch64)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Compatibility check (aarch64)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Compatibility check (aarch64)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Compatibility check (aarch64)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
stress_test_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, build_amd_debug, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index 22f1b3a03a88..97112df6bc4d 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -27,6 +27,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml
index 35aa1922bebd..9cd82ad46042 100644
--- a/.github/workflows/release_builds.yml
+++ b/.github/workflows/release_builds.yml
@@ -17,6 +17,7 @@ env:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }}
diff --git a/ci/jobs/scripts/integration_tests_runner.py b/ci/jobs/scripts/integration_tests_runner.py
index f3ad2a8d7db6..720897503576 100755
--- a/ci/jobs/scripts/integration_tests_runner.py
+++ b/ci/jobs/scripts/integration_tests_runner.py
@@ -29,7 +29,7 @@
CLICKHOUSE_PLAY_HOST = os.environ.get("CHECKS_DATABASE_HOST", "play.clickhouse.com")
CLICKHOUSE_PLAY_USER = os.environ.get("CLICKHOUSE_TEST_STAT_LOGIN", "play")
CLICKHOUSE_PLAY_PASSWORD = os.environ.get("CLICKHOUSE_TEST_STAT_PASSWORD", "")
-CLICKHOUSE_PLAY_DB = os.environ.get("CLICKHOUSE_PLAY_DB", "gh-data")
+CLICKHOUSE_PLAY_DB = os.environ.get("CLICKHOUSE_PLAY_DB", "default")
CLICKHOUSE_PLAY_URL = f"https://{CLICKHOUSE_PLAY_HOST}:8443/"
diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py
index 6eb522901061..5353703a58ea 100644
--- a/ci/praktika/yaml_additional_templates.py
+++ b/ci/praktika/yaml_additional_templates.py
@@ -3,6 +3,7 @@ class AltinityWorkflowTemplates:
# Braces must be escaped
ADDITIONAL_GLOBAL_ENV = r""" AWS_DEFAULT_REGION: ${{{{ secrets.AWS_DEFAULT_REGION }}}}
CHECKS_DATABASE_HOST: ${{{{ secrets.CHECKS_DATABASE_HOST }}}}
+ CLICKHOUSE_PLAY_DB: gh-data
AZURE_STORAGE_KEY: ${{{{ secrets.AZURE_STORAGE_KEY }}}}
AZURE_ACCOUNT_NAME: ${{{{ secrets.AZURE_ACCOUNT_NAME }}}}
AZURE_CONTAINER_NAME: ${{{{ secrets.AZURE_CONTAINER_NAME }}}}
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
index edbb8a0d56c2..a1b07fa45654 100644
--- a/ci/workflows/pull_request_external.py
+++ b/ci/workflows/pull_request_external.py
@@ -58,10 +58,6 @@
job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
for job in JobConfigs.install_check_jobs
],
- *[
- job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
- for job in JobConfigs.compatibility_test_jobs
- ],
*[
job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
for job in JobConfigs.stress_test_jobs
From b378eccdb9fb077649126768e660f13cd1c14b49 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Thu, 4 Dec 2025 13:56:14 -0500
Subject: [PATCH 12/15] community workflow: remove more jobs that don't work
---
.github/workflows/pull_request_external.yml | 270 --------------------
ci/workflows/pull_request_external.py | 14 +-
2 files changed, 4 insertions(+), 280 deletions(-)
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index 46c019958634..0ba093fb24de 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -3659,96 +3659,6 @@ jobs:
python3 -m praktika run 'Integration tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
- docker_server_image:
- runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
- needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }}
- name: "Docker server image"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Docker server image"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Docker server image' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Docker server image' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
- docker_keeper_image:
- runs-on: [self-hosted, altinity-on-demand, altinity-style-checker]
- needs: [config_workflow, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }}
- name: "Docker keeper image"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Docker keeper image"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Docker keeper image' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Docker keeper image' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
install_packages_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
@@ -3807,186 +3717,6 @@ jobs:
python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
fi
- stress_test_amd_debug:
- runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
- needs: [config_workflow, build_amd_debug, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF9kZWJ1Zyk=') }}
- name: "Stress test (amd_debug)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Stress test (amd_debug)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Stress test (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Stress test (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
- stress_test_amd_tsan:
- runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
- needs: [config_workflow, build_amd_tsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF90c2FuKQ==') }}
- name: "Stress test (amd_tsan)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Stress test (amd_tsan)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Stress test (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Stress test (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
- stress_test_amd_ubsan:
- runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
- needs: [config_workflow, build_amd_ubsan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF91YnNhbik=') }}
- name: "Stress test (amd_ubsan)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Stress test (amd_ubsan)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Stress test (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Stress test (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
- stress_test_amd_msan:
- runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
- needs: [config_workflow, build_amd_msan, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel]
- if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3QgKGFtZF9tc2FuKQ==') }}
- name: "Stress test (amd_msan)"
- outputs:
- data: ${{ steps.run.outputs.DATA }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ env.CHECKOUT_REF }}
-
- - name: Setup
- uses: ./.github/actions/runner_setup
- - name: Docker setup
- uses: ./.github/actions/docker_setup
- with:
- test_name: "Stress test (amd_msan)"
-
- - name: Prepare env script
- run: |
- rm -rf ./ci/tmp ./ci/tmp ./ci/tmp
- mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp
- cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
- export PYTHONPATH=./ci:.:
- cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF'
- ${{ needs.config_workflow.outputs.data }}
- EOF
- cat > ./ci/tmp/workflow_status.json << 'EOF'
- ${{ toJson(needs) }}
- EOF
- ENV_SETUP_SCRIPT_EOF
-
- - name: Run
- id: run
- run: |
- . ./ci/tmp/praktika_setup_env.sh
- set -o pipefail
- if command -v ts &> /dev/null; then
- python3 -m praktika run 'Stress test (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log
- else
- python3 -m praktika run 'Stress test (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log
- fi
-
ast_fuzzer_amd_debug:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester]
needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug]
diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py
index a1b07fa45654..c96077a72d6a 100644
--- a/ci/workflows/pull_request_external.py
+++ b/ci/workflows/pull_request_external.py
@@ -48,20 +48,14 @@
for job in JobConfigs.integration_test_jobs_non_required
],
JobConfigs.integration_test_asan_flaky_pr_job,
- JobConfigs.docker_sever.set_dependency(
- FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
- ),
- JobConfigs.docker_keeper.set_dependency(
- FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES
- ),
*[
job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
for job in JobConfigs.install_check_jobs
],
- *[
- job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
- for job in JobConfigs.stress_test_jobs
- ],
+ # *[
+ # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
+ # for job in JobConfigs.stress_test_jobs
+ # ], # NOTE (strtgbb): Does not support github artifacts
# *[
# job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES)
# for job in JobConfigs.upgrade_test_jobs
From 62a9da181340335f87bc6df93d7cec68446b7290 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Fri, 5 Dec 2025 09:30:35 -0500
Subject: [PATCH 13/15] try to fix tests trying to use azure when not available
---
ci/jobs/functional_tests.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/ci/jobs/functional_tests.py b/ci/jobs/functional_tests.py
index cd9a7865afa4..091ed806c544 100644
--- a/ci/jobs/functional_tests.py
+++ b/ci/jobs/functional_tests.py
@@ -204,6 +204,9 @@ def main():
"NOTE: No azure credentials provided for community PR - disable azure storage"
)
config_installs_args += " --no-azure"
+
+ # NOTE(strtgbb): With the above, some tests are still trying to use azure, try this:
+ os.environ["USE_AZURE_STORAGE_FOR_MERGE_TREE"] = "0"
else:
print("Disable azure for a local run")
config_installs_args += " --no-azure"
From a8491fa7a6eb09bfde5289ef18439707a47eb841 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Fri, 5 Dec 2025 09:37:16 -0500
Subject: [PATCH 14/15] community workflow: remove broken report link
---
.github/workflows/backport_branches.yml | 1 +
.github/workflows/master.yml | 1 +
.github/workflows/merge_queue.yml | 1 +
.github/workflows/nightly_fuzzers.yml | 1 +
.github/workflows/pull_request.yml | 1 +
.github/workflows/pull_request_external.yml | 1 +
.github/workflows/release_branches.yml | 1 +
.github/workflows/release_builds.yml | 1 +
ci/praktika/yaml_additional_templates.py | 1 +
9 files changed, 9 insertions(+)
diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml
index f61c70f198c6..2cf948e1ef66 100644
--- a/.github/workflows/backport_branches.yml
+++ b/.github/workflows/backport_branches.yml
@@ -59,6 +59,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index af4708aeaa1c..d4f2b6a648aa 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -58,6 +58,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/merge_queue.yml b/.github/workflows/merge_queue.yml
index b1aa4c48e3e0..b213f147a2bd 100644
--- a/.github/workflows/merge_queue.yml
+++ b/.github/workflows/merge_queue.yml
@@ -48,6 +48,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/nightly_fuzzers.yml b/.github/workflows/nightly_fuzzers.yml
index 9d7a389e7318..1ea499b3b9e1 100644
--- a/.github/workflows/nightly_fuzzers.yml
+++ b/.github/workflows/nightly_fuzzers.yml
@@ -36,6 +36,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index c7f2e6ae18b4..fe28a11ca73a 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -59,6 +59,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml
index 0ba093fb24de..e3471434e5fc 100644
--- a/.github/workflows/pull_request_external.yml
+++ b/.github/workflows/pull_request_external.yml
@@ -46,6 +46,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index 97112df6bc4d..03a4001db49c 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -58,6 +58,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml
index 9cd82ad46042..662e642adc19 100644
--- a/.github/workflows/release_builds.yml
+++ b/.github/workflows/release_builds.yml
@@ -47,6 +47,7 @@ jobs:
test_name: "Config Workflow"
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py
index 5353703a58ea..96b0e3e037ef 100644
--- a/ci/praktika/yaml_additional_templates.py
+++ b/ci/praktika/yaml_additional_templates.py
@@ -22,6 +22,7 @@ class AltinityWorkflowTemplates:
# Additional pre steps for config workflow job
ADDITIONAL_CI_CONFIG_STEPS = r"""
- name: Note report location to summary
+ if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
From 0b5a37ae6295ab6b99d212e626198b9433e757b4 Mon Sep 17 00:00:00 2001
From: strtgbb <146047128+strtgbb@users.noreply.github.com>
Date: Tue, 9 Dec 2025 14:38:04 -0500
Subject: [PATCH 15/15] disable ch.start/stop_log_exports
---
ci/jobs/scripts/clickhouse_proc.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/ci/jobs/scripts/clickhouse_proc.py b/ci/jobs/scripts/clickhouse_proc.py
index f1983e8a7147..8276abf5e052 100644
--- a/ci/jobs/scripts/clickhouse_proc.py
+++ b/ci/jobs/scripts/clickhouse_proc.py
@@ -1037,8 +1037,10 @@ def set_random_timezone():
# FIXME: the start_time must be preserved globally in ENV or something like that
# to get the same values in different DBs
# As a wild idea, it could be stored in a Info.check_start_timestamp
+ exit(0) # Note (strtgbb): We don't use log exports
res = ch.start_log_exports(check_start_time=Utils.timestamp())
elif command == "logs_export_stop":
+ exit(0) # Note (strtgbb): We don't use log exports
res = ch.stop_log_exports()
elif command == "start_minio":
param = sys.argv[2]