diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 549efdf6bc38..2cf948e1ef66 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} @@ -27,15 +28,14 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -59,6 +59,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 8d0d79f2ee13..d4f2b6a648aa 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} @@ -26,15 +27,14 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -58,6 +58,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/merge_queue.yml b/.github/workflows/merge_queue.yml index aa7066382f26..b213f147a2bd 100644 --- a/.github/workflows/merge_queue.yml +++ b/.github/workflows/merge_queue.yml @@ -8,6 +8,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -17,12 +18,12 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} jobs: @@ -47,6 +48,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/nightly_fuzzers.yml b/.github/workflows/nightly_fuzzers.yml index 84cb1e8e02b9..1ea499b3b9e1 100644 --- a/.github/workflows/nightly_fuzzers.yml +++ b/.github/workflows/nightly_fuzzers.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: @@ -35,6 +36,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/nightly_jepsen.yml b/.github/workflows/nightly_jepsen.yml index 1ff46f516f75..4201db8788aa 100644 --- a/.github/workflows/nightly_jepsen.yml +++ b/.github/workflows/nightly_jepsen.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: diff --git a/.github/workflows/nightly_statistics.yml b/.github/workflows/nightly_statistics.yml index 8a0e96858eb9..c9228837b508 100644 --- a/.github/workflows/nightly_statistics.yml +++ b/.github/workflows/nightly_statistics.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index ff2904b1d497..fe28a11ca73a 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} @@ -27,15 +28,14 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -59,6 +59,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/pull_request_external.yml b/.github/workflows/pull_request_external.yml new file mode 100644 index 000000000000..e3471434e5fc --- /dev/null +++ b/.github/workflows/pull_request_external.yml @@ -0,0 +1,4127 @@ +# generated by praktika + +name: Community PR + +on: + workflow_dispatch: + inputs: + no_cache: + description: Run without cache + required: false + type: boolean + default: false + pull_request: + branches: ['antalya', 'releases/*', 'antalya-*'] + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} + DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} + DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} + CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} + + + +jobs: + + config_workflow: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] + needs: [] + if: ${{ github.actor == 'strtgbb' }} + name: "Config Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Config Workflow" + + - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} + env: + PR_NUMBER: ${{ github.event.pull_request.number || 0 }} + COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + run: | + if [ "$PR_NUMBER" -eq 0 ]; then + PREFIX="REFs/$GITHUB_REF_NAME/$COMMIT_SHA" + else + PREFIX="PRs/$PR_NUMBER/$COMMIT_SHA" + fi + REPORT_LINK=https://s3.amazonaws.com/altinity-build-artifacts/$PREFIX/$GITHUB_RUN_ID/ci_run_report.html + echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + fast_test: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }} + name: "Fast test" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Fast test" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + build_amd_debug: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} + name: "Build (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_debug)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_DEBUG + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_DEBUG + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_DEBUG + path: ci/tmp/*.deb + + build_amd_release: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} + name: "Build (amd_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_RELEASE + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact CH_AMD_RELEASE_STRIPPED + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_RELEASE_STRIPPED + path: ci/tmp/build/programs/self-extracting/clickhouse-stripped + + + - name: Upload artifact DEB_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_RELEASE + path: ci/tmp/*.deb + + + - name: Upload artifact RPM_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: RPM_AMD_RELEASE + path: ci/tmp/*.rpm + + + - name: Upload artifact TGZ_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: TGZ_AMD_RELEASE + path: ci/tmp/*64.tgz* + + build_amd_asan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }} + name: "Build (amd_asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_asan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_ASAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_ASAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_ASAN + path: ci/tmp/*.deb + + build_amd_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }} + name: "Build (amd_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_TSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_TSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_TSAN + path: ci/tmp/*.deb + + build_amd_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }} + name: "Build (amd_msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_MSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_MSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_MSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_MSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_MSAM + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_MSAM + path: ci/tmp/*.deb + + build_amd_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }} + name: "Build (amd_ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_UBSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_UBSAN + path: ci/tmp/*.deb + + build_amd_binary: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }} + name: "Build (amd_binary)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_binary)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_BINARY + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_BINARY + path: ci/tmp/build/programs/self-extracting/clickhouse + + build_arm_release: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} + name: "Build (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_RELEASE + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact CH_ARM_RELEASE_STRIPPED + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_RELEASE_STRIPPED + path: ci/tmp/build/programs/self-extracting/clickhouse-stripped + + + - name: Upload artifact DEB_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: DEB_ARM_RELEASE + path: ci/tmp/*.deb + + + - name: Upload artifact RPM_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: RPM_ARM_RELEASE + path: ci/tmp/*.rpm + + + - name: Upload artifact TGZ_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: TGZ_ARM_RELEASE + path: ci/tmp/*64.tgz* + + build_arm_coverage: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9jb3ZlcmFnZSk=') }} + name: "Build (arm_coverage)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_coverage)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_coverage)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_coverage)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_COV_BIN + uses: actions/upload-artifact@v4 + with: + name: CH_COV_BIN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_COV + uses: actions/upload-artifact@v4 + with: + name: DEB_COV + path: ci/tmp/*.deb + + build_arm_binary: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }} + name: "Build (arm_binary)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_binary)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_BIN + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_BIN + path: ci/tmp/build/programs/self-extracting/clickhouse + + unit_tests_asan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAoYXNhbik=') }} + name: "Unit tests (asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (asan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAodHNhbik=') }} + name: "Unit tests (tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAobXNhbik=') }} + name: "Unit tests (msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, build_amd_ubsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'VW5pdCB0ZXN0cyAodWJzYW4p') }} + name: "Unit tests (ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_distributed_plan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_distributed_plan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_distributed_plan_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgc2VxdWVudGlhbCk=') }} + name: "Stateless tests (amd_asan, distributed plan, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, distributed plan, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }} + name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }} + name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} + name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_asyncinsert_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} + name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_asyncinsert_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_debug, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_debug, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_tsan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_tsan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_tsan, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_tsan, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_msan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_msan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_msan, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_msan, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_ubsan_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_ubsan, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_ubsan, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_ubsan_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_ubsan, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_ubsan, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_distributed_plan_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_distributed_plan_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgcGFyYWxsZWwp') }} + name: "Stateless tests (amd_tsan, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_arm_binary_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }} + name: "Stateless tests (arm_binary, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_binary, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_arm_binary_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (arm_binary, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_binary, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + bugfix_validation_integration_tests: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVnZml4IHZhbGlkYXRpb24gKGludGVncmF0aW9uIHRlc3RzKQ==') }} + name: "Bugfix validation (integration tests)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Bugfix validation (integration tests)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Bugfix validation (integration tests)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Bugfix validation (integration tests)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + bugfix_validation_functional_tests: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVnZml4IHZhbGlkYXRpb24gKGZ1bmN0aW9uYWwgdGVzdHMp') }} + name: "Bugfix validation (functional tests)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Bugfix validation (functional tests)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Bugfix validation (functional tests)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Bugfix validation (functional tests)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_flaky_check: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] + needs: [config_workflow, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZmxha3kgY2hlY2sp') }} + name: "Stateless tests (amd_asan, flaky check)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, flaky check)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_1_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDEvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 1/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 1/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_2_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDIvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 2/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 2/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_3_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDMvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 3/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 3/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_4_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDQvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 4/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 4/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_5_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDUvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 5/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 5/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_old_analyzer_6_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBvbGQgYW5hbHl6ZXIsIDYvNik=') }} + name: "Integration tests (amd_asan, old analyzer, 6/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, old analyzer, 6/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, old analyzer, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_1_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDEvNSk=') }} + name: "Integration tests (amd_binary, 1/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 1/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_2_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDIvNSk=') }} + name: "Integration tests (amd_binary, 2/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 2/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_3_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDMvNSk=') }} + name: "Integration tests (amd_binary, 3/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 3/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_4_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDQvNSk=') }} + name: "Integration tests (amd_binary, 4/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 4/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_5_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDUvNSk=') }} + name: "Integration tests (amd_binary, 5/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 5/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_1_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDEvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 1/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 1/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_2_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDIvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 2/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 2/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_3_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDMvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 3/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 3/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_4_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDQvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 4/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 4/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_1_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAxLzYp') }} + name: "Integration tests (amd_tsan, 1/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 1/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_2_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAyLzYp') }} + name: "Integration tests (amd_tsan, 2/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 2/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_3_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAzLzYp') }} + name: "Integration tests (amd_tsan, 3/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 3/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_4_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA0LzYp') }} + name: "Integration tests (amd_tsan, 4/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 4/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_5_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA1LzYp') }} + name: "Integration tests (amd_tsan, 5/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 5/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_6_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA2LzYp') }} + name: "Integration tests (amd_tsan, 6/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 6/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_flaky_check: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, build_amd_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBmbGFreSBjaGVjayk=') }} + name: "Integration tests (amd_asan, flaky check)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, flaky check)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, flaky check)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + install_packages_amd_debug: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX2RlYnVnKQ==') }} + name: "Install packages (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Install packages (amd_debug)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + + - name: Download artifact DEB_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: DEB_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Install packages (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + ast_fuzzer_amd_debug: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX2RlYnVnKQ==') }} + name: "AST fuzzer (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "AST fuzzer (amd_debug)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'AST fuzzer (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'AST fuzzer (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + ast_fuzzer_amd_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX3RzYW4p') }} + name: "AST fuzzer (amd_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "AST fuzzer (amd_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'AST fuzzer (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'AST fuzzer (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + ast_fuzzer_amd_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX21zYW4p') }} + name: "AST fuzzer (amd_msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "AST fuzzer (amd_msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'AST fuzzer (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'AST fuzzer (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + ast_fuzzer_amd_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QVNUIGZ1enplciAoYW1kX3Vic2FuKQ==') }} + name: "AST fuzzer (amd_ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "AST fuzzer (amd_ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'AST fuzzer (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'AST fuzzer (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + buzzhouse_amd_debug: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfZGVidWcp') }} + name: "BuzzHouse (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "BuzzHouse (amd_debug)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'BuzzHouse (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'BuzzHouse (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + buzzhouse_amd_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_tsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfdHNhbik=') }} + name: "BuzzHouse (amd_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "BuzzHouse (amd_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'BuzzHouse (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'BuzzHouse (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + buzzhouse_amd_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_msan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfbXNhbik=') }} + name: "BuzzHouse (amd_msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "BuzzHouse (amd_msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'BuzzHouse (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'BuzzHouse (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + buzzhouse_amd_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel, build_amd_ubsan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnV6ekhvdXNlIChhbWRfdWJzYW4p') }} + name: "BuzzHouse (amd_ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "BuzzHouse (amd_ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp ./ci/tmp ./ci/tmp + mkdir -p ./ci/tmp ./ci/tmp ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_config_community_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'BuzzHouse (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'BuzzHouse (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 12e370e00207..03a4001db49c 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} @@ -26,15 +27,14 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -58,6 +58,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 00bfb1015b15..662e642adc19 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -7,6 +7,7 @@ on: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -16,12 +17,12 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }} AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }} AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} jobs: @@ -46,6 +47,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -89,6 +91,7 @@ jobs: dockers_build_amd: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYW1kKQ==') }} name: "Dockers Build (amd)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -136,6 +139,7 @@ jobs: dockers_build_arm: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYXJtKQ==') }} name: "Dockers Build (arm)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -183,6 +187,7 @@ jobs: build_amd_debug: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} name: "Build (amd_debug)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -230,6 +235,7 @@ jobs: build_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} name: "Build (amd_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -277,6 +283,7 @@ jobs: build_amd_asan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }} name: "Build (amd_asan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -324,6 +331,7 @@ jobs: build_amd_tsan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }} name: "Build (amd_tsan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -371,6 +379,7 @@ jobs: build_amd_msan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }} name: "Build (amd_msan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -418,6 +427,7 @@ jobs: build_amd_ubsan: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }} name: "Build (amd_ubsan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -465,6 +475,7 @@ jobs: build_amd_binary: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }} name: "Build (amd_binary)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -512,6 +523,7 @@ jobs: build_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} name: "Build (arm_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -559,6 +571,7 @@ jobs: build_arm_binary: runs-on: [self-hosted, altinity-on-demand, altinity-builder] needs: [config_workflow, dockers_build_amd, dockers_build_arm] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }} name: "Build (arm_binary)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -606,6 +619,7 @@ jobs: docker_server_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }} name: "Docker server image" outputs: data: ${{ steps.run.outputs.DATA }} @@ -653,6 +667,7 @@ jobs: docker_keeper_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }} name: "Docker keeper image" outputs: data: ${{ steps.run.outputs.DATA }} @@ -700,6 +715,7 @@ jobs: install_packages_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX3JlbGVhc2Up') }} name: "Install packages (amd_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -747,6 +763,7 @@ jobs: install_packages_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYXJtX3JlbGVhc2Up') }} name: "Install packages (arm_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -794,6 +811,7 @@ jobs: stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }} name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -841,6 +859,7 @@ jobs: stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }} name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -888,6 +907,7 @@ jobs: stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -935,6 +955,7 @@ jobs: stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -982,6 +1003,7 @@ jobs: stateless_tests_arm_binary_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }} name: "Stateless tests (arm_binary, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -1029,6 +1051,7 @@ jobs: stateless_tests_arm_binary_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }} name: "Stateless tests (arm_binary, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} diff --git a/.github/workflows/vectorsearchstress.yml b/.github/workflows/vectorsearchstress.yml index e3d123cf315f..b151328d44fb 100644 --- a/.github/workflows/vectorsearchstress.yml +++ b/.github/workflows/vectorsearchstress.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index 18dc461f5efd..57de3261677f 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -113,6 +113,13 @@ def main(): os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200" os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache" + if "Community" in info.workflow_name: + print("NOTE: Community contribution - set sccache to run without AWS credentials") + os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1" + # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1 + os.environ.pop("AWS_SECRET_ACCESS_KEY", None) + os.environ.pop("AWS_ACCESS_KEY_ID", None) + os.environ["CTCACHE_LOG_LEVEL"] = "debug" os.environ["CTCACHE_DIR"] = f"{build_dir}/ccache/clang-tidy-cache" os.environ["CTCACHE_S3_BUCKET"] = Settings.S3_ARTIFACT_PATH diff --git a/ci/jobs/fast_test.py b/ci/jobs/fast_test.py index 860a14f6ca02..684a169cc353 100644 --- a/ci/jobs/fast_test.py +++ b/ci/jobs/fast_test.py @@ -145,6 +145,13 @@ def main(): os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200" os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache" + if "Community" in Info().workflow_name: + print("NOTE: Community contribution - set sccache to run without AWS credentials") + os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1" + # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1 + os.environ.pop("AWS_SECRET_ACCESS_KEY", None) + os.environ.pop("AWS_ACCESS_KEY_ID", None) + Shell.check("sccache --show-stats", verbose=True) Utils.add_to_PATH(f"{build_dir}/programs:{current_directory}/tests") diff --git a/ci/jobs/functional_tests.py b/ci/jobs/functional_tests.py index 02d148e2bd5f..091ed806c544 100644 --- a/ci/jobs/functional_tests.py +++ b/ci/jobs/functional_tests.py @@ -191,13 +191,22 @@ def main(): runner_options += f" --jobs {nproc}" if not info.is_local_run: + # NOTE(strtgbb): We pass azure credentials through the docker command, not SSM. # TODO: find a way to work with Azure secret so it's ok for local tests as well, for now keep azure disabled # os.environ["AZURE_CONNECTION_STRING"] = Shell.get_output( # f"aws ssm get-parameter --region us-east-1 --name azure_connection_string --with-decryption --output text --query Parameter.Value", # verbose=True, # ) - # NOTE(strtgbb): We pass azure credentials through the docker command, not SSM. - pass + + # NOTE(strtgbb): Azure credentials don't exist in community workflow + if info.is_community_pr: + print( + "NOTE: No azure credentials provided for community PR - disable azure storage" + ) + config_installs_args += " --no-azure" + + # NOTE(strtgbb): With the above, some tests are still trying to use azure, try this: + os.environ["USE_AZURE_STORAGE_FOR_MERGE_TREE"] = "0" else: print("Disable azure for a local run") config_installs_args += " --no-azure" diff --git a/ci/jobs/scripts/clickhouse_proc.py b/ci/jobs/scripts/clickhouse_proc.py index f1983e8a7147..8276abf5e052 100644 --- a/ci/jobs/scripts/clickhouse_proc.py +++ b/ci/jobs/scripts/clickhouse_proc.py @@ -1037,8 +1037,10 @@ def set_random_timezone(): # FIXME: the start_time must be preserved globally in ENV or something like that # to get the same values in different DBs # As a wild idea, it could be stored in a Info.check_start_timestamp + exit(0) # Note (strtgbb): We don't use log exports res = ch.start_log_exports(check_start_time=Utils.timestamp()) elif command == "logs_export_stop": + exit(0) # Note (strtgbb): We don't use log exports res = ch.stop_log_exports() elif command == "start_minio": param = sys.argv[2] diff --git a/ci/jobs/scripts/integration_tests_runner.py b/ci/jobs/scripts/integration_tests_runner.py index f3ad2a8d7db6..720897503576 100755 --- a/ci/jobs/scripts/integration_tests_runner.py +++ b/ci/jobs/scripts/integration_tests_runner.py @@ -29,7 +29,7 @@ CLICKHOUSE_PLAY_HOST = os.environ.get("CHECKS_DATABASE_HOST", "play.clickhouse.com") CLICKHOUSE_PLAY_USER = os.environ.get("CLICKHOUSE_TEST_STAT_LOGIN", "play") CLICKHOUSE_PLAY_PASSWORD = os.environ.get("CLICKHOUSE_TEST_STAT_PASSWORD", "") -CLICKHOUSE_PLAY_DB = os.environ.get("CLICKHOUSE_PLAY_DB", "gh-data") +CLICKHOUSE_PLAY_DB = os.environ.get("CLICKHOUSE_PLAY_DB", "default") CLICKHOUSE_PLAY_URL = f"https://{CLICKHOUSE_PLAY_HOST}:8443/" diff --git a/ci/praktika/info.py b/ci/praktika/info.py index 9683acb39c5b..a44248da8313 100644 --- a/ci/praktika/info.py +++ b/ci/praktika/info.py @@ -93,6 +93,10 @@ def repo_owner(self): def fork_name(self): return self.env.FORK_NAME + @property + def is_community_pr(self): + return "Community" in self.env.WORKFLOW_NAME + @property def user_name(self): return self.env.USER_LOGIN diff --git a/ci/praktika/parser.py b/ci/praktika/parser.py index 3df2b847dac0..64bb20a795f1 100644 --- a/ci/praktika/parser.py +++ b/ci/praktika/parser.py @@ -236,7 +236,8 @@ def parse(self): assert ( False ), f"Artifact [{artifact_name}] has unsupported type [{artifact.type}]" - if artifact.type == Artifact.Type.GH: + # NOTE (strtgbb): Added a check that provided_by is not empty, which is the case for artifacts that are defined in defs.py but not used in a workflow + if artifact.type == Artifact.Type.GH and artifact.provided_by != "": self.workflow_yaml_config.job_to_config[ artifact.provided_by ].artifacts_gh_provides.append(artifact) diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py index a31ba2b7ea75..96b0e3e037ef 100644 --- a/ci/praktika/yaml_additional_templates.py +++ b/ci/praktika/yaml_additional_templates.py @@ -3,12 +3,12 @@ class AltinityWorkflowTemplates: # Braces must be escaped ADDITIONAL_GLOBAL_ENV = r""" AWS_DEFAULT_REGION: ${{{{ secrets.AWS_DEFAULT_REGION }}}} CHECKS_DATABASE_HOST: ${{{{ secrets.CHECKS_DATABASE_HOST }}}} + CLICKHOUSE_PLAY_DB: gh-data AZURE_STORAGE_KEY: ${{{{ secrets.AZURE_STORAGE_KEY }}}} AZURE_ACCOUNT_NAME: ${{{{ secrets.AZURE_ACCOUNT_NAME }}}} AZURE_CONTAINER_NAME: ${{{{ secrets.AZURE_CONTAINER_NAME }}}} AZURE_STORAGE_ACCOUNT_URL: "https://${{{{ secrets.AZURE_ACCOUNT_NAME }}}}.blob.core.windows.net/" ROBOT_TOKEN: ${{{{ secrets.ROBOT_TOKEN }}}} - GH_TOKEN: ${{{{ github.token }}}} """ # Additional pre steps for all jobs JOB_SETUP_STEPS = """ @@ -22,6 +22,7 @@ class AltinityWorkflowTemplates: # Additional pre steps for config workflow job ADDITIONAL_CI_CONFIG_STEPS = r""" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py index 3793e6d07250..7238676436fa 100644 --- a/ci/praktika/yaml_generator.py +++ b/ci/praktika/yaml_generator.py @@ -1,5 +1,6 @@ import dataclasses from typing import List +import os from . import Artifact, Job, Workflow from .mangle import _get_workflows @@ -55,20 +56,21 @@ class Templates: jobs: {JOBS}\ """ - TEMPLATE_GH_TOKEN_PERMISSIONS = """\ -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all\ -""" + # NOTE (strtgbb): This is dangerous to set for untrusted workflows, and for trusted workflows it should already be the default + TEMPLATE_GH_TOKEN_PERMISSIONS = "" TEMPLATE_ENV_CHECKOUT_REF_PR = """\ + GH_TOKEN: ${{{{ github.token }}}} DISABLE_CI_MERGE_COMMIT: ${{{{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}}} DISABLE_CI_CACHE: ${{{{ github.event.inputs.no_cache || '0' }}}} CHECKOUT_REF: ${{{{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}}}\ """ TEMPLATE_ENV_CHECKOUT_REF_PUSH = """\ + GH_TOKEN: ${{{{ github.token }}}} DISABLE_CI_CACHE: ${{{{ github.event.inputs.no_cache || '0' }}}} CHECKOUT_REF: ""\ """ TEMPLATE_ENV_CHECKOUT_REF_DEFAULT = """\ + GH_TOKEN: ${{{{ github.token }}}} CHECKOUT_REF: ""\ """ TEMPLATE_ENV_SECRET = """\ @@ -313,7 +315,8 @@ def generate(self): for artifact in job.artifacts_gh_provides: uploads_github.append( YamlGenerator.Templates.TEMPLATE_GH_UPLOAD.format( - NAME=artifact.name, PATH=artifact.path + NAME=artifact.name, + PATH=os.path.relpath(artifact.path, os.getcwd()), ) ) downloads_github = [] @@ -329,19 +332,26 @@ def generate(self): ) if_expression = "" + # NOTE (strtgbb): We still want the cache logic, we use it for skipping based on PR config if ( - self.workflow_config.config.enable_cache - and job_name_normalized != config_job_name_normalized + # self.workflow_config.config.enable_cache + # and + job_name_normalized != config_job_name_normalized ): if_expression = YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION.format( WORKFLOW_CONFIG_JOB_NAME=config_job_name_normalized, JOB_NAME_BASE64=Utils.to_base64(job_name), ) + elif self.workflow_config.name == "Community PR": + # TODO: replace this hack with a proper configuration + if_expression = "\n if: ${{ github.actor == 'strtgbb' }}" + if job.run_unless_cancelled: if_expression = ( YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION_NOT_CANCELLED ) + secrets_envs = [] # note(strtgbb): This adds github secrets to praktika_setup_env.sh # This makes the workflow very verbose and we don't need it @@ -486,7 +496,12 @@ def generate(self): VAR_NAME=secret.name ) format_kwargs["ENV_SECRETS"] = GH_VAR_ENVS + SECRET_ENVS - format_kwargs["ENV_SECRETS"] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV + + if self.parser.config.secrets: + # Only add global env if there are secrets in workflow config + format_kwargs[ + "ENV_SECRETS" + ] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV template_1 = base_template.strip().format( NAME=self.workflow_config.name, diff --git a/ci/settings/altinity_overrides.py b/ci/settings/altinity_overrides.py index dc91e4ada82c..5c2c550d51d5 100644 --- a/ci/settings/altinity_overrides.py +++ b/ci/settings/altinity_overrides.py @@ -49,6 +49,9 @@ class RunnerLabels: DISABLED_WORKFLOWS = [ "new_pull_request.py", + "nightly_statistics.py", + "nightly_jepsen.py", + "VectorSearchStress.py", ] DEFAULT_LOCAL_TEST_WORKFLOW = "pull_request.py" diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 6a30b58b651e..2185a889ad6e 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -56,7 +56,7 @@ JobConfigs.bugfix_validation_it_job.set_dependency( [ # JobNames.STYLE_CHECK, # NOTE (strtgbb): we don't run style check - # JobNames.FAST_TEST, # NOTE (strtgbb): we don't run fast tests + # JobNames.FAST_TEST, # NOTE (strtgbb): this takes too long, revisit later # JobConfigs.tidy_build_arm_jobs[0].name, # NOTE (strtgbb): we don't run tidy build jobs ] ), diff --git a/ci/workflows/pull_request_external.py b/ci/workflows/pull_request_external.py new file mode 100644 index 000000000000..c96077a72d6a --- /dev/null +++ b/ci/workflows/pull_request_external.py @@ -0,0 +1,112 @@ +import copy +from praktika import Workflow, Artifact + +from ci.defs.defs import BASE_BRANCH, DOCKERS, SECRETS, ArtifactConfigs, JobNames +from ci.defs.job_configs import JobConfigs +from ci.jobs.scripts.workflow_hooks.filter_job import should_skip_job + +FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES = [ + job.name + for job in JobConfigs.functional_tests_jobs + if any( + substr in job.name + for substr in ( + "_debug, parallel", + "_binary, parallel", + "_asan, distributed plan, parallel", + ) + ) +] + +REGULAR_BUILD_NAMES = [job.name for job in JobConfigs.build_jobs] + +workflow = Workflow.Config( + name="Community PR", + event=Workflow.Event.PULL_REQUEST, + base_branches=[BASE_BRANCH, "releases/*", "antalya-*"], + jobs=[ + JobConfigs.fast_test, + *[job.set_dependency([JobNames.FAST_TEST]) for job in JobConfigs.build_jobs], + *JobConfigs.unittest_jobs, + *[ + j.set_dependency( + FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES + if j.name not in FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES + else [] + ) + for j in JobConfigs.functional_tests_jobs + ], + JobConfigs.bugfix_validation_it_job.set_dependency([JobNames.FAST_TEST]), + JobConfigs.bugfix_validation_ft_pr_job, + *JobConfigs.stateless_tests_flaky_pr_jobs, + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.integration_test_jobs_required[:] + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.integration_test_jobs_non_required + ], + JobConfigs.integration_test_asan_flaky_pr_job, + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.install_check_jobs + ], + # *[ + # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + # for job in JobConfigs.stress_test_jobs + # ], # NOTE (strtgbb): Does not support github artifacts + # *[ + # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + # for job in JobConfigs.upgrade_test_jobs + # ], # TODO: customize for our repo + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.ast_fuzzer_jobs + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.buzz_fuzzer_jobs + ], + ], + artifacts=[ + *ArtifactConfigs.unittests_binaries, + *ArtifactConfigs.clickhouse_binaries, + *ArtifactConfigs.clickhouse_stripped_binaries, + *ArtifactConfigs.clickhouse_debians, + *ArtifactConfigs.clickhouse_rpms, + *ArtifactConfigs.clickhouse_tgzs, + ArtifactConfigs.fuzzers, + ArtifactConfigs.fuzzers_corpus, + ], + dockers=DOCKERS, + disable_dockers_build=True, + enable_dockers_manifest_merge=False, + secrets=[], + enable_job_filtering_by_changes=False, # TODO: Change this back? + enable_cache=False, + enable_report=False, + enable_cidb=False, + enable_merge_ready_status=False, + enable_commit_status_on_failure=False, + pre_hooks=[ + "python3 ./ci/jobs/scripts/workflow_hooks/store_data.py", + "python3 ./ci/jobs/scripts/workflow_hooks/version_log.py", + "python3 ./ci/jobs/scripts/workflow_hooks/parse_ci_tags.py", + ], + workflow_filter_hooks=[should_skip_job], + post_hooks=[], +) + +# NOTE (strtgbb): use deepcopy to avoid modifying workflows generated after this one +for i, job in enumerate(workflow.jobs): + workflow.jobs[i] = copy.deepcopy(job) + workflow.jobs[i].enable_commit_status = False + +for i, artifact in enumerate(workflow.artifacts): + workflow.artifacts[i] = copy.deepcopy(artifact) + workflow.artifacts[i].type = Artifact.Type.GH + +WORKFLOWS = [ + workflow, +]