diff --git a/.github/actions/docker_setup/action.yml b/.github/actions/docker_setup/action.yml new file mode 100644 index 000000000000..2748ce1aecdb --- /dev/null +++ b/.github/actions/docker_setup/action.yml @@ -0,0 +1,38 @@ +name: Docker setup +description: Setup docker +inputs: + nested_job: + description: the fuse for unintended use inside of the reusable callable jobs + default: true + type: boolean + DOCKER_USERNAME: + description: username for the dockerhub login + required: true + type: string + DOCKER_PASSWORD: + description: password for the dockerhub login + required: true + type: string +runs: + using: "composite" + steps: + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker + - name: Docker login + shell: bash + run: | + docker login -u ${{ inputs.DOCKER_USERNAME }} -p ${{ inputs.DOCKER_PASSWORD }} + docker info diff --git a/.github/retry.sh b/.github/retry.sh new file mode 100755 index 000000000000..566c2cf11315 --- /dev/null +++ b/.github/retry.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Execute command until exitcode is 0 or +# maximum number of retries is reached +# Example: +# ./retry +retries=$1 +delay=$2 +command="${@:3}" +exitcode=0 +try=0 +until [ "$try" -ge $retries ] +do + echo "$command" + eval "$command" + exitcode=$? + if [ $exitcode -eq 0 ]; then + break + fi + try=$((try+1)) + sleep $2 +done +exit $exitcode diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 000000000000..014d6b9ea238 --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,580 @@ +name: Regression test workflow - Release +'on': + workflow_call: + inputs: + runner_type: + description: the label of runner to use, can be a simple string or a comma-separated list + required: true + type: string + commit: + description: commit hash of the regression tests. + required: true + type: string + arch: + description: arch to run the tests on. + required: true + type: string + timeout_minutes: + description: Maximum number of minutes to let workflow run before GitHub cancels it. + default: 210 + type: number + build_sha: + description: commit sha of the workflow run for artifact upload. + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true + AWS_REPORT_KEY_ID: + description: aws s3 key id used for regression test reports. + required: true + AWS_REPORT_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression test reports. + required: true + AWS_REPORT_REGION: + description: aws s3 region used for regression test reports. + required: true + REGRESSION_AWS_S3_BUCKET: + description: aws s3 bucket used for regression tests. + required: true + REGRESSION_AWS_S3_KEY_ID: + description: aws s3 key id used for regression tests. + required: true + REGRESSION_AWS_S3_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression tests. + required: true + REGRESSION_AWS_S3_REGION: + description: aws s3 region used for regression tests. + required: true + REGRESSION_GCS_KEY_ID: + description: gcs key id used for regression tests. + required: true + REGRESSION_GCS_KEY_SECRET: + description: gcs key secret used for regression tests. + required: true + REGRESSION_GCS_URI: + description: gcs uri used for regression tests. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output classic + --parallel 1 + --log raw.log + artifacts: builds + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, alter, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Alter: + strategy: + fail-fast: false + matrix: + ONLY: [replace, attach, move] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=alter + STORAGE=/${{ matrix.ONLY }}_partition + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u alter/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --only "/alter/${{ matrix.ONLY }} partition/*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.ONLY }}_partition-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ClickHouseKeeperSSL: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + path: ${{ env.artifact_paths }} + + LDAP: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + Parquet: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ParquetS3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + STORAGE=${{ matrix.STORAGE}} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + S3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + TieredStorage: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_urls_package_${{ inputs.arch }} + - name: Rename report + run: | + mv ${{ env.REPORTS_PATH }}/build_urls_package_${{ inputs.arch }}.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_binary_path }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0c7db644d025..8ff345fee70e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,11 +6,6 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows - REGRESSION_COMMON_COMMIT: 1108c52f249af64885c255f39192ba3cc4c145ab - REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 - REGRESSION_KEY_VALUE_COMMIT: e072060fba19d3f81a96f4c5cbe9c5d0b1dcfa9d - on: # yamllint disable-line rule:truthy pull_request: @@ -28,6 +23,8 @@ on: # yamllint disable-line rule:truthy push: branches: - 'releases/22.8**' + schedule: + - cron: "0 0 * * 6" jobs: # DockerHubPushAarch64: @@ -46,10 +43,10 @@ jobs: # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json # Former DockerHubPushAmd64 DockerHubPush: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-snapshot-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check @@ -62,7 +59,7 @@ jobs: run: | mv ${{ runner.temp }}/docker_images_check/changed_images_amd64.json ${{ runner.temp }}/docker_images_check/changed_images.json - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: changed_images path: ${{ runner.temp }}/docker_images_check/changed_images.json @@ -93,7 +90,7 @@ jobs: # path: ${{ runner.temp }}/changed_images.json CompatibilityCheck: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Set envs run: | @@ -103,12 +100,13 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: CompatibilityCheck run: | @@ -129,7 +127,7 @@ jobs: ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] + runs-on: [self-hosted, altinity-on-demand, altinity-type-ccx53, altinity-in-ash, altinity-snapshot-x86-22.8_ccx53_ubuntu_22.04_builder, altinity-setup-none] steps: - name: Set envs run: | @@ -142,14 +140,14 @@ jobs: CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: changed_images path: ${{ env.IMAGES_PATH }} - name: Trust My Directory run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: true @@ -159,11 +157,11 @@ jobs: sudo rm -fr "$TEMP_PATH" mkdir -p "$TEMP_PATH/build_check/package_release" cd .. && tar czf $TEMP_PATH/build_source.src.tar.gz ClickHouse/ - cd $TEMP_PATH && tar xvzf $TEMP_PATH/build_source.src.tar.gz + cd $TEMP_PATH && tar xzf $TEMP_PATH/build_source.src.tar.gz cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json @@ -225,10 +223,10 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -254,7 +252,7 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-setup-none, altinity-type-cax11, altinity-image-arm-snapshot-22.8_ubuntu_22.04] if: ${{ success() || failure() }} steps: - name: Set envs @@ -267,11 +265,12 @@ jobs: NEEDS_DATA_PATH=${{runner.temp}}/needs.json EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Report Builder @@ -337,10 +336,10 @@ jobs: # - BuilderBinDarwinAarch64 - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-setup-none, altinity-type-cax11, altinity-image-arm-snapshot-22.8_ubuntu_22.04] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Mark Commit Release Ready @@ -352,7 +351,7 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-snapshot-x86-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Set envs run: | @@ -364,11 +363,12 @@ jobs: KILL_TIMEOUT=10800 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Functional test @@ -427,7 +427,7 @@ jobs: ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-snapshot-x86-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Set envs run: | @@ -439,11 +439,12 @@ jobs: KILL_TIMEOUT=3600 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Functional test @@ -502,7 +503,7 @@ jobs: ############################################################################################# IntegrationTestsRelease0: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-snapshot-x86-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Set envs run: | @@ -514,14 +515,25 @@ jobs: RUN_BY_HASH_NUM=0 RUN_BY_HASH_TOTAL=2 EOF + - name: Download changed images + uses: actions/download-artifact@v4 + with: + name: changed_images + path: ${{ env.REPORTS_PATH }} - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -539,7 +551,7 @@ jobs: sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease1: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-snapshot-x86-22.8_cpx51_ubuntu_22.04_tester, altinity-setup-none] steps: - name: Set envs run: | @@ -551,14 +563,25 @@ jobs: RUN_BY_HASH_NUM=1 RUN_BY_HASH_TOTAL=2 EOF + - name: Download changed images + uses: actions/download-artifact@v4 + with: + name: changed_images + path: ${{ env.REPORTS_PATH }} - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -577,632 +600,20 @@ jobs: ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# - regression_start: - ## Not depending on the tests above since they can fail at any given moment. - needs: [BuilderDebRelease] - runs-on: ubuntu-latest - steps: - - run: true - - regression_common: - strategy: - fail-fast: false - matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - benchmark: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - clickhouse_keeper_ssl: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - key_value: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=key_value - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - ldap: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-authentication-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/no_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_minio: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/minio - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage minio - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_aws: - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/aws_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws_s3-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - tiered_storage_s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [regression_start] - runs-on: [self-hosted, stress-tester] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + RegressionTestsRelease: + needs: [BuilderReport] + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-snapshot-x86-22.8_cpx51_ubuntu_22.04, altinity-setup-none + commit: 6da94b78dc53cb8965ab56c04a89ebf54ed04cbc + arch: release + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} SignRelease: needs: [BuilderDebRelease] - runs-on: [ self-hosted ] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx11, altinity-in-ash, altinity-image-x86-snapshot-22.8_ubuntu_22.04, altinity-setup-none] + timeout-minutes: 180 steps: - name: Set envs run: | @@ -1214,10 +625,11 @@ jobs: run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: + name: build_urls_package_release path: ${{ env.REPORTS_PATH }} - name: Sign release env: @@ -1228,7 +640,7 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg @@ -1256,20 +668,11 @@ jobs: - IntegrationTestsRelease1 - CompatibilityCheck - SignRelease - - regression_common - - benchmark - - clickhouse_keeper_ssl - - key_value - - ldap - - parquet - - parquet_minio - - parquet_aws - - s3 - - tiered_storage_s3 - runs-on: [self-hosted, style-checker] + - RegressionTestsRelease + runs-on: [self-hosted, altinity-on-demand, altinity-setup-none, altinity-type-cax11, altinity-image-arm-snapshot-22.8_ubuntu_22.04] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index de9310732a8b..15958f5dab28 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -15,17 +15,14 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ && rm -rf apple-libtapi # Build and install tools for cross-linking to Darwin (x86-64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +# Build and install tools for cross-linking to Darwin (aarch64) +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=x86_64-apple-darwin \ && make install -j$(nproc) \ - && cd ../.. \ - && rm -rf cctools-port - -# Build and install tools for cross-linking to Darwin (aarch64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ - && cd cctools-port/cctools \ + && make clean \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=aarch64-apple-darwin \ && make install -j$(nproc) \ diff --git a/docker/packager/packager b/docker/packager/packager index d3348f739e77..e7134436eeff 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -130,9 +130,11 @@ def parse_env_variables( package_type: str, cache: str, shared_libraries: bool, + s3_access_key_id: str, s3_bucket: str, s3_directory: str, s3_rw_access: bool, + s3_secret_access_key: str, clang_tidy: bool, version: str, official: bool, @@ -272,6 +274,10 @@ def parse_env_variables( result.append(f"SCCACHE_S3_KEY_PREFIX={sccache_dir}") if not s3_rw_access: result.append("SCCACHE_S3_NO_CREDENTIALS=true") + if s3_access_key_id: + result.append(f"AWS_ACCESS_KEY_ID={s3_access_key_id}") + if s3_secret_access_key: + result.append(f"AWS_SECRET_ACCESS_KEY={s3_secret_access_key}") if clang_tidy: # `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above. @@ -396,6 +402,14 @@ def parse_args() -> argparse.Namespace: type=dir_name, help="a directory with ccache", ) + parser.add_argument( + "--s3-access-key-id", + help="an S3 access key id used for sscache bucket", + ) + parser.add_argument( + "--s3-secret-access-key", + help="an S3 secret access key used for sscache bucket", + ) parser.add_argument( "--s3-bucket", help="an S3 bucket used for sscache and clang-tidy-cache", @@ -471,9 +485,11 @@ def main(): args.package_type, args.cache, args.shared_libraries, + args.s3_access_key_id, args.s3_bucket, args.s3_directory, args.s3_rw_access, + args.s3_secret_access_key, args.clang_tidy, args.version, args.official, diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile index 8d6321eb5d68..bddd37a27cb7 100644 --- a/docker/test/integration/base/Dockerfile +++ b/docker/test/integration/base/Dockerfile @@ -30,6 +30,7 @@ RUN apt-get update \ python3-pip \ libcurl4-openssl-dev \ libssl-dev \ + iptables='1.8.*' \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* diff --git a/docker/test/integration/helper_container/Dockerfile b/docker/test/integration/helper_container/Dockerfile index 6a093081bf2c..0d2252dc6f11 100644 --- a/docker/test/integration/helper_container/Dockerfile +++ b/docker/test/integration/helper_container/Dockerfile @@ -1,5 +1,5 @@ # docker build -t clickhouse/integration-helper . # Helper docker container to run iptables without sudo -FROM alpine -RUN apk add -U iproute2 +FROM alpine:3.18 +RUN apk add -U iproute2 iptables diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 4ac2df4eac84..79a86e3efeb0 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -8,33 +8,33 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ - adduser \ + adduser='3.*' \ ca-certificates \ - bash \ - btrfs-progs \ - e2fsprogs \ - iptables \ - xfsprogs \ + bash='5.*' \ + btrfs-progs='5.4.*' \ + e2fsprogs='1.45.*' \ + iptables='1.8.*' \ + xfsprogs='5.3.*' \ tar \ - pigz \ + pigz='2.4*' \ wget \ git \ - iproute2 \ + iproute2='5.5.*' \ cgroupfs-mount \ python3-pip \ tzdata \ - libicu-dev \ - bsdutils \ + libicu-dev='66.*' \ + bsdutils='1:2.34*' \ curl \ - python3-pika \ + python3-pika='0.11.*' \ liblua5.1-dev \ - luajit \ - libssl-dev \ - libcurl4-openssl-dev \ + luajit='2.1.*' \ + libssl-dev='1.1.*' \ + libcurl4-openssl-dev='7.68.*' \ gdb \ software-properties-common \ - libkrb5-dev \ - krb5-user \ + libkrb5-dev='1.17*' \ + krb5-user='1.17*' \ g++ \ && rm -rf \ /var/lib/apt/lists/* \ @@ -61,38 +61,38 @@ RUN dockerd --version; docker --version RUN python3 -m pip install --no-cache-dir \ PyMySQL \ - aerospike==4.0.0 \ - avro==1.10.2 \ - asyncio \ - cassandra-driver \ - confluent-kafka==1.5.0 \ - dict2xml \ - dicttoxml \ - docker \ - docker-compose==1.29.2 \ - grpcio \ - grpcio-tools \ - kafka-python \ - kazoo \ - lz4 \ - minio \ - nats-py \ - protobuf \ - psycopg2-binary==2.8.6 \ - pymongo==3.11.0 \ - pytest \ - pytest-order==1.0.0 \ - pytest-timeout \ - pytest-xdist \ - pytest-repeat \ + aerospike~=4.0 \ + avro~=1.10 \ + asyncio~=3.4 \ + cassandra-driver~=3.28 \ + confluent-kafka~=1.5 \ + dict2xml~=1.7 \ + dicttoxml~=1.7 \ + docker~=6.1 \ + docker-compose~=1.29 \ + grpcio~=1.56 \ + grpcio-tools~=1.56 \ + kafka-python~=2.0 \ + kazoo~=2.9 \ + lz4~=4.3 \ + minio~=7.1 \ + nats-py~=2.3 \ + protobuf~=4.24 \ + psycopg2-binary~=2.8 \ + pymongo~=3.11 \ + pytest~=7.4 \ + pytest-order~=1.0 \ + pytest-timeout~=2.1 \ + pytest-xdist~=3.3 \ + pytest-repeat~=0.9 \ pytz \ - redis \ - tzlocal==2.1 \ - urllib3 \ - requests-kerberos \ - pyhdfs \ - azure-storage-blob \ - meilisearch==0.18.3 + redis~=4.6 \ + tzlocal~=2.1 \ + urllib3~=1.25 \ + requests-kerberos~=0.14 \ + pyhdfs~=0.3 \ + azure-storage-blob~=12.17 \ + meilisearch~=0.18 COPY modprobe.sh /usr/local/bin/modprobe COPY dockerd-entrypoint.sh /usr/local/bin/ diff --git a/docker/test/stateful/Dockerfile b/docker/test/stateful/Dockerfile index 3076f9fb6d5d..9529ab12ffcf 100644 --- a/docker/test/stateful/Dockerfile +++ b/docker/test/stateful/Dockerfile @@ -19,7 +19,8 @@ COPY s3downloader /s3downloader ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com" ENV DATASETS="hits visits" -RUN npm install -g azurite +# The following is already done in clickhouse/stateless-test +# RUN npm install -g azurite COPY run.sh / CMD ["/bin/bash", "/run.sh"] diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 58ac030e46c1..64bf9772c95d 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -9,26 +9,26 @@ ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/down RUN apt-get update -y \ && env DEBIAN_FRONTEND=noninteractive \ apt-get install --yes --no-install-recommends \ - awscli \ - brotli \ + awscli=1.18.* \ + brotli=1.0.* \ expect \ - golang \ + golang=2:1.13* \ lsof \ mysql-client=8.0* \ - ncdu \ + ncdu=1.14.* \ netcat-openbsd \ - nodejs \ - npm \ + nodejs=10.19.* \ + npm=6.14.* \ openjdk-11-jre-headless \ - openssl \ + openssl=1.1.1* \ postgresql-client \ - protobuf-compiler \ + protobuf-compiler=3.6.* \ python3 \ - python3-lxml \ - python3-pip \ - python3-requests \ + python3-lxml=4.5.* \ + python3-pip=20.0.* \ + python3-requests=2.22.* \ python3-termcolor \ - qemu-user-static \ + qemu-user-static='1:4.2*' \ sqlite3 \ sudo \ telnet \ @@ -38,12 +38,12 @@ RUN apt-get update -y \ zstd \ file \ pv \ - rpm2cpio \ - cpio \ + rpm2cpio=4.14.* \ + cpio=2.13* \ && apt-get clean -RUN pip3 install numpy scipy pandas Jinja2 +RUN pip3 install numpy~=1.24 scipy~=1.10 pandas~=2.0 Jinja2~=3.1 RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \ @@ -71,7 +71,7 @@ RUN arch=${TARGETARCH:-amd64} \ && chmod +x ./mc ./minio -RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ +RUN wget 'https://archive.apache.org/dist/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ && tar -xvf hadoop-3.3.1.tar.gz \ && rm -rf hadoop-3.3.1.tar.gz @@ -79,7 +79,7 @@ ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_PASSWORD="clickhouse" ENV EXPORT_S3_STORAGE_POLICIES=1 -RUN npm install -g azurite +RUN npm install -g azurite@3.25.1 COPY run.sh / COPY setup_minio.sh / diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 59da13a24fb0..c3d7f663ef7d 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -16,8 +16,10 @@ GITHUB_JOB, IMAGES_PATH, REPO_COPY, + S3_ACCESS_KEY_ID, S3_BUILDS_BUCKET, S3_DOWNLOAD, + S3_SECRET_ACCESS_KEY, TEMP_PATH, CLICKHOUSE_STABLE_VERSION_SUFFIX, ) @@ -59,7 +61,6 @@ def get_packager_cmd( output_path: str, build_version: str, image_version: str, - ccache_path: str, official: bool, ) -> str: package_type = build_config["package_type"] @@ -78,11 +79,11 @@ def get_packager_cmd( if build_config["tidy"] == "enable": cmd += " --clang-tidy" - # NOTE(vnemkov): we are going to continue to use ccache for now - cmd += " --cache=ccache" - cmd += f" --ccache-dir={ccache_path}" + cmd += " --cache=sccache" cmd += " --s3-rw-access" cmd += f" --s3-bucket={S3_BUILDS_BUCKET}" + cmd += f" --s3-access-key-id={S3_ACCESS_KEY_ID}" + cmd += f" --s3-secret-access-key={S3_SECRET_ACCESS_KEY}" if "additional_pkgs" in build_config and build_config["additional_pkgs"]: cmd += " --additional-pkgs" @@ -274,7 +275,7 @@ def main(): # put them as github actions artifact (result) check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config) - docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME) + docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME, version=pr_info.docker_image_tag + "-amd64") image_version = docker_image.version logging.info("Got version from repo %s", version.string) @@ -302,31 +303,12 @@ def main(): if not os.path.exists(build_output_path): os.makedirs(build_output_path) - # NOTE(vnemkov): since we still want to use CCACHE over SCCACHE, unlike upstream, - # So we need to create local directory for that, just as with 22.8 - ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache") - - logging.info("Will try to fetch cache for our build") - try: - get_ccache_if_not_exists( - ccache_path, s3_helper, pr_info.number, TEMP_PATH, pr_info.release_pr - ) - except Exception as e: - # In case there are issues with ccache, remove the path and do not fail a build - logging.info("Failed to get ccache, building without it. Error: %s", e) - rmtree(ccache_path, ignore_errors=True) - - if not os.path.exists(ccache_path): - logging.info("cache was not fetched, will create empty dir") - os.makedirs(ccache_path) - packager_cmd = get_packager_cmd( build_config, os.path.join(REPO_COPY, "docker/packager"), build_output_path, version.string, image_version, - ccache_path, official_flag, ) @@ -342,7 +324,6 @@ def main(): subprocess.check_call( f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True ) - subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True) logging.info("Build finished with %s, log path %s", success, log_path) if not success: # We check if docker works, because if it's down, it's infrastructure @@ -354,10 +335,6 @@ def main(): ) sys.exit(1) - # Upload the ccache first to have the least build time in case of problems - logging.info("Will upload cache") - upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH) - # FIXME performance performance_urls = [] performance_path = os.path.join(build_output_path, "performance.tgz") @@ -395,7 +372,7 @@ def main(): print(f"::notice ::Log URL: {log_url}") src_path = os.path.join(TEMP_PATH, "build_source.src.tar.gz") - + if os.path.exists(src_path): src_url = s3_helper.upload_build_file_to_s3( src_path, s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz" @@ -405,7 +382,7 @@ def main(): logging.info("Source tar doesn't exist") print(f"::notice ::Source tar URL: {src_url}") - + create_json_artifact( TEMP_PATH, build_name, log_url, build_urls, build_config, elapsed, success ) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 2f3859380cf7..b0d75386c1fe 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -476,6 +476,7 @@ def main(): description = format_description(description) with open(changed_json, "w", encoding="utf-8") as images_file: + logging.info(f"Updating {changed_json} with:\n{json.dumps(result_images)}") json.dump(result_images, images_file) s3_helper = S3Helper() diff --git a/tests/ci/docker_pull_helper.py b/tests/ci/docker_pull_helper.py index 1154cf9b8dd3..fb89145f2ea3 100644 --- a/tests/ci/docker_pull_helper.py +++ b/tests/ci/docker_pull_helper.py @@ -33,7 +33,7 @@ def get_images_with_versions( break if not images_path: - logging.info("Images file not found") + logging.info(f"Images file not found in {reports_path}") else: logging.info("Images file path %s", images_path) @@ -44,6 +44,7 @@ def get_images_with_versions( logging.info("Got images %s", images) else: images = {} + logging.info("Images file doesn't exist") docker_images = [] for image_name in required_image: diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 13a7a1ffd7e5..03e2ef8e403f 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -22,7 +22,9 @@ REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) REPO_COPY = os.getenv("REPO_COPY", git_root) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) +S3_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID") S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "altinity-build-artifacts") +S3_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY") S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts") S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") CLICKHOUSE_STABLE_VERSION_SUFFIX = os.getenv("CLICKHOUSE_STABLE_VERSION_SUFFIX", "stable") diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index bb0b17a59aa0..af1de1482357 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -266,7 +266,7 @@ def parse_args(): sys.exit(0) image_name = get_image_name(check_name) - docker_image = get_image_with_version(reports_path, image_name) + docker_image = get_image_with_version(reports_path, image_name, version=pr_info.docker_image_tag + "-amd64") repo_tests_path = os.path.join(repo_path, "tests") diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index bd1513f87f22..03c734293bcc 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -189,7 +189,7 @@ def parse_args(): # logging.info("Check is already finished according to github status, exiting") # sys.exit(0) - images = get_images_with_versions(reports_path, IMAGES) + images = get_images_with_versions(reports_path, IMAGES, version=pr_info.docker_image_tag + "-amd64") images_with_versions = {i.name: i.version for i in images} result_path = os.path.join(temp_path, "output_dir") if not os.path.exists(result_path): diff --git a/tests/ci/pr_info.py b/tests/ci/pr_info.py index 61c5ad227378..99d29a2dc704 100644 --- a/tests/ci/pr_info.py +++ b/tests/ci/pr_info.py @@ -13,6 +13,10 @@ GITHUB_RUN_URL, GITHUB_EVENT_PATH, ) +from version_helper import ( + Git, + get_version_from_repo, +) FORCE_TESTS_LABEL = "force tests" SKIP_MERGEABLE_CHECK_LABEL = "skip mergeable check" @@ -97,6 +101,7 @@ def __init__( # release_pr and merged_pr are used for docker images additional cache self.release_pr = 0 self.merged_pr = 0 + self.version = get_version_from_repo(git=Git(True)) ref = github_event.get("ref", "refs/heads/master") if ref and ref.startswith("refs/heads/"): ref = ref[11:] @@ -113,7 +118,8 @@ def __init__( github_event["pull_request"] = prs_for_sha[0] if "pull_request" in github_event: # pull request and other similar events - self.number = github_event["pull_request"]["number"] + self.number = github_event["pull_request"]["number"] # type: int + self.docker_image_tag = str(self.number) # type: str if pr_event_from_api: try: response = get_with_retries( @@ -185,6 +191,7 @@ def __init__( if pull_request is None or pull_request["state"] == "closed": # it's merged PR to master self.number = 0 + self.docker_image_tag = str(self.number) + "-" + str(self.sha) self.labels = {} self.pr_html_url = f"{repo_prefix}/commits/{ref}" self.base_ref = ref @@ -197,6 +204,7 @@ def __init__( ) else: self.number = pull_request["number"] + self.docker_image_tag = str(self.number) self.labels = {label["name"] for label in pull_request["labels"]} self.base_ref = pull_request["base"]["ref"] @@ -233,8 +241,11 @@ def __init__( else: print("event.json does not match pull_request or push:") print(json.dumps(github_event, sort_keys=True, indent=4)) - self.sha = os.getenv("GITHUB_SHA") - self.number = 0 + self.sha = os.getenv( + "GITHUB_SHA", "0000000000000000000000000000000000000000" + ) + self.number = f"{self.version.major}.{self.version.minor}.{self.version.patch}" + self.docker_image_tag = str(self.number) + "-" + str(self.sha) self.labels = {} repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}" self.task_url = GITHUB_RUN_URL diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index 9484a905ff63..54a726edfda2 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -119,7 +119,7 @@ def process_results(result_folder, server_log_path, run_log_path): logging.info("Check is already finished according to github status, exiting") sys.exit(0) - docker_image = get_image_with_version(reports_path, "altinityinfra/stress-test") + docker_image = get_image_with_version(reports_path, "altinityinfra/stress-test", version=pr_info.docker_image_tag) packages_path = os.path.join(temp_path, "packages") if not os.path.exists(packages_path): diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 5f9b0619deca..2e532e1909c5 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -14,7 +14,7 @@ MAX_RETRY = 3 -NUM_WORKERS = 5 +NUM_WORKERS = 10 SLEEP_BETWEEN_RETRIES = 5 PARALLEL_GROUP_SIZE = 100 CLICKHOUSE_BINARY_PATH = "usr/bin/clickhouse" @@ -605,8 +605,10 @@ def run_test_group( test_cmd = " ".join([test for test in sorted(test_names)]) parallel_cmd = ( - " --parallel {} ".format(num_workers) if num_workers > 0 else "" + ## NOTE(vnemkov) Second and consecutive runs non-parallel to improve stability + " --parallel {} ".format(num_workers) if (num_workers > 0 and i == 0) else "" ) + # -r -- show extra test summary: # -f -- (f)ailed # -E -- (E)rror diff --git a/tests/integration/helpers/network.py b/tests/integration/helpers/network.py index b8377acebcfc..248d57dcff2e 100644 --- a/tests/integration/helpers/network.py +++ b/tests/integration/helpers/network.py @@ -278,7 +278,7 @@ def _ensure_container(self): detach=True, network_mode="host", ) - logging.debug("[network] Created new container %s", self._container.id) + logging.debug("[network] Created new container %s from %s", self._container.id, image_name) self._container_expire_time = time.time() + self.container_expire_timeout return self._container @@ -298,7 +298,7 @@ def _exec_run(self, cmd, **kwargs): exit_code = self._docker_client.api.exec_inspect(handle)["ExitCode"] logging.debug( - "[network] %s: %s (%s): %s", container.id, cmd, exit_code, output.strip() + "[network] %s (%s): %s (%s): %s", container.id, container.image, cmd, exit_code, output.strip() ) if exit_code != 0: