name: Build and Test on: push: branches: - main - release pull_request: defaults: run: shell: bash -euxo pipefail {0} concurrency: # Allow only one workflow per any non-`main` branch. group: ${{ github.workflow }}-${{ github.ref }}-${{ github.ref == 'refs/heads/main' && github.sha || 'anysha' }} cancel-in-progress: true env: RUST_BACKTRACE: 1 COPT: '-Werror' AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }} jobs: tag: runs-on: [ self-hosted, gen3, small ] container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned outputs: build-tag: ${{steps.build-tag.outputs.tag}} steps: - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 0 - name: Get build tag run: | echo run:$GITHUB_RUN_ID echo ref:$GITHUB_REF_NAME echo rev:$(git rev-list --count HEAD) if [[ "$GITHUB_REF_NAME" == "main" ]]; then echo "tag=$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT elif [[ "$GITHUB_REF_NAME" == "release" ]]; then echo "tag=release-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT else echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'" echo "tag=$GITHUB_RUN_ID" >> $GITHUB_OUTPUT fi shell: bash id: build-tag check-codestyle-python: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init steps: - name: Checkout uses: actions/checkout@v3 with: submodules: false fetch-depth: 1 - name: Cache poetry deps id: cache_poetry uses: actions/cache@v3 with: path: ~/.cache/pypoetry/virtualenvs key: v1-codestyle-python-deps-${{ hashFiles('poetry.lock') }} - name: Install Python deps run: ./scripts/pysync - name: Run isort to ensure code format run: poetry run isort --diff --check . - name: Run black to ensure code format run: poetry run black --diff --check . - name: Run flake8 to ensure code format run: poetry run flake8 . - name: Run mypy to check types run: poetry run mypy . check-codestyle-rust: runs-on: [ self-hosted, gen3, large ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init steps: - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 1 # Disabled for now # - name: Restore cargo deps cache # id: cache_cargo # uses: actions/cache@v3 # with: # path: | # !~/.cargo/registry/src # ~/.cargo/git/ # target/ # key: v1-${{ runner.os }}-cargo-clippy-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }} # Some of our rust modules use FFI and need those to be checked - name: Get postgres headers run: make postgres-headers -j$(nproc) - name: Run cargo clippy run: ./run_clippy.sh # Use `${{ !cancelled() }}` to run quck tests after the longer clippy run - name: Check formatting if: ${{ !cancelled() }} run: cargo fmt --all -- --check # https://github.com/facebookincubator/cargo-guppy/tree/bec4e0eb29dcd1faac70b1b5360267fc02bf830e/tools/cargo-hakari#2-keep-the-workspace-hack-up-to-date-in-ci - name: Check rust dependencies if: ${{ !cancelled() }} run: | cargo hakari generate --diff # workspace-hack Cargo.toml is up-to-date cargo hakari manage-deps --dry-run # all workspace crates depend on workspace-hack # https://github.com/EmbarkStudios/cargo-deny - name: Check rust licenses/bans/advisories/sources if: ${{ !cancelled() }} run: cargo deny check build-neon: runs-on: [ self-hosted, gen3, large ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init strategy: fail-fast: false matrix: build_type: [ debug, release ] env: BUILD_TYPE: ${{ matrix.build_type }} GIT_VERSION: ${{ github.sha }} steps: - name: Fix git ownership run: | # Workaround for `fatal: detected dubious ownership in repository at ...` # # Use both ${{ github.workspace }} and ${GITHUB_WORKSPACE} because they're different on host and in containers # Ref https://github.com/actions/checkout/issues/785 # git config --global --add safe.directory ${{ github.workspace }} git config --global --add safe.directory ${GITHUB_WORKSPACE} - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 1 - name: Set pg 14 revision for caching id: pg_v14_rev run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT - name: Set pg 15 revision for caching id: pg_v15_rev run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v15) >> $GITHUB_OUTPUT # Set some environment variables used by all the steps. # # CARGO_FLAGS is extra options to pass to "cargo build", "cargo test" etc. # It also includes --features, if any # # CARGO_FEATURES is passed to "cargo metadata". It is separate from CARGO_FLAGS, # because "cargo metadata" doesn't accept --release or --debug options # # We run tests with addtional features, that are turned off by default (e.g. in release builds), see # corresponding Cargo.toml files for their descriptions. - name: Set env variables run: | CARGO_FEATURES="--features testing" if [[ $BUILD_TYPE == "debug" ]]; then cov_prefix="scripts/coverage --profraw-prefix=$GITHUB_JOB --dir=/tmp/coverage run" CARGO_FLAGS="--locked $CARGO_FEATURES" elif [[ $BUILD_TYPE == "release" ]]; then cov_prefix="" CARGO_FLAGS="--locked --release $CARGO_FEATURES" fi echo "cov_prefix=${cov_prefix}" >> $GITHUB_ENV echo "CARGO_FEATURES=${CARGO_FEATURES}" >> $GITHUB_ENV echo "CARGO_FLAGS=${CARGO_FLAGS}" >> $GITHUB_ENV echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo" >> $GITHUB_ENV # Disabled for now # Don't include the ~/.cargo/registry/src directory. It contains just # uncompressed versions of the crates in ~/.cargo/registry/cache # directory, and it's faster to let 'cargo' to rebuild it from the # compressed crates. # - name: Cache cargo deps # id: cache_cargo # uses: actions/cache@v3 # with: # path: | # ~/.cargo/registry/ # !~/.cargo/registry/src # ~/.cargo/git/ # target/ # # Fall back to older versions of the key, if no cache for current Cargo.lock was found # key: | # v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }} # v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}- - name: Cache postgres v14 build id: cache_pg_14 uses: actions/cache@v3 with: path: pg_install/v14 key: v1-${{ runner.os }}-${{ matrix.build_type }}-pg-${{ steps.pg_v14_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }} - name: Cache postgres v15 build id: cache_pg_15 uses: actions/cache@v3 with: path: pg_install/v15 key: v1-${{ runner.os }}-${{ matrix.build_type }}-pg-${{ steps.pg_v15_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }} - name: Build postgres v14 if: steps.cache_pg_14.outputs.cache-hit != 'true' run: mold -run make postgres-v14 -j$(nproc) - name: Build postgres v15 if: steps.cache_pg_15.outputs.cache-hit != 'true' run: mold -run make postgres-v15 -j$(nproc) - name: Build neon extensions run: mold -run make neon-pg-ext -j$(nproc) - name: Run cargo build run: | ${cov_prefix} mold -run cargo build $CARGO_FLAGS --bins --tests - name: Run cargo test run: | ${cov_prefix} cargo test $CARGO_FLAGS - name: Install rust binaries run: | # Install target binaries mkdir -p /tmp/neon/bin/ binaries=$( ${cov_prefix} cargo metadata $CARGO_FEATURES --format-version=1 --no-deps | jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name' ) for bin in $binaries; do SRC=target/$BUILD_TYPE/$bin DST=/tmp/neon/bin/$bin cp "$SRC" "$DST" done # Install test executables and write list of all binaries (for code coverage) if [[ $BUILD_TYPE == "debug" ]]; then # Keep bloated coverage data files away from the rest of the artifact mkdir -p /tmp/coverage/ mkdir -p /tmp/neon/test_bin/ test_exe_paths=$( ${cov_prefix} cargo test $CARGO_FLAGS --message-format=json --no-run | jq -r '.executable | select(. != null)' ) for bin in $test_exe_paths; do SRC=$bin DST=/tmp/neon/test_bin/$(basename $bin) # We don't need debug symbols for code coverage, so strip them out to make # the artifact smaller. strip "$SRC" -o "$DST" echo "$DST" >> /tmp/coverage/binaries.list done for bin in $binaries; do echo "/tmp/neon/bin/$bin" >> /tmp/coverage/binaries.list done fi - name: Install postgres binaries run: cp -a pg_install /tmp/neon/pg_install - name: Upload Neon artifact uses: ./.github/actions/upload with: name: neon-${{ runner.os }}-${{ matrix.build_type }}-artifact path: /tmp/neon # XXX: keep this after the binaries.list is formed, so the coverage can properly work later - name: Merge and upload coverage data if: matrix.build_type == 'debug' uses: ./.github/actions/save-coverage-data regress-tests: runs-on: [ self-hosted, gen3, large ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init needs: [ build-neon ] strategy: fail-fast: false matrix: build_type: [ debug, release ] steps: - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 1 - name: Pytest regression tests uses: ./.github/actions/run-python-test-set with: build_type: ${{ matrix.build_type }} test_selection: regress needs_postgres_source: true run_with_real_s3: true real_s3_bucket: ci-tests-s3 real_s3_region: us-west-2 real_s3_access_key_id: "${{ secrets.AWS_ACCESS_KEY_ID_CI_TESTS_S3 }}" real_s3_secret_access_key: "${{ secrets.AWS_SECRET_ACCESS_KEY_CI_TESTS_S3 }}" - name: Merge and upload coverage data if: matrix.build_type == 'debug' uses: ./.github/actions/save-coverage-data benchmarks: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init needs: [ build-neon ] if: github.ref_name == 'main' || contains(github.event.pull_request.labels.*.name, 'run-benchmarks') strategy: fail-fast: false matrix: build_type: [ release ] steps: - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 1 - name: Pytest benchmarks uses: ./.github/actions/run-python-test-set with: build_type: ${{ matrix.build_type }} test_selection: performance run_in_parallel: false save_perf_report: ${{ github.ref == 'refs/heads/main' }} env: VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}" PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}" # XXX: no coverage data handling here, since benchmarks are run on release builds, # while coverage is currently collected for the debug ones merge-allure-report: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init needs: [ regress-tests, benchmarks ] if: ${{ !cancelled() }} strategy: fail-fast: false matrix: build_type: [ debug, release ] steps: - name: Checkout uses: actions/checkout@v3 with: submodules: false - name: Create Allure report id: create-allure-report uses: ./.github/actions/allure-report with: action: generate build_type: ${{ matrix.build_type }} - name: Store Allure test stat in the DB if: ${{ steps.create-allure-report.outputs.report-url }} env: BUILD_TYPE: ${{ matrix.build_type }} SHA: ${{ github.event.pull_request.head.sha || github.sha }} REPORT_URL: ${{ steps.create-allure-report.outputs.report-url }} TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR }} run: | curl --fail --output suites.json ${REPORT_URL%/index.html}/data/suites.json ./scripts/pysync DATABASE_URL="$TEST_RESULT_CONNSTR" poetry run python3 scripts/ingest_regress_test_result.py --revision ${SHA} --reference ${GITHUB_REF} --build-type ${BUILD_TYPE} --ingest suites.json coverage-report: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init needs: [ regress-tests ] strategy: fail-fast: false matrix: build_type: [ debug ] steps: - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 1 # Disabled for now # - name: Restore cargo deps cache # id: cache_cargo # uses: actions/cache@v3 # with: # path: | # ~/.cargo/registry/ # !~/.cargo/registry/src # ~/.cargo/git/ # target/ # key: v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }} - name: Get Neon artifact uses: ./.github/actions/download with: name: neon-${{ runner.os }}-${{ matrix.build_type }}-artifact path: /tmp/neon - name: Get coverage artifact uses: ./.github/actions/download with: name: coverage-data-artifact path: /tmp/coverage - name: Merge coverage data run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage merge - name: Build and upload coverage report run: | COMMIT_SHA=${{ github.event.pull_request.head.sha }} COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}} COMMIT_URL=https://github.com/${{ github.repository }}/commit/$COMMIT_SHA scripts/coverage \ --dir=/tmp/coverage report \ --input-objects=/tmp/coverage/binaries.list \ --commit-url=$COMMIT_URL \ --format=github REPORT_URL=https://${{ github.repository_owner }}.github.io/zenith-coverage-data/$COMMIT_SHA scripts/git-upload \ --repo=https://${{ secrets.VIP_VAP_ACCESS_TOKEN }}@github.com/${{ github.repository_owner }}/zenith-coverage-data.git \ --message="Add code coverage for $COMMIT_URL" \ copy /tmp/coverage/report $COMMIT_SHA # COPY FROM TO_RELATIVE # Add link to the coverage report to the commit curl -f -X POST \ https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \ -H "Accept: application/vnd.github.v3+json" \ --user "${{ secrets.CI_ACCESS_TOKEN }}" \ --data \ "{ \"state\": \"success\", \"context\": \"neon-coverage\", \"description\": \"Coverage report is ready\", \"target_url\": \"$REPORT_URL\" }" trigger-e2e-tests: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned options: --init needs: [ push-docker-hub, tag ] steps: - name: Set PR's status to pending and request a remote CI test run: | # For pull requests, GH Actions set "github.sha" variable to point at a fake merge commit # but we need to use a real sha of a latest commit in the PR's branch for the e2e job, # to place a job run status update later. COMMIT_SHA=${{ github.event.pull_request.head.sha }} # For non-PR kinds of runs, the above will produce an empty variable, pick the original sha value for those COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}} REMOTE_REPO="${{ github.repository_owner }}/cloud" curl -f -X POST \ https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \ -H "Accept: application/vnd.github.v3+json" \ --user "${{ secrets.CI_ACCESS_TOKEN }}" \ --data \ "{ \"state\": \"pending\", \"context\": \"neon-cloud-e2e\", \"description\": \"[$REMOTE_REPO] Remote CI job is about to start\" }" curl -f -X POST \ https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \ -H "Accept: application/vnd.github.v3+json" \ --user "${{ secrets.CI_ACCESS_TOKEN }}" \ --data \ "{ \"ref\": \"main\", \"inputs\": { \"ci_job_name\": \"neon-cloud-e2e\", \"commit_hash\": \"$COMMIT_SHA\", \"remote_repo\": \"${{ github.repository }}\", \"storage_image_tag\": \"${{ needs.tag.outputs.build-tag }}\", \"compute_image_tag\": \"${{ needs.tag.outputs.build-tag }}\" } }" neon-image: runs-on: [ self-hosted, gen3, large ] needs: [ tag ] # https://github.com/GoogleContainerTools/kaniko/issues/2005 container: gcr.io/kaniko-project/executor:v1.7.0-debug defaults: run: shell: sh -eu {0} steps: - name: Checkout uses: actions/checkout@v1 # v3 won't work with kaniko with: submodules: true fetch-depth: 0 - name: Configure ECR login run: echo "{\"credsStore\":\"ecr-login\"}" > /kaniko/.docker/config.json - name: Kaniko build neon run: /kaniko/executor --reproducible --snapshotMode=redo --skip-unused-stages --cache=true --cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache --context . --build-arg GIT_VERSION=${{ github.sha }} --destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} # Cleanup script fails otherwise - rm: cannot remove '/nvme/actions-runner/_work/_temp/_github_home/.ecr': Permission denied - name: Cleanup ECR folder run: rm -rf ~/.ecr compute-tools-image: runs-on: [ self-hosted, gen3, large ] needs: [ tag ] container: gcr.io/kaniko-project/executor:v1.7.0-debug defaults: run: shell: sh -eu {0} steps: - name: Checkout uses: actions/checkout@v1 # v3 won't work with kaniko - name: Configure ECR login run: echo "{\"credsStore\":\"ecr-login\"}" > /kaniko/.docker/config.json - name: Kaniko build compute tools run: /kaniko/executor --reproducible --snapshotMode=redo --skip-unused-stages --cache=true --cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache --context . --build-arg GIT_VERSION=${{ github.sha }} --dockerfile Dockerfile.compute-tools --destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} - name: Cleanup ECR folder run: rm -rf ~/.ecr compute-node-image: runs-on: [ self-hosted, gen3, large ] container: gcr.io/kaniko-project/executor:v1.7.0-debug needs: [ tag ] strategy: fail-fast: false matrix: version: [ v14, v15 ] defaults: run: shell: sh -eu {0} steps: - name: Checkout uses: actions/checkout@v1 # v3 won't work with kaniko with: submodules: true fetch-depth: 0 - name: Configure ECR login run: echo "{\"credsStore\":\"ecr-login\"}" > /kaniko/.docker/config.json - name: Kaniko build compute node with extensions run: /kaniko/executor --reproducible --snapshotMode=redo --skip-unused-stages --cache=true --cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache --context . --build-arg GIT_VERSION=${{ github.sha }} --build-arg PG_VERSION=${{ matrix.version }} --dockerfile Dockerfile.compute-node --destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}} - name: Cleanup ECR folder run: rm -rf ~/.ecr vm-compute-node-image: runs-on: [ self-hosted, gen3, large ] needs: [ tag, compute-node-image ] strategy: fail-fast: false matrix: version: [ v14, v15 ] defaults: run: shell: sh -eu {0} env: VM_INFORMANT_VERSION: 0.1.1 steps: - name: Downloading latest vm-builder run: | curl -L https://github.com/neondatabase/neonvm/releases/latest/download/vm-builder -o vm-builder chmod +x vm-builder - name: Pulling compute-node image run: | docker pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}} - name: Downloading VM informant version ${{ env.VM_INFORMANT_VERSION }} run: | curl -fL https://github.com/neondatabase/autoscaling/releases/download/${{ env.VM_INFORMANT_VERSION }}/vm-informant -o vm-informant chmod +x vm-informant - name: Adding VM informant to compute-node image run: | ID=$(docker create 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}) docker cp vm-informant $ID:/bin/vm-informant docker commit $ID temp-vm-compute-node docker rm -f $ID - name: Build vm image run: | # note: as of 2023-01-12, vm-builder requires a trailing ":latest" for local images ./vm-builder -src=temp-vm-compute-node:latest -dst=369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}} - name: Pushing vm-compute-node image run: | docker push 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}} test-images: needs: [ tag, neon-image, compute-node-image, compute-tools-image ] runs-on: [ self-hosted, gen3, small ] steps: - name: Checkout uses: actions/checkout@v3 with: fetch-depth: 0 # `neondatabase/neon` contains multiple binaries, all of them use the same input for the version into the same version formatting library. # Pick pageserver as currently the only binary with extra "version" features printed in the string to verify. # Regular pageserver version string looks like # Neon page server git-env:32d14403bd6ab4f4520a94cbfd81a6acef7a526c failpoints: true, features: [] # Bad versions might loop like: # Neon page server git-env:local failpoints: true, features: ["testing"] # Ensure that we don't have bad versions. - name: Verify image versions shell: bash # ensure no set -e for better error messages run: | pageserver_version=$(docker run --rm 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} "/bin/sh" "-c" "/usr/local/bin/pageserver --version") echo "Pageserver version string: $pageserver_version" if ! echo "$pageserver_version" | grep -qv 'git-env:local' ; then echo "Pageserver version should not be the default Dockerfile one" exit 1 fi if ! echo "$pageserver_version" | grep -qv '"testing"' ; then echo "Pageserver version should have no testing feature enabled" exit 1 fi - name: Verify docker-compose example run: env REPOSITORY=369495373322.dkr.ecr.eu-central-1.amazonaws.com TAG=${{needs.tag.outputs.build-tag}} ./docker-compose/docker_compose_test.sh - name: Print logs and clean up if: always() run: | docker compose -f ./docker-compose/docker-compose.yml logs || 0 docker compose -f ./docker-compose/docker-compose.yml down promote-images: runs-on: [ self-hosted, gen3, small ] needs: [ tag, test-images, vm-compute-node-image ] container: golang:1.19-bullseye if: github.event_name != 'workflow_dispatch' steps: - name: Install Crane & ECR helper if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' run: | go install github.com/google/go-containerregistry/cmd/crane@31786c6cbb82d6ec4fb8eb79cd9387905130534e # v0.11.0 go install github.com/awslabs/amazon-ecr-credential-helper/ecr-login/cli/docker-credential-ecr-login@69c85dc22db6511932bbf119e1a0cc5c90c69a7f # v0.6.0 - name: Configure ECR login run: | mkdir /github/home/.docker/ echo "{\"credsStore\":\"ecr-login\"}" > /github/home/.docker/config.json - name: Add latest tag to images if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' run: | crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} latest crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} latest crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:${{needs.tag.outputs.build-tag}} latest crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} latest crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:${{needs.tag.outputs.build-tag}} latest crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} latest - name: Cleanup ECR folder run: rm -rf ~/.ecr push-docker-hub: runs-on: [ self-hosted, dev, x64 ] needs: [ promote-images, tag ] container: golang:1.19-bullseye steps: - name: Install Crane & ECR helper run: | go install github.com/google/go-containerregistry/cmd/crane@31786c6cbb82d6ec4fb8eb79cd9387905130534e # v0.11.0 go install github.com/awslabs/amazon-ecr-credential-helper/ecr-login/cli/docker-credential-ecr-login@69c85dc22db6511932bbf119e1a0cc5c90c69a7f # v0.6.0 - name: Configure ECR login run: | mkdir /github/home/.docker/ echo "{\"credsStore\":\"ecr-login\"}" > /github/home/.docker/config.json - name: Pull neon image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} neon - name: Pull compute tools image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} compute-tools - name: Pull compute node v14 image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:${{needs.tag.outputs.build-tag}} compute-node-v14 - name: Pull vm compute node v14 image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} vm-compute-node-v14 - name: Pull compute node v15 image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:${{needs.tag.outputs.build-tag}} compute-node-v15 - name: Pull vm compute node v15 image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} vm-compute-node-v15 - name: Pull rust image from ECR run: crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned rust - name: Push images to production ECR if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' run: | crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/neon:latest crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:latest crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:latest crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:latest crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:latest crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:latest - name: Configure Docker Hub login run: | # ECR Credential Helper & Docker Hub don't work together in config, hence reset echo "" > /github/home/.docker/config.json crane auth login -u ${{ secrets.NEON_DOCKERHUB_USERNAME }} -p ${{ secrets.NEON_DOCKERHUB_PASSWORD }} index.docker.io - name: Push neon image to Docker Hub run: crane push neon neondatabase/neon:${{needs.tag.outputs.build-tag}} - name: Push compute tools image to Docker Hub run: crane push compute-tools neondatabase/compute-tools:${{needs.tag.outputs.build-tag}} - name: Push compute node v14 image to Docker Hub run: crane push compute-node-v14 neondatabase/compute-node-v14:${{needs.tag.outputs.build-tag}} - name: Push vm compute node v14 image to Docker Hub run: crane push vm-compute-node-v14 neondatabase/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} - name: Push compute node v15 image to Docker Hub run: crane push compute-node-v15 neondatabase/compute-node-v15:${{needs.tag.outputs.build-tag}} - name: Push vm compute node v15 image to Docker Hub run: crane push vm-compute-node-v15 neondatabase/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} - name: Push rust image to Docker Hub run: crane push rust neondatabase/rust:pinned - name: Add latest tag to images in Docker Hub if: | (github.ref_name == 'main' || github.ref_name == 'release') && github.event_name != 'workflow_dispatch' run: | crane tag neondatabase/neon:${{needs.tag.outputs.build-tag}} latest crane tag neondatabase/compute-tools:${{needs.tag.outputs.build-tag}} latest crane tag neondatabase/compute-node-v14:${{needs.tag.outputs.build-tag}} latest crane tag neondatabase/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} latest crane tag neondatabase/compute-node-v15:${{needs.tag.outputs.build-tag}} latest crane tag neondatabase/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} latest - name: Cleanup ECR folder run: rm -rf ~/.ecr deploy-pr-test-new: runs-on: [ self-hosted, gen3, small ] container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/ansible:pinned # We need both storage **and** compute images for deploy, because control plane picks the compute version based on the storage version. # If it notices a fresh storage it may bump the compute version. And if compute image failed to build it may break things badly needs: [ push-docker-hub, tag, regress-tests ] if: | contains(github.event.pull_request.labels.*.name, 'deploy-test-storage') && github.event_name != 'workflow_dispatch' defaults: run: shell: bash strategy: matrix: target_region: [ eu-west-1 ] steps: - name: Checkout uses: actions/checkout@v3 with: submodules: true fetch-depth: 0 - name: Redeploy run: | export DOCKER_TAG=${{needs.tag.outputs.build-tag}} cd "$(pwd)/.github/ansible" ./get_binaries.sh ansible-galaxy collection install sivel.toiletwater ansible-playbook deploy.yaml -i staging.${{ matrix.target_region }}.hosts.yaml -e @ssm_config -e CONSOLE_API_TOKEN=${{ secrets.NEON_STAGING_API_KEY }} -e SENTRY_URL_PAGESERVER=${{ secrets.SENTRY_URL_PAGESERVER }} -e SENTRY_URL_SAFEKEEPER=${{ secrets.SENTRY_URL_SAFEKEEPER }} rm -f neon_install.tar.gz .neon_current_version - name: Cleanup ansible folder run: rm -rf ~/.ansible deploy: runs-on: [ self-hosted, gen3, small ] container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/ansible:latest needs: [ push-docker-hub, tag, regress-tests ] if: ( github.ref_name == 'main' || github.ref_name == 'release' ) && github.event_name != 'workflow_dispatch' steps: - name: Checkout uses: actions/checkout@v3 with: submodules: false fetch-depth: 0 - name: Trigger deploy workflow env: GH_TOKEN: ${{ github.token }} run: | if [[ "$GITHUB_REF_NAME" == "main" ]]; then gh workflow run deploy-dev.yml --ref main -f branch=${{ github.sha }} -f dockerTag=${{needs.tag.outputs.build-tag}} elif [[ "$GITHUB_REF_NAME" == "release" ]]; then gh workflow run deploy-prod.yml --ref release -f branch=${{ github.sha }} -f dockerTag=${{needs.tag.outputs.build-tag}} -f disclamerAcknowledged=true else echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'" exit 1 fi promote-compatibility-data: runs-on: [ self-hosted, gen3, small ] container: image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned options: --init needs: [ push-docker-hub, tag, regress-tests ] if: github.ref_name == 'release' && github.event_name != 'workflow_dispatch' steps: - name: Promote compatibility snapshot for the release env: BUCKET: neon-github-public-dev PREFIX: artifacts/latest run: | # Update compatibility snapshot for the release for build_type in debug release; do OLD_FILENAME=compatibility-snapshot-${build_type}-pg14-${GITHUB_RUN_ID}.tar.zst NEW_FILENAME=compatibility-snapshot-${build_type}-pg14.tar.zst time aws s3 mv --only-show-errors s3://${BUCKET}/${PREFIX}/${OLD_FILENAME} s3://${BUCKET}/${PREFIX}/${NEW_FILENAME} done # Update Neon artifact for the release (reuse already uploaded artifact) for build_type in debug release; do OLD_PREFIX=artifacts/${GITHUB_RUN_ID} FILENAME=neon-${{ runner.os }}-${build_type}-artifact.tar.zst S3_KEY=$(aws s3api list-objects-v2 --bucket ${BUCKET} --prefix ${OLD_PREFIX} | jq -r '.Contents[].Key' | grep ${FILENAME} | sort --version-sort | tail -1 || true) if [ -z "${S3_KEY}" ]; then echo 2>&1 "Neither s3://${BUCKET}/${OLD_PREFIX}/${FILENAME} nor its version from previous attempts exist" exit 1 fi time aws s3 cp --only-show-errors s3://${BUCKET}/${S3_KEY} s3://${BUCKET}/${PREFIX}/${FILENAME} done