mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-22 21:59:59 +00:00
This will catch compiler & clippy warnings in all feature combinations. We should probably use cargo hack for build and test as well, but, that's quite expensive and would add to overall CI wait times. obsoletes https://github.com/neondatabase/neon/pull/4073 refs https://github.com/neondatabase/neon/pull/4070
1043 lines
41 KiB
YAML
1043 lines
41 KiB
YAML
name: Build and Test
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- main
|
|
- release
|
|
pull_request:
|
|
|
|
defaults:
|
|
run:
|
|
shell: bash -euxo pipefail {0}
|
|
|
|
concurrency:
|
|
# Allow only one workflow per any non-`main` branch.
|
|
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
|
cancel-in-progress: true
|
|
|
|
env:
|
|
RUST_BACKTRACE: 1
|
|
COPT: '-Werror'
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_DEV }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY_DEV }}
|
|
|
|
jobs:
|
|
tag:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned
|
|
outputs:
|
|
build-tag: ${{steps.build-tag.outputs.tag}}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- name: Get build tag
|
|
run: |
|
|
echo run:$GITHUB_RUN_ID
|
|
echo ref:$GITHUB_REF_NAME
|
|
echo rev:$(git rev-list --count HEAD)
|
|
if [[ "$GITHUB_REF_NAME" == "main" ]]; then
|
|
echo "tag=$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
|
elif [[ "$GITHUB_REF_NAME" == "release" ]]; then
|
|
echo "tag=release-$(git rev-list --count HEAD)" >> $GITHUB_OUTPUT
|
|
else
|
|
echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'"
|
|
echo "tag=$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
|
fi
|
|
shell: bash
|
|
id: build-tag
|
|
|
|
check-codestyle-python:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: false
|
|
fetch-depth: 1
|
|
|
|
- name: Cache poetry deps
|
|
id: cache_poetry
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: ~/.cache/pypoetry/virtualenvs
|
|
key: v1-codestyle-python-deps-${{ hashFiles('poetry.lock') }}
|
|
|
|
- name: Install Python deps
|
|
run: ./scripts/pysync
|
|
|
|
- name: Run ruff to ensure code format
|
|
run: poetry run ruff .
|
|
|
|
- name: Run black to ensure code format
|
|
run: poetry run black --diff --check .
|
|
|
|
- name: Run mypy to check types
|
|
run: poetry run mypy .
|
|
|
|
check-codestyle-rust:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 1
|
|
|
|
# Disabled for now
|
|
# - name: Restore cargo deps cache
|
|
# id: cache_cargo
|
|
# uses: actions/cache@v3
|
|
# with:
|
|
# path: |
|
|
# !~/.cargo/registry/src
|
|
# ~/.cargo/git/
|
|
# target/
|
|
# key: v1-${{ runner.os }}-cargo-clippy-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }}
|
|
|
|
# Some of our rust modules use FFI and need those to be checked
|
|
- name: Get postgres headers
|
|
run: make postgres-headers -j$(nproc)
|
|
|
|
# cargo hack runs the given cargo subcommand (clippy in this case) for all feature combinations.
|
|
# This will catch compiler & clippy warnings in all feature combinations.
|
|
# TODO: use cargo hack for build and test as well, but, that's quite expensive.
|
|
# NB: keep clippy args in sync with ./run_clippy.sh
|
|
- run: |
|
|
CLIPPY_COMMON_ARGS="$( source .neon_clippy_args; echo "$CLIPPY_COMMON_ARGS")"
|
|
if [ "$CLIPPY_COMMON_ARGS" = "" ]; then
|
|
echo "No clippy args found in .neon_clippy_args"
|
|
exit 1
|
|
fi
|
|
echo "CLIPPY_COMMON_ARGS=${CLIPPY_COMMON_ARGS}" >> $GITHUB_ENV
|
|
- name: Run cargo clippy (debug)
|
|
run: cargo hack --feature-powerset clippy $CLIPPY_COMMON_ARGS
|
|
- name: Run cargo clippy (release)
|
|
run: cargo hack --feature-powerset clippy --release $CLIPPY_COMMON_ARGS
|
|
|
|
# Use `${{ !cancelled() }}` to run quck tests after the longer clippy run
|
|
- name: Check formatting
|
|
if: ${{ !cancelled() }}
|
|
run: cargo fmt --all -- --check
|
|
|
|
# https://github.com/facebookincubator/cargo-guppy/tree/bec4e0eb29dcd1faac70b1b5360267fc02bf830e/tools/cargo-hakari#2-keep-the-workspace-hack-up-to-date-in-ci
|
|
- name: Check rust dependencies
|
|
if: ${{ !cancelled() }}
|
|
run: |
|
|
cargo hakari generate --diff # workspace-hack Cargo.toml is up-to-date
|
|
cargo hakari manage-deps --dry-run # all workspace crates depend on workspace-hack
|
|
|
|
# https://github.com/EmbarkStudios/cargo-deny
|
|
- name: Check rust licenses/bans/advisories/sources
|
|
if: ${{ !cancelled() }}
|
|
run: cargo deny check
|
|
|
|
build-neon:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
build_type: [ debug, release ]
|
|
env:
|
|
BUILD_TYPE: ${{ matrix.build_type }}
|
|
GIT_VERSION: ${{ github.sha }}
|
|
|
|
steps:
|
|
- name: Fix git ownership
|
|
run: |
|
|
# Workaround for `fatal: detected dubious ownership in repository at ...`
|
|
#
|
|
# Use both ${{ github.workspace }} and ${GITHUB_WORKSPACE} because they're different on host and in containers
|
|
# Ref https://github.com/actions/checkout/issues/785
|
|
#
|
|
git config --global --add safe.directory ${{ github.workspace }}
|
|
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
|
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 1
|
|
|
|
- name: Set pg 14 revision for caching
|
|
id: pg_v14_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT
|
|
|
|
- name: Set pg 15 revision for caching
|
|
id: pg_v15_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v15) >> $GITHUB_OUTPUT
|
|
|
|
# Set some environment variables used by all the steps.
|
|
#
|
|
# CARGO_FLAGS is extra options to pass to "cargo build", "cargo test" etc.
|
|
# It also includes --features, if any
|
|
#
|
|
# CARGO_FEATURES is passed to "cargo metadata". It is separate from CARGO_FLAGS,
|
|
# because "cargo metadata" doesn't accept --release or --debug options
|
|
#
|
|
# We run tests with addtional features, that are turned off by default (e.g. in release builds), see
|
|
# corresponding Cargo.toml files for their descriptions.
|
|
- name: Set env variables
|
|
run: |
|
|
CARGO_FEATURES="--features testing"
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix="scripts/coverage --profraw-prefix=$GITHUB_JOB --dir=/tmp/coverage run"
|
|
CARGO_FLAGS="--locked"
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=""
|
|
CARGO_FLAGS="--locked --release"
|
|
fi
|
|
echo "cov_prefix=${cov_prefix}" >> $GITHUB_ENV
|
|
echo "CARGO_FEATURES=${CARGO_FEATURES}" >> $GITHUB_ENV
|
|
echo "CARGO_FLAGS=${CARGO_FLAGS}" >> $GITHUB_ENV
|
|
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo" >> $GITHUB_ENV
|
|
|
|
# Disabled for now
|
|
# Don't include the ~/.cargo/registry/src directory. It contains just
|
|
# uncompressed versions of the crates in ~/.cargo/registry/cache
|
|
# directory, and it's faster to let 'cargo' to rebuild it from the
|
|
# compressed crates.
|
|
# - name: Cache cargo deps
|
|
# id: cache_cargo
|
|
# uses: actions/cache@v3
|
|
# with:
|
|
# path: |
|
|
# ~/.cargo/registry/
|
|
# !~/.cargo/registry/src
|
|
# ~/.cargo/git/
|
|
# target/
|
|
# # Fall back to older versions of the key, if no cache for current Cargo.lock was found
|
|
# key: |
|
|
# v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }}
|
|
# v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}-
|
|
|
|
- name: Cache postgres v14 build
|
|
id: cache_pg_14
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: pg_install/v14
|
|
key: v1-${{ runner.os }}-${{ matrix.build_type }}-pg-${{ steps.pg_v14_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
|
|
|
- name: Cache postgres v15 build
|
|
id: cache_pg_15
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: pg_install/v15
|
|
key: v1-${{ runner.os }}-${{ matrix.build_type }}-pg-${{ steps.pg_v15_rev.outputs.pg_rev }}-${{ hashFiles('Makefile') }}
|
|
|
|
- name: Build postgres v14
|
|
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v14 -j$(nproc)
|
|
|
|
- name: Build postgres v15
|
|
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v15 -j$(nproc)
|
|
|
|
- name: Build neon extensions
|
|
run: mold -run make neon-pg-ext -j$(nproc)
|
|
|
|
- name: Run cargo build
|
|
run: |
|
|
${cov_prefix} mold -run cargo build $CARGO_FLAGS $CARGO_FEATURES --bins --tests
|
|
|
|
- name: Run cargo test
|
|
run: |
|
|
${cov_prefix} cargo test $CARGO_FLAGS $CARGO_FEATURES
|
|
|
|
# Run separate tests for real S3
|
|
export ENABLE_REAL_S3_REMOTE_STORAGE=nonempty
|
|
export REMOTE_STORAGE_S3_BUCKET=neon-github-public-dev
|
|
export REMOTE_STORAGE_S3_REGION=eu-central-1
|
|
# Avoid `$CARGO_FEATURES` since there's no `testing` feature in the e2e tests now
|
|
${cov_prefix} cargo test $CARGO_FLAGS --package remote_storage --test pagination_tests -- s3_pagination_should_work --exact
|
|
|
|
- name: Install rust binaries
|
|
run: |
|
|
# Install target binaries
|
|
mkdir -p /tmp/neon/bin/
|
|
binaries=$(
|
|
${cov_prefix} cargo metadata $CARGO_FEATURES --format-version=1 --no-deps |
|
|
jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name'
|
|
)
|
|
for bin in $binaries; do
|
|
SRC=target/$BUILD_TYPE/$bin
|
|
DST=/tmp/neon/bin/$bin
|
|
cp "$SRC" "$DST"
|
|
done
|
|
|
|
# Install test executables and write list of all binaries (for code coverage)
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
# Keep bloated coverage data files away from the rest of the artifact
|
|
mkdir -p /tmp/coverage/
|
|
|
|
mkdir -p /tmp/neon/test_bin/
|
|
|
|
test_exe_paths=$(
|
|
${cov_prefix} cargo test $CARGO_FLAGS $CARGO_FEATURES --message-format=json --no-run |
|
|
jq -r '.executable | select(. != null)'
|
|
)
|
|
for bin in $test_exe_paths; do
|
|
SRC=$bin
|
|
DST=/tmp/neon/test_bin/$(basename $bin)
|
|
|
|
# We don't need debug symbols for code coverage, so strip them out to make
|
|
# the artifact smaller.
|
|
strip "$SRC" -o "$DST"
|
|
echo "$DST" >> /tmp/coverage/binaries.list
|
|
done
|
|
|
|
for bin in $binaries; do
|
|
echo "/tmp/neon/bin/$bin" >> /tmp/coverage/binaries.list
|
|
done
|
|
fi
|
|
|
|
- name: Install postgres binaries
|
|
run: cp -a pg_install /tmp/neon/pg_install
|
|
|
|
- name: Upload Neon artifact
|
|
uses: ./.github/actions/upload
|
|
with:
|
|
name: neon-${{ runner.os }}-${{ matrix.build_type }}-artifact
|
|
path: /tmp/neon
|
|
|
|
# XXX: keep this after the binaries.list is formed, so the coverage can properly work later
|
|
- name: Merge and upload coverage data
|
|
if: matrix.build_type == 'debug'
|
|
uses: ./.github/actions/save-coverage-data
|
|
|
|
regress-tests:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
needs: [ build-neon ]
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
build_type: [ debug, release ]
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 1
|
|
|
|
- name: Pytest regression tests
|
|
uses: ./.github/actions/run-python-test-set
|
|
with:
|
|
build_type: ${{ matrix.build_type }}
|
|
test_selection: regress
|
|
needs_postgres_source: true
|
|
run_with_real_s3: true
|
|
real_s3_bucket: ci-tests-s3
|
|
real_s3_region: us-west-2
|
|
real_s3_access_key_id: "${{ secrets.AWS_ACCESS_KEY_ID_CI_TESTS_S3 }}"
|
|
real_s3_secret_access_key: "${{ secrets.AWS_SECRET_ACCESS_KEY_CI_TESTS_S3 }}"
|
|
rerun_flaky: true
|
|
env:
|
|
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR }}
|
|
CHECK_ONDISK_DATA_COMPATIBILITY: nonempty
|
|
|
|
- name: Merge and upload coverage data
|
|
if: matrix.build_type == 'debug'
|
|
uses: ./.github/actions/save-coverage-data
|
|
|
|
benchmarks:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
needs: [ build-neon ]
|
|
if: github.ref_name == 'main' || contains(github.event.pull_request.labels.*.name, 'run-benchmarks')
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
build_type: [ release ]
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 1
|
|
|
|
- name: Pytest benchmarks
|
|
uses: ./.github/actions/run-python-test-set
|
|
with:
|
|
build_type: ${{ matrix.build_type }}
|
|
test_selection: performance
|
|
run_in_parallel: false
|
|
save_perf_report: ${{ github.ref_name == 'main' }}
|
|
env:
|
|
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
|
|
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
|
|
# XXX: no coverage data handling here, since benchmarks are run on release builds,
|
|
# while coverage is currently collected for the debug ones
|
|
|
|
create-test-report:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
needs: [ regress-tests, benchmarks ]
|
|
if: ${{ !cancelled() }}
|
|
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
|
|
- name: Create Allure report (debug)
|
|
if: ${{ !cancelled() }}
|
|
id: create-allure-report-debug
|
|
uses: ./.github/actions/allure-report
|
|
with:
|
|
action: generate
|
|
build_type: debug
|
|
|
|
- name: Create Allure report (release)
|
|
if: ${{ !cancelled() }}
|
|
id: create-allure-report-release
|
|
uses: ./.github/actions/allure-report
|
|
with:
|
|
action: generate
|
|
build_type: release
|
|
|
|
- uses: actions/github-script@v6
|
|
if: >
|
|
!cancelled() &&
|
|
github.event_name == 'pull_request' && (
|
|
steps.create-allure-report-debug.outputs.report-url ||
|
|
steps.create-allure-report-release.outputs.report-url
|
|
)
|
|
with:
|
|
# Retry script for 5XX server errors: https://github.com/actions/github-script#retries
|
|
retries: 5
|
|
script: |
|
|
const reports = [{
|
|
buildType: "debug",
|
|
reportUrl: "${{ steps.create-allure-report-debug.outputs.report-url }}",
|
|
jsonUrl: "${{ steps.create-allure-report-debug.outputs.report-json-url }}",
|
|
}, {
|
|
buildType: "release",
|
|
reportUrl: "${{ steps.create-allure-report-release.outputs.report-url }}",
|
|
jsonUrl: "${{ steps.create-allure-report-release.outputs.report-json-url }}",
|
|
}]
|
|
|
|
const script = require("./scripts/pr-comment-test-report.js")
|
|
await script({
|
|
github,
|
|
context,
|
|
fetch,
|
|
reports,
|
|
})
|
|
|
|
- name: Store Allure test stat in the DB
|
|
if: >
|
|
!cancelled() && (
|
|
steps.create-allure-report-debug.outputs.report-url ||
|
|
steps.create-allure-report-release.outputs.report-url
|
|
)
|
|
env:
|
|
SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
REPORT_JSON_URL_DEBUG: ${{ steps.create-allure-report-debug.outputs.report-json-url }}
|
|
REPORT_JSON_URL_RELEASE: ${{ steps.create-allure-report-release.outputs.report-json-url }}
|
|
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR }}
|
|
run: |
|
|
./scripts/pysync
|
|
|
|
for report_url in $REPORT_JSON_URL_DEBUG $REPORT_JSON_URL_RELEASE; do
|
|
if [ -z "$report_url" ]; then
|
|
continue
|
|
fi
|
|
|
|
if [[ "$report_url" == "$REPORT_JSON_URL_DEBUG" ]]; then
|
|
BUILD_TYPE=debug
|
|
else
|
|
BUILD_TYPE=release
|
|
fi
|
|
|
|
curl --fail --output suites.json "${report_url}"
|
|
DATABASE_URL="$TEST_RESULT_CONNSTR" poetry run python3 scripts/ingest_regress_test_result.py --revision ${SHA} --reference ${GITHUB_REF} --build-type ${BUILD_TYPE} --ingest suites.json
|
|
done
|
|
|
|
coverage-report:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
needs: [ regress-tests ]
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
build_type: [ debug ]
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 1
|
|
|
|
# Disabled for now
|
|
# - name: Restore cargo deps cache
|
|
# id: cache_cargo
|
|
# uses: actions/cache@v3
|
|
# with:
|
|
# path: |
|
|
# ~/.cargo/registry/
|
|
# !~/.cargo/registry/src
|
|
# ~/.cargo/git/
|
|
# target/
|
|
# key: v1-${{ runner.os }}-${{ matrix.build_type }}-cargo-${{ hashFiles('rust-toolchain.toml') }}-${{ hashFiles('Cargo.lock') }}
|
|
|
|
- name: Get Neon artifact
|
|
uses: ./.github/actions/download
|
|
with:
|
|
name: neon-${{ runner.os }}-${{ matrix.build_type }}-artifact
|
|
path: /tmp/neon
|
|
|
|
- name: Get coverage artifact
|
|
uses: ./.github/actions/download
|
|
with:
|
|
name: coverage-data-artifact
|
|
path: /tmp/coverage
|
|
|
|
- name: Merge coverage data
|
|
run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage merge
|
|
|
|
- name: Build and upload coverage report
|
|
run: |
|
|
COMMIT_SHA=${{ github.event.pull_request.head.sha }}
|
|
COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}}
|
|
COMMIT_URL=https://github.com/${{ github.repository }}/commit/$COMMIT_SHA
|
|
|
|
scripts/coverage \
|
|
--dir=/tmp/coverage report \
|
|
--input-objects=/tmp/coverage/binaries.list \
|
|
--commit-url=$COMMIT_URL \
|
|
--format=github
|
|
|
|
REPORT_URL=https://${{ github.repository_owner }}.github.io/zenith-coverage-data/$COMMIT_SHA
|
|
|
|
scripts/git-upload \
|
|
--repo=https://${{ secrets.VIP_VAP_ACCESS_TOKEN }}@github.com/${{ github.repository_owner }}/zenith-coverage-data.git \
|
|
--message="Add code coverage for $COMMIT_URL" \
|
|
copy /tmp/coverage/report $COMMIT_SHA # COPY FROM TO_RELATIVE
|
|
|
|
# Add link to the coverage report to the commit
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "${{ secrets.CI_ACCESS_TOKEN }}" \
|
|
--data \
|
|
"{
|
|
\"state\": \"success\",
|
|
\"context\": \"neon-coverage\",
|
|
\"description\": \"Coverage report is ready\",
|
|
\"target_url\": \"$REPORT_URL\"
|
|
}"
|
|
|
|
trigger-e2e-tests:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned
|
|
options: --init
|
|
needs: [ promote-images, tag ]
|
|
steps:
|
|
- name: Set PR's status to pending and request a remote CI test
|
|
run: |
|
|
# For pull requests, GH Actions set "github.sha" variable to point at a fake merge commit
|
|
# but we need to use a real sha of a latest commit in the PR's branch for the e2e job,
|
|
# to place a job run status update later.
|
|
COMMIT_SHA=${{ github.event.pull_request.head.sha }}
|
|
# For non-PR kinds of runs, the above will produce an empty variable, pick the original sha value for those
|
|
COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}}
|
|
|
|
REMOTE_REPO="${{ github.repository_owner }}/cloud"
|
|
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "${{ secrets.CI_ACCESS_TOKEN }}" \
|
|
--data \
|
|
"{
|
|
\"state\": \"pending\",
|
|
\"context\": \"neon-cloud-e2e\",
|
|
\"description\": \"[$REMOTE_REPO] Remote CI job is about to start\"
|
|
}"
|
|
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "${{ secrets.CI_ACCESS_TOKEN }}" \
|
|
--data \
|
|
"{
|
|
\"ref\": \"main\",
|
|
\"inputs\": {
|
|
\"ci_job_name\": \"neon-cloud-e2e\",
|
|
\"commit_hash\": \"$COMMIT_SHA\",
|
|
\"remote_repo\": \"${{ github.repository }}\",
|
|
\"storage_image_tag\": \"${{ needs.tag.outputs.build-tag }}\",
|
|
\"compute_image_tag\": \"${{ needs.tag.outputs.build-tag }}\"
|
|
}
|
|
}"
|
|
|
|
neon-image:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
needs: [ tag ]
|
|
container: gcr.io/kaniko-project/executor:v1.9.2-debug
|
|
defaults:
|
|
run:
|
|
shell: sh -eu {0}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v1 # v3 won't work with kaniko
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 0
|
|
|
|
- name: Configure ECR and Docker Hub login
|
|
run: |
|
|
DOCKERHUB_AUTH=$(echo -n "${{ secrets.NEON_DOCKERHUB_USERNAME }}:${{ secrets.NEON_DOCKERHUB_PASSWORD }}" | base64)
|
|
echo "::add-mask::${DOCKERHUB_AUTH}"
|
|
|
|
cat <<-EOF > /kaniko/.docker/config.json
|
|
{
|
|
"auths": {
|
|
"https://index.docker.io/v1/": {
|
|
"auth": "${DOCKERHUB_AUTH}"
|
|
}
|
|
},
|
|
"credHelpers": {
|
|
"369495373322.dkr.ecr.eu-central-1.amazonaws.com": "ecr-login"
|
|
}
|
|
}
|
|
EOF
|
|
|
|
- name: Kaniko build neon
|
|
run:
|
|
/kaniko/executor --reproducible --snapshot-mode=redo --skip-unused-stages --cache=true
|
|
--cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache
|
|
--context .
|
|
--build-arg GIT_VERSION=${{ github.sha }}
|
|
--destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}}
|
|
--destination neondatabase/neon:${{needs.tag.outputs.build-tag}}
|
|
|
|
# Cleanup script fails otherwise - rm: cannot remove '/nvme/actions-runner/_work/_temp/_github_home/.ecr': Permission denied
|
|
- name: Cleanup ECR folder
|
|
run: rm -rf ~/.ecr
|
|
|
|
|
|
neon-image-depot:
|
|
# For testing this will run side-by-side for a few merges.
|
|
# This action is not really optimized yet, but gets the job done
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
needs: [ tag ]
|
|
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/base:pinned
|
|
permissions:
|
|
contents: read
|
|
id-token: write
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 0
|
|
|
|
- name: Setup go
|
|
uses: actions/setup-go@v3
|
|
with:
|
|
go-version: '1.19'
|
|
|
|
- name: Set up Depot CLI
|
|
uses: depot/setup-action@v1
|
|
|
|
- name: Install Crane & ECR helper
|
|
run: go install github.com/awslabs/amazon-ecr-credential-helper/ecr-login/cli/docker-credential-ecr-login@69c85dc22db6511932bbf119e1a0cc5c90c69a7f # v0.6.0
|
|
|
|
- name: Configure ECR login
|
|
run: |
|
|
mkdir /github/home/.docker/
|
|
echo "{\"credsStore\":\"ecr-login\"}" > /github/home/.docker/config.json
|
|
|
|
- name: Build and push
|
|
uses: depot/build-push-action@v1
|
|
with:
|
|
# if no depot.json file is at the root of your repo, you must specify the project id
|
|
project: nrdv0s4kcs
|
|
push: true
|
|
tags: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:depot-${{needs.tag.outputs.build-tag}}
|
|
|
|
compute-tools-image:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
needs: [ tag ]
|
|
container: gcr.io/kaniko-project/executor:v1.9.2-debug
|
|
defaults:
|
|
run:
|
|
shell: sh -eu {0}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v1 # v3 won't work with kaniko
|
|
|
|
- name: Configure ECR and Docker Hub login
|
|
run: |
|
|
DOCKERHUB_AUTH=$(echo -n "${{ secrets.NEON_DOCKERHUB_USERNAME }}:${{ secrets.NEON_DOCKERHUB_PASSWORD }}" | base64)
|
|
echo "::add-mask::${DOCKERHUB_AUTH}"
|
|
|
|
cat <<-EOF > /kaniko/.docker/config.json
|
|
{
|
|
"auths": {
|
|
"https://index.docker.io/v1/": {
|
|
"auth": "${DOCKERHUB_AUTH}"
|
|
}
|
|
},
|
|
"credHelpers": {
|
|
"369495373322.dkr.ecr.eu-central-1.amazonaws.com": "ecr-login"
|
|
}
|
|
}
|
|
EOF
|
|
|
|
- name: Kaniko build compute tools
|
|
run:
|
|
/kaniko/executor --reproducible --snapshot-mode=redo --skip-unused-stages --cache=true
|
|
--cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache
|
|
--context .
|
|
--build-arg GIT_VERSION=${{ github.sha }}
|
|
--dockerfile Dockerfile.compute-tools
|
|
--destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}}
|
|
--destination neondatabase/compute-tools:${{needs.tag.outputs.build-tag}}
|
|
|
|
# Cleanup script fails otherwise - rm: cannot remove '/nvme/actions-runner/_work/_temp/_github_home/.ecr': Permission denied
|
|
- name: Cleanup ECR folder
|
|
run: rm -rf ~/.ecr
|
|
|
|
compute-node-image:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
container: gcr.io/kaniko-project/executor:v1.9.2-debug
|
|
needs: [ tag ]
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
version: [ v14, v15 ]
|
|
defaults:
|
|
run:
|
|
shell: sh -eu {0}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v1 # v3 won't work with kaniko
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 0
|
|
|
|
- name: Configure ECR and Docker Hub login
|
|
run: |
|
|
DOCKERHUB_AUTH=$(echo -n "${{ secrets.NEON_DOCKERHUB_USERNAME }}:${{ secrets.NEON_DOCKERHUB_PASSWORD }}" | base64)
|
|
echo "::add-mask::${DOCKERHUB_AUTH}"
|
|
|
|
cat <<-EOF > /kaniko/.docker/config.json
|
|
{
|
|
"auths": {
|
|
"https://index.docker.io/v1/": {
|
|
"auth": "${DOCKERHUB_AUTH}"
|
|
}
|
|
},
|
|
"credHelpers": {
|
|
"369495373322.dkr.ecr.eu-central-1.amazonaws.com": "ecr-login"
|
|
}
|
|
}
|
|
EOF
|
|
|
|
- name: Kaniko build compute node with extensions
|
|
run:
|
|
/kaniko/executor --reproducible --snapshot-mode=redo --skip-unused-stages --cache=true
|
|
--cache-repo 369495373322.dkr.ecr.eu-central-1.amazonaws.com/cache
|
|
--context .
|
|
--build-arg GIT_VERSION=${{ github.sha }}
|
|
--build-arg PG_VERSION=${{ matrix.version }}
|
|
--dockerfile Dockerfile.compute-node
|
|
--destination 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}
|
|
--destination neondatabase/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}
|
|
|
|
# Cleanup script fails otherwise - rm: cannot remove '/nvme/actions-runner/_work/_temp/_github_home/.ecr': Permission denied
|
|
- name: Cleanup ECR folder
|
|
run: rm -rf ~/.ecr
|
|
|
|
vm-compute-node-image:
|
|
runs-on: [ self-hosted, gen3, large ]
|
|
needs: [ tag, compute-node-image ]
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
version: [ v14, v15 ]
|
|
defaults:
|
|
run:
|
|
shell: sh -eu {0}
|
|
env:
|
|
VM_BUILDER_VERSION: v0.4.6
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v1
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- name: Downloading vm-builder
|
|
run: |
|
|
curl -L https://github.com/neondatabase/neonvm/releases/download/$VM_BUILDER_VERSION/vm-builder -o vm-builder
|
|
chmod +x vm-builder
|
|
|
|
- name: Pulling compute-node image
|
|
run: |
|
|
docker pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}
|
|
|
|
- name: Building VM compute-node rootfs
|
|
run: |
|
|
docker build -t temp-vm-compute-node --build-arg SRC_IMAGE=369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}} -f Dockerfile.vm-compute-node .
|
|
|
|
- name: Build vm image
|
|
run: |
|
|
# note: as of 2023-01-12, vm-builder requires a trailing ":latest" for local images
|
|
./vm-builder -use-inittab -src=temp-vm-compute-node:latest -dst=369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}
|
|
|
|
- name: Pushing vm-compute-node image
|
|
run: |
|
|
docker push 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-${{ matrix.version }}:${{needs.tag.outputs.build-tag}}
|
|
|
|
test-images:
|
|
needs: [ tag, neon-image, compute-node-image, compute-tools-image ]
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
# `neondatabase/neon` contains multiple binaries, all of them use the same input for the version into the same version formatting library.
|
|
# Pick pageserver as currently the only binary with extra "version" features printed in the string to verify.
|
|
# Regular pageserver version string looks like
|
|
# Neon page server git-env:32d14403bd6ab4f4520a94cbfd81a6acef7a526c failpoints: true, features: []
|
|
# Bad versions might loop like:
|
|
# Neon page server git-env:local failpoints: true, features: ["testing"]
|
|
# Ensure that we don't have bad versions.
|
|
- name: Verify image versions
|
|
shell: bash # ensure no set -e for better error messages
|
|
run: |
|
|
pageserver_version=$(docker run --rm 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} "/bin/sh" "-c" "/usr/local/bin/pageserver --version")
|
|
|
|
echo "Pageserver version string: $pageserver_version"
|
|
|
|
if ! echo "$pageserver_version" | grep -qv 'git-env:local' ; then
|
|
echo "Pageserver version should not be the default Dockerfile one"
|
|
exit 1
|
|
fi
|
|
|
|
if ! echo "$pageserver_version" | grep -qv '"testing"' ; then
|
|
echo "Pageserver version should have no testing feature enabled"
|
|
exit 1
|
|
fi
|
|
|
|
- name: Verify docker-compose example
|
|
run: env REPOSITORY=369495373322.dkr.ecr.eu-central-1.amazonaws.com TAG=${{needs.tag.outputs.build-tag}} ./docker-compose/docker_compose_test.sh
|
|
|
|
- name: Print logs and clean up
|
|
if: always()
|
|
run: |
|
|
docker compose -f ./docker-compose/docker-compose.yml logs || 0
|
|
docker compose -f ./docker-compose/docker-compose.yml down
|
|
|
|
promote-images:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
needs: [ tag, test-images, vm-compute-node-image ]
|
|
container: golang:1.19-bullseye
|
|
# Don't add if-condition here.
|
|
# The job should always be run because we have dependant other jobs that shouldn't be skipped
|
|
|
|
steps:
|
|
- name: Install Crane & ECR helper
|
|
run: |
|
|
go install github.com/google/go-containerregistry/cmd/crane@31786c6cbb82d6ec4fb8eb79cd9387905130534e # v0.11.0
|
|
go install github.com/awslabs/amazon-ecr-credential-helper/ecr-login/cli/docker-credential-ecr-login@69c85dc22db6511932bbf119e1a0cc5c90c69a7f # v0.6.0
|
|
|
|
- name: Configure ECR login
|
|
run: |
|
|
mkdir /github/home/.docker/
|
|
echo "{\"credsStore\":\"ecr-login\"}" > /github/home/.docker/config.json
|
|
|
|
- name: Copy vm-compute-node images to Docker Hub
|
|
run: |
|
|
crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} vm-compute-node-v14
|
|
crane pull 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} vm-compute-node-v15
|
|
|
|
- name: Add latest tag to images
|
|
if: |
|
|
(github.ref_name == 'main' || github.ref_name == 'release') &&
|
|
github.event_name != 'workflow_dispatch'
|
|
run: |
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} latest
|
|
|
|
- name: Push images to production ECR
|
|
if: |
|
|
(github.ref_name == 'main' || github.ref_name == 'release') &&
|
|
github.event_name != 'workflow_dispatch'
|
|
run: |
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/neon:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/neon:latest
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-tools:latest
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v14:latest
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v14:latest
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/compute-node-v15:latest
|
|
crane copy 369495373322.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} 093970136003.dkr.ecr.eu-central-1.amazonaws.com/vm-compute-node-v15:latest
|
|
|
|
- name: Configure Docker Hub login
|
|
run: |
|
|
# ECR Credential Helper & Docker Hub don't work together in config, hence reset
|
|
echo "" > /github/home/.docker/config.json
|
|
crane auth login -u ${{ secrets.NEON_DOCKERHUB_USERNAME }} -p ${{ secrets.NEON_DOCKERHUB_PASSWORD }} index.docker.io
|
|
|
|
- name: Push vm-compute-node to Docker Hub
|
|
run: |
|
|
crane push vm-compute-node-v14 neondatabase/vm-compute-node-v14:${{needs.tag.outputs.build-tag}}
|
|
crane push vm-compute-node-v15 neondatabase/vm-compute-node-v15:${{needs.tag.outputs.build-tag}}
|
|
|
|
- name: Push latest tags to Docker Hub
|
|
if: |
|
|
(github.ref_name == 'main' || github.ref_name == 'release') &&
|
|
github.event_name != 'workflow_dispatch'
|
|
run: |
|
|
crane tag neondatabase/neon:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag neondatabase/compute-tools:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag neondatabase/compute-node-v14:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag neondatabase/vm-compute-node-v14:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag neondatabase/compute-node-v15:${{needs.tag.outputs.build-tag}} latest
|
|
crane tag neondatabase/vm-compute-node-v15:${{needs.tag.outputs.build-tag}} latest
|
|
|
|
- name: Cleanup ECR folder
|
|
run: rm -rf ~/.ecr
|
|
|
|
deploy-pr-test-new:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/ansible:pinned
|
|
# We need both storage **and** compute images for deploy, because control plane picks the compute version based on the storage version.
|
|
# If it notices a fresh storage it may bump the compute version. And if compute image failed to build it may break things badly
|
|
needs: [ promote-images, tag, regress-tests ]
|
|
if: |
|
|
contains(github.event.pull_request.labels.*.name, 'deploy-test-storage') &&
|
|
github.event_name != 'workflow_dispatch'
|
|
defaults:
|
|
run:
|
|
shell: bash
|
|
strategy:
|
|
matrix:
|
|
target_region: [ eu-west-1 ]
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: true
|
|
fetch-depth: 0
|
|
|
|
- name: Redeploy
|
|
run: |
|
|
export DOCKER_TAG=${{needs.tag.outputs.build-tag}}
|
|
cd "$(pwd)/.github/ansible"
|
|
|
|
./get_binaries.sh
|
|
|
|
ansible-galaxy collection install sivel.toiletwater
|
|
ansible-playbook deploy.yaml -i staging.${{ matrix.target_region }}.hosts.yaml -e @ssm_config -e CONSOLE_API_TOKEN=${{ secrets.NEON_STAGING_API_KEY }} -e SENTRY_URL_PAGESERVER=${{ secrets.SENTRY_URL_PAGESERVER }} -e SENTRY_URL_SAFEKEEPER=${{ secrets.SENTRY_URL_SAFEKEEPER }}
|
|
rm -f neon_install.tar.gz .neon_current_version
|
|
|
|
- name: Cleanup ansible folder
|
|
run: rm -rf ~/.ansible
|
|
|
|
deploy:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/ansible:latest
|
|
needs: [ promote-images, tag, regress-tests ]
|
|
if: ( github.ref_name == 'main' || github.ref_name == 'release' ) && github.event_name != 'workflow_dispatch'
|
|
steps:
|
|
- name: Fix git ownership
|
|
run: |
|
|
# Workaround for `fatal: detected dubious ownership in repository at ...`
|
|
#
|
|
# Use both ${{ github.workspace }} and ${GITHUB_WORKSPACE} because they're different on host and in containers
|
|
# Ref https://github.com/actions/checkout/issues/785
|
|
#
|
|
git config --global --add safe.directory ${{ github.workspace }}
|
|
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
|
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: false
|
|
fetch-depth: 0
|
|
|
|
- name: Trigger deploy workflow
|
|
env:
|
|
GH_TOKEN: ${{ github.token }}
|
|
run: |
|
|
if [[ "$GITHUB_REF_NAME" == "main" ]]; then
|
|
gh workflow run deploy-dev.yml --ref main -f branch=${{ github.sha }} -f dockerTag=${{needs.tag.outputs.build-tag}}
|
|
elif [[ "$GITHUB_REF_NAME" == "release" ]]; then
|
|
gh workflow run deploy-prod.yml --ref release -f branch=${{ github.sha }} -f dockerTag=${{needs.tag.outputs.build-tag}} -f disclamerAcknowledged=true
|
|
else
|
|
echo "GITHUB_REF_NAME (value '$GITHUB_REF_NAME') is not set to either 'main' or 'release'"
|
|
exit 1
|
|
fi
|
|
|
|
promote-compatibility-data:
|
|
runs-on: [ self-hosted, gen3, small ]
|
|
container:
|
|
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
|
|
options: --init
|
|
needs: [ promote-images, tag, regress-tests ]
|
|
if: github.ref_name == 'release' && github.event_name != 'workflow_dispatch'
|
|
steps:
|
|
- name: Promote compatibility snapshot for the release
|
|
env:
|
|
BUCKET: neon-github-public-dev
|
|
PREFIX: artifacts/latest
|
|
run: |
|
|
# Update compatibility snapshot for the release
|
|
for build_type in debug release; do
|
|
OLD_FILENAME=compatibility-snapshot-${build_type}-pg14-${GITHUB_RUN_ID}.tar.zst
|
|
NEW_FILENAME=compatibility-snapshot-${build_type}-pg14.tar.zst
|
|
|
|
time aws s3 mv --only-show-errors s3://${BUCKET}/${PREFIX}/${OLD_FILENAME} s3://${BUCKET}/${PREFIX}/${NEW_FILENAME}
|
|
done
|
|
|
|
# Update Neon artifact for the release (reuse already uploaded artifact)
|
|
for build_type in debug release; do
|
|
OLD_PREFIX=artifacts/${GITHUB_RUN_ID}
|
|
FILENAME=neon-${{ runner.os }}-${build_type}-artifact.tar.zst
|
|
|
|
S3_KEY=$(aws s3api list-objects-v2 --bucket ${BUCKET} --prefix ${OLD_PREFIX} | jq -r '.Contents[].Key' | grep ${FILENAME} | sort --version-sort | tail -1 || true)
|
|
if [ -z "${S3_KEY}" ]; then
|
|
echo >&2 "Neither s3://${BUCKET}/${OLD_PREFIX}/${FILENAME} nor its version from previous attempts exist"
|
|
exit 1
|
|
fi
|
|
|
|
time aws s3 cp --only-show-errors s3://${BUCKET}/${S3_KEY} s3://${BUCKET}/${PREFIX}/${FILENAME}
|
|
done
|