mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-05 20:42:54 +00:00
## Problem CI currently uses static credentials in some places. These are less secure and hard to maintain, so we are going to deprecate them and use OIDC auth. ## Summary of changes - ci(fix): Use OIDC auth to upload artifact on s3 - ci(fix): Use OIDC auth to login on ECR
326 lines
13 KiB
YAML
326 lines
13 KiB
YAML
name: Build and Test Locally
|
|
|
|
on:
|
|
workflow_call:
|
|
inputs:
|
|
arch:
|
|
description: 'x64 or arm64'
|
|
required: true
|
|
type: string
|
|
build-tag:
|
|
description: 'build tag'
|
|
required: true
|
|
type: string
|
|
build-tools-image:
|
|
description: 'build-tools image'
|
|
required: true
|
|
type: string
|
|
build-type:
|
|
description: 'debug or release'
|
|
required: true
|
|
type: string
|
|
test-cfg:
|
|
description: 'a json object of postgres versions and lfc states to run regression tests on'
|
|
required: true
|
|
type: string
|
|
|
|
defaults:
|
|
run:
|
|
shell: bash -euxo pipefail {0}
|
|
|
|
env:
|
|
RUST_BACKTRACE: 1
|
|
COPT: '-Werror'
|
|
|
|
jobs:
|
|
build-neon:
|
|
runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', inputs.arch == 'arm64' && 'large-arm64' || 'large')) }}
|
|
permissions:
|
|
id-token: write # aws-actions/configure-aws-credentials
|
|
contents: read
|
|
container:
|
|
image: ${{ inputs.build-tools-image }}
|
|
credentials:
|
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
# Raise locked memory limit for tokio-epoll-uring.
|
|
# On 5.10 LTS kernels < 5.10.162 (and generally mainline kernels < 5.12),
|
|
# io_uring will account the memory of the CQ and SQ as locked.
|
|
# More details: https://github.com/neondatabase/neon/issues/6373#issuecomment-1905814391
|
|
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
|
env:
|
|
BUILD_TYPE: ${{ inputs.build-type }}
|
|
GIT_VERSION: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
BUILD_TAG: ${{ inputs.build-tag }}
|
|
|
|
steps:
|
|
- uses: actions/checkout@v4
|
|
with:
|
|
submodules: true
|
|
|
|
- name: Set pg 14 revision for caching
|
|
id: pg_v14_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v14) >> $GITHUB_OUTPUT
|
|
|
|
- name: Set pg 15 revision for caching
|
|
id: pg_v15_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v15) >> $GITHUB_OUTPUT
|
|
|
|
- name: Set pg 16 revision for caching
|
|
id: pg_v16_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v16) >> $GITHUB_OUTPUT
|
|
|
|
- name: Set pg 17 revision for caching
|
|
id: pg_v17_rev
|
|
run: echo pg_rev=$(git rev-parse HEAD:vendor/postgres-v17) >> $GITHUB_OUTPUT
|
|
|
|
# Set some environment variables used by all the steps.
|
|
#
|
|
# CARGO_FLAGS is extra options to pass to "cargo build", "cargo test" etc.
|
|
# It also includes --features, if any
|
|
#
|
|
# CARGO_FEATURES is passed to "cargo metadata". It is separate from CARGO_FLAGS,
|
|
# because "cargo metadata" doesn't accept --release or --debug options
|
|
#
|
|
# We run tests with addtional features, that are turned off by default (e.g. in release builds), see
|
|
# corresponding Cargo.toml files for their descriptions.
|
|
- name: Set env variables
|
|
env:
|
|
ARCH: ${{ inputs.arch }}
|
|
run: |
|
|
CARGO_FEATURES="--features testing"
|
|
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
|
cov_prefix="scripts/coverage --profraw-prefix=$GITHUB_JOB --dir=/tmp/coverage run"
|
|
CARGO_FLAGS="--locked"
|
|
elif [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=""
|
|
CARGO_FLAGS="--locked"
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=""
|
|
CARGO_FLAGS="--locked --release"
|
|
fi
|
|
{
|
|
echo "cov_prefix=${cov_prefix}"
|
|
echo "CARGO_FEATURES=${CARGO_FEATURES}"
|
|
echo "CARGO_FLAGS=${CARGO_FLAGS}"
|
|
echo "CARGO_HOME=${GITHUB_WORKSPACE}/.cargo"
|
|
} >> $GITHUB_ENV
|
|
|
|
- name: Cache postgres v14 build
|
|
id: cache_pg_14
|
|
uses: actions/cache@v4
|
|
with:
|
|
path: pg_install/v14
|
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v14_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
|
|
- name: Cache postgres v15 build
|
|
id: cache_pg_15
|
|
uses: actions/cache@v4
|
|
with:
|
|
path: pg_install/v15
|
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v15_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
|
|
- name: Cache postgres v16 build
|
|
id: cache_pg_16
|
|
uses: actions/cache@v4
|
|
with:
|
|
path: pg_install/v16
|
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v16_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
|
|
- name: Cache postgres v17 build
|
|
id: cache_pg_17
|
|
uses: actions/cache@v4
|
|
with:
|
|
path: pg_install/v17
|
|
key: v1-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-pg-${{ steps.pg_v17_rev.outputs.pg_rev }}-bookworm-${{ hashFiles('Makefile', 'build-tools.Dockerfile') }}
|
|
|
|
- name: Build postgres v14
|
|
if: steps.cache_pg_14.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v14 -j$(nproc)
|
|
|
|
- name: Build postgres v15
|
|
if: steps.cache_pg_15.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v15 -j$(nproc)
|
|
|
|
- name: Build postgres v16
|
|
if: steps.cache_pg_16.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v16 -j$(nproc)
|
|
|
|
- name: Build postgres v17
|
|
if: steps.cache_pg_17.outputs.cache-hit != 'true'
|
|
run: mold -run make postgres-v17 -j$(nproc)
|
|
|
|
- name: Build neon extensions
|
|
run: mold -run make neon-pg-ext -j$(nproc)
|
|
|
|
- name: Build walproposer-lib
|
|
run: mold -run make walproposer-lib -j$(nproc)
|
|
|
|
- name: Run cargo build
|
|
run: |
|
|
PQ_LIB_DIR=$(pwd)/pg_install/v16/lib
|
|
export PQ_LIB_DIR
|
|
${cov_prefix} mold -run cargo build $CARGO_FLAGS $CARGO_FEATURES --bins --tests
|
|
|
|
# Do install *before* running rust tests because they might recompile the
|
|
# binaries with different features/flags.
|
|
- name: Install rust binaries
|
|
env:
|
|
ARCH: ${{ inputs.arch }}
|
|
run: |
|
|
# Install target binaries
|
|
mkdir -p /tmp/neon/bin/
|
|
binaries=$(
|
|
${cov_prefix} cargo metadata $CARGO_FEATURES --format-version=1 --no-deps |
|
|
jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name'
|
|
)
|
|
for bin in $binaries; do
|
|
SRC=target/$BUILD_TYPE/$bin
|
|
DST=/tmp/neon/bin/$bin
|
|
cp "$SRC" "$DST"
|
|
done
|
|
|
|
# Install test executables and write list of all binaries (for code coverage)
|
|
if [[ $BUILD_TYPE == "debug" && $ARCH == 'x64' ]]; then
|
|
# Keep bloated coverage data files away from the rest of the artifact
|
|
mkdir -p /tmp/coverage/
|
|
|
|
mkdir -p /tmp/neon/test_bin/
|
|
|
|
test_exe_paths=$(
|
|
${cov_prefix} cargo test $CARGO_FLAGS $CARGO_FEATURES --message-format=json --no-run |
|
|
jq -r '.executable | select(. != null)'
|
|
)
|
|
for bin in $test_exe_paths; do
|
|
SRC=$bin
|
|
DST=/tmp/neon/test_bin/$(basename $bin)
|
|
|
|
# We don't need debug symbols for code coverage, so strip them out to make
|
|
# the artifact smaller.
|
|
strip "$SRC" -o "$DST"
|
|
echo "$DST" >> /tmp/coverage/binaries.list
|
|
done
|
|
|
|
for bin in $binaries; do
|
|
echo "/tmp/neon/bin/$bin" >> /tmp/coverage/binaries.list
|
|
done
|
|
fi
|
|
|
|
- name: Configure AWS credentials
|
|
uses: aws-actions/configure-aws-credentials@v4
|
|
with:
|
|
aws-region: eu-central-1
|
|
role-to-assume: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
role-duration-seconds: 18000 # 5 hours
|
|
|
|
- name: Run rust tests
|
|
env:
|
|
NEXTEST_RETRIES: 3
|
|
run: |
|
|
PQ_LIB_DIR=$(pwd)/pg_install/v16/lib
|
|
export PQ_LIB_DIR
|
|
LD_LIBRARY_PATH=$(pwd)/pg_install/v17/lib
|
|
export LD_LIBRARY_PATH
|
|
|
|
#nextest does not yet support running doctests
|
|
${cov_prefix} cargo test --doc $CARGO_FLAGS $CARGO_FEATURES
|
|
|
|
# run all non-pageserver tests
|
|
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E '!package(pageserver)'
|
|
|
|
# run pageserver tests with different settings
|
|
for io_engine in std-fs tokio-epoll-uring ; do
|
|
NEON_PAGESERVER_UNIT_TEST_VIRTUAL_FILE_IOENGINE=$io_engine ${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(pageserver)'
|
|
done
|
|
|
|
# Run separate tests for real S3
|
|
export ENABLE_REAL_S3_REMOTE_STORAGE=nonempty
|
|
export REMOTE_STORAGE_S3_BUCKET=neon-github-ci-tests
|
|
export REMOTE_STORAGE_S3_REGION=eu-central-1
|
|
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(remote_storage)' -E 'test(test_real_s3)'
|
|
|
|
# Run separate tests for real Azure Blob Storage
|
|
# XXX: replace region with `eu-central-1`-like region
|
|
export ENABLE_REAL_AZURE_REMOTE_STORAGE=y
|
|
export AZURE_STORAGE_ACCOUNT="${{ secrets.AZURE_STORAGE_ACCOUNT_DEV }}"
|
|
export AZURE_STORAGE_ACCESS_KEY="${{ secrets.AZURE_STORAGE_ACCESS_KEY_DEV }}"
|
|
export REMOTE_STORAGE_AZURE_CONTAINER="${{ vars.REMOTE_STORAGE_AZURE_CONTAINER }}"
|
|
export REMOTE_STORAGE_AZURE_REGION="${{ vars.REMOTE_STORAGE_AZURE_REGION }}"
|
|
${cov_prefix} cargo nextest run $CARGO_FLAGS $CARGO_FEATURES -E 'package(remote_storage)' -E 'test(test_real_azure)'
|
|
|
|
- name: Install postgres binaries
|
|
run: |
|
|
# Use tar to copy files matching the pattern, preserving the paths in the destionation
|
|
tar c \
|
|
pg_install/v* \
|
|
pg_install/build/*/src/test/regress/*.so \
|
|
pg_install/build/*/src/test/regress/pg_regress \
|
|
pg_install/build/*/src/test/isolation/isolationtester \
|
|
pg_install/build/*/src/test/isolation/pg_isolation_regress \
|
|
| tar x -C /tmp/neon
|
|
|
|
- name: Upload Neon artifact
|
|
uses: ./.github/actions/upload
|
|
with:
|
|
name: neon-${{ runner.os }}-${{ runner.arch }}-${{ inputs.build-type }}-artifact
|
|
path: /tmp/neon
|
|
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
|
|
# XXX: keep this after the binaries.list is formed, so the coverage can properly work later
|
|
- name: Merge and upload coverage data
|
|
if: inputs.build-type == 'debug'
|
|
uses: ./.github/actions/save-coverage-data
|
|
|
|
regress-tests:
|
|
# Don't run regression tests on debug arm64 builds
|
|
if: inputs.build-type != 'debug' || inputs.arch != 'arm64'
|
|
permissions:
|
|
id-token: write # aws-actions/configure-aws-credentials
|
|
contents: read
|
|
statuses: write
|
|
needs: [ build-neon ]
|
|
runs-on: ${{ fromJson(format('["self-hosted", "{0}"]', inputs.arch == 'arm64' && 'large-arm64' || 'large')) }}
|
|
container:
|
|
image: ${{ inputs.build-tools-image }}
|
|
credentials:
|
|
username: ${{ secrets.NEON_DOCKERHUB_USERNAME }}
|
|
password: ${{ secrets.NEON_DOCKERHUB_PASSWORD }}
|
|
# for changed limits, see comments on `options:` earlier in this file
|
|
options: --init --shm-size=512mb --ulimit memlock=67108864:67108864
|
|
strategy:
|
|
fail-fast: false
|
|
matrix: ${{ fromJSON(format('{{"include":{0}}}', inputs.test-cfg)) }}
|
|
steps:
|
|
- uses: actions/checkout@v4
|
|
with:
|
|
submodules: true
|
|
|
|
- name: Pytest regression tests
|
|
continue-on-error: ${{ matrix.lfc_state == 'with-lfc' && inputs.build-type == 'debug' }}
|
|
uses: ./.github/actions/run-python-test-set
|
|
timeout-minutes: 60
|
|
with:
|
|
build_type: ${{ inputs.build-type }}
|
|
test_selection: regress
|
|
needs_postgres_source: true
|
|
run_with_real_s3: true
|
|
real_s3_bucket: neon-github-ci-tests
|
|
real_s3_region: eu-central-1
|
|
rerun_failed: true
|
|
pg_version: ${{ matrix.pg_version }}
|
|
aws_oicd_role_arn: ${{ vars.DEV_AWS_OIDC_ROLE_ARN }}
|
|
env:
|
|
TEST_RESULT_CONNSTR: ${{ secrets.REGRESS_TEST_RESULT_CONNSTR_NEW }}
|
|
CHECK_ONDISK_DATA_COMPATIBILITY: nonempty
|
|
BUILD_TAG: ${{ inputs.build-tag }}
|
|
PAGESERVER_VIRTUAL_FILE_IO_ENGINE: tokio-epoll-uring
|
|
USE_LFC: ${{ matrix.lfc_state == 'with-lfc' && 'true' || 'false' }}
|
|
|
|
# Temporary disable this step until we figure out why it's so flaky
|
|
# Ref https://github.com/neondatabase/neon/issues/4540
|
|
- name: Merge and upload coverage data
|
|
if: |
|
|
false &&
|
|
inputs.build-type == 'debug' && matrix.pg_version == 'v16'
|
|
uses: ./.github/actions/save-coverage-data
|