mirror of
https://github.com/neondatabase/neon.git
synced 2025-12-22 21:59:59 +00:00
Currently it's included with minimal changes and lives aside of the main workspace. Later we may re-use and combine common parts with zenith control_plane. This change is mostly needed to unify cloud deployment pipeline: 1.1. build compute-tools image 1.2. build compute-node image based on the freshly built compute-tools 2. build zenith image So we can roll new compute image and new storage required by it to operate properly. Also it becomes easier to test console against some specific version of compute-node/-tools.
724 lines
26 KiB
YAML
724 lines
26 KiB
YAML
version: 2.1
|
|
|
|
executors:
|
|
zenith-build-executor:
|
|
resource_class: xlarge
|
|
docker:
|
|
- image: cimg/rust:1.56.1
|
|
zenith-python-executor:
|
|
docker:
|
|
- image: cimg/python:3.7.10 # Oldest available 3.7 with Ubuntu 20.04 (for GLIBC and Rust) at CirlceCI
|
|
|
|
jobs:
|
|
check-codestyle-rust:
|
|
executor: zenith-build-executor
|
|
steps:
|
|
- checkout
|
|
- run:
|
|
name: rustfmt
|
|
when: always
|
|
command: |
|
|
cargo fmt --all -- --check
|
|
|
|
# A job to build postgres
|
|
build-postgres:
|
|
executor: zenith-build-executor
|
|
parameters:
|
|
build_type:
|
|
type: enum
|
|
enum: ["debug", "release"]
|
|
environment:
|
|
BUILD_TYPE: << parameters.build_type >>
|
|
steps:
|
|
# Checkout the git repo (circleci doesn't have a flag to enable submodules here)
|
|
- checkout
|
|
|
|
# Grab the postgres git revision to build a cache key.
|
|
# Note this works even though the submodule hasn't been checkout out yet.
|
|
- run:
|
|
name: Get postgres cache key
|
|
command: |
|
|
git rev-parse HEAD:vendor/postgres > /tmp/cache-key-postgres
|
|
|
|
- restore_cache:
|
|
name: Restore postgres cache
|
|
keys:
|
|
# Restore ONLY if the rev key matches exactly
|
|
- v04-postgres-cache-<< parameters.build_type >>-{{ checksum "/tmp/cache-key-postgres" }}
|
|
|
|
# FIXME We could cache our own docker container, instead of installing packages every time.
|
|
- run:
|
|
name: apt install dependencies
|
|
command: |
|
|
if [ ! -e tmp_install/bin/postgres ]; then
|
|
sudo apt update
|
|
sudo apt install build-essential libreadline-dev zlib1g-dev flex bison libseccomp-dev
|
|
fi
|
|
|
|
# Build postgres if the restore_cache didn't find a build.
|
|
# `make` can't figure out whether the cache is valid, since
|
|
# it only compares file timestamps.
|
|
- run:
|
|
name: build postgres
|
|
command: |
|
|
if [ ! -e tmp_install/bin/postgres ]; then
|
|
# "depth 1" saves some time by not cloning the whole repo
|
|
git submodule update --init --depth 1
|
|
make postgres -j8
|
|
fi
|
|
|
|
- save_cache:
|
|
name: Save postgres cache
|
|
key: v04-postgres-cache-<< parameters.build_type >>-{{ checksum "/tmp/cache-key-postgres" }}
|
|
paths:
|
|
- tmp_install
|
|
|
|
# A job to build zenith rust code
|
|
build-zenith:
|
|
executor: zenith-build-executor
|
|
parameters:
|
|
build_type:
|
|
type: enum
|
|
enum: ["debug", "release"]
|
|
environment:
|
|
BUILD_TYPE: << parameters.build_type >>
|
|
steps:
|
|
- run:
|
|
name: apt install dependencies
|
|
command: |
|
|
sudo apt update
|
|
sudo apt install libssl-dev clang
|
|
|
|
# Checkout the git repo (without submodules)
|
|
- checkout
|
|
|
|
# Grab the postgres git revision to build a cache key.
|
|
# Note this works even though the submodule hasn't been checkout out yet.
|
|
- run:
|
|
name: Get postgres cache key
|
|
command: |
|
|
git rev-parse HEAD:vendor/postgres > /tmp/cache-key-postgres
|
|
|
|
- restore_cache:
|
|
name: Restore postgres cache
|
|
keys:
|
|
# Restore ONLY if the rev key matches exactly
|
|
- v04-postgres-cache-<< parameters.build_type >>-{{ checksum "/tmp/cache-key-postgres" }}
|
|
|
|
- restore_cache:
|
|
name: Restore rust cache
|
|
keys:
|
|
# Require an exact match. While an out of date cache might speed up the build,
|
|
# there's no way to clean out old packages, so the cache grows every time something
|
|
# changes.
|
|
- v04-rust-cache-deps-<< parameters.build_type >>-{{ checksum "Cargo.lock" }}
|
|
|
|
# Build the rust code, including test binaries
|
|
- run:
|
|
name: Rust build << parameters.build_type >>
|
|
command: |
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run)
|
|
CARGO_FLAGS=
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=()
|
|
CARGO_FLAGS=--release
|
|
fi
|
|
|
|
export CARGO_INCREMENTAL=0
|
|
"${cov_prefix[@]}" cargo build $CARGO_FLAGS --bins --tests
|
|
|
|
- save_cache:
|
|
name: Save rust cache
|
|
key: v04-rust-cache-deps-<< parameters.build_type >>-{{ checksum "Cargo.lock" }}
|
|
paths:
|
|
- ~/.cargo/registry
|
|
- ~/.cargo/git
|
|
- target
|
|
|
|
# Run style checks
|
|
# has to run separately from cargo fmt section
|
|
# since needs to run with dependencies
|
|
- run:
|
|
name: cargo clippy
|
|
command: |
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run)
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=()
|
|
fi
|
|
|
|
"${cov_prefix[@]}" ./run_clippy.sh
|
|
|
|
# Run rust unit tests
|
|
- run:
|
|
name: cargo test
|
|
command: |
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run)
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=()
|
|
fi
|
|
|
|
"${cov_prefix[@]}" cargo test
|
|
|
|
# Install the rust binaries, for use by test jobs
|
|
- run:
|
|
name: Install rust binaries
|
|
command: |
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run)
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=()
|
|
fi
|
|
|
|
binaries=$(
|
|
"${cov_prefix[@]}" cargo metadata --format-version=1 --no-deps |
|
|
jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name'
|
|
)
|
|
|
|
test_exe_paths=$(
|
|
"${cov_prefix[@]}" cargo test --message-format=json --no-run |
|
|
jq -r '.executable | select(. != null)'
|
|
)
|
|
|
|
mkdir -p /tmp/zenith/bin
|
|
mkdir -p /tmp/zenith/test_bin
|
|
mkdir -p /tmp/zenith/etc
|
|
|
|
# Install target binaries
|
|
for bin in $binaries; do
|
|
SRC=target/$BUILD_TYPE/$bin
|
|
DST=/tmp/zenith/bin/$bin
|
|
cp $SRC $DST
|
|
echo $DST >> /tmp/zenith/etc/binaries.list
|
|
done
|
|
|
|
# Install test executables (for code coverage)
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
for bin in $test_exe_paths; do
|
|
SRC=$bin
|
|
DST=/tmp/zenith/test_bin/$(basename $bin)
|
|
cp $SRC $DST
|
|
echo $DST >> /tmp/zenith/etc/binaries.list
|
|
done
|
|
fi
|
|
|
|
# Install the postgres binaries, for use by test jobs
|
|
- run:
|
|
name: Install postgres binaries
|
|
command: |
|
|
cp -a tmp_install /tmp/zenith/pg_install
|
|
|
|
# Save the rust binaries and coverage data for other jobs in this workflow.
|
|
- persist_to_workspace:
|
|
root: /tmp/zenith
|
|
paths:
|
|
- "*"
|
|
|
|
check-codestyle-python:
|
|
executor: zenith-python-executor
|
|
steps:
|
|
- checkout
|
|
- run:
|
|
name: Install deps
|
|
command: pipenv --python 3.7 install --dev
|
|
- run:
|
|
name: Run yapf to ensure code format
|
|
when: always
|
|
command: pipenv run yapf --recursive --diff .
|
|
- run:
|
|
name: Run mypy to check types
|
|
when: always
|
|
command: pipenv run mypy .
|
|
|
|
run-pytest:
|
|
executor: zenith-python-executor
|
|
parameters:
|
|
# pytest args to specify the tests to run.
|
|
#
|
|
# This can be a test file name, e.g. 'test_pgbench.py, or a subdirectory,
|
|
# or '-k foobar' to run tests containing string 'foobar'. See pytest man page
|
|
# section SPECIFYING TESTS / SELECTING TESTS for details.
|
|
#
|
|
# Select the type of Rust build. Must be "release" or "debug".
|
|
build_type:
|
|
type: string
|
|
default: "debug"
|
|
# This parameter is required, to prevent the mistake of running all tests in one job.
|
|
test_selection:
|
|
type: string
|
|
default: ""
|
|
# Arbitrary parameters to pytest. For example "-s" to prevent capturing stdout/stderr
|
|
extra_params:
|
|
type: string
|
|
default: ""
|
|
needs_postgres_source:
|
|
type: boolean
|
|
default: false
|
|
run_in_parallel:
|
|
type: boolean
|
|
default: true
|
|
save_perf_report:
|
|
type: boolean
|
|
default: false
|
|
environment:
|
|
BUILD_TYPE: << parameters.build_type >>
|
|
steps:
|
|
- attach_workspace:
|
|
at: /tmp/zenith
|
|
- checkout
|
|
- when:
|
|
condition: << parameters.needs_postgres_source >>
|
|
steps:
|
|
- run: git submodule update --init --depth 1
|
|
- run:
|
|
name: Install deps
|
|
command: pipenv --python 3.7 install
|
|
- run:
|
|
name: Run pytest
|
|
# pytest doesn't output test logs in real time, so CI job may fail with
|
|
# `Too long with no output` error, if a test is running for a long time.
|
|
# In that case, tests should have internal timeouts that are less than
|
|
# no_output_timeout, specified here.
|
|
no_output_timeout: 10m
|
|
environment:
|
|
- ZENITH_BIN: /tmp/zenith/bin
|
|
- POSTGRES_DISTRIB_DIR: /tmp/zenith/pg_install
|
|
- TEST_OUTPUT: /tmp/test_output
|
|
# this variable will be embedded in perf test report
|
|
# and is needed to distinguish different environments
|
|
- PLATFORM: zenith-local-ci
|
|
command: |
|
|
PERF_REPORT_DIR="$(realpath test_runner/perf-report-local)"
|
|
|
|
TEST_SELECTION="test_runner/<< parameters.test_selection >>"
|
|
EXTRA_PARAMS="<< parameters.extra_params >>"
|
|
if [ -z "$TEST_SELECTION" ]; then
|
|
echo "test_selection must be set"
|
|
exit 1
|
|
fi
|
|
if << parameters.run_in_parallel >>; then
|
|
EXTRA_PARAMS="-n4 $EXTRA_PARAMS"
|
|
fi
|
|
if << parameters.save_perf_report >>; then
|
|
if [[ $CIRCLE_BRANCH == "main" ]]; then
|
|
mkdir -p "$PERF_REPORT_DIR"
|
|
EXTRA_PARAMS="--out-dir $PERF_REPORT_DIR $EXTRA_PARAMS"
|
|
fi
|
|
fi
|
|
|
|
export GITHUB_SHA=$CIRCLE_SHA1
|
|
|
|
if [[ $BUILD_TYPE == "debug" ]]; then
|
|
cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run)
|
|
elif [[ $BUILD_TYPE == "release" ]]; then
|
|
cov_prefix=()
|
|
fi
|
|
|
|
# Run the tests.
|
|
#
|
|
# The junit.xml file allows CircleCI to display more fine-grained test information
|
|
# in its "Tests" tab in the results page.
|
|
# --verbose prints name of each test (helpful when there are
|
|
# multiple tests in one file)
|
|
# -rA prints summary in the end
|
|
# -n4 uses four processes to run tests via pytest-xdist
|
|
# -s is not used to prevent pytest from capturing output, because tests are running
|
|
# in parallel and logs are mixed between different tests
|
|
"${cov_prefix[@]}" pipenv run pytest \
|
|
--junitxml=$TEST_OUTPUT/junit.xml \
|
|
--tb=short \
|
|
--verbose \
|
|
-m "not remote_cluster" \
|
|
-rA $TEST_SELECTION $EXTRA_PARAMS
|
|
|
|
if << parameters.save_perf_report >>; then
|
|
if [[ $CIRCLE_BRANCH == "main" ]]; then
|
|
# TODO: reuse scripts/git-upload
|
|
export REPORT_FROM="$PERF_REPORT_DIR"
|
|
export REPORT_TO=local
|
|
scripts/generate_and_push_perf_report.sh
|
|
fi
|
|
fi
|
|
- run:
|
|
# CircleCI artifacts are preserved one file at a time, so skipping
|
|
# this step isn't a good idea. If you want to extract the
|
|
# pageserver state, perhaps a tarball would be a better idea.
|
|
name: Delete all data but logs
|
|
when: always
|
|
command: |
|
|
du -sh /tmp/test_output/*
|
|
find /tmp/test_output -type f ! -name "pg.log" ! -name "pageserver.log" ! -name "safekeeper.log" ! -name "regression.diffs" ! -name "junit.xml" ! -name "*.filediff" ! -name "*.stdout" ! -name "*.stderr" -delete
|
|
du -sh /tmp/test_output/*
|
|
- store_artifacts:
|
|
path: /tmp/test_output
|
|
# The store_test_results step tells CircleCI where to find the junit.xml file.
|
|
- store_test_results:
|
|
path: /tmp/test_output
|
|
# Save coverage data (if any)
|
|
- persist_to_workspace:
|
|
root: /tmp/zenith
|
|
paths:
|
|
- "*"
|
|
|
|
coverage-report:
|
|
executor: zenith-build-executor
|
|
steps:
|
|
- attach_workspace:
|
|
at: /tmp/zenith
|
|
- checkout
|
|
- restore_cache:
|
|
name: Restore rust cache
|
|
keys:
|
|
# Require an exact match. While an out of date cache might speed up the build,
|
|
# there's no way to clean out old packages, so the cache grows every time something
|
|
# changes.
|
|
- v04-rust-cache-deps-debug-{{ checksum "Cargo.lock" }}
|
|
- run:
|
|
name: Install llvm-tools
|
|
command: |
|
|
# TODO: install a proper symbol demangler, e.g. rustfilt
|
|
# TODO: we should embed this into a docker image
|
|
rustup component add llvm-tools-preview
|
|
- run:
|
|
name: Build coverage report
|
|
command: |
|
|
COMMIT_URL=https://github.com/zenithdb/zenith/commit/$CIRCLE_SHA1
|
|
|
|
scripts/coverage \
|
|
--dir=/tmp/zenith/coverage report \
|
|
--input-objects=/tmp/zenith/etc/binaries.list \
|
|
--commit-url=$COMMIT_URL \
|
|
--format=github
|
|
- run:
|
|
name: Upload coverage report
|
|
command: |
|
|
LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME
|
|
REPORT_URL=https://zenithdb.github.io/zenith-coverage-data/$CIRCLE_SHA1
|
|
COMMIT_URL=https://github.com/zenithdb/zenith/commit/$CIRCLE_SHA1
|
|
|
|
scripts/git-upload \
|
|
--repo=https://$VIP_VAP_ACCESS_TOKEN@github.com/zenithdb/zenith-coverage-data.git \
|
|
--message="Add code coverage for $COMMIT_URL" \
|
|
copy /tmp/zenith/coverage/report $CIRCLE_SHA1 # COPY FROM TO_RELATIVE
|
|
|
|
# Add link to the coverage report to the commit
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/$LOCAL_REPO/statuses/$CIRCLE_SHA1 \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "$CI_ACCESS_TOKEN" \
|
|
--data \
|
|
"{
|
|
\"state\": \"success\",
|
|
\"context\": \"zenith-coverage\",
|
|
\"description\": \"Coverage report is ready\",
|
|
\"target_url\": \"$REPORT_URL\"
|
|
}"
|
|
|
|
# Build zenithdb/zenith:latest image and push it to Docker hub
|
|
docker-image:
|
|
docker:
|
|
- image: cimg/base:2021.04
|
|
steps:
|
|
- checkout
|
|
- setup_remote_docker:
|
|
docker_layer_caching: true
|
|
- run:
|
|
name: Init postgres submodule
|
|
command: git submodule update --init --depth 1
|
|
- run:
|
|
name: Build and push Docker image
|
|
command: |
|
|
echo $DOCKER_PWD | docker login -u $DOCKER_LOGIN --password-stdin
|
|
DOCKER_TAG=$(git log --oneline|wc -l)
|
|
docker build --build-arg GIT_VERSION=$CIRCLE_SHA1 -t zenithdb/zenith:latest . && docker push zenithdb/zenith:latest
|
|
docker tag zenithdb/zenith:latest zenithdb/zenith:${DOCKER_TAG} && docker push zenithdb/zenith:${DOCKER_TAG}
|
|
|
|
# Build zenithdb/compute-node:latest image and push it to Docker hub
|
|
docker-image-compute:
|
|
docker:
|
|
- image: cimg/base:2021.04
|
|
steps:
|
|
- checkout
|
|
- setup_remote_docker:
|
|
docker_layer_caching: true
|
|
# Build zenithdb/compute-tools:latest image and push it to Docker hub
|
|
# TODO: this should probably also use versioned tag, not just :latest.
|
|
# XXX: but should it? We build and use it only locally now.
|
|
- run:
|
|
name: Build and push compute-tools Docker image
|
|
command: |
|
|
echo $DOCKER_PWD | docker login -u $DOCKER_LOGIN --password-stdin
|
|
docker build -t zenithdb/compute-tools:latest ./compute_tools/
|
|
docker push zenithdb/compute-tools:latest
|
|
- run:
|
|
name: Init postgres submodule
|
|
command: git submodule update --init --depth 1
|
|
- run:
|
|
name: Build and push compute-node Docker image
|
|
command: |
|
|
echo $DOCKER_PWD | docker login -u $DOCKER_LOGIN --password-stdin
|
|
DOCKER_TAG=$(git log --oneline|wc -l)
|
|
docker build -t zenithdb/compute-node:latest vendor/postgres && docker push zenithdb/compute-node:latest
|
|
docker tag zenithdb/compute-node:latest zenithdb/compute-node:${DOCKER_TAG} && docker push zenithdb/compute-node:${DOCKER_TAG}
|
|
|
|
deploy-staging:
|
|
docker:
|
|
- image: cimg/python:3.10
|
|
steps:
|
|
- checkout
|
|
- setup_remote_docker
|
|
- run:
|
|
name: Get Zenith binaries
|
|
command: |
|
|
rm -rf zenith_install postgres_install.tar.gz zenith_install.tar.gz
|
|
mkdir zenith_install
|
|
DOCKER_TAG=$(git log --oneline|wc -l)
|
|
docker pull --quiet zenithdb/zenith:${DOCKER_TAG}
|
|
ID=$(docker create zenithdb/zenith:${DOCKER_TAG})
|
|
docker cp $ID:/data/postgres_install.tar.gz .
|
|
tar -xzf postgres_install.tar.gz -C zenith_install && rm postgres_install.tar.gz
|
|
docker cp $ID:/usr/local/bin/pageserver zenith_install/bin/
|
|
docker cp $ID:/usr/local/bin/safekeeper zenith_install/bin/
|
|
docker cp $ID:/usr/local/bin/proxy zenith_install/bin/
|
|
docker cp $ID:/usr/local/bin/postgres zenith_install/bin/
|
|
docker rm -v $ID
|
|
echo ${DOCKER_TAG} | tee zenith_install/.zenith_current_version
|
|
tar -czf zenith_install.tar.gz -C zenith_install .
|
|
ls -la zenith_install.tar.gz
|
|
- run:
|
|
name: Setup ansible
|
|
command: |
|
|
pip install --progress-bar off --user ansible boto3
|
|
ansible-galaxy collection install amazon.aws
|
|
- run:
|
|
name: Apply re-deploy playbook
|
|
environment:
|
|
ANSIBLE_HOST_KEY_CHECKING: false
|
|
command: |
|
|
echo "${STAGING_SSH_KEY}" | base64 --decode | ssh-add -
|
|
export AWS_REGION=${STAGING_AWS_REGION}
|
|
export AWS_ACCESS_KEY_ID=${STAGING_AWS_ACCESS_KEY_ID}
|
|
export AWS_SECRET_ACCESS_KEY=${STAGING_AWS_SECRET_ACCESS_KEY}
|
|
ansible-playbook .circleci/storage-redeploy.playbook.yml
|
|
rm -f zenith_install.tar.gz
|
|
|
|
deploy-staging-proxy:
|
|
docker:
|
|
- image: cimg/base:2021.04
|
|
environment:
|
|
KUBECONFIG: .kubeconfig
|
|
steps:
|
|
- checkout
|
|
- run:
|
|
name: Store kubeconfig file
|
|
command: |
|
|
echo "${STAGING_KUBECONFIG_DATA}" | base64 --decode > ${KUBECONFIG}
|
|
chmod 0600 ${KUBECONFIG}
|
|
- run:
|
|
name: Setup helm v3
|
|
command: |
|
|
curl -s https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
|
|
helm repo add zenithdb https://zenithdb.github.io/helm-charts
|
|
- run:
|
|
name: Re-deploy proxy
|
|
command: |
|
|
DOCKER_TAG=$(git log --oneline|wc -l)
|
|
helm upgrade zenith-proxy zenithdb/zenith-proxy --install -f .circleci/proxy.staging.yaml --set image.tag=${DOCKER_TAG} --wait
|
|
|
|
# Trigger a new remote CI job
|
|
remote-ci-trigger:
|
|
docker:
|
|
- image: cimg/base:2021.04
|
|
parameters:
|
|
remote_repo:
|
|
type: string
|
|
environment:
|
|
REMOTE_REPO: << parameters.remote_repo >>
|
|
steps:
|
|
- run:
|
|
name: Set PR's status to pending
|
|
command: |
|
|
LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME
|
|
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/$LOCAL_REPO/statuses/$CIRCLE_SHA1 \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "$CI_ACCESS_TOKEN" \
|
|
--data \
|
|
"{
|
|
\"state\": \"pending\",
|
|
\"context\": \"zenith-remote-ci\",
|
|
\"description\": \"[$REMOTE_REPO] Remote CI job is about to start\"
|
|
}"
|
|
- run:
|
|
name: Request a remote CI test
|
|
command: |
|
|
LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME
|
|
|
|
curl -f -X POST \
|
|
https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
--user "$CI_ACCESS_TOKEN" \
|
|
--data \
|
|
"{
|
|
\"ref\": \"main\",
|
|
\"inputs\": {
|
|
\"ci_job_name\": \"zenith-remote-ci\",
|
|
\"commit_hash\": \"$CIRCLE_SHA1\",
|
|
\"remote_repo\": \"$LOCAL_REPO\"
|
|
}
|
|
}"
|
|
|
|
#
|
|
#
|
|
# compute-tools jobs
|
|
# TODO: unify with main build_and_test pipeline
|
|
#
|
|
#
|
|
compute-tools-test:
|
|
executor: zenith-build-executor
|
|
working_directory: ~/repo/compute_tools
|
|
steps:
|
|
- checkout:
|
|
path: ~/repo
|
|
|
|
- restore_cache:
|
|
name: Restore rust cache
|
|
keys:
|
|
# Require an exact match. While an out of date cache might speed up the build,
|
|
# there's no way to clean out old packages, so the cache grows every time something
|
|
# changes.
|
|
- v03-rust-cache-deps-debug-{{ checksum "Cargo.lock" }}
|
|
|
|
# Build the rust code, including test binaries
|
|
- run:
|
|
name: Rust build
|
|
environment:
|
|
CARGO_INCREMENTAL: 0
|
|
command: cargo build --bins --tests
|
|
|
|
- save_cache:
|
|
name: Save rust cache
|
|
key: v03-rust-cache-deps-debug-{{ checksum "Cargo.lock" }}
|
|
paths:
|
|
- ~/.cargo/registry
|
|
- ~/.cargo/git
|
|
- target
|
|
|
|
# Run Rust formatting checks
|
|
- run:
|
|
name: cargo fmt check
|
|
command: cargo fmt --all -- --check
|
|
|
|
# Run Rust linter (clippy)
|
|
- run:
|
|
name: cargo clippy check
|
|
command: cargo clippy --all --all-targets -- -Dwarnings -Drust-2018-idioms
|
|
|
|
# Run Rust integration and unittests
|
|
- run: cargo test
|
|
|
|
workflows:
|
|
build_and_test:
|
|
jobs:
|
|
- check-codestyle-rust
|
|
- check-codestyle-python
|
|
- build-postgres:
|
|
name: build-postgres-<< matrix.build_type >>
|
|
matrix:
|
|
parameters:
|
|
build_type: ["debug", "release"]
|
|
- build-zenith:
|
|
name: build-zenith-<< matrix.build_type >>
|
|
matrix:
|
|
parameters:
|
|
build_type: ["debug", "release"]
|
|
requires:
|
|
- build-postgres-<< matrix.build_type >>
|
|
- run-pytest:
|
|
name: pg_regress-tests-<< matrix.build_type >>
|
|
matrix:
|
|
parameters:
|
|
build_type: ["debug", "release"]
|
|
test_selection: batch_pg_regress
|
|
needs_postgres_source: true
|
|
requires:
|
|
- build-zenith-<< matrix.build_type >>
|
|
- run-pytest:
|
|
name: other-tests-<< matrix.build_type >>
|
|
matrix:
|
|
parameters:
|
|
build_type: ["debug", "release"]
|
|
test_selection: batch_others
|
|
requires:
|
|
- build-zenith-<< matrix.build_type >>
|
|
- run-pytest:
|
|
name: benchmarks
|
|
build_type: release
|
|
test_selection: performance
|
|
run_in_parallel: false
|
|
save_perf_report: true
|
|
requires:
|
|
- build-zenith-release
|
|
- coverage-report:
|
|
# Context passes credentials for gh api
|
|
context: CI_ACCESS_TOKEN
|
|
requires:
|
|
# TODO: consider adding more
|
|
- other-tests-debug
|
|
- compute-tools-test
|
|
- docker-image:
|
|
# Context gives an ability to login
|
|
context: Docker Hub
|
|
# Build image only for commits to main
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
requires:
|
|
- pg_regress-tests-release
|
|
- other-tests-release
|
|
- docker-image-compute:
|
|
# Context gives an ability to login
|
|
context: Docker Hub
|
|
# Build image only for commits to main
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
requires:
|
|
- pg_regress-tests-release
|
|
- other-tests-release
|
|
- compute-tools-test
|
|
- deploy-staging:
|
|
# Context gives an ability to login
|
|
context: Docker Hub
|
|
# deploy only for commits to main
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
requires:
|
|
- docker-image
|
|
- deploy-staging-proxy:
|
|
# deploy only for commits to main
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
requires:
|
|
- docker-image
|
|
- remote-ci-trigger:
|
|
# Context passes credentials for gh api
|
|
context: CI_ACCESS_TOKEN
|
|
remote_repo: "zenithdb/console"
|
|
requires:
|
|
# XXX: Successful build doesn't mean everything is OK, but
|
|
# the job to be triggered takes so much time to complete (~22 min)
|
|
# that it's better not to wait for the commented-out steps
|
|
- build-zenith-debug
|
|
# - pg_regress-tests-release
|
|
# - other-tests-release
|