diff --git a/.circleci/config.yml b/.circleci/config.yml index 9aca415dc8..61f551cd03 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -286,7 +286,7 @@ jobs: # no_output_timeout, specified here. no_output_timeout: 10m environment: - - ZENITH_BIN: /tmp/zenith/bin + - NEON_BIN: /tmp/zenith/bin - POSTGRES_DISTRIB_DIR: /tmp/zenith/pg_install - TEST_OUTPUT: /tmp/test_output # this variable will be embedded in perf test report @@ -688,50 +688,6 @@ jobs: helm upgrade neon-proxy neondatabase/neon-proxy --install -f .circleci/helm-values/production.proxy.yaml --set image.tag=${DOCKER_TAG} --wait helm upgrade neon-proxy-scram neondatabase/neon-proxy --install -f .circleci/helm-values/production.proxy-scram.yaml --set image.tag=${DOCKER_TAG} --wait - # Trigger a new remote CI job - remote-ci-trigger: - docker: - - image: cimg/base:2021.04 - parameters: - remote_repo: - type: string - environment: - REMOTE_REPO: << parameters.remote_repo >> - steps: - - run: - name: Set PR's status to pending - command: | - LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME - - curl -f -X POST \ - https://api.github.com/repos/$LOCAL_REPO/statuses/$CIRCLE_SHA1 \ - -H "Accept: application/vnd.github.v3+json" \ - --user "$CI_ACCESS_TOKEN" \ - --data \ - "{ - \"state\": \"pending\", - \"context\": \"neon-cloud-e2e\", - \"description\": \"[$REMOTE_REPO] Remote CI job is about to start\" - }" - - run: - name: Request a remote CI test - command: | - LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME - - curl -f -X POST \ - https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \ - -H "Accept: application/vnd.github.v3+json" \ - --user "$CI_ACCESS_TOKEN" \ - --data \ - "{ - \"ref\": \"main\", - \"inputs\": { - \"ci_job_name\": \"neon-cloud-e2e\", - \"commit_hash\": \"$CIRCLE_SHA1\", - \"remote_repo\": \"$LOCAL_REPO\" - } - }" - workflows: build_and_test: jobs: @@ -880,14 +836,3 @@ workflows: - release requires: - docker-image-release - - remote-ci-trigger: - # Context passes credentials for gh api - context: CI_ACCESS_TOKEN - remote_repo: "neondatabase/cloud" - requires: - # XXX: Successful build doesn't mean everything is OK, but - # the job to be triggered takes so much time to complete (~22 min) - # that it's better not to wait for the commented-out steps - - build-neon-release - # - pg_regress-tests-release - # - other-tests-release diff --git a/.github/actions/run-python-test-set/action.yml b/.github/actions/run-python-test-set/action.yml index 94fac2ee99..4831cdaed1 100644 --- a/.github/actions/run-python-test-set/action.yml +++ b/.github/actions/run-python-test-set/action.yml @@ -2,25 +2,29 @@ name: 'Run python test' description: 'Runs a Neon python test set, performing all the required preparations before' inputs: - # Select the type of Rust build. Must be "release" or "debug". build_type: + description: 'Type of Rust (neon) and C (postgres) builds. Must be "release" or "debug".' required: true rust_toolchain: + description: 'Rust toolchain version to fetch the caches' required: true - # This parameter is required, to prevent the mistake of running all tests in one job. test_selection: + description: 'A python test suite to run' required: true - # Arbitrary parameters to pytest. For example "-s" to prevent capturing stdout/stderr extra_params: + description: 'Arbitrary parameters to pytest. For example "-s" to prevent capturing stdout/stderr' required: false default: '' needs_postgres_source: + description: 'Set to true if the test suite requires postgres source checked out' required: false default: 'false' run_in_parallel: + description: 'Whether to run tests in parallel' required: false default: 'true' save_perf_report: + description: 'Whether to upload the performance report' required: false default: 'false' @@ -60,7 +64,7 @@ runs: - name: Run pytest env: - ZENITH_BIN: /tmp/neon/bin + NEON_BIN: /tmp/neon/bin POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install TEST_OUTPUT: /tmp/test_output # this variable will be embedded in perf test report @@ -117,3 +121,20 @@ runs: scripts/generate_and_push_perf_report.sh fi fi + + - name: Delete all data but logs + shell: bash -ex {0} + if: always() + run: | + du -sh /tmp/test_output/* + find /tmp/test_output -type f ! -name "*.log" ! -name "regression.diffs" ! -name "junit.xml" ! -name "*.filediff" ! -name "*.stdout" ! -name "*.stderr" ! -name "flamegraph.svg" ! -name "*.metrics" -delete + du -sh /tmp/test_output/* + + - name: Upload python test logs + if: always() + uses: actions/upload-artifact@v3 + with: + retention-days: 7 + if-no-files-found: error + name: python-test-${{ inputs.test_selection }}-${{ runner.os }}-${{ inputs.build_type }}-${{ inputs.rust_toolchain }}-logs + path: /tmp/test_output/ diff --git a/.github/actions/save-coverage-data/action.yml b/.github/actions/save-coverage-data/action.yml new file mode 100644 index 0000000000..7b228f636f --- /dev/null +++ b/.github/actions/save-coverage-data/action.yml @@ -0,0 +1,17 @@ +name: 'Merge and upload coverage data' +description: 'Compresses and uploads the coverage data as an artifact' + +runs: + using: "composite" + steps: + - name: Merge coverage data + shell: bash -ex {0} + run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage/ merge + + - name: Upload coverage data + uses: actions/upload-artifact@v3 + with: + retention-days: 7 + if-no-files-found: error + name: coverage-data-artifact + path: /tmp/neon/coverage/ diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 5f4dd754d2..7cbd1103c8 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -1,13 +1,33 @@ -name: build_and_test -on: [ push ] +name: Test + +on: + push: + branches: + - main + pull_request: + defaults: run: shell: bash -ex {0} +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + RUST_BACKTRACE: 1 + COPT: '-Werror' + AWS_ACCESS_KEY_ID: ${{ secrets.CACHEPOT_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CACHEPOT_AWS_SECRET_ACCESS_KEY }} + CACHEPOT_BUCKET: zenith-rust-cachepot + RUSTC_WRAPPER: cachepot + + jobs: build-postgres: runs-on: [ self-hosted, Linux, k8s-runner ] strategy: + fail-fast: false matrix: build_type: [ debug, release ] rust_toolchain: [ 1.58 ] @@ -52,6 +72,7 @@ jobs: runs-on: [ self-hosted, Linux, k8s-runner ] needs: [ build-postgres ] strategy: + fail-fast: false matrix: build_type: [ debug, release ] rust_toolchain: [ 1.58 ] @@ -97,17 +118,11 @@ jobs: CARGO_FLAGS="--release --features profiling" fi - export CACHEPOT_BUCKET=zenith-rust-cachepot - export RUSTC_WRAPPER=cachepot - export AWS_ACCESS_KEY_ID="${{ secrets.AWS_ACCESS_KEY_ID }}" - export AWS_SECRET_ACCESS_KEY="${{ secrets.AWS_SECRET_ACCESS_KEY }}" - export HOME=/home/runner "${cov_prefix[@]}" mold -run cargo build $CARGO_FLAGS --features failpoints --bins --tests cachepot -s - name: Run cargo test run: | - export HOME=/home/runner if [[ $BUILD_TYPE == "debug" ]]; then cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) CARGO_FLAGS= @@ -115,12 +130,11 @@ jobs: cov_prefix=() CARGO_FLAGS=--release fi - + "${cov_prefix[@]}" cargo test $CARGO_FLAGS - name: Install rust binaries run: | - export HOME=/home/runner if [[ $BUILD_TYPE == "debug" ]]; then cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) elif [[ $BUILD_TYPE == "release" ]]; then @@ -137,39 +151,34 @@ jobs: jq -r '.executable | select(. != null)' ) - mkdir -p /tmp/neon/bin - mkdir -p /tmp/neon/test_bin - mkdir -p /tmp/neon/etc + mkdir -p /tmp/neon/bin/ + mkdir -p /tmp/neon/test_bin/ + mkdir -p /tmp/neon/etc/ + mkdir -p /tmp/neon/coverage/ # Install target binaries for bin in $binaries; do SRC=target/$BUILD_TYPE/$bin DST=/tmp/neon/bin/$bin - cp $SRC $DST - echo $DST >> /tmp/neon/etc/binaries.list + cp "$SRC" "$DST" done - # Install test executables (for code coverage) + # Install test executables and write list of all binaries (for code coverage) if [[ $BUILD_TYPE == "debug" ]]; then + for bin in $binaries; do + echo "/tmp/neon/bin/$bin" >> /tmp/neon/coverage/binaries.list + done for bin in $test_exe_paths; do SRC=$bin DST=/tmp/neon/test_bin/$(basename $bin) - cp $SRC $DST - echo $DST >> /tmp/neon/etc/binaries.list + cp "$SRC" "$DST" + echo "$DST" >> /tmp/neon/coverage/binaries.list done fi - name: Install postgres binaries run: cp -a tmp_install /tmp/neon/pg_install - - name: Merge coverage data - run: | - export HOME=/home/runner - # This will speed up workspace uploads - if [[ $BUILD_TYPE == "debug" ]]; then - scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage merge - fi - - name: Prepare neon artifact run: tar -C /tmp/neon/ -czf ./neon.tgz . @@ -181,38 +190,17 @@ jobs: name: neon-${{ runner.os }}-${{ matrix.build_type }}-${{ matrix.rust_toolchain }}-artifact path: ./neon.tgz - check-codestyle-python: - runs-on: [ self-hosted, Linux, k8s-runner ] - strategy: - matrix: - rust_toolchain: [ 1.58 ] - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - submodules: true - fetch-depth: 1 + # XXX: keep this after the binaries.list is formed, so the coverage can properly work later + - name: Merge and upload coverage data + if: matrix.build_type == 'debug' + uses: ./.github/actions/save-coverage-data - - name: Cache poetry deps - id: cache_poetry - uses: actions/cache@v3 - with: - path: ~/.cache/pypoetry/virtualenvs - key: v1-${{ runner.os }}-python-deps-${{ hashFiles('poetry.lock') }} - - - name: Install Python deps - run: ./scripts/pysync - - - name: Run yapf to ensure code format - run: poetry run yapf --recursive --diff . - - - name: Run mypy to check types - run: poetry run mypy . pg_regress-tests: runs-on: [ self-hosted, Linux, k8s-runner ] needs: [ build-neon ] strategy: + fail-fast: false matrix: build_type: [ debug, release ] rust_toolchain: [ 1.58 ] @@ -231,10 +219,15 @@ jobs: test_selection: batch_pg_regress needs_postgres_source: true + - name: Merge and upload coverage data + if: matrix.build_type == 'debug' + uses: ./.github/actions/save-coverage-data + other-tests: runs-on: [ self-hosted, Linux, k8s-runner ] needs: [ build-neon ] strategy: + fail-fast: false matrix: build_type: [ debug, release ] rust_toolchain: [ 1.58 ] @@ -252,10 +245,15 @@ jobs: rust_toolchain: ${{ matrix.rust_toolchain }} test_selection: batch_others + - name: Merge and upload coverage data + if: matrix.build_type == 'debug' + uses: ./.github/actions/save-coverage-data + benchmarks: runs-on: [ self-hosted, Linux, k8s-runner ] needs: [ build-neon ] strategy: + fail-fast: false matrix: build_type: [ release ] rust_toolchain: [ 1.58 ] @@ -273,4 +271,107 @@ jobs: rust_toolchain: ${{ matrix.rust_toolchain }} test_selection: performance run_in_parallel: false - # save_perf_report: true + save_perf_report: true + # XXX: no coverage data handling here, since benchmarks are run on release builds, + # while coverage is currently collected for the debug ones + + coverage-report: + runs-on: [ self-hosted, Linux, k8s-runner ] + needs: [ other-tests, pg_regress-tests ] + strategy: + fail-fast: false + matrix: + build_type: [ debug ] + rust_toolchain: [ 1.58 ] + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: true + fetch-depth: 1 + + - name: Get Neon artifact for restoration + uses: actions/download-artifact@v3 + with: + name: neon-${{ runner.os }}-${{ matrix.build_type }}-${{ matrix.rust_toolchain }}-artifact + path: ./neon-artifact/ + + - name: Extract Neon artifact + run: | + mkdir -p /tmp/neon/ + tar -xf ./neon-artifact/neon.tgz -C /tmp/neon/ + rm -rf ./neon-artifact/ + + - name: Restore coverage data + uses: actions/download-artifact@v3 + with: + name: coverage-data-artifact + path: /tmp/neon/coverage/ + + - name: Build and upload coverage report + run: | + COMMIT_SHA=${{ github.event.pull_request.head.sha }} + COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}} + COMMIT_URL=https://github.com/${{ github.repository }}/commit/$COMMIT_SHA + + scripts/coverage \ + --dir=/tmp/neon/coverage report \ + --input-objects=/tmp/neon/coverage/binaries.list \ + --commit-url=$COMMIT_URL \ + --format=github + + REPORT_URL=https://${{ github.repository_owner }}.github.io/neon-coverage-data/$COMMIT_SHA + + scripts/git-upload \ + --repo=https://$VIP_VAP_ACCESS_TOKEN@github.com/${{ github.repository_owner }}/neon-coverage-data.git \ + --message="Add code coverage for $COMMIT_URL" \ + copy /tmp/neon/coverage/report $COMMIT_SHA # COPY FROM TO_RELATIVE + + # Add link to the coverage report to the commit + curl -f -X POST \ + https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \ + -H "Accept: application/vnd.github.v3+json" \ + --user "${{ secrets.CI_ACCESS_TOKEN }}" \ + --data \ + "{ + \"state\": \"success\", + \"context\": \"neon-coverage\", + \"description\": \"Coverage report is ready\", + \"target_url\": \"$REPORT_URL\" + }" + + trigger-e2e-tests: + runs-on: [ self-hosted, Linux, k8s-runner ] + needs: [ build-neon ] + steps: + - name: Set PR's status to pending and request a remote CI test + run: | + COMMIT_SHA=${{ github.event.pull_request.head.sha }} + COMMIT_SHA=${COMMIT_SHA:-${{ github.sha }}} + + REMOTE_REPO="${{ github.repository_owner }}/cloud" + + curl -f -X POST \ + https://api.github.com/repos/${{ github.repository }}/statuses/$COMMIT_SHA \ + -H "Accept: application/vnd.github.v3+json" \ + --user "${{ secrets.CI_ACCESS_TOKEN }}" \ + --data \ + "{ + \"state\": \"pending\", + \"context\": \"neon-cloud-e2e\", + \"description\": \"[$REMOTE_REPO] Remote CI job is about to start\" + }" + + curl -f -X POST \ + https://api.github.com/repos/$REMOTE_REPO/actions/workflows/testing.yml/dispatches \ + -H "Accept: application/vnd.github.v3+json" \ + --user "${{ secrets.CI_ACCESS_TOKEN }}" \ + --data \ + "{ + \"ref\": \"main\", + \"inputs\": { + \"ci_job_name\": \"neon-cloud-e2e\", + \"commit_hash\": \"$COMMIT_SHA\", + \"remote_repo\": \"${{ github.repository }}\" + } + }" diff --git a/.github/workflows/testing.yml b/.github/workflows/codestyle.yml similarity index 73% rename from .github/workflows/testing.yml rename to .github/workflows/codestyle.yml index aa1e152fb2..292c2c903b 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/codestyle.yml @@ -1,4 +1,4 @@ -name: Build and Test +name: Check code style and build on: push: @@ -6,9 +6,21 @@ on: - main pull_request: +defaults: + run: + shell: bash -ex {0} + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + RUST_BACKTRACE: 1 + jobs: - regression-check: + check-codestyle-rust: strategy: + fail-fast: false matrix: # If we want to duplicate this job for different # Rust toolchains (e.g. nightly or 1.37.0), add them here. @@ -92,5 +104,30 @@ jobs: - name: Run cargo clippy run: ./run_clippy.sh - - name: Run cargo test - run: cargo test --all --all-targets + - name: Ensure all project builds + run: cargo build --all --all-targets + + check-codestyle-python: + runs-on: [ self-hosted, Linux, k8s-runner ] + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: false + fetch-depth: 1 + + - name: Cache poetry deps + id: cache_poetry + uses: actions/cache@v3 + with: + path: ~/.cache/pypoetry/virtualenvs + key: v1-codestyle-python-deps-${{ hashFiles('poetry.lock') }} + + - name: Install Python deps + run: ./scripts/pysync + + - name: Run yapf to ensure code format + run: poetry run yapf --recursive --diff . + + - name: Run mypy to check types + run: poetry run mypy . diff --git a/Dockerfile b/Dockerfile index 34f5282c2c..ad85638af3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -46,9 +46,9 @@ RUN set -e \ && useradd -d /data zenith \ && chown -R zenith:zenith /data -COPY --from=build --chown=zenith:zenith /home/runner/project/target/release/pageserver /usr/local/bin -COPY --from=build --chown=zenith:zenith /home/runner/project/target/release/safekeeper /usr/local/bin -COPY --from=build --chown=zenith:zenith /home/runner/project/target/release/proxy /usr/local/bin +COPY --from=build --chown=zenith:zenith /home/runner/target/release/pageserver /usr/local/bin +COPY --from=build --chown=zenith:zenith /home/runner/target/release/safekeeper /usr/local/bin +COPY --from=build --chown=zenith:zenith /home/runner/target/release/proxy /usr/local/bin COPY --from=pg-build /pg/tmp_install/ /usr/local/ COPY --from=pg-build /postgres_install.tar.gz /data/ diff --git a/Dockerfile.compute-tools b/Dockerfile.compute-tools index 1e7e20eae0..71770ae9ed 100644 --- a/Dockerfile.compute-tools +++ b/Dockerfile.compute-tools @@ -15,4 +15,4 @@ RUN set -e \ # Final image that only has one binary FROM debian:buster-slim -COPY --from=rust-build /home/runner/project/target/release/compute_ctl /usr/local/bin/compute_ctl +COPY --from=rust-build /home/runner/target/release/compute_ctl /usr/local/bin/compute_ctl diff --git a/test_runner/README.md b/test_runner/README.md index f95588462b..4b54c45175 100644 --- a/test_runner/README.md +++ b/test_runner/README.md @@ -45,7 +45,7 @@ If you want to run all tests that have the string "bench" in their names: Useful environment variables: -`ZENITH_BIN`: The directory where zenith binaries can be found. +`NEON_BIN`: The directory where neon binaries can be found. `POSTGRES_DISTRIB_DIR`: The directory where postgres distribution can be found. `TEST_OUTPUT`: Set the directory where test state and test output files should go. diff --git a/test_runner/batch_others/test_remote_storage.py b/test_runner/batch_others/test_remote_storage.py index 8a2748b880..b0ba8758cc 100644 --- a/test_runner/batch_others/test_remote_storage.py +++ b/test_runner/batch_others/test_remote_storage.py @@ -1,5 +1,5 @@ # It's possible to run any regular test with the local fs remote storage via -# env ZENITH_PAGESERVER_OVERRIDES="remote_storage={local_path='/tmp/zenith_zzz/'}" poetry ...... +# env ZENITH_PAGESERVER_OVERRIDES="remote_storage={local_path='/tmp/neon_zzz/'}" poetry ...... import shutil, os from contextlib import closing diff --git a/test_runner/fixtures/neon_fixtures.py b/test_runner/fixtures/neon_fixtures.py index 12edcb8792..7506641fcb 100644 --- a/test_runner/fixtures/neon_fixtures.py +++ b/test_runner/fixtures/neon_fixtures.py @@ -50,7 +50,7 @@ A fixture is created with the decorator @pytest.fixture decorator. See docs: https://docs.pytest.org/en/6.2.x/fixture.html There are several environment variables that can control the running of tests: -ZENITH_BIN, POSTGRES_DISTRIB_DIR, etc. See README.md for more information. +NEON_BIN, POSTGRES_DISTRIB_DIR, etc. See README.md for more information. There's no need to import this file to use it. It should be declared as a plugin inside conftest.py, and that makes it available to all tests. @@ -151,7 +151,7 @@ def pytest_configure(config): return # Find the neon binaries. global neon_binpath - env_neon_bin = os.environ.get('ZENITH_BIN') + env_neon_bin = os.environ.get('NEON_BIN') if env_neon_bin: neon_binpath = env_neon_bin else: