From 5ee19b075855b1fb0aecbabbd0fe8d1385bcfbb3 Mon Sep 17 00:00:00 2001 From: Dmitry Ivanov Date: Wed, 29 Jun 2022 17:59:19 +0300 Subject: [PATCH] Fix bloated coverage uploads (#2005) Move coverage data to a better directory, merge it better and don't publish it from CircleCI pipeline --- .circleci/config.yml | 110 ++---------------- .../actions/run-python-test-set/action.yml | 2 +- .github/actions/save-coverage-data/action.yml | 4 +- .github/workflows/build_and_test.yml | 27 +++-- 4 files changed, 26 insertions(+), 117 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 61f551cd03..f64ba94cb4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -100,10 +100,8 @@ jobs: name: Rust build << parameters.build_type >> command: | if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run) CARGO_FLAGS= elif [[ $BUILD_TYPE == "release" ]]; then - cov_prefix=() CARGO_FLAGS="--release --features profiling" fi @@ -112,7 +110,7 @@ jobs: export RUSTC_WRAPPER=cachepot export AWS_ACCESS_KEY_ID="${CACHEPOT_AWS_ACCESS_KEY_ID}" export AWS_SECRET_ACCESS_KEY="${CACHEPOT_AWS_SECRET_ACCESS_KEY}" - "${cov_prefix[@]}" mold -run cargo build $CARGO_FLAGS --features failpoints --bins --tests + mold -run cargo build $CARGO_FLAGS --features failpoints --bins --tests cachepot -s - save_cache: @@ -128,32 +126,24 @@ jobs: name: cargo test command: | if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run) CARGO_FLAGS= elif [[ $BUILD_TYPE == "release" ]]; then - cov_prefix=() CARGO_FLAGS=--release fi - "${cov_prefix[@]}" cargo test $CARGO_FLAGS + cargo test $CARGO_FLAGS # Install the rust binaries, for use by test jobs - run: name: Install rust binaries command: | - if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run) - elif [[ $BUILD_TYPE == "release" ]]; then - cov_prefix=() - fi - binaries=$( - "${cov_prefix[@]}" cargo metadata --format-version=1 --no-deps | + cargo metadata --format-version=1 --no-deps | jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name' ) test_exe_paths=$( - "${cov_prefix[@]}" cargo test --message-format=json --no-run | + cargo test --message-format=json --no-run | jq -r '.executable | select(. != null)' ) @@ -166,34 +156,15 @@ jobs: SRC=target/$BUILD_TYPE/$bin DST=/tmp/zenith/bin/$bin cp $SRC $DST - echo $DST >> /tmp/zenith/etc/binaries.list done - # Install test executables (for code coverage) - if [[ $BUILD_TYPE == "debug" ]]; then - for bin in $test_exe_paths; do - SRC=$bin - DST=/tmp/zenith/test_bin/$(basename $bin) - cp $SRC $DST - echo $DST >> /tmp/zenith/etc/binaries.list - done - fi - # Install the postgres binaries, for use by test jobs - run: name: Install postgres binaries command: | cp -a tmp_install /tmp/zenith/pg_install - - run: - name: Merge coverage data - command: | - # This will speed up workspace uploads - if [[ $BUILD_TYPE == "debug" ]]; then - scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage merge - fi - - # Save the rust binaries and coverage data for other jobs in this workflow. + # Save rust binaries for other jobs in the workflow - persist_to_workspace: root: /tmp/zenith paths: @@ -314,12 +285,6 @@ jobs: export GITHUB_SHA=$CIRCLE_SHA1 - if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage run) - elif [[ $BUILD_TYPE == "release" ]]; then - cov_prefix=() - fi - # Run the tests. # # The junit.xml file allows CircleCI to display more fine-grained test information @@ -330,7 +295,7 @@ jobs: # -n4 uses four processes to run tests via pytest-xdist # -s is not used to prevent pytest from capturing output, because tests are running # in parallel and logs are mixed between different tests - "${cov_prefix[@]}" ./scripts/pytest \ + ./scripts/pytest \ --junitxml=$TEST_OUTPUT/junit.xml \ --tb=short \ --verbose \ @@ -359,67 +324,12 @@ jobs: # The store_test_results step tells CircleCI where to find the junit.xml file. - store_test_results: path: /tmp/test_output - - run: - name: Merge coverage data - command: | - # This will speed up workspace uploads - if [[ $BUILD_TYPE == "debug" ]]; then - scripts/coverage "--profraw-prefix=$CIRCLE_JOB" --dir=/tmp/zenith/coverage merge - fi - # Save coverage data (if any) + # Save data (if any) - persist_to_workspace: root: /tmp/zenith paths: - "*" - coverage-report: - executor: neon-xlarge-executor - steps: - - attach_workspace: - at: /tmp/zenith - - checkout - - restore_cache: - name: Restore rust cache - keys: - # Require an exact match. While an out of date cache might speed up the build, - # there's no way to clean out old packages, so the cache grows every time something - # changes. - - v04-rust-cache-deps-debug-{{ checksum "Cargo.lock" }} - - run: - name: Build coverage report - command: | - COMMIT_URL=https://github.com/neondatabase/neon/commit/$CIRCLE_SHA1 - - scripts/coverage \ - --dir=/tmp/zenith/coverage report \ - --input-objects=/tmp/zenith/etc/binaries.list \ - --commit-url=$COMMIT_URL \ - --format=github - - run: - name: Upload coverage report - command: | - LOCAL_REPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME - REPORT_URL=https://neondatabase.github.io/zenith-coverage-data/$CIRCLE_SHA1 - COMMIT_URL=https://github.com/neondatabase/neon/commit/$CIRCLE_SHA1 - - scripts/git-upload \ - --repo=https://$VIP_VAP_ACCESS_TOKEN@github.com/neondatabase/zenith-coverage-data.git \ - --message="Add code coverage for $COMMIT_URL" \ - copy /tmp/zenith/coverage/report $CIRCLE_SHA1 # COPY FROM TO_RELATIVE - - # Add link to the coverage report to the commit - curl -f -X POST \ - https://api.github.com/repos/$LOCAL_REPO/statuses/$CIRCLE_SHA1 \ - -H "Accept: application/vnd.github.v3+json" \ - --user "$CI_ACCESS_TOKEN" \ - --data \ - "{ - \"state\": \"success\", - \"context\": \"zenith-coverage\", - \"description\": \"Coverage report is ready\", - \"target_url\": \"$REPORT_URL\" - }" - # Build neondatabase/neon:latest image and push it to Docker hub docker-image: docker: @@ -730,12 +640,6 @@ workflows: save_perf_report: true requires: - build-neon-release - - coverage-report: - # Context passes credentials for gh api - context: CI_ACCESS_TOKEN - requires: - # TODO: consider adding more - - other-tests-debug - docker-image: # Context gives an ability to login context: Docker Hub diff --git a/.github/actions/run-python-test-set/action.yml b/.github/actions/run-python-test-set/action.yml index 4831cdaed1..48c0c2b925 100644 --- a/.github/actions/run-python-test-set/action.yml +++ b/.github/actions/run-python-test-set/action.yml @@ -92,7 +92,7 @@ runs: fi if [[ "${{ inputs.build_type }}" == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) + cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run) elif [[ "${{ inputs.build_type }}" == "release" ]]; then cov_prefix=() fi diff --git a/.github/actions/save-coverage-data/action.yml b/.github/actions/save-coverage-data/action.yml index 7b228f636f..7ad04cf1fe 100644 --- a/.github/actions/save-coverage-data/action.yml +++ b/.github/actions/save-coverage-data/action.yml @@ -6,7 +6,7 @@ runs: steps: - name: Merge coverage data shell: bash -ex {0} - run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage/ merge + run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage merge - name: Upload coverage data uses: actions/upload-artifact@v3 @@ -14,4 +14,4 @@ runs: retention-days: 7 if-no-files-found: error name: coverage-data-artifact - path: /tmp/neon/coverage/ + path: /tmp/coverage/ diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 40a305a468..81b4585714 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -49,7 +49,7 @@ jobs: - name: Build postgres if: steps.cache_pg.outputs.cache-hit != 'true' - run: COPT='-Werror' mold -run make postgres -j$(nproc) + run: mold -run make postgres -j$(nproc) # actions/cache@v3 does not allow concurrently using the same cache across job steps, so use a separate cache - name: Prepare postgres artifact @@ -109,7 +109,7 @@ jobs: - name: Run cargo build run: | if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) + cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run) CARGO_FLAGS= elif [[ $BUILD_TYPE == "release" ]]; then cov_prefix=() @@ -121,7 +121,7 @@ jobs: - name: Run cargo test run: | if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) + cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run) CARGO_FLAGS= elif [[ $BUILD_TYPE == "release" ]]; then cov_prefix=() @@ -133,7 +133,7 @@ jobs: - name: Install rust binaries run: | if [[ $BUILD_TYPE == "debug" ]]; then - cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/neon/coverage run) + cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run) elif [[ $BUILD_TYPE == "release" ]]; then cov_prefix=() fi @@ -151,7 +151,9 @@ jobs: mkdir -p /tmp/neon/bin/ mkdir -p /tmp/neon/test_bin/ mkdir -p /tmp/neon/etc/ - mkdir -p /tmp/neon/coverage/ + + # Keep bloated coverage data files away from the rest of the artifact + mkdir -p /tmp/coverage/ # Install target binaries for bin in $binaries; do @@ -163,13 +165,13 @@ jobs: # Install test executables and write list of all binaries (for code coverage) if [[ $BUILD_TYPE == "debug" ]]; then for bin in $binaries; do - echo "/tmp/neon/bin/$bin" >> /tmp/neon/coverage/binaries.list + echo "/tmp/neon/bin/$bin" >> /tmp/coverage/binaries.list done for bin in $test_exe_paths; do SRC=$bin DST=/tmp/neon/test_bin/$(basename $bin) cp "$SRC" "$DST" - echo "$DST" >> /tmp/neon/coverage/binaries.list + echo "$DST" >> /tmp/coverage/binaries.list done fi @@ -313,7 +315,10 @@ jobs: uses: actions/download-artifact@v3 with: name: coverage-data-artifact - path: /tmp/neon/coverage/ + path: /tmp/coverage/ + + - name: Merge coverage data + run: scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage merge - name: Build and upload coverage report run: | @@ -322,8 +327,8 @@ jobs: COMMIT_URL=https://github.com/${{ github.repository }}/commit/$COMMIT_SHA scripts/coverage \ - --dir=/tmp/neon/coverage report \ - --input-objects=/tmp/neon/coverage/binaries.list \ + --dir=/tmp/coverage report \ + --input-objects=/tmp/coverage/binaries.list \ --commit-url=$COMMIT_URL \ --format=github @@ -332,7 +337,7 @@ jobs: scripts/git-upload \ --repo=https://${{ secrets.VIP_VAP_ACCESS_TOKEN }}@github.com/${{ github.repository_owner }}/zenith-coverage-data.git \ --message="Add code coverage for $COMMIT_URL" \ - copy /tmp/neon/coverage/report $COMMIT_SHA # COPY FROM TO_RELATIVE + copy /tmp/coverage/report $COMMIT_SHA # COPY FROM TO_RELATIVE # Add link to the coverage report to the commit curl -f -X POST \