mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-08 05:52:55 +00:00
## Problem
The current test history format is a bit inconvenient:
- It stores all test results in one row, so all queries should include
subqueries which expand the test
- It includes duplicated test results if the rerun is triggered manually
for one of the test jobs (for example, if we rerun `debug-pg14`, then
the report will include duplicates for other build types/postgres
versions)
- It doesn't have a reference to run_id, which we use to create a link
to allure report
Here's the proposed new format:
```
id BIGSERIAL PRIMARY KEY,
parent_suite TEXT NOT NULL,
suite TEXT NOT NULL,
name TEXT NOT NULL,
status TEXT NOT NULL,
started_at TIMESTAMPTZ NOT NULL,
stopped_at TIMESTAMPTZ NOT NULL,
duration INT NOT NULL,
flaky BOOLEAN NOT NULL,
build_type TEXT NOT NULL,
pg_version INT NOT NULL,
run_id BIGINT NOT NULL,
run_attempt INT NOT NULL,
reference TEXT NOT NULL,
revision CHAR(40) NOT NULL,
raw JSONB COMPRESSION lz4 NOT NULL,
```
## Summary of changes
- Misc allure changes:
- Update allure to 2.23.1
- Delete files from previous runs in HTML report (by using `sync
--delete` instead of `mv`)
- Use `test-cases/*.json` instead of `suites.json`, using this directory
allows us to catch all reruns.
- Until we migrated `scripts/flaky_tests.py` and
`scripts/benchmark_durations.py` store test results in 2 formats (in 2
different databases).
60 lines
2.0 KiB
YAML
60 lines
2.0 KiB
YAML
name: "Download an artifact"
|
|
description: "Custom download action"
|
|
inputs:
|
|
name:
|
|
description: "Artifact name"
|
|
required: true
|
|
path:
|
|
description: "A directory to put artifact into"
|
|
default: "."
|
|
required: false
|
|
skip-if-does-not-exist:
|
|
description: "Allow to skip if file doesn't exist, fail otherwise"
|
|
default: false
|
|
required: false
|
|
prefix:
|
|
description: "S3 prefix. Default is '${GITHUB_RUN_ID}/${GITHUB_RUN_ATTEMPT}'"
|
|
required: false
|
|
|
|
runs:
|
|
using: "composite"
|
|
steps:
|
|
- name: Download artifact
|
|
id: download-artifact
|
|
shell: bash -euxo pipefail {0}
|
|
env:
|
|
TARGET: ${{ inputs.path }}
|
|
ARCHIVE: /tmp/downloads/${{ inputs.name }}.tar.zst
|
|
SKIP_IF_DOES_NOT_EXIST: ${{ inputs.skip-if-does-not-exist }}
|
|
PREFIX: artifacts/${{ inputs.prefix || format('{0}/{1}', github.run_id, github.run_attempt) }}
|
|
run: |
|
|
BUCKET=neon-github-public-dev
|
|
FILENAME=$(basename $ARCHIVE)
|
|
|
|
S3_KEY=$(aws s3api list-objects-v2 --bucket ${BUCKET} --prefix ${PREFIX%$GITHUB_RUN_ATTEMPT} | jq -r '.Contents[]?.Key' | grep ${FILENAME} | sort --version-sort | tail -1 || true)
|
|
if [ -z "${S3_KEY}" ]; then
|
|
if [ "${SKIP_IF_DOES_NOT_EXIST}" = "true" ]; then
|
|
echo 'SKIPPED=true' >> $GITHUB_OUTPUT
|
|
exit 0
|
|
else
|
|
echo >&2 "Neither s3://${BUCKET}/${PREFIX}/${FILENAME} nor its version from previous attempts exist"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
echo 'SKIPPED=false' >> $GITHUB_OUTPUT
|
|
|
|
mkdir -p $(dirname $ARCHIVE)
|
|
time aws s3 cp --only-show-errors s3://${BUCKET}/${S3_KEY} ${ARCHIVE}
|
|
|
|
- name: Extract artifact
|
|
if: ${{ steps.download-artifact.outputs.SKIPPED == 'false' }}
|
|
shell: bash -euxo pipefail {0}
|
|
env:
|
|
TARGET: ${{ inputs.path }}
|
|
ARCHIVE: /tmp/downloads/${{ inputs.name }}.tar.zst
|
|
run: |
|
|
mkdir -p ${TARGET}
|
|
time tar -xf ${ARCHIVE} -C ${TARGET}
|
|
rm -f ${ARCHIVE}
|