Create Allure report for perf tests (#2326)

This commit is contained in:
Alexander Bayandin
2022-08-31 16:15:26 +01:00
committed by GitHub
parent 5745dbdd33
commit d7c9cfe7bb
8 changed files with 195 additions and 189 deletions

View File

@@ -18,7 +18,7 @@ runs:
- name: Validate input parameters
shell: bash -euxo pipefail {0}
run: |
if [ "${{ inputs.action }}" != "store"] && [ "${{ inputs.action }}" != "generate" ]; then
if [ "${{ inputs.action }}" != "store" ] && [ "${{ inputs.action }}" != "generate" ]; then
echo 2>&1 "Unknown inputs.action type '${{ inputs.action }}'; allowed 'generate' or 'store' only"
exit 1
fi
@@ -41,7 +41,7 @@ runs:
# Shortcut for a special branch
key=main
else
key=branch-$(echo ${GITHUB_REF#refs/heads/} | tr -cd "[:alnum:]._-")
key=branch-$(echo ${GITHUB_REF#refs/heads/} | tr -c "[:alnum:]._-" "-")
fi
echo "::set-output name=KEY::${key}"
@@ -94,7 +94,7 @@ runs:
BUILD_TYPE=${{ inputs.build_type }}
EOF
ARCHIVE="${GITHUB_RUN_ID}-${{ inputs.test_selection }}-${GITHUB_RUN_ATTEMPT}.tar.zst"
ARCHIVE="${GITHUB_RUN_ID}-${{ inputs.test_selection }}-${GITHUB_RUN_ATTEMPT}-$(date +%s).tar.zst"
ZSTD_NBTHREADS=0
tar -C ${TEST_OUTPUT}/allure/results -cf ${ARCHIVE} --zstd .
@@ -207,7 +207,7 @@ runs:
script: |
const { REPORT_URL, BUILD_TYPE, SHA } = process.env
result = await github.rest.repos.createCommitStatus({
await github.rest.repos.createCommitStatus({
owner: context.repo.owner,
repo: context.repo.repo,
sha: `${SHA}`,
@@ -215,5 +215,3 @@ runs:
target_url: `${REPORT_URL}`,
context: `Allure report / ${BUILD_TYPE}`,
})
console.log(result);

View File

@@ -3,11 +3,11 @@ description: 'Runs a Neon python test set, performing all the required preparati
inputs:
build_type:
description: 'Type of Rust (neon) and C (postgres) builds. Must be "release" or "debug".'
description: 'Type of Rust (neon) and C (postgres) builds. Must be "release" or "debug", or "remote" for the remote cluster'
required: true
rust_toolchain:
description: 'Rust toolchain version to fetch the caches'
required: true
required: false
test_selection:
description: 'A python test suite to run'
required: true
@@ -52,6 +52,7 @@ runs:
using: "composite"
steps:
- name: Get Neon artifact
if: inputs.build_type != 'remote'
uses: ./.github/actions/download
with:
name: neon-${{ runner.os }}-${{ inputs.build_type }}-${{ inputs.rust_toolchain }}-artifact
@@ -78,7 +79,6 @@ runs:
- name: Run pytest
env:
NEON_BIN: /tmp/neon/bin
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
TEST_OUTPUT: /tmp/test_output
# this variable will be embedded in perf test report
# and is needed to distinguish different environments
@@ -88,6 +88,12 @@ runs:
AWS_SECRET_ACCESS_KEY: ${{ inputs.real_s3_secret_access_key }}
shell: bash -euxo pipefail {0}
run: |
export POSTGRES_DISTRIB_DIR=${POSTGRES_DISTRIB_DIR:-/tmp/neon/pg_install}
if [ "${BUILD_TYPE}" = "remote" ]; then
export REMOTE_ENV=1
fi
PERF_REPORT_DIR="$(realpath test_runner/perf-report-local)"
rm -rf $PERF_REPORT_DIR
@@ -119,6 +125,13 @@ runs:
cov_prefix=(scripts/coverage "--profraw-prefix=$GITHUB_JOB" --dir=/tmp/coverage run)
elif [[ "${{ inputs.build_type }}" == "release" ]]; then
cov_prefix=()
else
cov_prefix=()
fi
# Wake up the cluster if we use remote neon instance
if [ "${{ inputs.build_type }}" = "remote" ] && [ -n "${BENCHMARK_CONNSTR}" ]; then
${POSTGRES_DISTRIB_DIR}/bin/psql ${BENCHMARK_CONNSTR} -c "SELECT version();"
fi
# Run the tests.
@@ -137,7 +150,6 @@ runs:
--alluredir=$TEST_OUTPUT/allure/results \
--tb=short \
--verbose \
-m "not remote_cluster" \
-rA $TEST_SELECTION $EXTRA_PARAMS
if [[ "${{ inputs.save_perf_report }}" == "true" ]]; then
@@ -148,25 +160,10 @@ runs:
fi
fi
- name: Upload Allure results
if: ${{ always() && (inputs.test_selection == 'regress') }}
- name: Create Allure report
if: always()
uses: ./.github/actions/allure-report
with:
action: store
build_type: ${{ inputs.build_type }}
test_selection: ${{ inputs.test_selection }}
- name: Delete all data but logs
shell: bash -euxo pipefail {0}
if: always()
run: |
du -sh /tmp/test_output/*
find /tmp/test_output -type f ! -name "*.log" ! -name "regression.diffs" ! -name "junit.xml" ! -name "*.filediff" ! -name "*.stdout" ! -name "*.stderr" ! -name "flamegraph.svg" ! -name "*.metrics" -delete
du -sh /tmp/test_output/*
- name: Upload python test logs
if: always()
uses: ./.github/actions/upload
with:
name: python-test-${{ inputs.test_selection }}-${{ runner.os }}-${{ inputs.build_type }}-${{ inputs.rust_toolchain }}-logs
path: /tmp/test_output/

View File

@@ -128,9 +128,9 @@ jobs:
env:
TEST_PG_BENCH_DURATIONS_MATRIX: "60m"
TEST_PG_BENCH_SCALES_MATRIX: "10gb"
REMOTE_ENV: "1"
POSTGRES_DISTRIB_DIR: /usr
TEST_OUTPUT: /tmp/test_output
BUILD_TYPE: remote
strategy:
fail-fast: false
@@ -138,23 +138,15 @@ jobs:
connstr: [ BENCHMARK_CAPTEST_CONNSTR, BENCHMARK_RDS_CONNSTR ]
runs-on: dev
container: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rustlegacy:2817580636
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rustlegacy:pinned
options: --init
timeout-minutes: 360 # 6h
steps:
- uses: actions/checkout@v3
- name: Cache poetry deps
id: cache_poetry
uses: actions/cache@v3
with:
path: ~/.cache/pypoetry/virtualenvs
key: v2-${{ runner.os }}-python-deps-${{ hashFiles('poetry.lock') }}
- name: Install Python deps
run: ./scripts/pysync
- name: Calculate platform
id: calculate-platform
env:
@@ -173,50 +165,54 @@ jobs:
- name: Install Deps
run: |
echo "deb http://apt.postgresql.org/pub/repos/apt focal-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt -y update
sudo apt install -y postgresql-14 postgresql-client-14
sudo apt install -y postgresql-14
- name: Benchmark init
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: true
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_init
env:
PLATFORM: ${{ steps.calculate-platform.outputs.PLATFORM }}
BENCHMARK_CONNSTR: ${{ secrets[matrix.connstr] }}
run: |
mkdir -p perf-report-captest
psql $BENCHMARK_CONNSTR -c "SELECT 1;"
./scripts/pytest test_runner/performance/test_perf_pgbench.py::test_pgbench_remote_init -v -m "remote_cluster" --out-dir perf-report-captest --timeout 21600
- name: Benchmark simple-update
uses: ./.github/actions/run-python-test-set
with:
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: true
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_simple_update
env:
PLATFORM: ${{ steps.calculate-platform.outputs.PLATFORM }}
BENCHMARK_CONNSTR: ${{ secrets[matrix.connstr] }}
run: |
psql $BENCHMARK_CONNSTR -c "SELECT 1;"
./scripts/pytest test_runner/performance/test_perf_pgbench.py::test_pgbench_remote_simple_update -v -m "remote_cluster" --out-dir perf-report-captest --timeout 21600
- name: Benchmark select-only
env:
PLATFORM: ${{ steps.calculate-platform.outputs.PLATFORM }}
BENCHMARK_CONNSTR: ${{ secrets[matrix.connstr] }}
run: |
psql $BENCHMARK_CONNSTR -c "SELECT 1;"
./scripts/pytest test_runner/performance/test_perf_pgbench.py::test_pgbench_remote_select_only -v -m "remote_cluster" --out-dir perf-report-captest --timeout 21600
- name: Submit result
env:
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
run: |
REPORT_FROM=$(realpath perf-report-captest) REPORT_TO=staging scripts/generate_and_push_perf_report.sh
- name: Upload logs
if: always()
uses: ./.github/actions/upload
- name: Benchmark simple-update
uses: ./.github/actions/run-python-test-set
with:
name: bench-captest-${{ steps.calculate-platform.outputs.PLATFORM }}
path: /tmp/test_output/
build_type: ${{ env.BUILD_TYPE }}
test_selection: performance
run_in_parallel: false
save_perf_report: true
extra_params: -m remote_cluster --timeout 21600 -k test_pgbench_remote_select_only
env:
PLATFORM: ${{ steps.calculate-platform.outputs.PLATFORM }}
BENCHMARK_CONNSTR: ${{ secrets[matrix.connstr] }}
VIP_VAP_ACCESS_TOKEN: "${{ secrets.VIP_VAP_ACCESS_TOKEN }}"
PERF_TEST_RESULT_CONNSTR: "${{ secrets.PERF_TEST_RESULT_CONNSTR }}"
- name: Create Allure report
uses: ./.github/actions/allure-report
with:
action: generate
build_type: ${{ env.BUILD_TYPE }}
- name: Post to a Slack channel
if: ${{ github.event.schedule && failure() }}

View File

@@ -278,7 +278,7 @@ jobs:
container:
image: 369495373322.dkr.ecr.eu-central-1.amazonaws.com/rust:pinned
options: --init
needs: [ regress-tests ]
needs: [ regress-tests, benchmarks ]
if: always()
strategy:
fail-fast: false
@@ -290,7 +290,7 @@ jobs:
with:
submodules: false
- name: Merge and Allure results
- name: Create Allure report
uses: ./.github/actions/allure-report
with:
action: generate

149
poetry.lock generated

File diff suppressed because one or more lines are too long

View File

@@ -27,7 +27,7 @@ prometheus-client = "^0.14.1"
pytest-timeout = "^2.1.0"
Werkzeug = "2.1.2"
pytest-order = "^1.0.1"
allure-pytest = "^2.9.45"
allure-pytest = "^2.10.0"
pytest-asyncio = "^0.19.0"
[tool.poetry.dev-dependencies]

View File

@@ -6,12 +6,10 @@ import enum
import filecmp
import json
import os
import pathlib
import re
import shutil
import socket
import subprocess
import tarfile
import tempfile
import textwrap
import time
@@ -22,7 +20,6 @@ from enum import Flag, auto
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, TypeVar, Union, cast
import allure # type: ignore
import asyncpg
import backoff # type: ignore
import boto3
@@ -38,7 +35,14 @@ from psycopg2.extensions import connection as PgConnection
from psycopg2.extensions import make_dsn, parse_dsn
from typing_extensions import Literal
from .utils import etcd_path, get_self_dir, lsn_from_hex, lsn_to_hex, subprocess_capture
from .utils import (
allure_attach_from_dir,
etcd_path,
get_self_dir,
lsn_from_hex,
lsn_to_hex,
subprocess_capture,
)
"""
This file contains pytest fixtures. A fixture is a test resource that can be
@@ -99,7 +103,7 @@ def pytest_configure(config):
top_output_dir = env_test_output
else:
top_output_dir = os.path.join(base_dir, DEFAULT_OUTPUT_DIR)
pathlib.Path(top_output_dir).mkdir(exist_ok=True)
Path(top_output_dir).mkdir(exist_ok=True)
# Find the postgres installation.
global pg_distrib_dir
@@ -234,11 +238,12 @@ def default_broker(request: Any, port_distributor: PortDistributor):
client_port = port_distributor.get_port()
# multiple pytest sessions could get launched in parallel, get them different datadirs
etcd_datadir = os.path.join(get_test_output_dir(request), f"etcd_datadir_{client_port}")
pathlib.Path(etcd_datadir).mkdir(exist_ok=True, parents=True)
Path(etcd_datadir).mkdir(exist_ok=True, parents=True)
broker = Etcd(datadir=etcd_datadir, port=client_port, peer_port=port_distributor.get_port())
yield broker
broker.stop()
allure_attach_from_dir(Path(etcd_datadir))
@pytest.fixture(scope="session")
@@ -1882,7 +1887,7 @@ class Postgres(PgProtocol):
self.env.neon_cli.pg_create(
branch_name, node_name=self.node_name, tenant_id=self.tenant_id, lsn=lsn, port=self.port
)
path = pathlib.Path("pgdatadirs") / "tenants" / self.tenant_id.hex / self.node_name
path = Path("pgdatadirs") / "tenants" / self.tenant_id.hex / self.node_name
self.pgdata_dir = os.path.join(self.env.repo_dir, path)
if config_lines is None:
@@ -1913,7 +1918,7 @@ class Postgres(PgProtocol):
def pg_data_dir_path(self) -> str:
"""Path to data directory"""
assert self.node_name
path = pathlib.Path("pgdatadirs") / "tenants" / self.tenant_id.hex / self.node_name
path = Path("pgdatadirs") / "tenants" / self.tenant_id.hex / self.node_name
return os.path.join(self.env.repo_dir, path)
def pg_xact_dir_path(self) -> str:
@@ -2289,7 +2294,7 @@ class Etcd:
log.debug(f"etcd is already running on port {self.port}")
return
pathlib.Path(self.datadir).mkdir(exist_ok=True)
Path(self.datadir).mkdir(exist_ok=True)
if not self.binary_path.is_file():
raise RuntimeError(f"etcd broker binary '{self.binary_path}' is not a file")
@@ -2329,26 +2334,16 @@ class Etcd:
self.handle.wait()
def get_test_output_dir(request: Any) -> pathlib.Path:
def get_test_output_dir(request: Any) -> Path:
"""Compute the working directory for an individual test."""
test_name = request.node.name
test_dir = pathlib.Path(top_output_dir) / test_name.replace("/", "-")
test_dir = Path(top_output_dir) / test_name.replace("/", "-")
log.info(f"get_test_output_dir is {test_dir}")
# make mypy happy
assert isinstance(test_dir, pathlib.Path)
assert isinstance(test_dir, Path)
return test_dir
ATTACHMENT_SUFFIXES = frozenset(
(
".log",
".stderr",
".stdout",
".diffs",
)
)
# This is autouse, so the test output directory always gets created, even
# if a test doesn't put anything there. It also solves a problem with the
# neon_simple_env fixture: if TEST_SHARED_FIXTURES is not set, it
@@ -2359,7 +2354,7 @@ ATTACHMENT_SUFFIXES = frozenset(
# this fixture ensures that the directory exists. That works because
# 'autouse' fixtures are run before other fixtures.
@pytest.fixture(scope="function", autouse=True)
def test_output_dir(request: Any) -> Iterator[pathlib.Path]:
def test_output_dir(request: Any) -> Iterator[Path]:
"""Create the working directory for an individual test."""
# one directory per test
@@ -2370,23 +2365,7 @@ def test_output_dir(request: Any) -> Iterator[pathlib.Path]:
yield test_dir
for attachment in test_dir.glob("**/*"):
if attachment.suffix in ATTACHMENT_SUFFIXES:
source = str(attachment)
name = str(attachment.relative_to(test_dir))
attachment_type = "text/plain"
extension = attachment.suffix.removeprefix(".")
# compress files larger than 1Mb, they're hardly readable in a browser
if attachment.stat().st_size > 1024 * 1024:
source = f"{attachment}.tar.gz"
with tarfile.open(source, "w:gz") as tar:
tar.add(attachment, arcname=attachment.name)
name = f"{name}.tar.gz"
attachment_type = "application/gzip"
extension = "tar.gz"
allure.attach.file(source, name, attachment_type, extension)
allure_attach_from_dir(test_dir)
SKIP_DIRS = frozenset(
@@ -2439,7 +2418,7 @@ def should_skip_file(filename: str) -> bool:
#
# Test helpers
#
def list_files_to_compare(pgdata_dir: pathlib.Path):
def list_files_to_compare(pgdata_dir: Path):
pgdata_files = []
for root, _file, filenames in os.walk(pgdata_dir):
for filename in filenames:
@@ -2492,7 +2471,7 @@ def check_restored_datadir_content(test_output_dir: Path, env: NeonEnv, pg: Post
# list files we're going to compare
assert pg.pgdata_dir
pgdata_files = list_files_to_compare(pathlib.Path(pg.pgdata_dir))
pgdata_files = list_files_to_compare(Path(pg.pgdata_dir))
restored_files = list_files_to_compare(restored_dir_path)
# check that file sets are equal

View File

@@ -1,11 +1,13 @@
import contextlib
import os
import pathlib
import re
import shutil
import subprocess
import tarfile
from pathlib import Path
from typing import Any, List, Tuple
import allure # type: ignore
from fixtures.log_helper import log
from psycopg2.extensions import cursor
@@ -116,7 +118,7 @@ def get_dir_size(path: str) -> int:
return totalbytes
def get_timeline_dir_size(path: pathlib.Path) -> int:
def get_timeline_dir_size(path: Path) -> int:
"""Get the timeline directory's total size, which only counts the layer files' size."""
sz = 0
for dir_entry in path.iterdir():
@@ -161,3 +163,36 @@ def get_scale_for_db(size_mb: int) -> int:
"""
return round(0.06689 * size_mb - 0.5)
ATTACHMENT_NAME_REGEX = re.compile(
r".+\.log|.+\.stderr|.+\.stdout|.+\.filediff|.+\.metrics|flamegraph\.svg|regression\.diffs"
)
def allure_attach_from_dir(dir: Path):
"""Attach all non-empty files from `dir` that matches `ATTACHMENT_NAME_REGEX` to Allure report"""
for attachment in Path(dir).glob("**/*"):
if ATTACHMENT_NAME_REGEX.fullmatch(attachment.name) and attachment.stat().st_size > 0:
source = str(attachment)
name = str(attachment.relative_to(dir))
# compress files larger than 1Mb, they're hardly readable in a browser
if attachment.stat().st_size > 1024 * 1024:
source = f"{attachment}.tar.gz"
with tarfile.open(source, "w:gz") as tar:
tar.add(attachment, arcname=attachment.name)
name = f"{name}.tar.gz"
if source.endswith(".tar.gz"):
attachment_type = "application/gzip"
extension = "tar.gz"
elif source.endswith(".svg"):
attachment_type = "image/svg+xml"
extension = "svg"
else:
attachment_type = "text/plain"
extension = attachment.suffix.removeprefix(".")
allure.attach.file(source, name, attachment_type, extension)