From c72cb44213e1ffeccaa321d2d43a90c7fa9c8881 Mon Sep 17 00:00:00 2001 From: Alexander Bayandin Date: Thu, 15 Feb 2024 15:53:58 +0000 Subject: [PATCH] test_runner/performance: parametrize benchmarks (#6744) ## Problem Currently, we don't store `PLATFORM` for Nightly Benchmarks. It causes them to be merged as reruns in Allure report (because they have the same test name). ## Summary of changes - Parametrize benchmarks by - Postgres Version (14/15/16) - Build Type (debug/release/remote) - PLATFORM (neon-staging/github-actions-selfhosted/...) --------- Co-authored-by: Bodobolero --- test_runner/fixtures/parametrize.py | 51 +++++++++++++++-------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/test_runner/fixtures/parametrize.py b/test_runner/fixtures/parametrize.py index d8ac92abb6..57ca1932b0 100644 --- a/test_runner/fixtures/parametrize.py +++ b/test_runner/fixtures/parametrize.py @@ -2,57 +2,58 @@ import os from typing import Optional import pytest -from _pytest.fixtures import FixtureRequest from _pytest.python import Metafunc from fixtures.pg_version import PgVersion """ -Dynamically parametrize tests by Postgres version, build type (debug/release/remote), and possibly by other parameters +Dynamically parametrize tests by different parameters """ @pytest.fixture(scope="function", autouse=True) -def pg_version(request: FixtureRequest) -> Optional[PgVersion]: - # Do not parametrize performance tests yet, we need to prepare grafana charts first - if "test_runner/performance" in str(request.node.path): - v = os.environ.get("DEFAULT_PG_VERSION") - return PgVersion(v) - +def pg_version() -> Optional[PgVersion]: return None @pytest.fixture(scope="function", autouse=True) -def build_type(request: FixtureRequest) -> Optional[str]: - # Do not parametrize performance tests yet, we need to prepare grafana charts first - if "test_runner/performance" in str(request.node.path): - return os.environ.get("BUILD_TYPE", "").lower() - +def build_type() -> Optional[str]: return None @pytest.fixture(scope="function", autouse=True) -def pageserver_virtual_file_io_engine(request: FixtureRequest) -> Optional[str]: +def platform() -> Optional[str]: + return None + + +@pytest.fixture(scope="function", autouse=True) +def pageserver_virtual_file_io_engine() -> Optional[str]: return None def pytest_generate_tests(metafunc: Metafunc): - if (v := os.environ.get("DEFAULT_PG_VERSION")) is None: - pg_versions = [version for version in PgVersion if version != PgVersion.NOT_SET] - else: - pg_versions = [PgVersion(v)] - - if (bt := os.environ.get("BUILD_TYPE")) is None: + if (bt := os.getenv("BUILD_TYPE")) is None: build_types = ["debug", "release"] else: build_types = [bt.lower()] - # Do not parametrize performance tests yet by Postgres version or build type, we need to prepare grafana charts first - if "test_runner/performance" not in metafunc.definition._nodeid: - metafunc.parametrize("build_type", build_types) - metafunc.parametrize("pg_version", pg_versions, ids=map(lambda v: f"pg{v}", pg_versions)) + metafunc.parametrize("build_type", build_types) + + if (v := os.getenv("DEFAULT_PG_VERSION")) is None: + pg_versions = [version for version in PgVersion if version != PgVersion.NOT_SET] + else: + pg_versions = [PgVersion(v)] + + metafunc.parametrize("pg_version", pg_versions, ids=map(lambda v: f"pg{v}", pg_versions)) # A hacky way to parametrize tests only for `pageserver_virtual_file_io_engine=tokio-epoll-uring` # And do not change test name for default `pageserver_virtual_file_io_engine=std-fs` to keep tests statistics - if (io_engine := os.environ.get("PAGESERVER_VIRTUAL_FILE_IO_ENGINE", "")) not in ("", "std-fs"): + if (io_engine := os.getenv("PAGESERVER_VIRTUAL_FILE_IO_ENGINE", "")) not in ("", "std-fs"): metafunc.parametrize("pageserver_virtual_file_io_engine", [io_engine]) + + # For performance tests, parametrize also by platform + if ( + "test_runner/performance" in metafunc.definition._nodeid + and (platform := os.getenv("PLATFORM")) is not None + ): + metafunc.parametrize("platform", [platform.lower()])