Add NeonBenchmarker#record_pagebench_results method

This commit is contained in:
Alexander Bayandin
2023-12-20 13:28:57 +00:00
parent 24a911d3f8
commit f41b50f72f
3 changed files with 65 additions and 1 deletions

View File

@@ -10,7 +10,7 @@ from datetime import datetime
from pathlib import Path
# Type-related stuff
from typing import Callable, ClassVar, Dict, Iterator, Optional
from typing import Any, Callable, ClassVar, Dict, Iterator, Optional
import pytest
from _pytest.config import Config
@@ -20,6 +20,7 @@ from _pytest.terminal import TerminalReporter
from fixtures.log_helper import log
from fixtures.neon_fixtures import NeonPageserver
from fixtures.types import TenantId, TimelineId
from fixtures.utils import humantime_to_ms
"""
This file contains fixtures for micro-benchmarks.
@@ -409,6 +410,34 @@ class NeonBenchmarker:
report=MetricReport.LOWER_IS_BETTER,
)
def record_pagebench_results(self, name: str, results: Dict[str, Any]):
total = results["total"]
metric = "request_count"
self.record(
f"{name}.{metric}",
total[metric],
"",
report=MetricReport.HIGHER_IS_BETTER,
)
metric = "latency_mean"
self.record(
f"{name}.{metric}",
humantime_to_ms(total[metric]),
"ms",
report=MetricReport.LOWER_IS_BETTER,
)
metric = "latency_percentiles"
for k, v in total[metric].items():
self.record(
f"{name}.{metric}.{k}",
humantime_to_ms(v),
"ms",
report=MetricReport.LOWER_IS_BETTER,
)
@pytest.fixture(scope="function")
def zenbenchmark(record_property: Callable[[str, object], None]) -> Iterator[NeonBenchmarker]:

View File

@@ -391,3 +391,36 @@ def run_pg_bench_small(pg_bin: "PgBin", connstr: str):
}
"""
pg_bin.run(["pgbench", "-i", "-I dtGvp", "-s1", connstr])
def humantime_to_ms(humantime: str) -> float:
"""
Converts Rust humantime's output string to milliseconds.
humantime_to_ms("1h 1ms 406us") -> 3600001.406
"""
unit_multiplier_map = {
"ns": 1e-6,
"us": 1e-3,
"ms": 1,
"s": 1e3,
"m": 1e3 * 60,
"h": 1e3 * 60 * 60,
}
matcher = re.compile(rf"^(\d+)({'|'.join(unit_multiplier_map.keys())})$")
total_ms = 0.0
if humantime == "0":
return total_ms
for item in humantime.split():
if (match := matcher.search(item)) is not None:
n, unit = match.groups()
total_ms += int(n) * unit_multiplier_map[unit]
else:
raise ValueError(
f"can't parse '{item}' (from string '{humantime}'), known units are {', '.join(unit_multiplier_map.keys())}."
)
return round(total_ms, 3)

View File

@@ -120,3 +120,5 @@ def test_getpage_throughput(
results = json.load(f)
log.info(f"Results:\n{json.dumps(results, sort_keys=True, indent=2)}")
zenbenchmark.record_pagebench_results("get-page-latest-lsn", results)