From 2ce975e40538132860163fd8d9ae4cca5b189915 Mon Sep 17 00:00:00 2001 From: John Spray Date: Sat, 20 Jul 2024 20:03:14 +0100 Subject: [PATCH] better benchmark --- test_runner/fixtures/compare_fixtures.py | 12 ++++++++++++ test_runner/performance/test_bulk_insert.py | 21 ++++++++++++++++----- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/test_runner/fixtures/compare_fixtures.py b/test_runner/fixtures/compare_fixtures.py index 08215438e1..34dcc6ee39 100644 --- a/test_runner/fixtures/compare_fixtures.py +++ b/test_runner/fixtures/compare_fixtures.py @@ -45,6 +45,12 @@ class PgCompare(ABC): def flush(self): pass + def flush1(self): + _x = 1 + 2 + + def flush2(self): + _x = 1 + 2 + @abstractmethod def report_peak_memory_use(self): pass @@ -130,7 +136,13 @@ class NeonCompare(PgCompare): return self._pg_bin def flush(self): + self.flush1() + self.flush2() + + def flush1(self): wait_for_last_flush_lsn(self.env, self._pg, self.tenant, self.timeline) + + def flush2(self): self.pageserver_http_client.timeline_checkpoint(self.tenant, self.timeline) self.pageserver_http_client.timeline_gc(self.tenant, self.timeline, 0) diff --git a/test_runner/performance/test_bulk_insert.py b/test_runner/performance/test_bulk_insert.py index ab33ed33fe..53e58113a1 100644 --- a/test_runner/performance/test_bulk_insert.py +++ b/test_runner/performance/test_bulk_insert.py @@ -1,8 +1,10 @@ +import time from contextlib import closing from fixtures.benchmark_fixture import MetricReport from fixtures.common_types import Lsn from fixtures.compare_fixtures import NeonCompare, PgCompare +from fixtures.log_helper import log from fixtures.pg_version import PgVersion @@ -26,10 +28,15 @@ def test_bulk_insert(neon_with_baseline: PgCompare): cur.execute("create table huge (i int, j int);") # Run INSERT, recording the time and I/O it takes + log.info("Writing...") with env.record_pageserver_writes("pageserver_writes"): with env.record_duration("insert"): - cur.execute("insert into huge values (generate_series(1, 5000000), 0);") - env.flush() + cur.execute("insert into huge values (generate_series(1, 20000000), 0);") + env.flush1() + + log.info("Finished writing") + + env.flush2() env.report_peak_memory_use() env.report_size() @@ -68,8 +75,12 @@ def measure_recovery_time(env: NeonCompare): env.env.pageserver.tenant_create(tenant_id=env.tenant, generation=attach_gen) # Measure recovery time - with env.record_duration("wal_recovery"): - client.timeline_create(pg_version, env.tenant, env.timeline) + client.timeline_create(pg_version, env.tenant, env.timeline) + log.info("Recovering...") + with env.record_duration("wal_recovery"): # Flush, which will also wait for lsn to catch up - env.flush() + env.flush1() + + log.info("Finished recovering") + time.sleep(5)