diff --git a/test_runner/performance/test_branch_creation.py b/test_runner/performance/test_branch_creation.py index 9cb346de47..4b109c150f 100644 --- a/test_runner/performance/test_branch_creation.py +++ b/test_runner/performance/test_branch_creation.py @@ -3,12 +3,15 @@ import statistics import threading import time import timeit +from contextlib import closing from typing import List import pytest from fixtures.benchmark_fixture import MetricReport from fixtures.compare_fixtures import NeonCompare from fixtures.log_helper import log +from fixtures.neon_fixtures import wait_for_last_record_lsn +from fixtures.types import Lsn def _record_branch_creation_durations(neon_compare: NeonCompare, durs: List[float]): @@ -107,3 +110,43 @@ def test_branch_creation_many(neon_compare: NeonCompare, n_branches: int): branch_creation_durations.append(dur) _record_branch_creation_durations(neon_compare, branch_creation_durations) + + +# Test measures the branch creation time when branching from a timeline with a lot of relations. +# +# This test measures the latency of branch creation under two scenarios +# 1. The ancestor branch is not under any workloads +# 2. The ancestor branch is under a workload (busy) +# +# To simulate the workload, the test runs a concurrent insertion on the ancestor branch right before branching. +def test_branch_creation_many_relations(neon_compare: NeonCompare): + env = neon_compare.env + + timeline_id = env.neon_cli.create_branch("root") + + pg = env.postgres.create_start("root") + with closing(pg.connect()) as conn: + with conn.cursor() as cur: + for i in range(10000): + cur.execute(f"CREATE TABLE t{i} as SELECT g FROM generate_series(1, 1000) g") + + # Wait for the pageserver to finish processing all the pending WALs, + # as we don't want the LSN wait time to be included during the branch creation + flush_lsn = Lsn(pg.safe_psql("SELECT pg_current_wal_flush_lsn()")[0][0]) + wait_for_last_record_lsn( + env.pageserver.http_client(), env.initial_tenant, timeline_id, flush_lsn + ) + + with neon_compare.record_duration("create_branch_time_not_busy_root"): + env.neon_cli.create_branch("child_not_busy", "root") + + # run a concurrent insertion to make the ancestor "busy" during the branch creation + thread = threading.Thread( + target=pg.safe_psql, args=("INSERT INTO t0 VALUES (generate_series(1, 100000))",) + ) + thread.start() + + with neon_compare.record_duration("create_branch_time_busy_root"): + env.neon_cli.create_branch("child_busy", "root") + + thread.join() diff --git a/test_runner/performance/test_branching.py b/test_runner/performance/test_branching.py new file mode 100644 index 0000000000..562e751458 --- /dev/null +++ b/test_runner/performance/test_branching.py @@ -0,0 +1,94 @@ +import timeit +from pathlib import Path +from typing import List + +from fixtures.benchmark_fixture import PgBenchRunResult +from fixtures.compare_fixtures import NeonCompare +from performance.test_perf_pgbench import utc_now_timestamp + +# ----------------------------------------------------------------------- +# Start of `test_compare_child_and_root_*` tests +# ----------------------------------------------------------------------- + +# `test_compare_child_and_root_*` tests compare the performance of a branch and its child branch(s). +# A common pattern in those tests is initializing a root branch then creating a child branch(s) from the root. +# Each test then runs a similar workload for both child branch and root branch. Each measures and reports +# some latencies/metrics during the workload for performance comparison between a branch and its ancestor. + + +def test_compare_child_and_root_pgbench_perf(neon_compare: NeonCompare): + env = neon_compare.env + pg_bin = neon_compare.pg_bin + + def run_pgbench_on_branch(branch: str, cmd: List[str]): + run_start_timestamp = utc_now_timestamp() + t0 = timeit.default_timer() + out = pg_bin.run_capture( + cmd, + ) + run_duration = timeit.default_timer() - t0 + run_end_timestamp = utc_now_timestamp() + + stdout = Path(f"{out}.stdout").read_text() + + res = PgBenchRunResult.parse_from_stdout( + stdout=stdout, + run_duration=run_duration, + run_start_timestamp=run_start_timestamp, + run_end_timestamp=run_end_timestamp, + ) + neon_compare.zenbenchmark.record_pg_bench_result(branch, res) + + env.neon_cli.create_branch("root") + pg_root = env.postgres.create_start("root") + pg_bin.run_capture(["pgbench", "-i", pg_root.connstr(), "-s10"]) + + env.neon_cli.create_branch("child", "root") + pg_child = env.postgres.create_start("child") + + run_pgbench_on_branch("root", ["pgbench", "-c10", "-T10", pg_root.connstr()]) + run_pgbench_on_branch("child", ["pgbench", "-c10", "-T10", pg_child.connstr()]) + + +def test_compare_child_and_root_write_perf(neon_compare: NeonCompare): + env = neon_compare.env + env.neon_cli.create_branch("root") + pg_root = env.postgres.create_start("root") + + pg_root.safe_psql( + "CREATE TABLE foo(key serial primary key, t text default 'foooooooooooooooooooooooooooooooooooooooooooooooooooo')", + ) + + env.neon_cli.create_branch("child", "root") + pg_child = env.postgres.create_start("child") + + with neon_compare.record_duration("root_run_duration"): + pg_root.safe_psql("INSERT INTO foo SELECT FROM generate_series(1,1000000)") + with neon_compare.record_duration("child_run_duration"): + pg_child.safe_psql("INSERT INTO foo SELECT FROM generate_series(1,1000000)") + + +def test_compare_child_and_root_read_perf(neon_compare: NeonCompare): + env = neon_compare.env + env.neon_cli.create_branch("root") + pg_root = env.postgres.create_start("root") + + pg_root.safe_psql_many( + [ + "CREATE TABLE foo(key serial primary key, t text default 'foooooooooooooooooooooooooooooooooooooooooooooooooooo')", + "INSERT INTO foo SELECT FROM generate_series(1,1000000)", + ] + ) + + env.neon_cli.create_branch("child", "root") + pg_child = env.postgres.create_start("child") + + with neon_compare.record_duration("root_run_duration"): + pg_root.safe_psql("SELECT count(*) from foo") + with neon_compare.record_duration("child_run_duration"): + pg_child.safe_psql("SELECT count(*) from foo") + + +# ----------------------------------------------------------------------- +# End of `test_compare_child_and_root_*` tests +# -----------------------------------------------------------------------