Bring periodic perf tests on GitHub back (#2037)

* test/fixtures: fix DeprecationWarning
* workflows/benchmarking: increase timeout
* test: switch pgbench to default(simple) query mode
* test/performance: ensure we don't have tables that we're creating
* workflows/pg_clients: remove unused env var
* workflows/benchmarking: change platform name
This commit is contained in:
Alexander Bayandin
2022-07-07 19:53:23 +01:00
committed by GitHub
parent ec0faf3ac6
commit 00c26ff3a3
8 changed files with 15 additions and 31 deletions

View File

@@ -324,7 +324,7 @@ class PgProtocol:
# Convert options='-c<key>=<val>' to server_settings
if 'options' in conn_options:
options = conn_options.pop('options')
for match in re.finditer('-c(\w*)=(\w*)', options):
for match in re.finditer(r'-c(\w*)=(\w*)', options):
key = match.group(1)
val = match.group(2)
if 'server_options' in conn_options:

View File

@@ -28,7 +28,7 @@ def test_compare_pg_stats_rw_with_pgbench_default(neon_with_baseline: PgCompare,
with env.record_pg_stats(pg_stats_rw):
env.pg_bin.run_capture(
['pgbench', f'-T{duration}', f'--random-seed={seed}', '-Mprepared', env.pg.connstr()])
['pgbench', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -46,14 +46,8 @@ def test_compare_pg_stats_wo_with_pgbench_simple_update(neon_with_baseline: PgCo
env.flush()
with env.record_pg_stats(pg_stats_wo):
env.pg_bin.run_capture([
'pgbench',
'-N',
f'-T{duration}',
f'--random-seed={seed}',
'-Mprepared',
env.pg.connstr()
])
env.pg_bin.run_capture(
['pgbench', '-N', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -71,14 +65,8 @@ def test_compare_pg_stats_ro_with_pgbench_select_only(neon_with_baseline: PgComp
env.flush()
with env.record_pg_stats(pg_stats_ro):
env.pg_bin.run_capture([
'pgbench',
'-S',
f'-T{duration}',
f'--random-seed={seed}',
'-Mprepared',
env.pg.connstr()
])
env.pg_bin.run_capture(
['pgbench', '-S', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -97,5 +85,5 @@ def test_compare_pg_stats_wal_with_pgbench_default(neon_with_baseline: PgCompare
with env.record_pg_stats(pg_stats_wal):
env.pg_bin.run_capture(
['pgbench', f'-T{duration}', f'--random-seed={seed}', '-Mprepared', env.pg.connstr()])
['pgbench', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()

View File

@@ -18,6 +18,7 @@ def test_hot_page(env: PgCompare):
with closing(env.pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute('drop table if exists t, f;')
# Write many updates to the same row
with env.record_duration('write'):

View File

@@ -20,6 +20,7 @@ def test_hot_table(env: PgCompare):
with closing(env.pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute('drop table if exists t;')
# Write many updates to a small table
with env.record_duration('write'):

View File

@@ -78,13 +78,11 @@ def run_test_pgbench(env: PgCompare, scale: int, duration: int):
# Run simple-update workload
run_pgbench(env,
"simple-update",
['pgbench', '-N', '-c4', f'-T{duration}', '-P2', '-Mprepared', env.pg.connstr()])
"simple-update", ['pgbench', '-N', '-c4', f'-T{duration}', '-P2', env.pg.connstr()])
# Run SELECT workload
run_pgbench(env,
"select-only",
['pgbench', '-S', '-c4', f'-T{duration}', '-P2', '-Mprepared', env.pg.connstr()])
"select-only", ['pgbench', '-S', '-c4', f'-T{duration}', '-P2', env.pg.connstr()])
env.report_size()

View File

@@ -116,7 +116,6 @@ def start_pgbench_simple_update_workload(env: PgCompare, duration: int):
'-c10',
'-N',
f'-T{duration}',
'-Mprepared',
env.pg.connstr(options="-csynchronous_commit=off")
])
env.flush()