Bring periodic perf tests on GitHub back (#2037)

* test/fixtures: fix DeprecationWarning
* workflows/benchmarking: increase timeout
* test: switch pgbench to default(simple) query mode
* test/performance: ensure we don't have tables that we're creating
* workflows/pg_clients: remove unused env var
* workflows/benchmarking: change platform name
This commit is contained in:
Alexander Bayandin
2022-07-07 19:53:23 +01:00
committed by GitHub
parent ec0faf3ac6
commit 00c26ff3a3
8 changed files with 15 additions and 31 deletions

View File

@@ -26,11 +26,11 @@ jobs:
runs-on: [self-hosted, zenith-benchmarker]
env:
POSTGRES_DISTRIB_DIR: "/usr/pgsql-13"
POSTGRES_DISTRIB_DIR: "/usr/pgsql-14"
steps:
- name: Checkout zenith repo
uses: actions/checkout@v2
uses: actions/checkout@v3
# actions/setup-python@v2 is not working correctly on self-hosted runners
# see https://github.com/actions/setup-python/issues/162
@@ -88,7 +88,7 @@ jobs:
# Plus time needed to initialize the test databases.
TEST_PG_BENCH_DURATIONS_MATRIX: "300"
TEST_PG_BENCH_SCALES_MATRIX: "10,100"
PLATFORM: "zenith-staging"
PLATFORM: "neon-staging"
BENCHMARK_CONNSTR: "${{ secrets.BENCHMARK_STAGING_CONNSTR }}"
REMOTE_ENV: "1" # indicate to test harness that we do not have zenith binaries locally
run: |
@@ -96,7 +96,7 @@ jobs:
# since it might generate duplicates when calling ingest_perf_test_result.py
rm -rf perf-report-staging
mkdir -p perf-report-staging
./scripts/pytest test_runner/performance/ -v -m "remote_cluster" --skip-interfering-proc-check --out-dir perf-report-staging
./scripts/pytest test_runner/performance/ -v -m "remote_cluster" --skip-interfering-proc-check --out-dir perf-report-staging --timeout 3600
- name: Submit result
env:

View File

@@ -48,9 +48,6 @@ jobs:
BENCHMARK_CONNSTR: "${{ secrets.BENCHMARK_STAGING_CONNSTR }}"
TEST_OUTPUT: /tmp/test_output
POSTGRES_DISTRIB_DIR: /tmp/neon/pg_install
# this variable will be embedded in perf test report
# and is needed to distinguish different environments
PLATFORM: github-actions-selfhosted
shell: bash -ex {0}
run: |
# Test framework expects we have psql binary;

View File

@@ -324,7 +324,7 @@ class PgProtocol:
# Convert options='-c<key>=<val>' to server_settings
if 'options' in conn_options:
options = conn_options.pop('options')
for match in re.finditer('-c(\w*)=(\w*)', options):
for match in re.finditer(r'-c(\w*)=(\w*)', options):
key = match.group(1)
val = match.group(2)
if 'server_options' in conn_options:

View File

@@ -28,7 +28,7 @@ def test_compare_pg_stats_rw_with_pgbench_default(neon_with_baseline: PgCompare,
with env.record_pg_stats(pg_stats_rw):
env.pg_bin.run_capture(
['pgbench', f'-T{duration}', f'--random-seed={seed}', '-Mprepared', env.pg.connstr()])
['pgbench', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -46,14 +46,8 @@ def test_compare_pg_stats_wo_with_pgbench_simple_update(neon_with_baseline: PgCo
env.flush()
with env.record_pg_stats(pg_stats_wo):
env.pg_bin.run_capture([
'pgbench',
'-N',
f'-T{duration}',
f'--random-seed={seed}',
'-Mprepared',
env.pg.connstr()
])
env.pg_bin.run_capture(
['pgbench', '-N', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -71,14 +65,8 @@ def test_compare_pg_stats_ro_with_pgbench_select_only(neon_with_baseline: PgComp
env.flush()
with env.record_pg_stats(pg_stats_ro):
env.pg_bin.run_capture([
'pgbench',
'-S',
f'-T{duration}',
f'--random-seed={seed}',
'-Mprepared',
env.pg.connstr()
])
env.pg_bin.run_capture(
['pgbench', '-S', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()
@@ -97,5 +85,5 @@ def test_compare_pg_stats_wal_with_pgbench_default(neon_with_baseline: PgCompare
with env.record_pg_stats(pg_stats_wal):
env.pg_bin.run_capture(
['pgbench', f'-T{duration}', f'--random-seed={seed}', '-Mprepared', env.pg.connstr()])
['pgbench', f'-T{duration}', f'--random-seed={seed}', env.pg.connstr()])
env.flush()

View File

@@ -18,6 +18,7 @@ def test_hot_page(env: PgCompare):
with closing(env.pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute('drop table if exists t, f;')
# Write many updates to the same row
with env.record_duration('write'):

View File

@@ -20,6 +20,7 @@ def test_hot_table(env: PgCompare):
with closing(env.pg.connect()) as conn:
with conn.cursor() as cur:
cur.execute('drop table if exists t;')
# Write many updates to a small table
with env.record_duration('write'):

View File

@@ -78,13 +78,11 @@ def run_test_pgbench(env: PgCompare, scale: int, duration: int):
# Run simple-update workload
run_pgbench(env,
"simple-update",
['pgbench', '-N', '-c4', f'-T{duration}', '-P2', '-Mprepared', env.pg.connstr()])
"simple-update", ['pgbench', '-N', '-c4', f'-T{duration}', '-P2', env.pg.connstr()])
# Run SELECT workload
run_pgbench(env,
"select-only",
['pgbench', '-S', '-c4', f'-T{duration}', '-P2', '-Mprepared', env.pg.connstr()])
"select-only", ['pgbench', '-S', '-c4', f'-T{duration}', '-P2', env.pg.connstr()])
env.report_size()

View File

@@ -116,7 +116,6 @@ def start_pgbench_simple_update_workload(env: PgCompare, duration: int):
'-c10',
'-N',
f'-T{duration}',
'-Mprepared',
env.pg.connstr(options="-csynchronous_commit=off")
])
env.flush()