From 5e04dad3604ddc6da58558425f44c9e6b3f05def Mon Sep 17 00:00:00 2001 From: Heikki Linnakangas Date: Fri, 25 Mar 2022 23:42:13 +0200 Subject: [PATCH] Add more variants of the sequential scan performance tests. More rows, and test with serial and parallel plans. But fewer iterations, so that the tests run in < 1 minutes, and we don't need to mark them as "slow". --- ...est_small_seqscans.py => test_seqscans.py} | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) rename test_runner/performance/{test_small_seqscans.py => test_seqscans.py} (65%) diff --git a/test_runner/performance/test_small_seqscans.py b/test_runner/performance/test_seqscans.py similarity index 65% rename from test_runner/performance/test_small_seqscans.py rename to test_runner/performance/test_seqscans.py index b98018ad97..85d0a24510 100644 --- a/test_runner/performance/test_small_seqscans.py +++ b/test_runner/performance/test_seqscans.py @@ -1,8 +1,5 @@ # Test sequential scan speed # -# The test table is large enough (3-4 MB) that it doesn't fit in the compute node -# cache, so the seqscans go to the page server. But small enough that it fits -# into memory in the page server. from contextlib import closing from dataclasses import dataclass from fixtures.zenith_fixtures import ZenithEnv @@ -12,11 +9,18 @@ from fixtures.compare_fixtures import PgCompare import pytest -@pytest.mark.parametrize('rows', [ - pytest.param(100000), - pytest.param(1000000, marks=pytest.mark.slow), -]) -def test_small_seqscans(zenith_with_baseline: PgCompare, rows: int): +@pytest.mark.parametrize( + 'rows,iters,workers', + [ + # The test table is large enough (3-4 MB) that it doesn't fit in the compute node + # cache, so the seqscans go to the page server. But small enough that it fits + # into memory in the page server. + pytest.param(100000, 100, 0), + # Also test with a larger table, with and without parallelism + pytest.param(10000000, 1, 0), + pytest.param(10000000, 1, 4) + ]) +def test_seqscans(zenith_with_baseline: PgCompare, rows: int, iters: int, workers: int): env = zenith_with_baseline with closing(env.pg.connect()) as conn: @@ -36,6 +40,8 @@ def test_small_seqscans(zenith_with_baseline: PgCompare, rows: int): assert int(shared_buffers) < int(table_size) env.zenbenchmark.record("table_size", table_size, 'bytes', MetricReport.TEST_PARAM) + cur.execute(f"set max_parallel_workers_per_gather = {workers}") + with env.record_duration('run'): - for i in range(1000): + for i in range(iters): cur.execute('select count(*) from t;')