From 6fe443e239531ca1fef4dbf5258c892b1baac6ef Mon Sep 17 00:00:00 2001 From: bojanserafimov Date: Wed, 6 Apr 2022 18:32:10 -0400 Subject: [PATCH] Improve random_writes test (#1469) If you want to test with a 3GB database by tweaking some constants you'll hit a query timeout. I fix that by batching the inserts. --- test_runner/performance/test_random_writes.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/test_runner/performance/test_random_writes.py b/test_runner/performance/test_random_writes.py index b41f2f72a8..ba9eabcd97 100644 --- a/test_runner/performance/test_random_writes.py +++ b/test_runner/performance/test_random_writes.py @@ -49,7 +49,15 @@ def test_random_writes(zenith_with_baseline: PgCompare): count integer default 0 ); """) - cur.execute(f"INSERT INTO Big (pk) values (generate_series(1,{n_rows}))") + + # Insert n_rows in batches to avoid query timeouts + rows_inserted = 0 + while rows_inserted < n_rows: + rows_to_insert = min(1000 * 1000, n_rows - rows_inserted) + low = rows_inserted + 1 + high = rows_inserted + rows_to_insert + cur.execute(f"INSERT INTO Big (pk) values (generate_series({low},{high}))") + rows_inserted += rows_to_insert # Get table size (can't be predicted because padding and alignment) cur.execute("SELECT pg_relation_size('Big');")