Improve random_writes test (#1469)

If you want to test with a 3GB database by tweaking some constants you'll hit a query timeout. I fix that by batching the inserts.
This commit is contained in:
bojanserafimov
2022-04-06 18:32:10 -04:00
committed by GitHub
parent d0c246ac3c
commit 6fe443e239

View File

@@ -49,7 +49,15 @@ def test_random_writes(zenith_with_baseline: PgCompare):
count integer default 0
);
""")
cur.execute(f"INSERT INTO Big (pk) values (generate_series(1,{n_rows}))")
# Insert n_rows in batches to avoid query timeouts
rows_inserted = 0
while rows_inserted < n_rows:
rows_to_insert = min(1000 * 1000, n_rows - rows_inserted)
low = rows_inserted + 1
high = rows_inserted + rows_to_insert
cur.execute(f"INSERT INTO Big (pk) values (generate_series({low},{high}))")
rows_inserted += rows_to_insert
# Get table size (can't be predicted because padding and alignment)
cur.execute("SELECT pg_relation_size('Big');")