Add max_wal_rate test (#12621)

## Problem
Add a test for max_wal_rate

## Summary of changes
Test max_wal_rate

## How is this tested?
python test

Co-authored-by: Haoyu Huang <haoyu.huang@databricks.com>
This commit is contained in:
Tristan Partin
2025-07-21 12:58:03 -05:00
committed by GitHub
parent 30e1213141
commit 187170be47

View File

@@ -3,6 +3,7 @@
#
from __future__ import annotations
import time
from concurrent.futures import ThreadPoolExecutor
from typing import TYPE_CHECKING, Any, cast
@@ -356,6 +357,74 @@ def test_sql_regress(
post_checks(env, test_output_dir, DBNAME, endpoint)
def test_max_wal_rate(neon_simple_env: NeonEnv):
"""
Test the databricks.max_wal_mb_per_second GUC and how it affects WAL rate
limiting.
"""
env = neon_simple_env
DBNAME = "regression"
superuser_name = "databricks_superuser"
# Connect to postgres and create a database called "regression".
endpoint = env.endpoints.create_start("main")
endpoint.safe_psql_many(
[
f"CREATE ROLE {superuser_name}",
f"CREATE DATABASE {DBNAME}",
"CREATE EXTENSION neon",
]
)
endpoint.safe_psql("CREATE TABLE usertable (YCSB_KEY INT, FIELD0 TEXT);", dbname=DBNAME)
# Write ~1 MB data.
with endpoint.cursor(dbname=DBNAME) as cur:
for _ in range(0, 1000):
cur.execute("INSERT INTO usertable SELECT random(), repeat('a', 1000);")
# No backpressure
tuples = endpoint.safe_psql("SELECT backpressure_throttling_time();")
assert tuples[0][0] == 0, "Backpressure throttling detected"
# 0 MB/s max_wal_rate. WAL proposer can still push some WALs but will be super slow.
endpoint.safe_psql_many(
[
"ALTER SYSTEM SET databricks.max_wal_mb_per_second = 0;",
"SELECT pg_reload_conf();",
]
)
# Write ~10 KB data should hit backpressure.
with endpoint.cursor(dbname=DBNAME) as cur:
cur.execute("SET databricks.max_wal_mb_per_second = 0;")
for _ in range(0, 10):
cur.execute("INSERT INTO usertable SELECT random(), repeat('a', 1000);")
tuples = endpoint.safe_psql("SELECT backpressure_throttling_time();")
assert tuples[0][0] > 0, "No backpressure throttling detected"
# 1 MB/s max_wal_rate.
endpoint.safe_psql_many(
[
"ALTER SYSTEM SET databricks.max_wal_mb_per_second = 1;",
"SELECT pg_reload_conf();",
]
)
# Write 10 MB data.
with endpoint.cursor(dbname=DBNAME) as cur:
start = int(time.time())
for _ in range(0, 10000):
cur.execute("INSERT INTO usertable SELECT random(), repeat('a', 1000);")
end = int(time.time())
assert end - start >= 10, (
"Throttling should cause the previous inserts to take greater than or equal to 10 seconds"
)
@skip_in_debug_build("only run with release build")
@pytest.mark.parametrize("reldir_type", ["v1", "v2"])
def test_tx_abort_with_many_relations(