From d1b398702dfa6308a0f8289da6345f02aaa34af4 Mon Sep 17 00:00:00 2001 From: Christian Schwarz Date: Thu, 30 Mar 2023 11:13:46 +0200 Subject: [PATCH] random_init_delay: remove the minimum of 10 seconds Before this patch, the range from which the random delay is picked is at minimum 10 seconds. The disk usage eviction tests are apparently the first to wait for a background loop to do its job. With this patch, they only need to wait the 1s which they configure. --- pageserver/src/tenant/tasks.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pageserver/src/tenant/tasks.rs b/pageserver/src/tenant/tasks.rs index 20d1d2bfb6..8aeacc12f5 100644 --- a/pageserver/src/tenant/tasks.rs +++ b/pageserver/src/tenant/tasks.rs @@ -244,14 +244,12 @@ pub(crate) async fn random_init_delay( ) -> Result<(), Cancelled> { use rand::Rng; + if period == Duration::ZERO { + return Ok(()); + } + let d = { let mut rng = rand::thread_rng(); - - // gen_range asserts that the range cannot be empty, which it could be because period can - // be set to zero to disable gc or compaction, so lets set it to be at least 10s. - let period = std::cmp::max(period, Duration::from_secs(10)); - - // semi-ok default as the source of jitter rng.gen_range(Duration::ZERO..=period) };