From fa54a57ca29f5d04af06b64f5532b38c5430675b Mon Sep 17 00:00:00 2001 From: Christian Schwarz Date: Thu, 30 Mar 2023 18:38:45 +0200 Subject: [PATCH] random_init_delay: remove the minimum of 10 seconds (#3914) Before this patch, the range from which the random delay is picked is at minimum 10 seconds. With this patch, they delay is bounded to whatever the given `period` is, and zero, if period id Duration::ZERO. Motivation for this: the disk usage eviction tests that we'll add in https://github.com/neondatabase/neon/pull/3905 need to wait for the disk usage eviction background loop to do its job. They set a period of 1s. It seems wasteful to wait 10 seconds in the tests. Co-authored-by: Joonas Koivunen --- pageserver/src/tenant/tasks.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pageserver/src/tenant/tasks.rs b/pageserver/src/tenant/tasks.rs index 20d1d2bfb6..8aeacc12f5 100644 --- a/pageserver/src/tenant/tasks.rs +++ b/pageserver/src/tenant/tasks.rs @@ -244,14 +244,12 @@ pub(crate) async fn random_init_delay( ) -> Result<(), Cancelled> { use rand::Rng; + if period == Duration::ZERO { + return Ok(()); + } + let d = { let mut rng = rand::thread_rng(); - - // gen_range asserts that the range cannot be empty, which it could be because period can - // be set to zero to disable gc or compaction, so lets set it to be at least 10s. - let period = std::cmp::max(period, Duration::from_secs(10)); - - // semi-ok default as the source of jitter rng.gen_range(Duration::ZERO..=period) };