mirror of
https://github.com/neondatabase/neon.git
synced 2026-05-13 03:00:37 +00:00
random_init_delay: remove the minimum of 10 seconds
Before this patch, the range from which the random delay is picked is at minimum 10 seconds. The disk usage eviction tests are apparently the first to wait for a background loop to do its job. With this patch, they only need to wait the 1s which they configure.
This commit is contained in:
@@ -244,14 +244,12 @@ pub(crate) async fn random_init_delay(
|
||||
) -> Result<(), Cancelled> {
|
||||
use rand::Rng;
|
||||
|
||||
if period == Duration::ZERO {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let d = {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
// gen_range asserts that the range cannot be empty, which it could be because period can
|
||||
// be set to zero to disable gc or compaction, so lets set it to be at least 10s.
|
||||
let period = std::cmp::max(period, Duration::from_secs(10));
|
||||
|
||||
// semi-ok default as the source of jitter
|
||||
rng.gen_range(Duration::ZERO..=period)
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user