diff --git a/pageserver/src/tenant.rs b/pageserver/src/tenant.rs index cb3e36efb3..05ceff2b59 100644 --- a/pageserver/src/tenant.rs +++ b/pageserver/src/tenant.rs @@ -3873,6 +3873,7 @@ mod tests { use hex_literal::hex; use pageserver_api::key::NON_INHERITED_RANGE; use pageserver_api::keyspace::KeySpace; + use pageserver_api::models::CompactionAlgorithm; use rand::{thread_rng, Rng}; use tests::storage_layer::ValuesReconstructState; use tests::timeline::{GetVectoredError, ShutdownMode}; @@ -4512,11 +4513,23 @@ mod tests { } async fn bulk_insert_compact_gc( + timeline: Arc, + ctx: &RequestContext, + lsn: Lsn, + repeat: usize, + key_count: usize, + ) -> anyhow::Result<()> { + let compact = true; + bulk_insert_maybe_compact_gc(timeline, ctx, lsn, repeat, key_count, compact).await + } + + async fn bulk_insert_maybe_compact_gc( timeline: Arc, ctx: &RequestContext, mut lsn: Lsn, repeat: usize, key_count: usize, + compact: bool, ) -> anyhow::Result<()> { let mut test_key = Key::from_hex("010000000033333333444444445500000000").unwrap(); let mut blknum = 0; @@ -4557,9 +4570,11 @@ mod tests { ) .await?; timeline.freeze_and_flush().await?; - timeline - .compact(&CancellationToken::new(), EnumSet::empty(), ctx) - .await?; + if compact { + timeline + .compact(&CancellationToken::new(), EnumSet::empty(), ctx) + .await?; + } timeline.gc().await?; } @@ -5042,7 +5057,22 @@ mod tests { #[tokio::test] async fn test_random_updates() -> anyhow::Result<()> { - let harness = TenantHarness::create("test_random_updates")?; + let names_algorithms = [ + ("test_random_updates_legacy", CompactionAlgorithm::Legacy), + ("test_random_updates_tiered", CompactionAlgorithm::Tiered), + ]; + for (name, algorithm) in names_algorithms { + test_random_updates_algorithm(name, algorithm).await?; + } + Ok(()) + } + + async fn test_random_updates_algorithm( + name: &'static str, + compaction_algorithm: CompactionAlgorithm, + ) -> anyhow::Result<()> { + let mut harness = TenantHarness::create(name)?; + harness.tenant_conf.compaction_algorithm = compaction_algorithm; let (tenant, ctx) = harness.load().await; let tline = tenant .create_test_timeline(TIMELINE_ID, Lsn(0x10), DEFAULT_PG_VERSION, &ctx) @@ -5107,7 +5137,7 @@ mod tests { ); } - // Perform a cycle of flush, compact, and GC + // Perform a cycle of flush, and GC let cutoff = tline.get_last_record_lsn(); tline .update_gc_info( @@ -5119,9 +5149,6 @@ mod tests { ) .await?; tline.freeze_and_flush().await?; - tline - .compact(&CancellationToken::new(), EnumSet::empty(), &ctx) - .await?; tline.gc().await?; } @@ -5402,19 +5429,36 @@ mod tests { #[tokio::test] async fn test_read_at_max_lsn() -> anyhow::Result<()> { - let harness = TenantHarness::create("test_read_at_max_lsn")?; + let names_algorithms = [ + ("test_read_at_max_lsn_legacy", CompactionAlgorithm::Legacy), + ("test_read_at_max_lsn_tiered", CompactionAlgorithm::Tiered), + ]; + for (name, algorithm) in names_algorithms { + test_read_at_max_lsn_algorithm(name, algorithm).await?; + } + Ok(()) + } + + async fn test_read_at_max_lsn_algorithm( + name: &'static str, + compaction_algorithm: CompactionAlgorithm, + ) -> anyhow::Result<()> { + let mut harness = TenantHarness::create(name)?; + harness.tenant_conf.compaction_algorithm = compaction_algorithm; let (tenant, ctx) = harness.load().await; let tline = tenant .create_test_timeline(TIMELINE_ID, Lsn(0x08), DEFAULT_PG_VERSION, &ctx) .await?; let lsn = Lsn(0x10); - bulk_insert_compact_gc(tline.clone(), &ctx, lsn, 50, 10000).await?; + let compact = false; + bulk_insert_maybe_compact_gc(tline.clone(), &ctx, lsn, 50, 10000, compact).await?; let test_key = Key::from_hex("010000000033333333444444445500000000").unwrap(); let read_lsn = Lsn(u64::MAX - 1); - assert!(tline.get(test_key, read_lsn, &ctx).await.is_ok()); + let result = tline.get(test_key, read_lsn, &ctx).await; + assert!(result.is_ok(), "result is not Ok: {}", result.unwrap_err()); Ok(()) }