refactor(pageserver(load_remote_timeline)) remove dead code handling absence of IndexPart (#9408)

The code is dead at runtime since we're nowadays always running with
remote storage and treat it as the source of truth during attach.

Clean it up as a preliminary to
https://github.com/neondatabase/neon/pull/9218.

Related: https://github.com/neondatabase/neon/pull/9366
This commit is contained in:
Christian Schwarz
2024-10-24 10:00:22 +02:00
committed by GitHub
parent b86432c29e
commit 6f34f97573
3 changed files with 6 additions and 34 deletions

View File

@@ -869,7 +869,7 @@ impl Tenant {
&self,
timeline_id: TimelineId,
resources: TimelineResources,
index_part: Option<IndexPart>,
index_part: IndexPart,
metadata: TimelineMetadata,
ancestor: Option<Arc<Timeline>>,
_ctx: &RequestContext,
@@ -894,24 +894,7 @@ impl Tenant {
"these are used interchangeably"
);
if let Some(index_part) = index_part.as_ref() {
timeline.remote_client.init_upload_queue(index_part)?;
} else {
// No data on the remote storage, but we have local metadata file. We can end up
// here with timeline_create being interrupted before finishing index part upload.
// By doing what we do here, the index part upload is retried.
// If control plane retries timeline creation in the meantime, the mgmt API handler
// for timeline creation will coalesce on the upload we queue here.
// FIXME: this branch should be dead code as we no longer write local metadata.
timeline
.remote_client
.init_upload_queue_for_empty_remote(&metadata)?;
timeline
.remote_client
.schedule_index_upload_for_full_metadata_update(&metadata)?;
}
timeline.remote_client.init_upload_queue(&index_part)?;
timeline
.load_layer_map(disk_consistent_lsn, index_part)
@@ -1541,7 +1524,7 @@ impl Tenant {
self.timeline_init_and_sync(
timeline_id,
resources,
Some(index_part),
index_part,
remote_metadata,
ancestor,
ctx,

View File

@@ -2404,7 +2404,7 @@ impl Timeline {
pub(super) async fn load_layer_map(
&self,
disk_consistent_lsn: Lsn,
index_part: Option<IndexPart>,
index_part: IndexPart,
) -> anyhow::Result<()> {
use init::{Decision::*, Discovered, DismissedLayer};
use LayerName::*;
@@ -2468,8 +2468,7 @@ impl Timeline {
);
}
let decided =
init::reconcile(discovered_layers, index_part.as_ref(), disk_consistent_lsn);
let decided = init::reconcile(discovered_layers, &index_part, disk_consistent_lsn);
let mut loaded_layers = Vec::new();
let mut needs_cleanup = Vec::new();

View File

@@ -125,19 +125,9 @@ pub(super) enum DismissedLayer {
/// Merges local discoveries and remote [`IndexPart`] to a collection of decisions.
pub(super) fn reconcile(
local_layers: Vec<(LayerName, LocalLayerFileMetadata)>,
index_part: Option<&IndexPart>,
index_part: &IndexPart,
disk_consistent_lsn: Lsn,
) -> Vec<(LayerName, Result<Decision, DismissedLayer>)> {
let Some(index_part) = index_part else {
// If we have no remote metadata, no local layer files are considered valid to load
return local_layers
.into_iter()
.map(|(layer_name, local_metadata)| {
(layer_name, Err(DismissedLayer::LocalOnly(local_metadata)))
})
.collect();
};
let mut result = Vec::new();
let mut remote_layers = HashMap::new();