mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-08 14:02:55 +00:00
Make TimelineInfo.local field mandatory.
It was only None when you queried the status of a timeline with 'timeline_detail' mgmt API call, and it was still being downloaded. You can check for that status with the 'tenant_status' API call instead, checking for has_in_progress_downloads field. Anothere case was if an error happened while trying to get the current logical size, in a 'timeline_detail' request. It might make sense to tolerate such errors, and leave the fields we cannot fill in as empty, None, 0 or similar, but it doesn't make sense to me to leave the whole 'local' struct empty in tht case.
This commit is contained in:
committed by
Heikki Linnakangas
parent
ee64a6b80b
commit
500239176c
@@ -358,9 +358,7 @@ fn print_timelines_tree(
|
||||
|
||||
// Memorize all direct children of each timeline.
|
||||
for timeline in timelines.iter() {
|
||||
if let Some(ancestor_timeline_id) =
|
||||
timeline.local.as_ref().and_then(|l| l.ancestor_timeline_id)
|
||||
{
|
||||
if let Some(ancestor_timeline_id) = timeline.local.ancestor_timeline_id {
|
||||
timelines_hash
|
||||
.get_mut(&ancestor_timeline_id)
|
||||
.context("missing timeline info in the HashMap")?
|
||||
@@ -371,13 +369,7 @@ fn print_timelines_tree(
|
||||
|
||||
for timeline in timelines_hash.values() {
|
||||
// Start with root local timelines (no ancestors) first.
|
||||
if timeline
|
||||
.info
|
||||
.local
|
||||
.as_ref()
|
||||
.and_then(|l| l.ancestor_timeline_id)
|
||||
.is_none()
|
||||
{
|
||||
if timeline.info.local.ancestor_timeline_id.is_none() {
|
||||
print_timeline(0, &Vec::from([true]), timeline, &timelines_hash)?;
|
||||
}
|
||||
}
|
||||
@@ -394,17 +386,17 @@ fn print_timeline(
|
||||
timeline: &TimelineTreeEl,
|
||||
timelines: &HashMap<TimelineId, TimelineTreeEl>,
|
||||
) -> Result<()> {
|
||||
let local_remote = match (timeline.info.local.as_ref(), timeline.info.remote.as_ref()) {
|
||||
(None, None) => unreachable!("in this case no info for a timeline is found"),
|
||||
(None, Some(_)) => "(R)",
|
||||
(Some(_), None) => "(L)",
|
||||
(Some(_), Some(_)) => "(L+R)",
|
||||
let local_remote = if timeline.info.remote.is_some() {
|
||||
"(L)"
|
||||
} else {
|
||||
"(L+R)"
|
||||
};
|
||||
|
||||
// Draw main padding
|
||||
print!("{} ", local_remote);
|
||||
|
||||
if nesting_level > 0 {
|
||||
let ancestor_lsn = match timeline.info.local.as_ref().and_then(|i| i.ancestor_lsn) {
|
||||
let ancestor_lsn = match timeline.info.local.ancestor_lsn {
|
||||
Some(lsn) => lsn.to_string(),
|
||||
None => "Unknown Lsn".to_string(),
|
||||
};
|
||||
@@ -597,10 +589,7 @@ fn handle_tenant(tenant_match: &ArgMatches, env: &mut local_env::LocalEnv) -> an
|
||||
Some(pg_version),
|
||||
)?;
|
||||
let new_timeline_id = timeline_info.timeline_id;
|
||||
let last_record_lsn = timeline_info
|
||||
.local
|
||||
.context(format!("Failed to get last record LSN: no local timeline info for timeline {new_timeline_id}"))?
|
||||
.last_record_lsn;
|
||||
let last_record_lsn = timeline_info.local.last_record_lsn;
|
||||
|
||||
env.register_branch_mapping(
|
||||
DEFAULT_BRANCH_NAME.to_string(),
|
||||
@@ -655,10 +644,7 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
|
||||
pageserver.timeline_create(tenant_id, None, None, None, Some(pg_version))?;
|
||||
let new_timeline_id = timeline_info.timeline_id;
|
||||
|
||||
let last_record_lsn = timeline_info
|
||||
.local
|
||||
.expect("no local timeline info")
|
||||
.last_record_lsn;
|
||||
let last_record_lsn = timeline_info.local.last_record_lsn;
|
||||
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id)?;
|
||||
|
||||
println!(
|
||||
@@ -738,10 +724,7 @@ fn handle_timeline(timeline_match: &ArgMatches, env: &mut local_env::LocalEnv) -
|
||||
)?;
|
||||
let new_timeline_id = timeline_info.timeline_id;
|
||||
|
||||
let last_record_lsn = timeline_info
|
||||
.local
|
||||
.expect("no local timeline info")
|
||||
.last_record_lsn;
|
||||
let last_record_lsn = timeline_info.local.last_record_lsn;
|
||||
|
||||
env.register_branch_mapping(new_branch_name.to_string(), tenant_id, new_timeline_id)?;
|
||||
|
||||
@@ -801,7 +784,7 @@ fn handle_pg(pg_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
|
||||
// Use the LSN at the end of the timeline.
|
||||
timeline_infos
|
||||
.get(&node.timeline_id)
|
||||
.and_then(|bi| bi.local.as_ref().map(|l| l.last_record_lsn.to_string()))
|
||||
.map(|bi| bi.local.last_record_lsn.to_string())
|
||||
.unwrap_or_else(|| "?".to_string())
|
||||
}
|
||||
Some(lsn) => {
|
||||
|
||||
@@ -169,7 +169,7 @@ pub struct TimelineInfo {
|
||||
pub tenant_id: TenantId,
|
||||
#[serde_as(as = "DisplayFromStr")]
|
||||
pub timeline_id: TimelineId,
|
||||
pub local: Option<LocalTimelineInfo>,
|
||||
pub local: LocalTimelineInfo,
|
||||
pub remote: Option<RemoteTimelineInfo>,
|
||||
}
|
||||
|
||||
|
||||
@@ -184,7 +184,7 @@ async fn timeline_create_handler(mut request: Request<Body>) -> Result<Response<
|
||||
Ok(Some(TimelineInfo {
|
||||
tenant_id,
|
||||
timeline_id: new_timeline.timeline_id,
|
||||
local: Some(local_info),
|
||||
local: local_info,
|
||||
remote: None,
|
||||
}))
|
||||
}
|
||||
@@ -220,17 +220,13 @@ async fn timeline_list_handler(request: Request<Body>) -> Result<Response<Body>,
|
||||
let mut response_data = Vec::with_capacity(timelines.len());
|
||||
for timeline in timelines {
|
||||
let timeline_id = timeline.timeline_id;
|
||||
let local = match local_timeline_info_from_timeline(
|
||||
let local = local_timeline_info_from_timeline(
|
||||
&timeline,
|
||||
include_non_incremental_logical_size,
|
||||
include_non_incremental_physical_size,
|
||||
) {
|
||||
Ok(local) => Some(local),
|
||||
Err(e) => {
|
||||
error!("Failed to convert tenant timeline {timeline_id} into the local one: {e:?}");
|
||||
None
|
||||
}
|
||||
};
|
||||
)
|
||||
.context("Failed to convert tenant timeline {timeline_id} into the local one: {e:?}")
|
||||
.map_err(ApiError::InternalServerError)?;
|
||||
|
||||
response_data.push(TimelineInfo {
|
||||
tenant_id,
|
||||
@@ -300,19 +296,15 @@ async fn timeline_detail_handler(request: Request<Body>) -> Result<Response<Body
|
||||
.await
|
||||
.map_err(|e: JoinError| ApiError::InternalServerError(e.into()))?;
|
||||
|
||||
let local_timeline_info = match timeline.and_then(|timeline| {
|
||||
local_timeline_info_from_timeline(
|
||||
&timeline,
|
||||
include_non_incremental_logical_size,
|
||||
include_non_incremental_physical_size,
|
||||
)
|
||||
}) {
|
||||
Ok(local_info) => Some(local_info),
|
||||
Err(e) => {
|
||||
error!("Failed to get local timeline info: {e:#}");
|
||||
None
|
||||
}
|
||||
};
|
||||
let timeline = timeline.map_err(ApiError::NotFound)?;
|
||||
|
||||
let local_timeline_info = local_timeline_info_from_timeline(
|
||||
&timeline,
|
||||
include_non_incremental_logical_size,
|
||||
include_non_incremental_physical_size,
|
||||
)
|
||||
.context("Failed to get local timeline info: {e:#}")
|
||||
.map_err(ApiError::InternalServerError)?;
|
||||
|
||||
let remote_timeline_info = {
|
||||
let remote_index_read = get_state(&request).remote_index.read().await;
|
||||
@@ -331,21 +323,15 @@ async fn timeline_detail_handler(request: Request<Body>) -> Result<Response<Body
|
||||
.instrument(info_span!("timeline_detail", tenant = %tenant_id, timeline = %timeline_id))
|
||||
.await?;
|
||||
|
||||
if local_timeline_info.is_none() && remote_timeline_info.is_none() {
|
||||
Err(ApiError::NotFound(anyhow!(
|
||||
"Timeline {tenant_id}/{timeline_id} is not found neither locally nor remotely"
|
||||
)))
|
||||
} else {
|
||||
json_response(
|
||||
StatusCode::OK,
|
||||
TimelineInfo {
|
||||
tenant_id,
|
||||
timeline_id,
|
||||
local: local_timeline_info,
|
||||
remote: remote_timeline_info,
|
||||
},
|
||||
)
|
||||
}
|
||||
json_response(
|
||||
StatusCode::OK,
|
||||
TimelineInfo {
|
||||
tenant_id,
|
||||
timeline_id,
|
||||
local: local_timeline_info,
|
||||
remote: remote_timeline_info,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_lsn_by_timestamp_handler(request: Request<Body>) -> Result<Response<Body>, ApiError> {
|
||||
|
||||
@@ -111,10 +111,9 @@ def test_remote_storage_backup_and_restore(
|
||||
with pytest.raises(Exception, match="Conflict: Tenant download is already in progress"):
|
||||
client.tenant_attach(tenant_id)
|
||||
|
||||
detail = client.timeline_detail(tenant_id, timeline_id)
|
||||
log.info("Timeline detail with active failpoint: %s", detail)
|
||||
assert detail["local"] is None
|
||||
assert detail["remote"]["awaits_download"]
|
||||
tenant_status = client.tenant_status(tenant_id)
|
||||
log.info("Tenant status with active failpoint: %s", tenant_status)
|
||||
assert tenant_status["has_in_progress_downloads"] is True
|
||||
|
||||
# trigger temporary download files removal
|
||||
env.pageserver.stop()
|
||||
|
||||
Reference in New Issue
Block a user