diff --git a/pageserver/src/http/routes.rs b/pageserver/src/http/routes.rs index 0e12cc2b1e..08d3fcf8df 100644 --- a/pageserver/src/http/routes.rs +++ b/pageserver/src/http/routes.rs @@ -23,7 +23,6 @@ use super::models::{ TimelineCreateRequest, TimelineGcRequest, TimelineInfo, }; use crate::context::{DownloadBehavior, RequestContext}; -use crate::disk_usage_eviction_task; use crate::metrics::{StorageTimeOperation, STORAGE_TIME_GLOBAL}; use crate::pgdatadir_mapping::LsnForTimestamp; use crate::task_mgr::TaskKind; @@ -35,6 +34,7 @@ use crate::tenant::size::ModelInputs; use crate::tenant::storage_layer::LayerAccessStatsReset; use crate::tenant::{LogicalSizeCalculationCause, PageReconstructError, Timeline}; use crate::{config::PageServerConf, tenant::mgr}; +use crate::{disk_usage_eviction_task, tenant}; use utils::{ auth::JwtAuth, http::{ @@ -328,15 +328,17 @@ async fn timeline_create_handler( &ctx, ) .await { - Ok(Some(new_timeline)) => { + Ok(new_timeline) => { // Created. Construct a TimelineInfo for it. let timeline_info = build_timeline_info_common(&new_timeline, &ctx) .await .map_err(ApiError::InternalServerError)?; json_response(StatusCode::CREATED, timeline_info) } - Ok(None) => json_response(StatusCode::CONFLICT, ()), // timeline already exists - Err(err) => Err(ApiError::InternalServerError(err)), + Err(tenant::CreateTimelineError::AlreadyExists) => { + json_response(StatusCode::CONFLICT, ()) + } + Err(tenant::CreateTimelineError::Other(err)) => Err(ApiError::InternalServerError(err)), } } .instrument(info_span!("timeline_create", tenant = %tenant_id, timeline_id = %new_timeline_id, lsn=?request_data.ancestor_start_lsn, pg_version=?request_data.pg_version)) diff --git a/pageserver/src/tenant.rs b/pageserver/src/tenant.rs index 47e9b5b4ec..339291149f 100644 --- a/pageserver/src/tenant.rs +++ b/pageserver/src/tenant.rs @@ -501,6 +501,14 @@ impl DeletionGuard { } } +#[derive(thiserror::Error, Debug)] +pub enum CreateTimelineError { + #[error("a timeline with the given ID already exists")] + AlreadyExists, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + impl Tenant { /// Yet another helper for timeline initialization. /// Contains the common part of `load_local_timeline` and `load_remote_timeline`. @@ -1375,8 +1383,7 @@ impl Tenant { /// Returns the new timeline ID and reference to its Timeline object. /// /// If the caller specified the timeline ID to use (`new_timeline_id`), and timeline with - /// the same timeline ID already exists, returns None. If `new_timeline_id` is not given, - /// a new unique ID is generated. + /// the same timeline ID already exists, returns CreateTimelineError::AlreadyExists. pub async fn create_timeline( &self, new_timeline_id: TimelineId, @@ -1385,11 +1392,12 @@ impl Tenant { pg_version: u32, broker_client: storage_broker::BrokerClientChannel, ctx: &RequestContext, - ) -> anyhow::Result>> { - anyhow::ensure!( - self.is_active(), - "Cannot create timelines on inactive tenant" - ); + ) -> Result, CreateTimelineError> { + if !self.is_active() { + return Err(CreateTimelineError::Other(anyhow::anyhow!( + "Cannot create timelines on inactive tenant" + ))); + } if let Ok(existing) = self.get_timeline(new_timeline_id, false) { debug!("timeline {new_timeline_id} already exists"); @@ -1409,7 +1417,7 @@ impl Tenant { .context("wait for timeline uploads to complete")?; } - return Ok(None); + return Err(CreateTimelineError::AlreadyExists); } let loaded_timeline = match ancestor_timeline_id { @@ -1424,12 +1432,12 @@ impl Tenant { let ancestor_ancestor_lsn = ancestor_timeline.get_ancestor_lsn(); if ancestor_ancestor_lsn > *lsn { // can we safely just branch from the ancestor instead? - bail!( + return Err(CreateTimelineError::Other(anyhow::anyhow!( "invalid start lsn {} for ancestor timeline {}: less than timeline ancestor lsn {}", lsn, ancestor_timeline_id, ancestor_ancestor_lsn, - ); + ))); } // Wait for the WAL to arrive and be processed on the parent branch up @@ -1463,7 +1471,7 @@ impl Tenant { })?; } - Ok(Some(loaded_timeline)) + Ok(loaded_timeline) } /// perform one garbage collection iteration, removing old data files from disk.