diff --git a/docs/consumption_metrics.md b/docs/consumption_metrics.md index 6bcd28ab10..eb211af646 100644 --- a/docs/consumption_metrics.md +++ b/docs/consumption_metrics.md @@ -38,11 +38,6 @@ Currently, the following metrics are collected: Amount of WAL produced , by a timeline, i.e. last_record_lsn This is an absolute, per-timeline metric. -- `resident_size` - -Size of all the layer files in the tenant's directory on disk on the pageserver. -This is an absolute, per-tenant metric. - - `remote_storage_size` Size of the remote storage (S3) directory. diff --git a/pageserver/src/consumption_metrics/metrics.rs b/pageserver/src/consumption_metrics/metrics.rs index 08ab69f349..acdf514101 100644 --- a/pageserver/src/consumption_metrics/metrics.rs +++ b/pageserver/src/consumption_metrics/metrics.rs @@ -30,9 +30,6 @@ pub(super) enum Name { /// Tenant remote size #[serde(rename = "remote_storage_size")] RemoteSize, - /// Tenant resident size - #[serde(rename = "resident_size")] - ResidentSize, /// Tenant synthetic size #[serde(rename = "synthetic_storage_size")] SyntheticSize, @@ -187,18 +184,6 @@ impl MetricsKey { .absolute_values() } - /// Sum of [`Timeline::resident_physical_size`] for each `Tenant`. - /// - /// [`Timeline::resident_physical_size`]: crate::tenant::Timeline::resident_physical_size - const fn resident_size(tenant_id: TenantId) -> AbsoluteValueFactory { - MetricsKey { - tenant_id, - timeline_id: None, - metric: Name::ResidentSize, - } - .absolute_values() - } - /// [`TenantShard::cached_synthetic_size`] as refreshed by [`calculate_synthetic_size_worker`]. /// /// [`TenantShard::cached_synthetic_size`]: crate::tenant::TenantShard::cached_synthetic_size @@ -261,10 +246,7 @@ where let mut tenants = std::pin::pin!(tenants); while let Some((tenant_id, tenant)) = tenants.next().await { - let mut tenant_resident_size = 0; - let timelines = tenant.list_timelines(); - let timelines_len = timelines.len(); for timeline in timelines { let timeline_id = timeline.timeline_id; @@ -287,16 +269,9 @@ where continue; } } - - tenant_resident_size += timeline.resident_physical_size(); } - if timelines_len == 0 { - // Force set it to 1 byte to avoid not being reported -- all timelines are offloaded. - tenant_resident_size = 1; - } - - let snap = TenantSnapshot::collect(&tenant, tenant_resident_size); + let snap = TenantSnapshot::collect(&tenant); snap.to_metrics(tenant_id, Utc::now(), cache, &mut current_metrics); } @@ -305,19 +280,14 @@ where /// In-between abstraction to allow testing metrics without actual Tenants. struct TenantSnapshot { - resident_size: u64, remote_size: u64, synthetic_size: u64, } impl TenantSnapshot { /// Collect tenant status to have metrics created out of it. - /// - /// `resident_size` is calculated of the timelines we had access to for other metrics, so we - /// cannot just list timelines here. - fn collect(t: &Arc, resident_size: u64) -> Self { + fn collect(t: &Arc) -> Self { TenantSnapshot { - resident_size, remote_size: t.remote_size(), // Note that this metric is calculated in a separate bgworker // Here we only use cached value, which may lag behind the real latest one @@ -334,8 +304,6 @@ impl TenantSnapshot { ) { let remote_size = MetricsKey::remote_storage_size(tenant_id).at(now, self.remote_size); - let resident_size = MetricsKey::resident_size(tenant_id).at(now, self.resident_size); - let synthetic_size = { let factory = MetricsKey::synthetic_size(tenant_id); let mut synthetic_size = self.synthetic_size; @@ -355,11 +323,7 @@ impl TenantSnapshot { } }; - metrics.extend( - [Some(remote_size), Some(resident_size), synthetic_size] - .into_iter() - .flatten(), - ); + metrics.extend([Some(remote_size), synthetic_size].into_iter().flatten()); } } diff --git a/pageserver/src/consumption_metrics/metrics/tests.rs b/pageserver/src/consumption_metrics/metrics/tests.rs index 52b4fb8680..5cfb361e40 100644 --- a/pageserver/src/consumption_metrics/metrics/tests.rs +++ b/pageserver/src/consumption_metrics/metrics/tests.rs @@ -224,7 +224,6 @@ fn post_restart_synthetic_size_uses_cached_if_available() { let tenant_id = TenantId::generate(); let ts = TenantSnapshot { - resident_size: 1000, remote_size: 1000, // not yet calculated synthetic_size: 0, @@ -245,7 +244,6 @@ fn post_restart_synthetic_size_uses_cached_if_available() { metrics, &[ MetricsKey::remote_storage_size(tenant_id).at(now, 1000), - MetricsKey::resident_size(tenant_id).at(now, 1000), MetricsKey::synthetic_size(tenant_id).at(now, 1000), ] ); @@ -256,7 +254,6 @@ fn post_restart_synthetic_size_is_not_sent_when_not_cached() { let tenant_id = TenantId::generate(); let ts = TenantSnapshot { - resident_size: 1000, remote_size: 1000, // not yet calculated synthetic_size: 0, @@ -274,7 +271,6 @@ fn post_restart_synthetic_size_is_not_sent_when_not_cached() { metrics, &[ MetricsKey::remote_storage_size(tenant_id).at(now, 1000), - MetricsKey::resident_size(tenant_id).at(now, 1000), // no synthetic size here ] ); @@ -295,14 +291,13 @@ pub(crate) const fn metric_examples_old( timeline_id: TimelineId, now: DateTime, before: DateTime, -) -> [RawMetric; 6] { +) -> [RawMetric; 5] { [ MetricsKey::written_size(tenant_id, timeline_id).at_old_format(now, 0), MetricsKey::written_size_delta(tenant_id, timeline_id) .from_until_old_format(before, now, 0), MetricsKey::timeline_logical_size(tenant_id, timeline_id).at_old_format(now, 0), MetricsKey::remote_storage_size(tenant_id).at_old_format(now, 0), - MetricsKey::resident_size(tenant_id).at_old_format(now, 0), MetricsKey::synthetic_size(tenant_id).at_old_format(now, 1), ] } @@ -312,13 +307,12 @@ pub(crate) const fn metric_examples( timeline_id: TimelineId, now: DateTime, before: DateTime, -) -> [NewRawMetric; 6] { +) -> [NewRawMetric; 5] { [ MetricsKey::written_size(tenant_id, timeline_id).at(now, 0), MetricsKey::written_size_delta(tenant_id, timeline_id).from_until(before, now, 0), MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0), MetricsKey::remote_storage_size(tenant_id).at(now, 0), - MetricsKey::resident_size(tenant_id).at(now, 0), MetricsKey::synthetic_size(tenant_id).at(now, 1), ] } diff --git a/pageserver/src/consumption_metrics/upload.rs b/pageserver/src/consumption_metrics/upload.rs index 59e0145a5b..19c5aec5b3 100644 --- a/pageserver/src/consumption_metrics/upload.rs +++ b/pageserver/src/consumption_metrics/upload.rs @@ -521,10 +521,6 @@ mod tests { line!(), r#"{"type":"absolute","time":"2023-09-15T00:00:00.123456789Z","metric":"remote_storage_size","idempotency_key":"2023-09-15 00:00:00.123456789 UTC-1-0000","value":0,"tenant_id":"00000000000000000000000000000000"}"#, ), - ( - line!(), - r#"{"type":"absolute","time":"2023-09-15T00:00:00.123456789Z","metric":"resident_size","idempotency_key":"2023-09-15 00:00:00.123456789 UTC-1-0000","value":0,"tenant_id":"00000000000000000000000000000000"}"#, - ), ( line!(), r#"{"type":"absolute","time":"2023-09-15T00:00:00.123456789Z","metric":"synthetic_storage_size","idempotency_key":"2023-09-15 00:00:00.123456789 UTC-1-0000","value":1,"tenant_id":"00000000000000000000000000000000"}"#, @@ -564,7 +560,7 @@ mod tests { assert_eq!(upgraded_samples, new_samples); } - fn metric_samples_old() -> [RawMetric; 6] { + fn metric_samples_old() -> [RawMetric; 5] { let tenant_id = TenantId::from_array([0; 16]); let timeline_id = TimelineId::from_array([0xff; 16]); @@ -576,7 +572,7 @@ mod tests { super::super::metrics::metric_examples_old(tenant_id, timeline_id, now, before) } - fn metric_samples() -> [NewRawMetric; 6] { + fn metric_samples() -> [NewRawMetric; 5] { let tenant_id = TenantId::from_array([0; 16]); let timeline_id = TimelineId::from_array([0xff; 16]); diff --git a/test_runner/regress/test_pageserver_metric_collection.py b/test_runner/regress/test_pageserver_metric_collection.py index acec0ba44a..ffde08a73f 100644 --- a/test_runner/regress/test_pageserver_metric_collection.py +++ b/test_runner/regress/test_pageserver_metric_collection.py @@ -506,7 +506,6 @@ class SyntheticSizeVerifier: PER_METRIC_VERIFIERS = { "remote_storage_size": CannotVerifyAnything, - "resident_size": CannotVerifyAnything, "written_size": WrittenDataVerifier, "written_data_bytes_delta": WrittenDataDeltaVerifier, "timeline_logical_size": CannotVerifyAnything,