mirror of
https://github.com/neondatabase/neon.git
synced 2026-01-14 00:42:54 +00:00
pageserver: add changed_bytes_from_parent consumption metric
This commit is contained in:
committed by
Erik Grinaker
parent
1a29f5672a
commit
600dca9e2e
@@ -27,6 +27,9 @@ pub(super) enum Name {
|
||||
/// Timeline logical size
|
||||
#[serde(rename = "timeline_logical_size")]
|
||||
LogicalSize,
|
||||
/// Timeline delta from parent (WAL bytes clamped to logical size)
|
||||
#[serde(rename = "timeline_changed_bytes_from_parent")]
|
||||
ChangedBytesFromParent,
|
||||
/// Tenant remote size
|
||||
#[serde(rename = "remote_storage_size")]
|
||||
RemoteSize,
|
||||
@@ -175,6 +178,24 @@ impl MetricsKey {
|
||||
.absolute_values()
|
||||
}
|
||||
|
||||
/// [`Timeline::get_current_lsn`] - [`Timeline::get_ancestor_lsn`], clamped to
|
||||
/// [`Timeline::get_current_logical_size`].
|
||||
///
|
||||
/// [`Timeline::get_current_lsn`]: crate::tenant::Timeline::get_current_lsn
|
||||
/// [`Timeline::get_ancestor_lsn`]: crate::tenant::Timeline::get_ancestor_lsn
|
||||
/// [`Timeline::get_current_logical_size`]: crate::tenant::Timeline::get_current_logical_size
|
||||
const fn timeline_changed_bytes_from_parent(
|
||||
tenant_id: TenantId,
|
||||
timeline_id: TimelineId,
|
||||
) -> AbsoluteValueFactory {
|
||||
MetricsKey {
|
||||
tenant_id,
|
||||
timeline_id: Some(timeline_id),
|
||||
metric: Name::ChangedBytesFromParent,
|
||||
}
|
||||
.absolute_values()
|
||||
}
|
||||
|
||||
/// [`TenantShard::remote_size`]
|
||||
///
|
||||
/// [`TenantShard::remote_size`]: crate::tenant::TenantShard::remote_size
|
||||
@@ -371,6 +392,7 @@ struct TimelineSnapshot {
|
||||
loaded_at: (Lsn, SystemTime),
|
||||
last_record_lsn: Lsn,
|
||||
current_exact_logical_size: Option<u64>,
|
||||
changed_bytes_from_parent: Option<u64>,
|
||||
}
|
||||
|
||||
impl TimelineSnapshot {
|
||||
@@ -406,10 +428,22 @@ impl TimelineSnapshot {
|
||||
}
|
||||
};
|
||||
|
||||
// This is an approximation of how much data has changed on this branch vs. its
|
||||
// ancestor: the number of bytes written to the WAL, clamped to the size of the branch.
|
||||
let changed_bytes_from_parent = current_exact_logical_size.and_then(|size| {
|
||||
if t.get_ancestor_lsn() == Lsn::MAX {
|
||||
return None;
|
||||
}
|
||||
t.get_last_record_lsn()
|
||||
.checked_sub(t.get_ancestor_lsn())
|
||||
.map(|wal_bytes| wal_bytes.0.min(size))
|
||||
});
|
||||
|
||||
Ok(Some(TimelineSnapshot {
|
||||
loaded_at,
|
||||
last_record_lsn,
|
||||
current_exact_logical_size,
|
||||
changed_bytes_from_parent,
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -487,6 +521,17 @@ impl TimelineSnapshot {
|
||||
metrics.push(factory.at(now, size));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let factory = MetricsKey::timeline_changed_bytes_from_parent(tenant_id, timeline_id);
|
||||
let current_or_previous = self
|
||||
.changed_bytes_from_parent
|
||||
.or_else(|| cache.get(factory.key()).map(|item| item.value));
|
||||
|
||||
if let Some(size) = current_or_previous {
|
||||
metrics.push(factory.at(now, size));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ fn startup_collected_timeline_metrics_before_advancing() {
|
||||
loaded_at: (disk_consistent_lsn, SystemTime::now()),
|
||||
last_record_lsn: disk_consistent_lsn,
|
||||
current_exact_logical_size: Some(0x42000),
|
||||
changed_bytes_from_parent: Some(0x1000),
|
||||
};
|
||||
|
||||
let now = DateTime::<Utc>::from(SystemTime::now());
|
||||
@@ -33,7 +34,8 @@ fn startup_collected_timeline_metrics_before_advancing() {
|
||||
0
|
||||
),
|
||||
MetricsKey::written_size(tenant_id, timeline_id).at(now, disk_consistent_lsn.0),
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000)
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000),
|
||||
MetricsKey::timeline_changed_bytes_from_parent(tenant_id, timeline_id).at(now, 0x1000)
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -60,6 +62,7 @@ fn startup_collected_timeline_metrics_second_round() {
|
||||
loaded_at: (disk_consistent_lsn, init),
|
||||
last_record_lsn: disk_consistent_lsn,
|
||||
current_exact_logical_size: Some(0x42000),
|
||||
changed_bytes_from_parent: Some(0x1000),
|
||||
};
|
||||
|
||||
snap.to_metrics(tenant_id, timeline_id, now, &mut metrics, &cache);
|
||||
@@ -69,7 +72,8 @@ fn startup_collected_timeline_metrics_second_round() {
|
||||
&[
|
||||
MetricsKey::written_size_delta(tenant_id, timeline_id).from_until(before, now, 0),
|
||||
MetricsKey::written_size(tenant_id, timeline_id).at(now, disk_consistent_lsn.0),
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000)
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000),
|
||||
MetricsKey::timeline_changed_bytes_from_parent(tenant_id, timeline_id).at(now, 0x1000)
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -104,6 +108,7 @@ fn startup_collected_timeline_metrics_nth_round_at_same_lsn() {
|
||||
loaded_at: (disk_consistent_lsn, init),
|
||||
last_record_lsn: disk_consistent_lsn,
|
||||
current_exact_logical_size: Some(0x42000),
|
||||
changed_bytes_from_parent: Some(0x1000),
|
||||
};
|
||||
|
||||
snap.to_metrics(tenant_id, timeline_id, now, &mut metrics, &cache);
|
||||
@@ -113,7 +118,8 @@ fn startup_collected_timeline_metrics_nth_round_at_same_lsn() {
|
||||
&[
|
||||
MetricsKey::written_size_delta(tenant_id, timeline_id).from_until(just_before, now, 0),
|
||||
MetricsKey::written_size(tenant_id, timeline_id).at(now, disk_consistent_lsn.0),
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000)
|
||||
MetricsKey::timeline_logical_size(tenant_id, timeline_id).at(now, 0x42000),
|
||||
MetricsKey::timeline_changed_bytes_from_parent(tenant_id, timeline_id).at(now, 0x1000)
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -141,6 +147,7 @@ fn post_restart_written_sizes_with_rolled_back_last_record_lsn() {
|
||||
loaded_at: (Lsn(50), at_restart),
|
||||
last_record_lsn: Lsn(50),
|
||||
current_exact_logical_size: None,
|
||||
changed_bytes_from_parent: None,
|
||||
};
|
||||
|
||||
let mut cache = HashMap::from([
|
||||
@@ -202,6 +209,7 @@ fn post_restart_current_exact_logical_size_uses_cached() {
|
||||
loaded_at: (Lsn(50), at_restart),
|
||||
last_record_lsn: Lsn(50),
|
||||
current_exact_logical_size: None,
|
||||
changed_bytes_from_parent: Some(0x1000),
|
||||
};
|
||||
|
||||
let cache = HashMap::from([MetricsKey::timeline_logical_size(tenant_id, timeline_id)
|
||||
|
||||
@@ -511,4 +511,5 @@ PER_METRIC_VERIFIERS = {
|
||||
"written_data_bytes_delta": WrittenDataDeltaVerifier,
|
||||
"timeline_logical_size": CannotVerifyAnything,
|
||||
"synthetic_storage_size": SyntheticSizeVerifier,
|
||||
"timeline_changed_bytes_from_parent": CannotVerifyAnything,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user