perf: avoid duplicate computation in promql (#5863)

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>
This commit is contained in:
Ruihang Xia
2025-04-09 19:53:25 +08:00
committed by GitHub
parent dda7496265
commit 08d0f31865
10 changed files with 42 additions and 8 deletions

View File

@@ -139,7 +139,7 @@ fn build_struct(
Self::name(),
Self::input_type(),
Self::return_type(),
Volatility::Immutable,
Volatility::Volatile,
Arc::new(Self::calc) as _,
)
}

View File

@@ -77,7 +77,7 @@ impl<const IS_COUNTER: bool, const IS_RATE: bool> ExtrapolatedRate<IS_COUNTER, I
name,
input_types,
DataType::Float64,
Volatility::Immutable,
Volatility::Volatile,
Arc::new(move |input: &_| Self::new(range_length).calc(input)) as _,
)
}

View File

@@ -74,7 +74,7 @@ impl HoltWinters {
Self::name(),
Self::input_type(),
Self::return_type(),
Volatility::Immutable,
Volatility::Volatile,
Arc::new(move |input: &_| Self::new(level, trend).calc(input)) as _,
)
}

View File

@@ -47,7 +47,7 @@ impl<const IS_RATE: bool> IDelta<IS_RATE> {
Self::name(),
Self::input_type(),
Self::return_type(),
Volatility::Immutable,
Volatility::Volatile,
Arc::new(Self::calc) as _,
)
}

View File

@@ -55,7 +55,7 @@ impl PredictLinear {
Self::name(),
input_types,
DataType::Float64,
Volatility::Immutable,
Volatility::Volatile,
Arc::new(move |input: &_| Self::new(t).predict_linear(input)) as _,
)
}

View File

@@ -51,7 +51,7 @@ impl QuantileOverTime {
Self::name(),
input_types,
DataType::Float64,
Volatility::Immutable,
Volatility::Volatile,
Arc::new(move |input: &_| Self::new(quantile).quantile_over_time(input)) as _,
)
}

View File

@@ -45,7 +45,7 @@ pub fn quantile_udaf(q: f64) -> Arc<AggregateUDF> {
vec![DataType::Float64],
// Output type: the φ-quantile
Arc::new(DataType::Float64),
Volatility::Immutable,
Volatility::Volatile,
// Create the accumulator
Arc::new(move |_| Ok(Box::new(QuantileAccumulator::new(q)))),
// Intermediate state types

View File

@@ -48,7 +48,7 @@ impl Round {
Self::name(),
Self::input_type(),
Self::return_type(),
Volatility::Immutable,
Volatility::Volatile,
Arc::new(move |input: &_| Self::new(nearest).calc(input)) as _,
)
}

View File

@@ -125,6 +125,32 @@ TQL ANALYZE (0, 10, '5s') test;
|_|_| Total rows: 0_|
+-+-+-+
-- SQLNESS REPLACE (metrics.*) REDACTED
-- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED
-- SQLNESS REPLACE (-+) -
-- SQLNESS REPLACE (\s\s+) _
-- SQLNESS REPLACE (peers.*) REDACTED
-- SQLNESS REPLACE region=\d+\(\d+,\s+\d+\) region=REDACTED
TQL ANALYZE (0, 10, '5s') rate(test[10s]);
+-+-+-+
| stage | node | plan_|
+-+-+-+
| 0_| 0_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED
|_|_|_FilterExec: prom_rate(j_range,i,j)@1 IS NOT NULL REDACTED
|_|_|_ProjectionExec: expr=[j@1 as j, prom_rate(j_range@4, i@0, j@1) as prom_rate(j_range,i,j), k@2 as k, l@3 as l] REDACTED
|_|_|_PromRangeManipulateExec: req range=[0..10000], interval=[5000], eval range=[10000], time index=[j] REDACTED
|_|_|_PromSeriesNormalizeExec: offset=[0], time index=[j], filter NaN: [true] REDACTED
|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED
|_|_|_MergeScanExec: REDACTED
|_|_|_|
| 1_| 0_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED
|_|_|_|
| 1_| 1_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED
|_|_|_|
|_|_| Total rows: 0_|
+-+-+-+
drop table test;
Affected Rows: 0

View File

@@ -55,4 +55,12 @@ CREATE TABLE test(i DOUBLE, j TIMESTAMP TIME INDEX, k STRING, l STRING, PRIMARY
-- SQLNESS REPLACE region=\d+\(\d+,\s+\d+\) region=REDACTED
TQL ANALYZE (0, 10, '5s') test;
-- SQLNESS REPLACE (metrics.*) REDACTED
-- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED
-- SQLNESS REPLACE (-+) -
-- SQLNESS REPLACE (\s\s+) _
-- SQLNESS REPLACE (peers.*) REDACTED
-- SQLNESS REPLACE region=\d+\(\d+,\s+\d+\) region=REDACTED
TQL ANALYZE (0, 10, '5s') rate(test[10s]);
drop table test;