feat: support PromQL operations over the same metric (#3124)

* update sqlness result

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>

* update ut cases

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>

* remove deadcode

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>

---------

Signed-off-by: Ruihang Xia <waynestxia@gmail.com>
This commit is contained in:
Ruihang Xia
2024-01-12 07:07:17 +08:00
committed by GitHub
parent 8ec1e42754
commit 0882da4d01
5 changed files with 143 additions and 69 deletions

View File

@@ -291,13 +291,13 @@ impl PromPlanner {
(None, None) => {
let left_input = self.prom_expr_to_plan(*lhs.clone()).await?;
let left_field_columns = self.ctx.field_columns.clone();
let left_table_ref: OwnedTableReference =
let mut left_table_ref: OwnedTableReference =
self.ctx.table_name.clone().unwrap_or_default().into();
let left_context = self.ctx.clone();
let right_input = self.prom_expr_to_plan(*rhs.clone()).await?;
let right_field_columns = self.ctx.field_columns.clone();
let right_table_ref: OwnedTableReference =
let mut right_table_ref: OwnedTableReference =
self.ctx.table_name.clone().unwrap_or_default().into();
let right_context = self.ctx.clone();
@@ -316,6 +316,12 @@ impl PromPlanner {
}
// normal join
if left_table_ref == right_table_ref {
// rename table references to avoid ambiguity
left_table_ref = OwnedTableReference::bare("lhs");
right_table_ref = OwnedTableReference::bare("rhs");
self.ctx.table_name = Some("lhs".to_string());
}
let mut field_columns =
left_field_columns.iter().zip(right_field_columns.iter());
let join_plan = self.join_on_non_field_columns(
@@ -1847,7 +1853,7 @@ impl PromPlanner {
.zip(self.ctx.field_columns.iter())
.map(|(expr, name)| Ok(DfExpr::Alias(Alias::new(expr, name.to_string()))));
// chain non-value columns (unchanged) and value columns (applied computation then alias)
// chain non-field columns (unchanged) and field columns (applied computation then alias)
let project_fields = non_field_columns_iter
.chain(field_columns_iter)
.collect::<Result<Vec<_>>>()?;
@@ -2379,16 +2385,16 @@ mod test {
.unwrap();
let expected = String::from(
"Projection: some_metric.tag_0, some_metric.timestamp, some_metric.field_0 + some_metric.field_0 AS some_metric.field_0 + some_metric.field_0 [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), some_metric.field_0 + some_metric.field_0:Float64;N]\
\n Inner Join: some_metric.tag_0 = some_metric.tag_0, some_metric.timestamp = some_metric.timestamp [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N, tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n SubqueryAlias: some_metric [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
"Projection: lhs.tag_0, lhs.timestamp, lhs.field_0 + rhs.field_0 AS lhs.field_0 + rhs.field_0 [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), lhs.field_0 + rhs.field_0:Float64;N]\
\n Inner Join: lhs.tag_0 = rhs.tag_0, lhs.timestamp = rhs.timestamp [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N, tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n SubqueryAlias: lhs [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromInstantManipulate: range=[0..100000000], lookback=[1000], interval=[5000], time index=[timestamp] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromSeriesNormalize: offset=[0], time index=[timestamp], filter NaN: [false] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromSeriesDivide: tags=[\"tag_0\"] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n Sort: some_metric.tag_0 DESC NULLS LAST, some_metric.timestamp DESC NULLS LAST [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n Filter: some_metric.tag_0 = Utf8(\"foo\") [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n TableScan: some_metric, unsupported_filters=[tag_0 = Utf8(\"foo\"), timestamp >= TimestampMillisecond(-1000, None), timestamp <= TimestampMillisecond(100001000, None)] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n SubqueryAlias: some_metric [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n SubqueryAlias: rhs [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromInstantManipulate: range=[0..100000000], lookback=[1000], interval=[5000], time index=[timestamp] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromSeriesNormalize: offset=[0], time index=[timestamp], filter NaN: [false] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\
\n PromSeriesDivide: tags=[\"tag_0\"] [tag_0:Utf8, timestamp:Timestamp(Millisecond, None), field_0:Float64;N]\

View File

@@ -441,12 +441,12 @@ async fn aggregators_complex_combined_aggrs(instance: Arc<dyn MockInstance>) {
unix_epoch_plus_100s(),
Duration::from_secs(60),
Duration::from_secs(0),
"+------------+---------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+\
\n| job | ts | http_requests.http_requests.http_requests.SUM(http_requests.value) + http_requests.MIN(http_requests.value) + http_requests.MAX(http_requests.value) + http_requests.AVG(http_requests.value) |\
\n+------------+---------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+\
\n| api-server | 1970-01-01T00:00:00 | 1750.0 |\
\n| app-server | 1970-01-01T00:00:00 | 4550.0 |\
\n+------------+---------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+",
"+------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------------+\
\n| job | ts | lhs.lhs.lhs.SUM(http_requests.value) + rhs.MIN(http_requests.value) + http_requests.MAX(http_requests.value) + rhs.AVG(http_requests.value) |\
\n+------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------------+\
\n| api-server | 1970-01-01T00:00:00 | 1750.0 |\
\n| app-server | 1970-01-01T00:00:00 | 4550.0 |\
\n+------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------------+",
)
.await;
}
@@ -466,12 +466,12 @@ async fn two_aggregators_combined_aggrs(instance: Arc<dyn MockInstance>) {
unix_epoch_plus_100s(),
Duration::from_secs(60),
Duration::from_secs(0),
"+------------+---------------------+---------------------------------------------------------------------------------+\
\n| job | ts | http_requests.SUM(http_requests.value) + http_requests.MIN(http_requests.value) |\
\n+------------+---------------------+---------------------------------------------------------------------------------+\
\n| api-server | 1970-01-01T00:00:00 | 1100.0 |\
\n| app-server | 1970-01-01T00:00:00 | 3100.0 |\
\n+------------+---------------------+---------------------------------------------------------------------------------+",
"+------------+---------------------+-------------------------------------------------------------+\
\n| job | ts | lhs.SUM(http_requests.value) + rhs.MIN(http_requests.value) |\
\n+------------+---------------------+-------------------------------------------------------------+\
\n| api-server | 1970-01-01T00:00:00 | 1100.0 |\
\n| app-server | 1970-01-01T00:00:00 | 3100.0 |\
\n+------------+---------------------+-------------------------------------------------------------+",
)
.await;
}
@@ -519,18 +519,18 @@ async fn binary_op_plain_columns(instance: Arc<dyn MockInstance>) {
unix_epoch_plus_100s(),
Duration::from_secs(60),
Duration::from_secs(0),
"+------------+----------+------------+---------------------+-------------------------------------------+\
\n| job | instance | group | ts | http_requests.value - http_requests.value |\
\n+------------+----------+------------+---------------------+-------------------------------------------+\
\n| api-server | 0 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 0 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 1 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 1 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 0 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 0 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 1 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 1 | production | 1970-01-01T00:00:00 | 0.0 |\
\n+------------+----------+------------+---------------------+-------------------------------------------+",
"+------------+----------+------------+---------------------+-----------------------+\
\n| job | instance | group | ts | lhs.value - rhs.value |\
\n+------------+----------+------------+---------------------+-----------------------+\
\n| api-server | 0 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 0 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 1 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| api-server | 1 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 0 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 0 | production | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 1 | canary | 1970-01-01T00:00:00 | 0.0 |\
\n| app-server | 1 | production | 1970-01-01T00:00:00 | 0.0 |\
\n+------------+----------+------------+---------------------+-----------------------+",
)
.await;
}

View File

@@ -49,40 +49,81 @@ Affected Rows: 3
-- SQLNESS SORT_RESULT 3 1
tql eval (0, 30, '10s'), data / data;
+---------------------+-----------------------+-----------------------+-----------------------+
| ts | data.val1 / data.val1 | data.val2 / data.val2 | data.val3 / data.val3 |
+---------------------+-----------------------+-----------------------+-----------------------+
| 1970-01-01T00:00:00 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:10 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:20 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:30 | 1.0 | 1.0 | 1.0 |
+---------------------+-----------------------+-----------------------+-----------------------+
+---------------------+---------------------+---------------------+---------------------+
| ts | lhs.val1 / rhs.val1 | lhs.val2 / rhs.val2 | lhs.val3 / rhs.val3 |
+---------------------+---------------------+---------------------+---------------------+
| 1970-01-01T00:00:00 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:10 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:20 | 1.0 | 1.0 | 1.0 |
| 1970-01-01T00:00:30 | 1.0 | 1.0 | 1.0 |
+---------------------+---------------------+---------------------+---------------------+
-- SQLNESS SORT_RESULT 3 1
tql eval (0, 30, '10s'), data{__field__="val1"} + data{__field__="val2"};
+---------------------+-----------------------+
| ts | data.val1 + data.val2 |
+---------------------+-----------------------+
| 1970-01-01T00:00:00 | 101.0 |
| 1970-01-01T00:00:10 | 202.0 |
| 1970-01-01T00:00:20 | 303.0 |
| 1970-01-01T00:00:30 | 303.0 |
+---------------------+-----------------------+
+---------------------+---------------------+
| ts | lhs.val1 + rhs.val2 |
+---------------------+---------------------+
| 1970-01-01T00:00:00 | 101.0 |
| 1970-01-01T00:00:10 | 202.0 |
| 1970-01-01T00:00:20 | 303.0 |
| 1970-01-01T00:00:30 | 303.0 |
+---------------------+---------------------+
-- SQLNESS SORT_RESULT 3 1
tql eval (0, 30, '10s'), data{__field__="val1", __field__="val2"} + data{__field__="val2", __field__="val3"};
+---------------------+-----------------------+-----------------------+
| ts | data.val1 + data.val2 | data.val2 + data.val3 |
+---------------------+-----------------------+-----------------------+
| 1970-01-01T00:00:00 | 101.0 | 10100.0 |
| 1970-01-01T00:00:10 | 202.0 | 20200.0 |
| 1970-01-01T00:00:20 | 303.0 | 30300.0 |
| 1970-01-01T00:00:30 | 303.0 | 30300.0 |
+---------------------+-----------------------+-----------------------+
+---------------------+---------------------+---------------------+
| ts | lhs.val1 + rhs.val2 | lhs.val2 + rhs.val3 |
+---------------------+---------------------+---------------------+
| 1970-01-01T00:00:00 | 101.0 | 10100.0 |
| 1970-01-01T00:00:10 | 202.0 | 20200.0 |
| 1970-01-01T00:00:20 | 303.0 | 30300.0 |
| 1970-01-01T00:00:30 | 303.0 | 30300.0 |
+---------------------+---------------------+---------------------+
drop table data;
Affected Rows: 0
create table host_cpu_seconds_total (
ts timestamp time index,
val double,
host string,
`mode` string,
primary key (host, `mode`)
);
Affected Rows: 0
insert into host_cpu_seconds_total values
(0, 0.1, 'host1', 'idle'),
(0, 0.2, 'host1', 'user'),
(0, 0.3, 'host1', 'system'),
(10000, 0.4, 'host1', 'idle'),
(10000, 0.5, 'host1', 'user'),
(10000, 0.6, 'host1', 'system'),
(20000, 0.2, 'host1', 'idle'),
(20000, 0.3, 'host1', 'user'),
(20000, 0.4, 'host1', 'system'),
(30000, 0.5, 'host1', 'idle'),
(30000, 0.6, 'host1', 'user'),
(30000, 0.7, 'host1', 'system');
Affected Rows: 12
-- SQLNESS SORT_RESULT 3 1
tql eval (0, 30, '10s') (sum by(host) (irate(host_cpu_seconds_total{mode!="idle"}[1m0s])) / sum by (host)((irate(host_cpu_seconds_total[1m0s])))) * 100;
+-------+---------------------+--------------------------------------------------------------------------------------+
| host | ts | lhs.SUM(prom_irate(ts_range,val)) / rhs.SUM(prom_irate(ts_range,val)) * Float64(100) |
+-------+---------------------+--------------------------------------------------------------------------------------+
| host1 | 1970-01-01T00:00:10 | 66.66666666666666 |
| host1 | 1970-01-01T00:00:20 | 77.77777777777779 |
| host1 | 1970-01-01T00:00:30 | 66.66666666666666 |
+-------+---------------------+--------------------------------------------------------------------------------------+
drop table host_cpu_seconds_total;
Affected Rows: 0

View File

@@ -26,3 +26,30 @@ tql eval (0, 30, '10s'), data{__field__="val1"} + data{__field__="val2"};
tql eval (0, 30, '10s'), data{__field__="val1", __field__="val2"} + data{__field__="val2", __field__="val3"};
drop table data;
create table host_cpu_seconds_total (
ts timestamp time index,
val double,
host string,
`mode` string,
primary key (host, `mode`)
);
insert into host_cpu_seconds_total values
(0, 0.1, 'host1', 'idle'),
(0, 0.2, 'host1', 'user'),
(0, 0.3, 'host1', 'system'),
(10000, 0.4, 'host1', 'idle'),
(10000, 0.5, 'host1', 'user'),
(10000, 0.6, 'host1', 'system'),
(20000, 0.2, 'host1', 'idle'),
(20000, 0.3, 'host1', 'user'),
(20000, 0.4, 'host1', 'system'),
(30000, 0.5, 'host1', 'idle'),
(30000, 0.6, 'host1', 'user'),
(30000, 0.7, 'host1', 'system');
-- SQLNESS SORT_RESULT 3 1
tql eval (0, 30, '10s') (sum by(host) (irate(host_cpu_seconds_total{mode!="idle"}[1m0s])) / sum by (host)((irate(host_cpu_seconds_total[1m0s])))) * 100;
drop table host_cpu_seconds_total;

View File

@@ -66,24 +66,24 @@ tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) by (model) + sum(prompt *
-- SQLNESS SORT_RESULT 3 1
tql eval(0, 10, '5s') sum(completion / 1000) + max(completion / 1000);
+---------------------+---------------------------------------------------------------------------+
| ts | completion.SUM(val / Float64(1000)) + completion.MAX(val / Float64(1000)) |
+---------------------+---------------------------------------------------------------------------+
| 1970-01-01T00:00:00 | 0.02 |
| 1970-01-01T00:00:05 | 0.05 |
| 1970-01-01T00:00:10 | 0.08 |
+---------------------+---------------------------------------------------------------------------+
+---------------------+-------------------------------------------------------------+
| ts | lhs.SUM(val / Float64(1000)) + rhs.MAX(val / Float64(1000)) |
+---------------------+-------------------------------------------------------------+
| 1970-01-01T00:00:00 | 0.02 |
| 1970-01-01T00:00:05 | 0.05 |
| 1970-01-01T00:00:10 | 0.08 |
+---------------------+-------------------------------------------------------------+
-- SQLNESS SORT_RESULT 3 1
tql eval(0, 10, '5s') sum(completion / 1000) + sum(completion / 1000);
+---------------------+---------------------------------------------------------------------------+
| ts | completion.SUM(val / Float64(1000)) + completion.SUM(val / Float64(1000)) |
+---------------------+---------------------------------------------------------------------------+
| 1970-01-01T00:00:00 | 0.02 |
| 1970-01-01T00:00:05 | 0.06 |
| 1970-01-01T00:00:10 | 0.1 |
+---------------------+---------------------------------------------------------------------------+
+---------------------+-------------------------------------------------------------+
| ts | lhs.SUM(val / Float64(1000)) + rhs.SUM(val / Float64(1000)) |
+---------------------+-------------------------------------------------------------+
| 1970-01-01T00:00:00 | 0.02 |
| 1970-01-01T00:00:05 | 0.06 |
| 1970-01-01T00:00:10 | 0.1 |
+---------------------+-------------------------------------------------------------+
drop table completion;