From d97a76c312db3ddcf038666edde0dd555414fd46 Mon Sep 17 00:00:00 2001 From: Ruihang Xia Date: Fri, 11 Apr 2025 17:07:16 +0800 Subject: [PATCH] blocklist in commutativity rule Signed-off-by: Ruihang Xia --- Cargo.lock | 48 +++--- Cargo.toml | 18 +-- .../src/extension_plan/instant_manipulate.rs | 4 +- src/promql/src/functions.rs | 2 +- src/promql/src/functions/extrapolate_rate.rs | 6 +- src/promql/src/functions/quantile_aggr.rs | 2 +- src/query/src/dist_plan/commutativity.rs | 40 +++-- .../src/query_engine/default_serializer.rs | 22 +++ .../common/select/tql_filter.result | 33 +++-- .../common/tql-explain-analyze/analyze.result | 84 +++++++---- .../common/tql-explain-analyze/explain.result | 137 ++++++++---------- 11 files changed, 231 insertions(+), 165 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a9ec65de9c..e9a41b4d52 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2895,7 +2895,7 @@ checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "datafusion" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-array", @@ -2946,7 +2946,7 @@ dependencies = [ [[package]] name = "datafusion-catalog" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "async-trait", @@ -2966,7 +2966,7 @@ dependencies = [ [[package]] name = "datafusion-catalog-listing" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-schema", @@ -2989,7 +2989,7 @@ dependencies = [ [[package]] name = "datafusion-common" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3014,7 +3014,7 @@ dependencies = [ [[package]] name = "datafusion-common-runtime" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "log", "tokio", @@ -3023,12 +3023,12 @@ dependencies = [ [[package]] name = "datafusion-doc" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" [[package]] name = "datafusion-execution" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "dashmap", @@ -3046,7 +3046,7 @@ dependencies = [ [[package]] name = "datafusion-expr" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "chrono", @@ -3066,7 +3066,7 @@ dependencies = [ [[package]] name = "datafusion-expr-common" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "datafusion-common", @@ -3077,7 +3077,7 @@ dependencies = [ [[package]] name = "datafusion-functions" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-buffer", @@ -3106,7 +3106,7 @@ dependencies = [ [[package]] name = "datafusion-functions-aggregate" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3127,7 +3127,7 @@ dependencies = [ [[package]] name = "datafusion-functions-aggregate-common" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3139,7 +3139,7 @@ dependencies = [ [[package]] name = "datafusion-functions-nested" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-array", @@ -3161,7 +3161,7 @@ dependencies = [ [[package]] name = "datafusion-functions-table" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "async-trait", @@ -3176,7 +3176,7 @@ dependencies = [ [[package]] name = "datafusion-functions-window" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "datafusion-common", "datafusion-doc", @@ -3192,7 +3192,7 @@ dependencies = [ [[package]] name = "datafusion-functions-window-common" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "datafusion-common", "datafusion-physical-expr-common", @@ -3201,7 +3201,7 @@ dependencies = [ [[package]] name = "datafusion-macros" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "datafusion-expr", "quote", @@ -3211,7 +3211,7 @@ dependencies = [ [[package]] name = "datafusion-optimizer" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "chrono", @@ -3229,7 +3229,7 @@ dependencies = [ [[package]] name = "datafusion-physical-expr" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3252,7 +3252,7 @@ dependencies = [ [[package]] name = "datafusion-physical-expr-common" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3265,7 +3265,7 @@ dependencies = [ [[package]] name = "datafusion-physical-optimizer" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-schema", @@ -3286,7 +3286,7 @@ dependencies = [ [[package]] name = "datafusion-physical-plan" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "ahash 0.8.11", "arrow", @@ -3316,7 +3316,7 @@ dependencies = [ [[package]] name = "datafusion-sql" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "arrow", "arrow-array", @@ -3334,7 +3334,7 @@ dependencies = [ [[package]] name = "datafusion-substrait" version = "45.0.0" -source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=90dba8f3ff278df3a6b1de64e145c275e7e84663#90dba8f3ff278df3a6b1de64e145c275e7e84663" +source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=07dd0bee9e524d83228847c15af6c12f438349ab#07dd0bee9e524d83228847c15af6c12f438349ab" dependencies = [ "async-recursion", "async-trait", diff --git a/Cargo.toml b/Cargo.toml index ba0fb9a889..78d45f9bf5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -113,15 +113,15 @@ clap = { version = "4.4", features = ["derive"] } config = "0.13.0" crossbeam-utils = "0.8" dashmap = "6.1" -datafusion = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-common = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-functions = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-optimizer = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-physical-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-physical-plan = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-sql = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } -datafusion-substrait = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "90dba8f3ff278df3a6b1de64e145c275e7e84663" } +datafusion = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-common = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-functions = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-optimizer = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-physical-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-physical-plan = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-sql = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } +datafusion-substrait = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "07dd0bee9e524d83228847c15af6c12f438349ab" } deadpool = "0.12" deadpool-postgres = "0.14" derive_builder = "0.20" diff --git a/src/promql/src/extension_plan/instant_manipulate.rs b/src/promql/src/extension_plan/instant_manipulate.rs index 1071e94f9a..a70bafc168 100644 --- a/src/promql/src/extension_plan/instant_manipulate.rs +++ b/src/promql/src/extension_plan/instant_manipulate.rs @@ -91,9 +91,9 @@ impl UserDefinedLogicalNodeCore for InstantManipulate { _exprs: Vec, inputs: Vec, ) -> DataFusionResult { - if inputs.is_empty() { + if inputs.len() != 1 { return Err(DataFusionError::Internal( - "InstantManipulate should have at least one input".to_string(), + "InstantManipulate should have exact one input".to_string(), )); } diff --git a/src/promql/src/functions.rs b/src/promql/src/functions.rs index dade00ea7b..0294214842 100644 --- a/src/promql/src/functions.rs +++ b/src/promql/src/functions.rs @@ -40,7 +40,7 @@ pub use holt_winters::HoltWinters; pub use idelta::IDelta; pub use predict_linear::PredictLinear; pub use quantile::QuantileOverTime; -pub use quantile_aggr::quantile_udaf; +pub use quantile_aggr::{quantile_udaf, QUANTILE_NAME}; pub use resets::Resets; pub use round::Round; diff --git a/src/promql/src/functions/extrapolate_rate.rs b/src/promql/src/functions/extrapolate_rate.rs index 8977eaf083..56a8029958 100644 --- a/src/promql/src/functions/extrapolate_rate.rs +++ b/src/promql/src/functions/extrapolate_rate.rs @@ -204,7 +204,7 @@ impl ExtrapolatedRate { - pub fn name() -> &'static str { + pub const fn name() -> &'static str { "prom_delta" } @@ -215,7 +215,7 @@ impl ExtrapolatedRate { // rate impl ExtrapolatedRate { - pub fn name() -> &'static str { + pub const fn name() -> &'static str { "prom_rate" } @@ -226,7 +226,7 @@ impl ExtrapolatedRate { // increase impl ExtrapolatedRate { - pub fn name() -> &'static str { + pub const fn name() -> &'static str { "prom_increase" } diff --git a/src/promql/src/functions/quantile_aggr.rs b/src/promql/src/functions/quantile_aggr.rs index 2f8d9edd9d..6af43eda52 100644 --- a/src/promql/src/functions/quantile_aggr.rs +++ b/src/promql/src/functions/quantile_aggr.rs @@ -25,7 +25,7 @@ use datatypes::arrow::datatypes::{DataType, Field, Float64Type}; use crate::functions::quantile::quantile_impl; -const QUANTILE_NAME: &str = "quantile"; +pub const QUANTILE_NAME: &str = "quantile"; const VALUES_FIELD_NAME: &str = "values"; const DEFAULT_LIST_FIELD_NAME: &str = "item"; diff --git a/src/query/src/dist_plan/commutativity.rs b/src/query/src/dist_plan/commutativity.rs index 19ef70c3d5..7f8fca5af9 100644 --- a/src/query/src/dist_plan/commutativity.rs +++ b/src/query/src/dist_plan/commutativity.rs @@ -19,6 +19,9 @@ use datafusion_expr::{Expr, LogicalPlan, UserDefinedLogicalNode}; use promql::extension_plan::{ EmptyMetric, InstantManipulate, RangeManipulate, SeriesDivide, SeriesNormalize, }; +use promql::functions::{ + Delta, HoltWinters, Increase, PredictLinear, QuantileOverTime, Rate, Round, QUANTILE_NAME, +}; use crate::dist_plan::merge_sort::{merge_sort_transformer, MergeSortLogicalPlan}; use crate::dist_plan::MergeScanLogicalPlan; @@ -55,12 +58,16 @@ impl Categorizer { LogicalPlan::Filter(filter) => Self::check_expr(&filter.predicate), LogicalPlan::Window(_) => Commutativity::Unimplemented, LogicalPlan::Aggregate(aggr) => { - if Self::check_partition(&aggr.group_expr, &partition_cols) { - return Commutativity::Commutative; + if !Self::check_partition(&aggr.group_expr, &partition_cols) { + return Commutativity::NonCommutative; } - - // check all children exprs and uses the strictest level - Commutativity::Unimplemented + for expr in &aggr.aggr_expr { + let commutativity = Self::check_expr(expr); + if !matches!(commutativity, Commutativity::Commutative) { + return commutativity; + } + } + Commutativity::Commutative } LogicalPlan::Sort(_) => { if partition_cols.is_empty() { @@ -136,7 +143,7 @@ impl Categorizer { || name == RangeManipulate::name() => { // They should always follows Series Divide. - // Either commutative or non-commutative (which will be blocked by SeriesDivide). + // Either all commutative or all non-commutative (which will be blocked by SeriesDivide). Commutativity::Commutative } name if name == EmptyMetric::name() @@ -165,8 +172,24 @@ impl Categorizer { | Expr::Negative(_) | Expr::Between(_) | Expr::Exists(_) - | Expr::InList(_) - | Expr::ScalarFunction(_) => Commutativity::Commutative, + | Expr::InList(_) => Commutativity::Commutative, + Expr::ScalarFunction(udf) => match udf.name() { + name if name == Delta::name() + || name == Rate::name() + || name == Increase::name() + || name == QuantileOverTime::name() + || name == PredictLinear::name() + || name == HoltWinters::name() + || name == Round::name() => + { + Commutativity::Unimplemented + } + _ => Commutativity::Commutative, + }, + Expr::AggregateFunction(udaf) => match udaf.func.name() { + name if name == QUANTILE_NAME => Commutativity::Unimplemented, + _ => Commutativity::Commutative, + }, Expr::Like(_) | Expr::SimilarTo(_) @@ -175,7 +198,6 @@ impl Categorizer { | Expr::Case(_) | Expr::Cast(_) | Expr::TryCast(_) - | Expr::AggregateFunction(_) | Expr::WindowFunction(_) | Expr::InSubquery(_) | Expr::ScalarSubquery(_) diff --git a/src/query/src/query_engine/default_serializer.rs b/src/query/src/query_engine/default_serializer.rs index 23d6789866..24e672b385 100644 --- a/src/query/src/query_engine/default_serializer.rs +++ b/src/query/src/query_engine/default_serializer.rs @@ -29,6 +29,10 @@ use datafusion::execution::{FunctionRegistry, SessionStateBuilder}; use datafusion::logical_expr::LogicalPlan; use datafusion_expr::UserDefinedLogicalNode; use greptime_proto::substrait_extension::MergeScan as PbMergeScan; +use promql::functions::{ + AbsentOverTime, AvgOverTime, Changes, CountOverTime, Deriv, IDelta, LastOverTime, MaxOverTime, + MinOverTime, PresentOverTime, Resets, StddevOverTime, StdvarOverTime, SumOverTime, +}; use prost::Message; use session::context::QueryContextRef; use snafu::ResultExt; @@ -132,6 +136,24 @@ impl SubstraitPlanDecoder for DefaultPlanDecoder { let _ = session_state.register_udaf(Arc::new(HllState::state_udf_impl())); let _ = session_state.register_udaf(Arc::new(HllState::merge_udf_impl())); let _ = session_state.register_udaf(Arc::new(GeoPathAccumulator::udf_impl())); + + // TODO(ruihang): add increase, rate, delta + let _ = session_state.register_udf(Arc::new(IDelta::::scalar_udf())); + let _ = session_state.register_udf(Arc::new(IDelta::::scalar_udf())); + let _ = session_state.register_udf(Arc::new(Resets::scalar_udf())); + let _ = session_state.register_udf(Arc::new(Changes::scalar_udf())); + let _ = session_state.register_udf(Arc::new(Deriv::scalar_udf())); + let _ = session_state.register_udf(Arc::new(AvgOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(MinOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(MaxOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(SumOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(CountOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(LastOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(AbsentOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(PresentOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(StddevOverTime::scalar_udf())); + let _ = session_state.register_udf(Arc::new(StdvarOverTime::scalar_udf())); + // TODO(ruihang): add quantile_over_time, predict_linear, holt_winters, round } let logical_plan = DFLogicalSubstraitConvertor .decode(message, session_state) diff --git a/tests/cases/standalone/common/select/tql_filter.result b/tests/cases/standalone/common/select/tql_filter.result index 9e41d5ed9b..56a1ac8bcb 100644 --- a/tests/cases/standalone/common/select/tql_filter.result +++ b/tests/cases/standalone/common/select/tql_filter.result @@ -17,11 +17,14 @@ tql analyze (1, 3, '1s') t1{ a = "a" }; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED -|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED +|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED +|_|_|_SortExec: expr=[a@0 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([a@0], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 3_| +-+-+-+ @@ -37,11 +40,14 @@ tql analyze (1, 3, '1s') t1{ a =~ ".*" }; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED -|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED +|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED +|_|_|_SortExec: expr=[a@0 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([a@0], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 6_| +-+-+-+ @@ -57,11 +63,14 @@ tql analyze (1, 3, '1s') t1{ a =~ "a.*" }; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED -|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[1000..3000], lookback=[300000], interval=[1000], time index=[b] REDACTED +|_|_|_PromSeriesDivideExec: tags=["a"] REDACTED +|_|_|_SortExec: expr=[a@0 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([a@0], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 3_| +-+-+-+ diff --git a/tests/cases/standalone/common/tql-explain-analyze/analyze.result b/tests/cases/standalone/common/tql-explain-analyze/analyze.result index af1a3d4fce..b3f8d55a39 100644 --- a/tests/cases/standalone/common/tql-explain-analyze/analyze.result +++ b/tests/cases/standalone/common/tql-explain-analyze/analyze.result @@ -19,11 +19,14 @@ TQL ANALYZE (0, 10, '5s') test; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 4_| +-+-+-+ @@ -41,11 +44,14 @@ TQL ANALYZE (0, 10, '1s', '2s') test; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[2000], interval=[1000], time index=[j] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[2000], interval=[1000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 4_| +-+-+-+ @@ -62,11 +68,14 @@ TQL ANALYZE ('1970-01-01T00:00:00'::timestamp, '1970-01-01T00:00:00'::timestamp +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 4_| +-+-+-+ @@ -85,11 +94,14 @@ TQL ANALYZE VERBOSE (0, 10, '5s') test; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED -|_|_|_MergeScanExec: REDACTED +| 0_| 0_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries, projection=["i", "j", "k"], filters=[j >= TimestampMillisecond(-300000, None), j <= TimestampMillisecond(310000, None)], REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=1 (1 memtable ranges, 0 file 0 ranges), distribution=PerSeries, projection=["i", "j", "k"], filters=[j >= TimestampMillisecond(-300000, None), j <= TimestampMillisecond(310000, None)], REDACTED |_|_|_| |_|_| Total rows: 4_| +-+-+-+ @@ -114,13 +126,23 @@ TQL ANALYZE (0, 10, '5s') test; +-+-+-+ | stage | node | plan_| +-+-+-+ -| 0_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +| 0_| 0_|_SortPreservingMergeExec: [k@2 ASC, l@3 ASC, j@1 ASC] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC, j@1 ASC], preserve_partitioning=[true] REDACTED |_|_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2, l@3], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| -| 1_| 1_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 1_|_PromInstantManipulateExec: range=[0..10000], lookback=[300000], interval=[5000], time index=[j] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2, l@3], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 0_| +-+-+-+ @@ -139,14 +161,26 @@ TQL ANALYZE (0, 10, '5s') rate(test[10s]); | 0_| 0_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED |_|_|_FilterExec: prom_rate(j_range,i,j)@1 IS NOT NULL REDACTED |_|_|_ProjectionExec: expr=[j@1 as j, prom_rate(j_range@4, i@0, j@1) as prom_rate(j_range,i,j), k@2 as k, l@3 as l] REDACTED -|_|_|_PromRangeManipulateExec: req range=[0..10000], interval=[5000], eval range=[10000], time index=[j] REDACTED -|_|_|_PromSeriesNormalizeExec: offset=[0], time index=[j], filter NaN: [true] REDACTED -|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +|_|_|_RepartitionExec: partitioning=REDACTED +|_|_|_SortPreservingMergeExec: [k@2 ASC, l@3 ASC, j@1 ASC] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC, j@1 ASC], preserve_partitioning=[true] REDACTED |_|_|_MergeScanExec: REDACTED |_|_|_| -| 1_| 0_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 0_|_PromRangeManipulateExec: req range=[0..10000], interval=[5000], eval range=[10000], time index=[j] REDACTED +|_|_|_PromSeriesNormalizeExec: offset=[0], time index=[j], filter NaN: [true] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2, l@3], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| -| 1_| 1_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED +| 1_| 1_|_PromRangeManipulateExec: req range=[0..10000], interval=[5000], eval range=[10000], time index=[j] REDACTED +|_|_|_PromSeriesNormalizeExec: offset=[0], time index=[j], filter NaN: [true] REDACTED +|_|_|_PromSeriesDivideExec: tags=["k", "l"] REDACTED +|_|_|_SortExec: expr=[k@2 ASC, l@3 ASC], preserve_partitioning=[true] REDACTED +|_|_|_CoalesceBatchesExec: target_batch_size=8192 REDACTED +|_|_|_RepartitionExec: partitioning=Hash([k@2, l@3], 32), input_partitions=1 REDACTED +|_|_|_SeqScan: region=REDACTED, partition_count=0 (0 memtable ranges, 0 file 0 ranges), distribution=PerSeries REDACTED |_|_|_| |_|_| Total rows: 0_| +-+-+-+ diff --git a/tests/cases/standalone/common/tql-explain-analyze/explain.result b/tests/cases/standalone/common/tql-explain-analyze/explain.result index e1bbaa89e3..a8abcac4bf 100644 --- a/tests/cases/standalone/common/tql-explain-analyze/explain.result +++ b/tests/cases/standalone/common/tql-explain-analyze/explain.result @@ -12,18 +12,13 @@ Affected Rows: 3 -- SQLNESS REPLACE (peers.*) REDACTED TQL EXPLAIN (0, 10, '5s') test; -+---------------+-----------------------------------------------------------------------------------------------+ -| plan_type | plan | -+---------------+-----------------------------------------------------------------------------------------------+ -| logical_plan | PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j] | -| | PromSeriesDivide: tags=["k"] | -| | Projection: test.i, test.j, test.k | -| | MergeScan [is_placeholder=false] | -| physical_plan | PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j] | -| | PromSeriesDivideExec: tags=["k"] | -| | MergeScanExec: REDACTED -| | | -+---------------+-----------------------------------------------------------------------------------------------+ ++---------------+-------------------------------------------------+ +| plan_type | plan | ++---------------+-------------------------------------------------+ +| logical_plan | MergeScan [is_placeholder=false] | +| physical_plan | MergeScanExec: REDACTED +| | | ++---------------+-------------------------------------------------+ -- 'lookback' parameter is not fully supported, the test has to be updated -- explain at 0s, 5s and 10s. No point at 0s. @@ -31,36 +26,26 @@ TQL EXPLAIN (0, 10, '5s') test; -- SQLNESS REPLACE (peers.*) REDACTED TQL EXPLAIN (0, 10, '1s', '2s') test; -+---------------+---------------------------------------------------------------------------------------------+ -| plan_type | plan | -+---------------+---------------------------------------------------------------------------------------------+ -| logical_plan | PromInstantManipulate: range=[0..0], lookback=[2000], interval=[300000], time index=[j] | -| | PromSeriesDivide: tags=["k"] | -| | Projection: test.i, test.j, test.k | -| | MergeScan [is_placeholder=false] | -| physical_plan | PromInstantManipulateExec: range=[0..0], lookback=[2000], interval=[300000], time index=[j] | -| | PromSeriesDivideExec: tags=["k"] | -| | MergeScanExec: REDACTED -| | | -+---------------+---------------------------------------------------------------------------------------------+ ++---------------+-------------------------------------------------+ +| plan_type | plan | ++---------------+-------------------------------------------------+ +| logical_plan | MergeScan [is_placeholder=false] | +| physical_plan | MergeScanExec: REDACTED +| | | ++---------------+-------------------------------------------------+ -- explain at 0s, 5s and 10s. No point at 0s. -- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED -- SQLNESS REPLACE (peers.*) REDACTED TQL EXPLAIN ('1970-01-01T00:00:00'::timestamp, '1970-01-01T00:00:00'::timestamp + '10 seconds'::interval, '5s') test; -+---------------+-----------------------------------------------------------------------------------------------+ -| plan_type | plan | -+---------------+-----------------------------------------------------------------------------------------------+ -| logical_plan | PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j] | -| | PromSeriesDivide: tags=["k"] | -| | Projection: test.i, test.j, test.k | -| | MergeScan [is_placeholder=false] | -| physical_plan | PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j] | -| | PromSeriesDivideExec: tags=["k"] | -| | MergeScanExec: REDACTED -| | | -+---------------+-----------------------------------------------------------------------------------------------+ ++---------------+-------------------------------------------------+ +| plan_type | plan | ++---------------+-------------------------------------------------+ +| logical_plan | MergeScan [is_placeholder=false] | +| physical_plan | MergeScanExec: REDACTED +| | | ++---------------+-------------------------------------------------+ -- explain verbose at 0s, 5s and 10s. No point at 0s. -- SQLNESS REPLACE (-+) - @@ -84,9 +69,7 @@ TQL EXPLAIN VERBOSE (0, 10, '5s') test; | logical_plan after expand_wildcard_rule_| SAME TEXT AS ABOVE_| | logical_plan after resolve_grouping_function_| SAME TEXT AS ABOVE_| | logical_plan after type_coercion_| SAME TEXT AS ABOVE_| -| logical_plan after DistPlannerAnalyzer_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivide: tags=["k"]_| -|_|_Projection: test.i, test.j, test.k_| +| logical_plan after DistPlannerAnalyzer_| Projection: test.i, test.j, test.k_| |_|_MergeScan [is_placeholder=false]_| | analyzed_logical_plan_| SAME TEXT AS ABOVE_| | logical_plan after eliminate_nested_union_| SAME TEXT AS ABOVE_| @@ -113,37 +96,45 @@ TQL EXPLAIN VERBOSE (0, 10, '5s') test; | logical_plan after unwrap_cast_in_comparison_| SAME TEXT AS ABOVE_| | logical_plan after common_sub_expression_eliminate_| SAME TEXT AS ABOVE_| | logical_plan after eliminate_group_by_constant_| SAME TEXT AS ABOVE_| +| logical_plan after optimize_projections_| MergeScan [is_placeholder=false]_| +| logical_plan after ScanHintRule_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_nested_union_| SAME TEXT AS ABOVE_| +| logical_plan after simplify_expressions_| SAME TEXT AS ABOVE_| +| logical_plan after unwrap_cast_in_comparison_| SAME TEXT AS ABOVE_| +| logical_plan after replace_distinct_aggregate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_join_| SAME TEXT AS ABOVE_| +| logical_plan after decorrelate_predicate_subquery_| SAME TEXT AS ABOVE_| +| logical_plan after scalar_subquery_to_join_| SAME TEXT AS ABOVE_| +| logical_plan after extract_equijoin_predicate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_duplicated_expr_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_filter_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_cross_join_| SAME TEXT AS ABOVE_| +| logical_plan after common_sub_expression_eliminate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_limit_| SAME TEXT AS ABOVE_| +| logical_plan after propagate_empty_relation_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_one_union_| SAME TEXT AS ABOVE_| +| logical_plan after filter_null_join_keys_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_outer_join_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_limit_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_filter_| SAME TEXT AS ABOVE_| +| logical_plan after single_distinct_aggregation_to_group_by | SAME TEXT AS ABOVE_| +| logical_plan after simplify_expressions_| SAME TEXT AS ABOVE_| +| logical_plan after unwrap_cast_in_comparison_| SAME TEXT AS ABOVE_| +| logical_plan after common_sub_expression_eliminate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_group_by_constant_| SAME TEXT AS ABOVE_| | logical_plan after optimize_projections_| SAME TEXT AS ABOVE_| | logical_plan after ScanHintRule_| SAME TEXT AS ABOVE_| -| logical_plan_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivide: tags=["k"]_| -|_|_Projection: test.i, test.j, test.k_| -|_|_MergeScan [is_placeholder=false]_| -| initial_physical_plan_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_ProjectionExec: expr=[i@0 as i, j@1 as j, k@2 as k]_| -|_|_MergeScanExec: REDACTED +| logical_plan_| MergeScan [is_placeholder=false]_| +| initial_physical_plan_| MergeScanExec: REDACTED |_|_| -| initial_physical_plan_with_stats_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j], statistics=[Rows=Inexact(0), Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]] | -|_|_PromSeriesDivideExec: tags=["k"], statistics=[Rows=Absent, Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]]_| -|_|_ProjectionExec: expr=[i@0 as i, j@1 as j, k@2 as k], statistics=[Rows=Absent, Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]]_| -|_|_MergeScanExec: REDACTED +| initial_physical_plan_with_stats_| MergeScanExec: REDACTED |_|_| -| initial_physical_plan_with_schema_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j], schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| -|_|_PromSeriesDivideExec: tags=["k"], schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| -|_|_ProjectionExec: expr=[i@0 as i, j@1 as j, k@2 as k], schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| -|_|_MergeScanExec: REDACTED +| initial_physical_plan_with_schema_| MergeScanExec: REDACTED |_|_| -| physical_plan after parallelize_scan_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_ProjectionExec: expr=[i@0 as i, j@1 as j, k@2 as k]_| -|_|_MergeScanExec: REDACTED +| physical_plan after parallelize_scan_| MergeScanExec: REDACTED |_|_| | physical_plan after PassDistributionRule_| SAME TEXT AS ABOVE_| | physical_plan after OutputRequirements_| OutputRequirementExec_| -|_|_PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_ProjectionExec: expr=[i@0 as i, j@1 as j, k@2 as k]_| |_|_MergeScanExec: REDACTED |_|_| | physical_plan after aggregate_statistics_| SAME TEXT AS ABOVE_| @@ -153,15 +144,9 @@ TQL EXPLAIN VERBOSE (0, 10, '5s') test; | physical_plan after CombinePartialFinalAggregate_| SAME TEXT AS ABOVE_| | physical_plan after EnforceSorting_| SAME TEXT AS ABOVE_| | physical_plan after OptimizeAggregateOrder_| SAME TEXT AS ABOVE_| -| physical_plan after ProjectionPushdown_| OutputRequirementExec_| -|_|_PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_MergeScanExec: REDACTED -|_|_| +| physical_plan after ProjectionPushdown_| SAME TEXT AS ABOVE_| | physical_plan after coalesce_batches_| SAME TEXT AS ABOVE_| -| physical_plan after OutputRequirements_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_MergeScanExec: REDACTED +| physical_plan after OutputRequirements_| MergeScanExec: REDACTED |_|_| | physical_plan after LimitAggregation_| SAME TEXT AS ABOVE_| | physical_plan after ProjectionPushdown_| SAME TEXT AS ABOVE_| @@ -169,17 +154,11 @@ TQL EXPLAIN VERBOSE (0, 10, '5s') test; | physical_plan after WindowedSortRule_| SAME TEXT AS ABOVE_| | physical_plan after RemoveDuplicateRule_| SAME TEXT AS ABOVE_| | physical_plan after SanityCheckPlan_| SAME TEXT AS ABOVE_| -| physical_plan_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| -|_|_PromSeriesDivideExec: tags=["k"]_| -|_|_MergeScanExec: REDACTED +| physical_plan_| MergeScanExec: REDACTED |_|_| -| physical_plan_with_stats_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j], statistics=[Rows=Inexact(0), Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]] | -|_|_PromSeriesDivideExec: tags=["k"], statistics=[Rows=Absent, Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]]_| -|_|_MergeScanExec: REDACTED +| physical_plan_with_stats_| MergeScanExec: REDACTED |_|_| -| physical_plan_with_schema_| PromInstantManipulateExec: range=[0..0], lookback=[300000], interval=[300000], time index=[j], schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| -|_|_PromSeriesDivideExec: tags=["k"], schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| -|_|_MergeScanExec: REDACTED +| physical_plan_with_schema_| MergeScanExec: REDACTED |_|_| +-+-+