diff --git a/src/catalog/src/process_manager.rs b/src/catalog/src/process_manager.rs index 353ec01714..1eebfec627 100644 --- a/src/catalog/src/process_manager.rs +++ b/src/catalog/src/process_manager.rs @@ -56,14 +56,21 @@ pub struct ProcessManager { #[derive(Debug, Clone)] pub enum QueryStatement { Sql(Statement), - Promql(EvalStmt), + // The optional string is the alias of the PromQL query. + Promql(EvalStmt, Option), } impl Display for QueryStatement { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { QueryStatement::Sql(stmt) => write!(f, "{}", stmt), - QueryStatement::Promql(eval_stmt) => write!(f, "{}", eval_stmt), + QueryStatement::Promql(eval_stmt, alias) => { + if let Some(alias) = alias { + write!(f, "{} AS {}", eval_stmt, alias) + } else { + write!(f, "{}", eval_stmt) + } + } } } } @@ -338,9 +345,9 @@ impl SlowQueryTimer { }; match &self.stmt { - QueryStatement::Promql(stmt) => { + QueryStatement::Promql(stmt, _alias) => { slow_query_event.is_promql = true; - slow_query_event.query = stmt.expr.to_string(); + slow_query_event.query = self.stmt.to_string(); slow_query_event.promql_step = Some(stmt.interval.as_millis() as u64); let start = stmt diff --git a/src/flow/src/batching_mode/utils.rs b/src/flow/src/batching_mode/utils.rs index 2891cf2541..e40e0c5e49 100644 --- a/src/flow/src/batching_mode/utils.rs +++ b/src/flow/src/batching_mode/utils.rs @@ -107,6 +107,7 @@ pub async fn sql_to_df_plan( lookback: eval .lookback .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: eval.alias.clone(), }; QueryLanguageParser::parse_promql(&promql, &query_ctx) diff --git a/src/frontend/src/instance.rs b/src/frontend/src/instance.rs index 3e0a22b6d2..3b83904d89 100644 --- a/src/frontend/src/instance.rs +++ b/src/frontend/src/instance.rs @@ -736,8 +736,8 @@ impl PrometheusHandler for Instance { interceptor.pre_execute(query, Some(&plan), query_ctx.clone())?; // Take the EvalStmt from the original QueryStatement and use it to create the CatalogQueryStatement. - let query_statement = if let QueryStatement::Promql(eval_stmt) = stmt { - CatalogQueryStatement::Promql(eval_stmt) + let query_statement = if let QueryStatement::Promql(eval_stmt, alias) = stmt { + CatalogQueryStatement::Promql(eval_stmt, alias) } else { // It should not happen since the query is already parsed successfully. return UnexpectedResultSnafu { diff --git a/src/frontend/src/instance/grpc.rs b/src/frontend/src/instance/grpc.rs index 0dc14419f2..9eeb57ce01 100644 --- a/src/frontend/src/instance/grpc.rs +++ b/src/frontend/src/instance/grpc.rs @@ -135,6 +135,7 @@ impl GrpcQueryHandler for Instance { end: promql.end, step: promql.step, lookback: promql.lookback, + alias: None, }; let mut result = SqlQueryHandler::do_promql_query(self, &prom_query, ctx.clone()).await; diff --git a/src/operator/src/statement.rs b/src/operator/src/statement.rs index 2d361e48a7..47a8994985 100644 --- a/src/operator/src/statement.rs +++ b/src/operator/src/statement.rs @@ -174,7 +174,7 @@ impl StatementExecutor { ) -> Result { match stmt { QueryStatement::Sql(stmt) => self.execute_sql(stmt, query_ctx).await, - QueryStatement::Promql(_) => self.plan_exec(stmt, query_ctx).await, + QueryStatement::Promql(_, _) => self.plan_exec(stmt, query_ctx).await, } } diff --git a/src/operator/src/statement/tql.rs b/src/operator/src/statement/tql.rs index cc4adc7ca8..03ad3f4976 100644 --- a/src/operator/src/statement/tql.rs +++ b/src/operator/src/statement/tql.rs @@ -42,6 +42,7 @@ impl StatementExecutor { lookback: eval .lookback .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: eval.alias, }; QueryLanguageParser::parse_promql(&promql, query_ctx).context(ParseQuerySnafu)? } @@ -82,6 +83,7 @@ impl StatementExecutor { lookback: analyze .lookback .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: analyze.alias, }; let analyze_node_name = if analyze.is_verbose { ANALYZE_VERBOSE_NODE_NAME diff --git a/src/query/src/parser.rs b/src/query/src/parser.rs index 826ddfaa50..335045f3ef 100644 --- a/src/query/src/parser.rs +++ b/src/query/src/parser.rs @@ -46,7 +46,8 @@ pub const ANALYZE_VERBOSE_NODE_NAME: &str = "ANALYZE VERBOSE"; #[derive(Debug, Clone)] pub enum QueryStatement { Sql(Statement), - Promql(EvalStmt), + // The optional String is the alias name + Promql(EvalStmt, Option), } impl QueryStatement { @@ -56,19 +57,22 @@ impl QueryStatement { operation: "sql post process", } .fail(), - QueryStatement::Promql(eval_stmt) => { + QueryStatement::Promql(eval_stmt, alias) => { let node_name = match params.get("name") { Some(name) => name.as_str(), None => "", }; let extension_node = Self::create_extension_node(node_name, &eval_stmt.expr); - Ok(QueryStatement::Promql(EvalStmt { - expr: Extension(extension_node.unwrap()), - start: eval_stmt.start, - end: eval_stmt.end, - interval: eval_stmt.interval, - lookback_delta: eval_stmt.lookback_delta, - })) + Ok(QueryStatement::Promql( + EvalStmt { + expr: Extension(extension_node.unwrap()), + start: eval_stmt.start, + end: eval_stmt.end, + interval: eval_stmt.interval, + lookback_delta: eval_stmt.lookback_delta, + }, + alias.clone(), + )) } } } @@ -99,6 +103,7 @@ pub struct PromQuery { pub end: String, pub step: String, pub lookback: String, + pub alias: Option, } impl Default for PromQuery { @@ -109,6 +114,7 @@ impl Default for PromQuery { end: String::from("0"), step: String::from("5m"), lookback: String::from(DEFAULT_LOOKBACK_STRING), + alias: None, } } } @@ -185,7 +191,7 @@ impl QueryLanguageParser { lookback_delta, }; - Ok(QueryStatement::Promql(eval_stmt)) + Ok(QueryStatement::Promql(eval_stmt, query.alias.clone())) } pub fn parse_promql_timestamp(timestamp: &str) -> Result { @@ -341,6 +347,7 @@ mod test { end: "2023-02-13T17:14:00Z".to_string(), step: "1d".to_string(), lookback: "5m".to_string(), + alias: Some("my_query".to_string()), }; #[cfg(not(windows))] @@ -355,7 +362,7 @@ mod test { end: SystemTime { tv_sec: 1676308440, tv_nsec: 0 }, \ interval: 86400s, \ lookback_delta: 300s \ - })", + }, Some(\"my_query\"))", ); // Windows has different debug output for SystemTime. @@ -371,7 +378,7 @@ mod test { end: SystemTime { intervals: 133207820400000000 }, \ interval: 86400s, \ lookback_delta: 300s \ - })", + }, Some(\"my_query\"))", ); let result = QueryLanguageParser::parse_promql(&promql, &QueryContext::arc()).unwrap(); diff --git a/src/query/src/planner.rs b/src/query/src/planner.rs index b3aaad0a38..3952c3ab4c 100644 --- a/src/query/src/planner.rs +++ b/src/query/src/planner.rs @@ -200,7 +200,12 @@ impl DfLogicalPlanner { } #[tracing::instrument(skip_all)] - async fn plan_pql(&self, stmt: &EvalStmt, query_ctx: QueryContextRef) -> Result { + async fn plan_pql( + &self, + stmt: &EvalStmt, + alias: Option, + query_ctx: QueryContextRef, + ) -> Result { let plan_decoder = Arc::new(DefaultPlanDecoder::new( self.session_state.clone(), &query_ctx, @@ -215,7 +220,7 @@ impl DfLogicalPlanner { .sql_parser .enable_ident_normalization, ); - PromPlanner::stmt_to_plan(table_provider, stmt, &self.engine_state) + PromPlanner::stmt_to_plan_with_alias(table_provider, stmt, alias, &self.engine_state) .await .map_err(BoxedError::new) .context(QueryPlanSnafu) @@ -323,8 +328,10 @@ impl DfLogicalPlanner { .lookback .clone() .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: eval.alias.clone(), }; let stmt = QueryLanguageParser::parse_promql(&prom_query, &query_ctx)?; + self.plan(&stmt, query_ctx).await } Tql::Explain(_) => UnimplementedSnafu { @@ -345,7 +352,9 @@ impl LogicalPlanner for DfLogicalPlanner { async fn plan(&self, stmt: &QueryStatement, query_ctx: QueryContextRef) -> Result { match stmt { QueryStatement::Sql(stmt) => self.plan_sql(stmt, query_ctx).await, - QueryStatement::Promql(stmt) => self.plan_pql(stmt, query_ctx).await, + QueryStatement::Promql(stmt, alias) => { + self.plan_pql(stmt, alias.clone(), query_ctx).await + } } } diff --git a/src/query/src/promql/planner.rs b/src/query/src/promql/planner.rs index 856f97ac5f..54ef6e7988 100644 --- a/src/query/src/promql/planner.rs +++ b/src/query/src/promql/planner.rs @@ -196,9 +196,10 @@ pub fn normalize_matcher(mut matcher: Matcher) -> Matcher { } impl PromPlanner { - pub async fn stmt_to_plan( + pub async fn stmt_to_plan_with_alias( table_provider: DfTableSourceProvider, stmt: &EvalStmt, + alias: Option, query_engine_state: &QueryEngineState, ) -> Result { let mut planner = Self { @@ -206,9 +207,25 @@ impl PromPlanner { ctx: PromPlannerContext::from_eval_stmt(stmt), }; - planner + let plan = planner .prom_expr_to_plan(&stmt.expr, query_engine_state) - .await + .await?; + + // Apply alias if provided + if let Some(alias_name) = alias { + planner.apply_alias_projection(plan, alias_name) + } else { + Ok(plan) + } + } + + #[cfg(test)] + async fn stmt_to_plan( + table_provider: DfTableSourceProvider, + stmt: &EvalStmt, + query_engine_state: &QueryEngineState, + ) -> Result { + Self::stmt_to_plan_with_alias(table_provider, stmt, None, query_engine_state).await } pub async fn prom_expr_to_plan( @@ -3340,6 +3357,35 @@ impl PromPlanner { }); Ok(fn_expr) } + + /// Apply an alias to the query result by adding a projection with the alias name + fn apply_alias_projection( + &mut self, + plan: LogicalPlan, + alias_name: String, + ) -> Result { + let fields_expr = self.create_field_column_exprs()?; + + // TODO(dennis): how to support multi-value aliasing? + ensure!( + fields_expr.len() == 1, + UnsupportedExprSnafu { + name: "alias on multi-value result" + } + ); + + let project_fields = fields_expr + .into_iter() + .map(|expr| expr.alias(&alias_name)) + .chain(self.create_tag_column_exprs()?) + .chain(Some(self.create_time_index_column_expr()?)); + + LogicalPlanBuilder::from(plan) + .project(project_fields) + .context(DataFusionPlanningSnafu)? + .build() + .context(DataFusionPlanningSnafu) + } } #[derive(Default, Debug)] @@ -4927,6 +4973,59 @@ Filter: up.field_0 IS NOT NULL [timestamp:Timestamp(Millisecond, None), field_0: assert_eq!(plan.display_indent_schema().to_string(), expected); } + #[tokio::test] + async fn test_value_alias() { + let mut eval_stmt = EvalStmt { + expr: PromExpr::NumberLiteral(NumberLiteral { val: 1.0 }), + start: UNIX_EPOCH, + end: UNIX_EPOCH + .checked_add(Duration::from_secs(100_000)) + .unwrap(), + interval: Duration::from_secs(5), + lookback_delta: Duration::from_secs(1), + }; + let case = r#"count_values('series', prometheus_tsdb_head_series{ip=~"(10.0.160.237:8080|10.0.160.237:9090)"}) by (ip)"#; + + let prom_expr = parser::parse(case).unwrap(); + eval_stmt.expr = prom_expr; + let table_provider = build_test_table_provider_with_fields( + &[ + ( + DEFAULT_SCHEMA_NAME.to_string(), + "prometheus_tsdb_head_series".to_string(), + ), + ( + DEFAULT_SCHEMA_NAME.to_string(), + "http_server_requests_seconds_count".to_string(), + ), + ], + &["ip"], + ) + .await; + + let alias = Some("my_series".to_string()); + let plan = PromPlanner::stmt_to_plan_with_alias( + table_provider, + &eval_stmt, + alias, + &build_query_engine_state(), + ) + .await + .unwrap(); + let expected = r#" +Projection: count(prometheus_tsdb_head_series.greptime_value) AS my_series, prometheus_tsdb_head_series.ip, prometheus_tsdb_head_series.greptime_timestamp [my_series:Int64, ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None)] + Projection: count(prometheus_tsdb_head_series.greptime_value), prometheus_tsdb_head_series.ip, prometheus_tsdb_head_series.greptime_timestamp, series [count(prometheus_tsdb_head_series.greptime_value):Int64, ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), series:Float64;N] + Sort: prometheus_tsdb_head_series.ip ASC NULLS LAST, prometheus_tsdb_head_series.greptime_timestamp ASC NULLS LAST, prometheus_tsdb_head_series.greptime_value ASC NULLS LAST [count(prometheus_tsdb_head_series.greptime_value):Int64, ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), series:Float64;N, greptime_value:Float64;N] + Projection: count(prometheus_tsdb_head_series.greptime_value), prometheus_tsdb_head_series.ip, prometheus_tsdb_head_series.greptime_timestamp, prometheus_tsdb_head_series.greptime_value AS series, prometheus_tsdb_head_series.greptime_value [count(prometheus_tsdb_head_series.greptime_value):Int64, ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), series:Float64;N, greptime_value:Float64;N] + Aggregate: groupBy=[[prometheus_tsdb_head_series.ip, prometheus_tsdb_head_series.greptime_timestamp, prometheus_tsdb_head_series.greptime_value]], aggr=[[count(prometheus_tsdb_head_series.greptime_value)]] [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N, count(prometheus_tsdb_head_series.greptime_value):Int64] + PromInstantManipulate: range=[0..100000000], lookback=[1000], interval=[5000], time index=[greptime_timestamp] [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N] + PromSeriesDivide: tags=["ip"] [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N] + Sort: prometheus_tsdb_head_series.ip ASC NULLS FIRST, prometheus_tsdb_head_series.greptime_timestamp ASC NULLS FIRST [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N] + Filter: prometheus_tsdb_head_series.ip ~ Utf8("^(?:(10.0.160.237:8080|10.0.160.237:9090))$") AND prometheus_tsdb_head_series.greptime_timestamp >= TimestampMillisecond(-1000, None) AND prometheus_tsdb_head_series.greptime_timestamp <= TimestampMillisecond(100001000, None) [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N] + TableScan: prometheus_tsdb_head_series [ip:Utf8, greptime_timestamp:Timestamp(Millisecond, None), greptime_value:Float64;N]"#; + assert_eq!(format!("\n{}", plan.display_indent_schema()), expected); + } + #[tokio::test] async fn test_quantile_expr() { let mut eval_stmt = EvalStmt { diff --git a/src/servers/src/grpc/prom_query_gateway.rs b/src/servers/src/grpc/prom_query_gateway.rs index 755c741ace..35e4c0f712 100644 --- a/src/servers/src/grpc/prom_query_gateway.rs +++ b/src/servers/src/grpc/prom_query_gateway.rs @@ -59,6 +59,7 @@ impl PrometheusGateway for PrometheusGatewayService { end: range_query.end, step: range_query.step, lookback: range_query.lookback, + alias: None, } } Promql::InstantQuery(instant_query) => { @@ -73,6 +74,7 @@ impl PrometheusGateway for PrometheusGatewayService { end: time, step: String::from("1s"), lookback: instant_query.lookback, + alias: None, } } }; diff --git a/src/servers/src/http/handler.rs b/src/servers/src/http/handler.rs index 626dcecf24..69ba93cc5c 100644 --- a/src/servers/src/http/handler.rs +++ b/src/servers/src/http/handler.rs @@ -327,6 +327,8 @@ impl From for PromQuery { lookback: query .lookback .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + // TODO(dennis): support alias from http params or parse from query.query + alias: None, } } } diff --git a/src/servers/src/http/prometheus.rs b/src/servers/src/http/prometheus.rs index 1c9f76a0e6..8bad09a8c1 100644 --- a/src/servers/src/http/prometheus.rs +++ b/src/servers/src/http/prometheus.rs @@ -245,6 +245,7 @@ pub async fn instant_query( .lookback .or(form_params.lookback) .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: None, }; let promql_expr = try_call_return_response!(promql_parser::parser::parse(&prom_query.query)); @@ -356,6 +357,7 @@ pub async fn range_query( .lookback .or(form_params.lookback) .unwrap_or_else(|| DEFAULT_LOOKBACK_STRING.to_string()), + alias: None, }; let promql_expr = try_call_return_response!(promql_parser::parser::parse(&prom_query.query)); @@ -568,6 +570,7 @@ pub async fn labels_query( end: end.clone(), step: DEFAULT_LOOKBACK_STRING.to_string(), lookback: lookback.clone(), + alias: None, }; let result = handler.do_query(&prom_query, query_ctx.clone()).await; @@ -1367,6 +1370,7 @@ pub async fn series_query( // TODO: find a better value for step step: DEFAULT_LOOKBACK_STRING.to_string(), lookback: lookback.clone(), + alias: None, }; let result = handler.do_query(&prom_query, query_ctx.clone()).await; diff --git a/src/servers/tests/mod.rs b/src/servers/tests/mod.rs index aeadb74f90..7d6268215c 100644 --- a/src/servers/tests/mod.rs +++ b/src/servers/tests/mod.rs @@ -144,6 +144,7 @@ impl GrpcQueryHandler for DummyInstance { end: promql.end, step: promql.step, lookback: promql.lookback, + alias: None, }; let mut result = SqlQueryHandler::do_promql_query(self, &prom_query, ctx).await; diff --git a/src/sql/src/parsers/tql_parser.rs b/src/sql/src/parsers/tql_parser.rs index e284c19d8c..bb5e858e4a 100644 --- a/src/sql/src/parsers/tql_parser.rs +++ b/src/sql/src/parsers/tql_parser.rs @@ -148,8 +148,8 @@ impl ParserContext<'_> { _ => ("0".to_string(), "0".to_string(), "5m".to_string(), None), }; - let query = Self::parse_tql_query(parser, self.sql).context(ParserSnafu)?; - Ok(TqlParameters::new(start, end, step, lookback, query)) + let (query, alias) = Self::parse_tql_query(parser, self.sql).context(ParserSnafu)?; + Ok(TqlParameters::new(start, end, step, lookback, query, alias)) } pub fn comma_or_rparen(token: &Token) -> bool { @@ -250,7 +250,11 @@ impl ParserContext<'_> { }) } - fn parse_tql_query(parser: &mut Parser, sql: &str) -> std::result::Result { + /// Parse the TQL query and optional alias from the given [Parser] and SQL string. + pub fn parse_tql_query( + parser: &mut Parser, + sql: &str, + ) -> std::result::Result<(String, Option), ParserError> { while matches!(parser.peek_token().token, Token::Comma) { let _skip_token = parser.next_token(); } @@ -262,14 +266,43 @@ impl ParserContext<'_> { let start_location = start_tql.span.start; // translate the start location to the index in the sql string let index = location_to_index(sql, &start_location); + assert!(index > 0); - let query = &sql[index - 1..]; - while parser.next_token() != Token::EOF { - // consume all tokens - // TODO(dennis): supports multi TQL statements separated by ';'? + let mut token = start_tql; + loop { + // Find AS keyword, which indicates " AS { + assert_eq!(eval.start, "0"); + assert_eq!(eval.end, "30"); + assert_eq!(eval.step, "10s"); + assert_eq!(eval.lookback, None); + assert_eq!(eval.query, "http_requests_total"); + assert_eq!(eval.alias, Some("my_metric".to_string())); + } + _ => unreachable!(), + } + + // Test TQL EVAL with complex query and alias + let sql = "TQL EVAL (1676887657, 1676887659, '1m') http_requests_total{environment=~'staging|testing|development',method!='GET'} @ 1609746000 offset 5m AS web_requests"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!(eval.start, "1676887657"); + assert_eq!(eval.end, "1676887659"); + assert_eq!(eval.step, "1m"); + assert_eq!(eval.lookback, None); + assert_eq!( + eval.query, + "http_requests_total{environment=~'staging|testing|development',method!='GET'} @ 1609746000 offset 5m" + ); + assert_eq!(eval.alias, Some("web_requests".to_string())); + } + _ => unreachable!(), + } + + // Test TQL EVAL with lookback and alias + let sql = "TQL EVAL (0, 100, '30s', '5m') cpu_usage_total AS cpu_metrics"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!(eval.start, "0"); + assert_eq!(eval.end, "100"); + assert_eq!(eval.step, "30s"); + assert_eq!(eval.lookback, Some("5m".to_string())); + assert_eq!(eval.query, "cpu_usage_total"); + assert_eq!(eval.alias, Some("cpu_metrics".to_string())); + } + _ => unreachable!(), + } + + // Test TQL EXPLAIN with alias + let sql = "TQL EXPLAIN (20, 100, '10s') memory_usage{app='web'} AS memory_data"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Explain(explain)) => { + assert_eq!(explain.start, "20"); + assert_eq!(explain.end, "100"); + assert_eq!(explain.step, "10s"); + assert_eq!(explain.lookback, None); + assert_eq!(explain.query, "memory_usage{app='web'}"); + assert_eq!(explain.alias, Some("memory_data".to_string())); + assert!(!explain.is_verbose); + assert_eq!(explain.format, None); + } + _ => unreachable!(), + } + + // Test TQL EXPLAIN VERBOSE with alias + let sql = "TQL EXPLAIN VERBOSE FORMAT JSON (0, 50, '5s') disk_io_rate AS disk_metrics"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Explain(explain)) => { + assert_eq!(explain.start, "0"); + assert_eq!(explain.end, "50"); + assert_eq!(explain.step, "5s"); + assert_eq!(explain.lookback, None); + assert_eq!(explain.query, "disk_io_rate"); + assert_eq!(explain.alias, Some("disk_metrics".to_string())); + assert!(explain.is_verbose); + assert_eq!(explain.format, Some(AnalyzeFormat::JSON)); + } + _ => unreachable!(), + } + + // Test TQL ANALYZE with alias + let sql = "TQL ANALYZE (100, 200, '1m') network_bytes_total AS network_stats"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Analyze(analyze)) => { + assert_eq!(analyze.start, "100"); + assert_eq!(analyze.end, "200"); + assert_eq!(analyze.step, "1m"); + assert_eq!(analyze.lookback, None); + assert_eq!(analyze.query, "network_bytes_total"); + assert_eq!(analyze.alias, Some("network_stats".to_string())); + assert!(!analyze.is_verbose); + assert_eq!(analyze.format, None); + } + _ => unreachable!(), + } + + // Test TQL ANALYZE VERBOSE with alias and lookback + let sql = "TQL ANALYZE VERBOSE FORMAT TEXT (0, 1000, '2m', '30s') error_rate{service='api'} AS api_errors"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Analyze(analyze)) => { + assert_eq!(analyze.start, "0"); + assert_eq!(analyze.end, "1000"); + assert_eq!(analyze.step, "2m"); + assert_eq!(analyze.lookback, Some("30s".to_string())); + assert_eq!(analyze.query, "error_rate{service='api'}"); + assert_eq!(analyze.alias, Some("api_errors".to_string())); + assert!(analyze.is_verbose); + assert_eq!(analyze.format, Some(AnalyzeFormat::TEXT)); + } + _ => unreachable!(), + } + } + + #[test] + fn test_parse_tql_alias_edge_cases() { + // Test alias with underscore and numbers + let sql = "TQL EVAL (0, 10, '5s') test_metric AS metric_123"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!(eval.query, "test_metric"); + assert_eq!(eval.alias, Some("metric_123".to_string())); + } + _ => unreachable!(), + } + + // Test complex PromQL expression with AS + let sql = r#"TQL EVAL (0, 30, '10s') (sum by(host) (irate(host_cpu_seconds_total{mode!='idle'}[1m0s])) / sum by (host)((irate(host_cpu_seconds_total[1m0s])))) * 100 AS cpu_utilization;"#; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!( + eval.query, + "(sum by(host) (irate(host_cpu_seconds_total{mode!='idle'}[1m0s])) / sum by (host)((irate(host_cpu_seconds_total[1m0s])))) * 100" + ); + assert_eq!(eval.alias, Some("cpu_utilization".to_string())); + } + _ => unreachable!(), + } + + // Test query with semicolon and alias + let sql = "TQL EVAL (0, 10, '5s') simple_metric AS my_alias"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!(eval.query, "simple_metric"); + assert_eq!(eval.alias, Some("my_alias".to_string())); + } + _ => unreachable!(), + } + + // Test without alias (ensure it still works) + let sql = "TQL EVAL (0, 10, '5s') test_metric_no_alias"; + match parse_into_statement(sql) { + Statement::Tql(Tql::Eval(eval)) => { + assert_eq!(eval.query, "test_metric_no_alias"); + assert_eq!(eval.alias, None); + } + _ => unreachable!(), + } + } + + #[test] + fn test_parse_tql_alias_errors() { + let dialect = &GreptimeDbDialect {}; + let parse_options = ParseOptions::default(); + + // Test AS without alias identifier + let sql = "TQL EVAL (0, 10, '5s') test_metric AS"; + let result = ParserContext::create_with_dialect(sql, dialect, parse_options.clone()); + assert!(result.is_err(), "Should fail when AS has no identifier"); + + // Test AS with invalid characters after alias + let sql = "TQL EVAL (0, 10, '5s') test_metric AS alias extra_token"; + let result = ParserContext::create_with_dialect(sql, dialect, parse_options.clone()); + assert!( + result.is_err(), + "Should fail with unexpected token after alias" + ); + + // Test AS with empty promql query + let sql = "TQL EVAL (0, 10, '5s') AS alias"; + let result = ParserContext::create_with_dialect(sql, dialect, parse_options.clone()); + assert!(result.is_err(), "Should fail with empty promql query"); + } + #[test] fn test_parse_tql_error() { let dialect = &GreptimeDbDialect {}; diff --git a/src/sql/src/statements/tql.rs b/src/sql/src/statements/tql.rs index 18c9f7b197..835a957ead 100644 --- a/src/sql/src/statements/tql.rs +++ b/src/sql/src/statements/tql.rs @@ -42,15 +42,20 @@ fn format_tql( step: &str, lookback: Option<&str>, query: &str, + alias: &Option, ) -> std::fmt::Result { write!(f, "({start}, {end}, '{step}'")?; if let Some(lookback) = lookback { write!(f, ", {lookback}")?; } - write!(f, ") {query}") + if let Some(alias) = alias { + write!(f, ") {query} AS {alias}") + } else { + write!(f, ") {query}") + } } -/// TQL EVAL (, , , [lookback]) +/// TQL EVAL (, , , [lookback]) [AS alias] #[derive(Debug, Clone, PartialEq, Eq, Visit, VisitMut, Serialize)] pub struct TqlEval { pub start: String, @@ -58,6 +63,7 @@ pub struct TqlEval { pub step: String, pub lookback: Option, pub query: String, + pub alias: Option, } impl Display for TqlEval { @@ -70,11 +76,12 @@ impl Display for TqlEval { &self.step, self.lookback.as_deref(), &self.query, + &self.alias, ) } } -/// TQL EXPLAIN [VERBOSE] [FORMAT format] [, , , [lookback]] +/// TQL EXPLAIN [VERBOSE] [FORMAT format] [, , , [lookback]] [AS alias] /// doesn't execute the query but tells how the query would be executed (similar to SQL EXPLAIN). #[derive(Debug, Clone, PartialEq, Eq, Visit, VisitMut, Serialize)] pub struct TqlExplain { @@ -83,6 +90,7 @@ pub struct TqlExplain { pub step: String, pub lookback: Option, pub query: String, + pub alias: Option, pub is_verbose: bool, pub format: Option, } @@ -103,11 +111,12 @@ impl Display for TqlExplain { &self.step, self.lookback.as_deref(), &self.query, + &self.alias, ) } } -/// TQL ANALYZE [VERBOSE] [FORMAT format] (, , , [lookback]) +/// TQL ANALYZE [VERBOSE] [FORMAT format] (, , , [lookback]) [AS alias] /// executes the plan and tells the detailed per-step execution time (similar to SQL ANALYZE). #[derive(Debug, Clone, PartialEq, Eq, Visit, VisitMut, Serialize)] pub struct TqlAnalyze { @@ -116,6 +125,7 @@ pub struct TqlAnalyze { pub step: String, pub lookback: Option, pub query: String, + pub alias: Option, pub is_verbose: bool, pub format: Option, } @@ -136,6 +146,7 @@ impl Display for TqlAnalyze { &self.step, self.lookback.as_deref(), &self.query, + &self.alias, ) } } @@ -151,6 +162,7 @@ pub struct TqlParameters { step: String, lookback: Option, query: String, + alias: Option, pub is_verbose: bool, pub format: Option, } @@ -162,6 +174,7 @@ impl TqlParameters { step: String, lookback: Option, query: String, + alias: Option, ) -> Self { TqlParameters { start, @@ -169,6 +182,7 @@ impl TqlParameters { step, lookback, query, + alias, is_verbose: false, format: None, } @@ -183,6 +197,7 @@ impl From for TqlEval { step: params.step, lookback: params.lookback, query: params.query, + alias: params.alias, } } } @@ -194,6 +209,7 @@ impl From for TqlExplain { end: params.end, step: params.step, query: params.query, + alias: params.alias, lookback: params.lookback, is_verbose: params.is_verbose, format: params.format, @@ -208,6 +224,7 @@ impl From for TqlAnalyze { end: params.end, step: params.step, query: params.query, + alias: params.alias, lookback: params.lookback, is_verbose: params.is_verbose, format: params.format, diff --git a/tests-integration/src/tests/promql_test.rs b/tests-integration/src/tests/promql_test.rs index c8d1f6597a..3c8f8d8f33 100644 --- a/tests-integration/src/tests/promql_test.rs +++ b/tests-integration/src/tests/promql_test.rs @@ -32,6 +32,7 @@ use crate::tests::test_util::MockInstance; async fn promql_query( ins: Arc, promql: &str, + alias: Option, query_ctx: Arc, start: SystemTime, end: SystemTime, @@ -40,9 +41,10 @@ async fn promql_query( ) -> operator::error::Result { let query = PromQuery { query: promql.to_string(), + alias, ..PromQuery::default() }; - let QueryStatement::Promql(mut eval_stmt) = + let QueryStatement::Promql(mut eval_stmt, alias) = QueryLanguageParser::parse_promql(&query, &query_ctx).unwrap() else { unreachable!() @@ -53,7 +55,7 @@ async fn promql_query( eval_stmt.lookback_delta = lookback; ins.statement_executor() - .execute_stmt(QueryStatement::Promql(eval_stmt), query_ctx) + .execute_stmt(QueryStatement::Promql(eval_stmt, alias), query_ctx) .await } @@ -68,6 +70,25 @@ async fn create_insert_query_assert( interval: Duration, lookback: Duration, expected: &str, +) { + create_insert_query_assert_with_alias( + instance, create, insert, promql, None, start, end, interval, lookback, expected, + ) + .await +} + +#[allow(clippy::too_many_arguments)] +async fn create_insert_query_assert_with_alias( + instance: Arc, + create: &str, + insert: &str, + promql: &str, + alias: Option, + start: SystemTime, + end: SystemTime, + interval: Duration, + lookback: Duration, + expected: &str, ) { instance .do_query(create, QueryContext::arc()) @@ -87,6 +108,7 @@ async fn create_insert_query_assert( let query_output = promql_query( instance, promql, + alias, QueryContext::arc(), start, end, @@ -335,6 +357,31 @@ async fn aggregators_simple_count(instance: Arc) { .await; } +// Test like `aggregators_simple_count` but with value aliasing. +#[apply(both_instances_cases)] +async fn value_alias(instance: Arc) { + let instance = instance.frontend(); + + create_insert_query_assert_with_alias( + instance, + AGGREGATORS_CREATE_TABLE, + AGGREGATORS_INSERT_DATA, + "COUNT BY (group) (http_requests{job=\"api-server\"})", + Some("my_series".to_string()), + UNIX_EPOCH, + unix_epoch_plus_100s(), + Duration::from_secs(60), + Duration::from_secs(0), + "+-----------+------------+---------------------+\ + \n| my_series | group | ts |\ + \n+-----------+------------+---------------------+\ + \n| 2 | canary | 1970-01-01T00:00:00 |\ + \n| 2 | production | 1970-01-01T00:00:00 |\ + \n+-----------+------------+---------------------+", + ) + .await; +} + // # Simple without. // eval instant at 50m sum without (instance) (http_requests{job="api-server"}) // {group="canary",job="api-server"} 700 @@ -578,6 +625,7 @@ async fn cross_schema_query(instance: Arc) { let query_output = promql_query( ins.clone(), r#"http_requests{__schema__="greptime_private"}"#, + None, QueryContext::arc(), start, end, @@ -605,6 +653,7 @@ async fn cross_schema_query(instance: Arc) { let query_output = promql_query( ins.clone(), r#"http_requests{__database__="greptime_private"}"#, + None, QueryContext::arc(), start, end, @@ -632,6 +681,7 @@ async fn cross_schema_query(instance: Arc) { let query_output = promql_query( ins.clone(), r#"http_requests"#, + None, QueryContext::arc(), start, end, @@ -649,6 +699,7 @@ async fn cross_schema_query(instance: Arc) { let query_output = promql_query( ins.clone(), r#"http_requests"#, + None, QueryContext::with_db_name(Some("greptime_private")).into(), start, end, diff --git a/tests/cases/standalone/common/tql-explain-analyze/explain.result b/tests/cases/standalone/common/tql-explain-analyze/explain.result index 3ef82f9806..33cd57b327 100644 --- a/tests/cases/standalone/common/tql-explain-analyze/explain.result +++ b/tests/cases/standalone/common/tql-explain-analyze/explain.result @@ -205,6 +205,143 @@ TQL EXPLAIN VERBOSE (0, 10, '5s') test; |_|_| +-+-+ +-- explain verbose at 0s, 5s and 10s. No point at 0s. +-- SQLNESS REPLACE (-+) - +-- SQLNESS REPLACE (\s\s+) _ +-- SQLNESS REPLACE (elapsed_compute.*) REDACTED +-- SQLNESS REPLACE (peers.*) REDACTED +-- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED +TQL EXPLAIN VERBOSE (0, 10, '5s') test AS series; + ++-+-+ +| plan_type_| plan_| ++-+-+ +| initial_logical_plan_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| +|_|_PromSeriesDivide: tags=["k"]_| +|_|_Sort: test.k ASC NULLS FIRST, test.j ASC NULLS FIRST_| +|_|_Filter: test.j >= TimestampMillisecond(-300000, None) AND test.j <= TimestampMillisecond(300000, None)_| +|_|_TableScan: test_| +| logical_plan after count_wildcard_to_time_index_rule_| SAME TEXT AS ABOVE_| +| logical_plan after StringNormalizationRule_| SAME TEXT AS ABOVE_| +| logical_plan after TranscribeAtatRule_| SAME TEXT AS ABOVE_| +| logical_plan after resolve_grouping_function_| SAME TEXT AS ABOVE_| +| logical_plan after type_coercion_| SAME TEXT AS ABOVE_| +| logical_plan after DistPlannerAnalyzer_| Projection: test.i, test.j, test.k_| +|_|_MergeScan [is_placeholder=false, remote_input=[_| +|_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| +|_|_PromSeriesDivide: tags=["k"]_| +|_|_Sort: test.k ASC NULLS FIRST, test.j ASC NULLS FIRST_| +|_|_Filter: test.j >= TimestampMillisecond(-300000, None) AND test.j <= TimestampMillisecond(300000, None)_| +|_|_TableScan: test_| +|_| ]]_| +| logical_plan after FixStateUdafOrderingAnalyzer_| SAME TEXT AS ABOVE_| +| analyzed_logical_plan_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_nested_union_| SAME TEXT AS ABOVE_| +| logical_plan after simplify_expressions_| SAME TEXT AS ABOVE_| +| logical_plan after replace_distinct_aggregate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_join_| SAME TEXT AS ABOVE_| +| logical_plan after decorrelate_predicate_subquery_| SAME TEXT AS ABOVE_| +| logical_plan after scalar_subquery_to_join_| SAME TEXT AS ABOVE_| +| logical_plan after decorrelate_lateral_join_| SAME TEXT AS ABOVE_| +| logical_plan after extract_equijoin_predicate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_duplicated_expr_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_filter_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_cross_join_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_limit_| SAME TEXT AS ABOVE_| +| logical_plan after propagate_empty_relation_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_one_union_| SAME TEXT AS ABOVE_| +| logical_plan after filter_null_join_keys_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_outer_join_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_limit_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_filter_| SAME TEXT AS ABOVE_| +| logical_plan after single_distinct_aggregation_to_group_by | SAME TEXT AS ABOVE_| +| logical_plan after eliminate_group_by_constant_| SAME TEXT AS ABOVE_| +| logical_plan after common_sub_expression_eliminate_| SAME TEXT AS ABOVE_| +| logical_plan after optimize_projections_| MergeScan [is_placeholder=false, remote_input=[_| +|_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| +|_|_PromSeriesDivide: tags=["k"]_| +|_|_Sort: test.k ASC NULLS FIRST, test.j ASC NULLS FIRST_| +|_|_Filter: test.j >= TimestampMillisecond(-300000, None) AND test.j <= TimestampMillisecond(300000, None)_| +|_|_TableScan: test_| +|_| ]]_| +| logical_plan after ScanHintRule_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_nested_union_| SAME TEXT AS ABOVE_| +| logical_plan after simplify_expressions_| SAME TEXT AS ABOVE_| +| logical_plan after replace_distinct_aggregate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_join_| SAME TEXT AS ABOVE_| +| logical_plan after decorrelate_predicate_subquery_| SAME TEXT AS ABOVE_| +| logical_plan after scalar_subquery_to_join_| SAME TEXT AS ABOVE_| +| logical_plan after decorrelate_lateral_join_| SAME TEXT AS ABOVE_| +| logical_plan after extract_equijoin_predicate_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_duplicated_expr_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_filter_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_cross_join_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_limit_| SAME TEXT AS ABOVE_| +| logical_plan after propagate_empty_relation_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_one_union_| SAME TEXT AS ABOVE_| +| logical_plan after filter_null_join_keys_| SAME TEXT AS ABOVE_| +| logical_plan after eliminate_outer_join_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_limit_| SAME TEXT AS ABOVE_| +| logical_plan after push_down_filter_| SAME TEXT AS ABOVE_| +| logical_plan after single_distinct_aggregation_to_group_by | SAME TEXT AS ABOVE_| +| logical_plan after eliminate_group_by_constant_| SAME TEXT AS ABOVE_| +| logical_plan after common_sub_expression_eliminate_| SAME TEXT AS ABOVE_| +| logical_plan after optimize_projections_| SAME TEXT AS ABOVE_| +| logical_plan after ScanHintRule_| SAME TEXT AS ABOVE_| +| logical_plan_| MergeScan [is_placeholder=false, remote_input=[_| +|_| PromInstantManipulate: range=[0..0], lookback=[300000], interval=[300000], time index=[j]_| +|_|_PromSeriesDivide: tags=["k"]_| +|_|_Sort: test.k ASC NULLS FIRST, test.j ASC NULLS FIRST_| +|_|_Filter: test.j >= TimestampMillisecond(-300000, None) AND test.j <= TimestampMillisecond(300000, None)_| +|_|_TableScan: test_| +|_| ]]_| +| initial_physical_plan_| MergeScanExec: REDACTED +|_|_| +| initial_physical_plan_with_stats_| MergeScanExec: REDACTED +|_|_| +| initial_physical_plan_with_schema_| MergeScanExec: REDACTED +|_|_| +| physical_plan after OutputRequirements_| OutputRequirementExec: order_by=[], dist_by=Unspecified_| +|_|_MergeScanExec: REDACTED +|_|_| +| physical_plan after aggregate_statistics_| SAME TEXT AS ABOVE_| +| physical_plan after join_selection_| SAME TEXT AS ABOVE_| +| physical_plan after LimitedDistinctAggregation_| SAME TEXT AS ABOVE_| +| physical_plan after FilterPushdown_| SAME TEXT AS ABOVE_| +| physical_plan after parallelize_scan_| SAME TEXT AS ABOVE_| +| physical_plan after PassDistributionRule_| SAME TEXT AS ABOVE_| +| physical_plan after EnforceSorting_| SAME TEXT AS ABOVE_| +| physical_plan after EnforceDistribution_| SAME TEXT AS ABOVE_| +| physical_plan after CombinePartialFinalAggregate_| SAME TEXT AS ABOVE_| +| physical_plan after EnforceSorting_| SAME TEXT AS ABOVE_| +| physical_plan after OptimizeAggregateOrder_| SAME TEXT AS ABOVE_| +| physical_plan after ProjectionPushdown_| SAME TEXT AS ABOVE_| +| physical_plan after coalesce_batches_| SAME TEXT AS ABOVE_| +| physical_plan after coalesce_async_exec_input_| SAME TEXT AS ABOVE_| +| physical_plan after OutputRequirements_| MergeScanExec: REDACTED +|_|_| +| physical_plan after LimitAggregation_| SAME TEXT AS ABOVE_| +| physical_plan after LimitPushdown_| SAME TEXT AS ABOVE_| +| physical_plan after ProjectionPushdown_| SAME TEXT AS ABOVE_| +| physical_plan after EnsureCooperative_| CooperativeExec_| +|_|_MergeScanExec: REDACTED +|_|_| +| physical_plan after FilterPushdown(Post)_| SAME TEXT AS ABOVE_| +| physical_plan after WindowedSortRule_| SAME TEXT AS ABOVE_| +| physical_plan after MatchesConstantTerm_| SAME TEXT AS ABOVE_| +| physical_plan after RemoveDuplicateRule_| SAME TEXT AS ABOVE_| +| physical_plan after SanityCheckPlan_| SAME TEXT AS ABOVE_| +| physical_plan_| CooperativeExec_| +|_|_MergeScanExec: REDACTED +|_|_| +| physical_plan_with_stats_| CooperativeExec, statistics=[Rows=Absent, Bytes=Absent, [(Col[0]:),(Col[1]:),(Col[2]:)]]_| +|_|_MergeScanExec: REDACTED +|_|_| +| physical_plan_with_schema_| CooperativeExec, schema=[i:Float64;N, j:Timestamp(Millisecond, None), k:Utf8;N]_| +|_|_MergeScanExec: REDACTED +|_|_| ++-+-+ + DROP TABLE test; Affected Rows: 0 diff --git a/tests/cases/standalone/common/tql-explain-analyze/explain.sql b/tests/cases/standalone/common/tql-explain-analyze/explain.sql index cf5618496d..5e296d4b6b 100644 --- a/tests/cases/standalone/common/tql-explain-analyze/explain.sql +++ b/tests/cases/standalone/common/tql-explain-analyze/explain.sql @@ -27,4 +27,12 @@ TQL EXPLAIN ('1970-01-01T00:00:00'::timestamp, '1970-01-01T00:00:00'::timestamp -- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED TQL EXPLAIN VERBOSE (0, 10, '5s') test; +-- explain verbose at 0s, 5s and 10s. No point at 0s. +-- SQLNESS REPLACE (-+) - +-- SQLNESS REPLACE (\s\s+) _ +-- SQLNESS REPLACE (elapsed_compute.*) REDACTED +-- SQLNESS REPLACE (peers.*) REDACTED +-- SQLNESS REPLACE (RoundRobinBatch.*) REDACTED +TQL EXPLAIN VERBOSE (0, 10, '5s') test AS series; + DROP TABLE test; diff --git a/tests/cases/standalone/common/tql/basic.result b/tests/cases/standalone/common/tql/basic.result index 5523b0bd4b..b6e7c0679d 100644 --- a/tests/cases/standalone/common/tql/basic.result +++ b/tests/cases/standalone/common/tql/basic.result @@ -32,6 +32,30 @@ TQL EVAL (0, 10, '5s') {__name__="test"}; | 2.0 | 1970-01-01T00:00:10 | a | +-----+---------------------+---+ +-- SQLNESS SORT_RESULT 2 1 +TQL EVAL (0, 10, '5s') {__name__="test"} AS test_series; + ++-------------+---+---------------------+ +| test_series | k | j | ++-------------+---+---------------------+ +| 1.0 | b | 1970-01-01T00:00:05 | +| 1.0 | b | 1970-01-01T00:00:10 | +| 2.0 | a | 1970-01-01T00:00:05 | +| 2.0 | a | 1970-01-01T00:00:10 | ++-------------+---+---------------------+ + +-- SQLNESS SORT_RESULT 2 1 +TQL EVAL (0, 10, '5s') count by (k) (test) AS c; + ++---+---+---------------------+ +| c | k | j | ++---+---+---------------------+ +| 1 | a | 1970-01-01T00:00:05 | +| 1 | a | 1970-01-01T00:00:10 | +| 1 | b | 1970-01-01T00:00:05 | +| 1 | b | 1970-01-01T00:00:10 | ++---+---+---------------------+ + -- SQLNESS SORT_RESULT 2 1 TQL EVAL (0, 10, '5s') test{__schema__="public"}; diff --git a/tests/cases/standalone/common/tql/basic.sql b/tests/cases/standalone/common/tql/basic.sql index 939d329f8d..79008985de 100644 --- a/tests/cases/standalone/common/tql/basic.sql +++ b/tests/cases/standalone/common/tql/basic.sql @@ -10,6 +10,12 @@ TQL EVAL (0, 10, '5s') test; -- SQLNESS SORT_RESULT 2 1 TQL EVAL (0, 10, '5s') {__name__="test"}; +-- SQLNESS SORT_RESULT 2 1 +TQL EVAL (0, 10, '5s') {__name__="test"} AS test_series; + +-- SQLNESS SORT_RESULT 2 1 +TQL EVAL (0, 10, '5s') count by (k) (test) AS c; + -- SQLNESS SORT_RESULT 2 1 TQL EVAL (0, 10, '5s') test{__schema__="public"}; diff --git a/tests/cases/standalone/common/tql/join.result b/tests/cases/standalone/common/tql/join.result index 9fd2e31327..e0d957b1b8 100644 --- a/tests/cases/standalone/common/tql/join.result +++ b/tests/cases/standalone/common/tql/join.result @@ -63,6 +63,19 @@ tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) by (model) + sum(prompt * | model-b | 1970-01-01T00:00:10 | 0.00033 | +---------+---------------------+-----------------------------------------------------------------------------------------------------------+ +-- SQLNESS SORT_RESULT 3 1 +tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) by (model) + sum(prompt * 0.0015 / 1000) by (model) AS result; + ++----------+---------+---------------------+ +| result | model | ts | ++----------+---------+---------------------+ +| 0.000165 | model-a | 1970-01-01T00:00:00 | +| 0.000165 | model-a | 1970-01-01T00:00:05 | +| 0.00033 | model-b | 1970-01-01T00:00:05 | +| 0.00033 | model-b | 1970-01-01T00:00:10 | +| 0.000495 | model-a | 1970-01-01T00:00:10 | ++----------+---------+---------------------+ + -- SQLNESS SORT_RESULT 3 1 tql eval(0, 10, '5s') sum(completion / 1000) + max(completion / 1000); diff --git a/tests/cases/standalone/common/tql/join.sql b/tests/cases/standalone/common/tql/join.sql index e9f5c8a544..c9287c865c 100644 --- a/tests/cases/standalone/common/tql/join.sql +++ b/tests/cases/standalone/common/tql/join.sql @@ -29,6 +29,9 @@ tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) + sum(prompt * 0.0015 / 10 -- SQLNESS SORT_RESULT 3 1 tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) by (model) + sum(prompt * 0.0015 / 1000) by (model); +-- SQLNESS SORT_RESULT 3 1 +tql eval(0, 10, '5s') sum(completion * 0.0015 / 1000) by (model) + sum(prompt * 0.0015 / 1000) by (model) AS result; + -- SQLNESS SORT_RESULT 3 1 tql eval(0, 10, '5s') sum(completion / 1000) + max(completion / 1000); diff --git a/tests/cases/standalone/common/tql/tql-cte.result b/tests/cases/standalone/common/tql/tql-cte.result index 8f9124b7d6..30abd5e6f1 100644 --- a/tests/cases/standalone/common/tql/tql-cte.result +++ b/tests/cases/standalone/common/tql/tql-cte.result @@ -61,6 +61,13 @@ SELECT * FROM tql; | 1970-01-01T00:00:40 | 3.0 | +---------------------+-----------+ +-- Explain TQL CTE +EXPLAIN WITH tql AS ( + TQL EVAL (0, 40, '10s') metric +) SELECT * FROM tql; + +Error: 1001(Unsupported), SQL statement is not supported, keyword: tql + -- Hybrid CTEs (TQL + SQL) WITH tql_data (ts, val) AS (TQL EVAL (0, 40, '10s') metric), @@ -279,6 +286,31 @@ LIMIT 5; | 1970-01-01T00:00:20 | host1 | host1 | +---------------------+-----------+-------+ +-- TQL CTE with JOIN and value aliasing +-- SQLNESS SORT_RESULT 3 1 +WITH tql_summary AS ( + TQL EVAL (0, 40, '10s') avg_over_time(labels[30s]) AS cpu +) +SELECT + t.ts, + t.cpu as avg_value, + l.host +FROM tql_summary t +JOIN labels l ON DATE_TRUNC('second', t.ts) = DATE_TRUNC('second', l.ts) +WHERE l.host = 'host1' +ORDER BY t.ts, l.host, avg_value +LIMIT 5; + ++---------------------+--------------------+-------+ +| ts | avg_value | host | ++---------------------+--------------------+-------+ +| 1970-01-01T00:00:00 | 0.1 | host1 | +| 1970-01-01T00:00:00 | 0.2 | host1 | +| 1970-01-01T00:00:10 | 0.45 | host1 | +| 1970-01-01T00:00:10 | 0.55 | host1 | +| 1970-01-01T00:00:20 | 0.5666666666666668 | host1 | ++---------------------+--------------------+-------+ + -- Error case - TQL ANALYZE should fail WITH tql_analyze AS ( TQL ANALYZE (0, 40, '10s') metric diff --git a/tests/cases/standalone/common/tql/tql-cte.sql b/tests/cases/standalone/common/tql/tql-cte.sql index 6a182b9272..6b9c197ef3 100644 --- a/tests/cases/standalone/common/tql/tql-cte.sql +++ b/tests/cases/standalone/common/tql/tql-cte.sql @@ -32,6 +32,11 @@ WITH tql (the_timestamp, the_value) as ( ) SELECT * FROM tql; +-- Explain TQL CTE +EXPLAIN WITH tql AS ( + TQL EVAL (0, 40, '10s') metric +) SELECT * FROM tql; + -- Hybrid CTEs (TQL + SQL) WITH tql_data (ts, val) AS (TQL EVAL (0, 40, '10s') metric), @@ -145,6 +150,21 @@ WHERE l.host = 'host1' ORDER BY t.ts, l.host, avg_value LIMIT 5; +-- TQL CTE with JOIN and value aliasing +-- SQLNESS SORT_RESULT 3 1 +WITH tql_summary AS ( + TQL EVAL (0, 40, '10s') avg_over_time(labels[30s]) AS cpu +) +SELECT + t.ts, + t.cpu as avg_value, + l.host +FROM tql_summary t +JOIN labels l ON DATE_TRUNC('second', t.ts) = DATE_TRUNC('second', l.ts) +WHERE l.host = 'host1' +ORDER BY t.ts, l.host, avg_value +LIMIT 5; + -- Error case - TQL ANALYZE should fail WITH tql_analyze AS ( TQL ANALYZE (0, 40, '10s') metric