mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2026-01-15 01:32:56 +00:00
* feat: use datafusion optimization refactor: mv `sql_to_flow_plan` elsewhere feat(WIP): use df optimization WIP analyzer rule feat(WIP): avg expander fix: transform avg expander fix: avg expand feat: names from substrait fix: avg rewrite test: update `test_avg`&`test_avg_group_by` test: fix `test_sum` test: fix some tests chore: remove unused flow plan transform feat: tumble expander test: update tests * chore: clippy * fix: tumble lose `group expr` * test: sqlness test update * test: rm unused cast * test: simplify sqlness * refactor: per review * chore: after rebase * fix: remove a outdated test * test: add comment * fix: report error when not literal * chore: update sqlness test after rebase * refactor: per review
103 lines
2.7 KiB
SQL
103 lines
2.7 KiB
SQL
CREATE TABLE numbers_input_basic (
|
|
number INT,
|
|
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
PRIMARY KEY(number),
|
|
TIME INDEX(ts)
|
|
);
|
|
|
|
CREATE FLOW test_numbers_basic
|
|
SINK TO out_num_cnt_basic
|
|
AS
|
|
SELECT sum(number) FROM numbers_input_basic GROUP BY tumble(ts, '1 second', '2021-07-01 00:00:00');
|
|
|
|
-- TODO(discord9): confirm if it's necessary to flush flow here?
|
|
-- because flush_flow result is at most 1
|
|
admin flush_flow('test_numbers_basic');
|
|
|
|
-- SQLNESS ARG restart=true
|
|
INSERT INTO numbers_input_basic
|
|
VALUES
|
|
(20, "2021-07-01 00:00:00.200"),
|
|
(22, "2021-07-01 00:00:00.600");
|
|
|
|
admin flush_flow('test_numbers_basic');
|
|
|
|
SELECT "SUM(numbers_input_basic.number)", window_start, window_end FROM out_num_cnt_basic;
|
|
|
|
admin flush_flow('test_numbers_basic');
|
|
|
|
INSERT INTO numbers_input_basic
|
|
VALUES
|
|
(23,"2021-07-01 00:00:01.000"),
|
|
(24,"2021-07-01 00:00:01.500");
|
|
|
|
admin flush_flow('test_numbers_basic');
|
|
|
|
-- note that this quote-unquote column is a column-name, **not** a aggregation expr, generated by datafusion
|
|
SELECT "SUM(numbers_input_basic.number)", window_start, window_end FROM out_num_cnt_basic;
|
|
|
|
DROP FLOW test_numbers_basic;
|
|
DROP TABLE numbers_input_basic;
|
|
DROP TABLE out_num_cnt_basic;
|
|
|
|
-- test interprete interval
|
|
|
|
CREATE TABLE numbers_input_basic (
|
|
number INT,
|
|
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
PRIMARY KEY(number),
|
|
TIME INDEX(ts)
|
|
);
|
|
create table out_num_cnt_basic (
|
|
number INT,
|
|
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP TIME INDEX);
|
|
|
|
CREATE FLOW filter_numbers_basic SINK TO out_num_cnt_basic AS SELECT INTERVAL '1 day 1 second', INTERVAL '1 month 1 day 1 second', INTERVAL '1 year 1 month' FROM numbers_input_basic where number > 10;
|
|
|
|
SHOW CREATE FLOW filter_numbers_basic;
|
|
|
|
drop flow filter_numbers_basic;
|
|
|
|
drop table out_num_cnt_basic;
|
|
|
|
drop table numbers_input_basic;
|
|
|
|
CREATE TABLE bytes_log (
|
|
byte INT,
|
|
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -- event time
|
|
TIME INDEX(ts)
|
|
);
|
|
|
|
-- TODO(discord9): remove this after auto infer table's time index is impl
|
|
CREATE TABLE approx_rate (
|
|
rate DOUBLE,
|
|
time_window TIMESTAMP,
|
|
update_at TIMESTAMP,
|
|
TIME INDEX(time_window)
|
|
);
|
|
|
|
CREATE FLOW find_approx_rate
|
|
SINK TO approx_rate
|
|
AS
|
|
SELECT (max(byte) - min(byte))/30.0 as rate, date_bin(INTERVAL '30 second', ts) as time_window from bytes_log GROUP BY time_window;
|
|
|
|
INSERT INTO bytes_log VALUES
|
|
(101, '2025-01-01 00:00:01'),
|
|
(300, '2025-01-01 00:00:29');
|
|
|
|
admin flush_flow('find_approx_rate');
|
|
|
|
SELECT rate, time_window FROM approx_rate;
|
|
|
|
INSERT INTO bytes_log VALUES
|
|
(450, '2025-01-01 00:00:32'),
|
|
(500, '2025-01-01 00:00:37');
|
|
|
|
admin flush_flow('find_approx_rate');
|
|
|
|
SELECT rate, time_window FROM approx_rate;
|
|
|
|
DROP TABLE bytes_log;
|
|
DROP FLOW find_approx_rate;
|
|
DROP TABLE approx_rate;
|