test: migrate duckdb tests part2, window functions (#6875)

* test: migrate window tests

Signed-off-by: Dennis Zhuang <killme2008@gmail.com>

* fix: blank line at the end

Signed-off-by: Dennis Zhuang <killme2008@gmail.com>

---------

Signed-off-by: Dennis Zhuang <killme2008@gmail.com>
This commit is contained in:
dennis zhuang
2025-09-03 14:55:47 +08:00
committed by Weny Xu
parent 0cea6ae64d
commit cd761df369
18 changed files with 907 additions and 0 deletions

View File

@@ -0,0 +1,91 @@
-- Migrated from DuckDB test: test/sql/window/test_lead_lag.test
CREATE TABLE win("id" INTEGER, v INTEGER, t INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO win VALUES
(1, 1, 2, 1000),
(1, 1, 1, 2000),
(1, 2, 3, 3000),
(2, 10, 4, 4000),
(2, 11, -1, 5000);
Affected Rows: 5
-- LAG function with offset 2
SELECT "id", v, t, LAG(v, 2, NULL) OVER (PARTITION BY "id" ORDER BY t ASC) as lag_val
FROM win ORDER BY "id", t;
+----+----+----+---------+
| id | v | t | lag_val |
+----+----+----+---------+
| 1 | 1 | 1 | |
| 1 | 1 | 2 | |
| 1 | 2 | 3 | 1 |
| 2 | 11 | -1 | |
| 2 | 10 | 4 | |
+----+----+----+---------+
-- LEAD function with offset 1
SELECT "id", v, t, LEAD(v, 1, -999) OVER (PARTITION BY "id" ORDER BY t ASC) as lead_val
FROM win ORDER BY "id", t;
+----+----+----+----------+
| id | v | t | lead_val |
+----+----+----+----------+
| 1 | 1 | 1 | 1 |
| 1 | 1 | 2 | 2 |
| 1 | 2 | 3 | -999 |
| 2 | 11 | -1 | 10 |
| 2 | 10 | 4 | -999 |
+----+----+----+----------+
-- LAG with default value
SELECT v, LAG(v, 1, 0) OVER (ORDER BY t) as lag_with_default FROM win ORDER BY t;
+----+------------------+
| v | lag_with_default |
+----+------------------+
| 11 | 0 |
| 1 | 11 |
| 1 | 1 |
| 2 | 1 |
| 10 | 2 |
+----+------------------+
-- LEAD with offset 2 and default value
SELECT "id", v, t, LEAD(v, 2, -999) OVER (PARTITION BY "id" ORDER BY t ASC) as lead_val2
FROM win ORDER BY "id", t;
+----+----+----+-----------+
| id | v | t | lead_val2 |
+----+----+----+-----------+
| 1 | 1 | 1 | 2 |
| 1 | 1 | 2 | -999 |
| 1 | 2 | 3 | -999 |
| 2 | 11 | -1 | -999 |
| 2 | 10 | 4 | -999 |
+----+----+----+-----------+
DROP TABLE win;
Affected Rows: 0
-- Test with VALUES clause (similar to DuckDB original)
SELECT c1, LEAD(c1, 2) OVER (ORDER BY c0) as lead_val
FROM (VALUES
(1, 2, 1000),
(2, 3, 2000),
(3, 4, 3000),
(4, 5, 4000)
) a(c0, c1, ts) ORDER BY c0;
+----+----------+
| c1 | lead_val |
+----+----------+
| 2 | 4 |
| 3 | 5 |
| 4 | |
| 5 | |
+----+----------+

View File

@@ -0,0 +1,36 @@
-- Migrated from DuckDB test: test/sql/window/test_lead_lag.test
CREATE TABLE win("id" INTEGER, v INTEGER, t INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO win VALUES
(1, 1, 2, 1000),
(1, 1, 1, 2000),
(1, 2, 3, 3000),
(2, 10, 4, 4000),
(2, 11, -1, 5000);
-- LAG function with offset 2
SELECT "id", v, t, LAG(v, 2, NULL) OVER (PARTITION BY "id" ORDER BY t ASC) as lag_val
FROM win ORDER BY "id", t;
-- LEAD function with offset 1
SELECT "id", v, t, LEAD(v, 1, -999) OVER (PARTITION BY "id" ORDER BY t ASC) as lead_val
FROM win ORDER BY "id", t;
-- LAG with default value
SELECT v, LAG(v, 1, 0) OVER (ORDER BY t) as lag_with_default FROM win ORDER BY t;
-- LEAD with offset 2 and default value
SELECT "id", v, t, LEAD(v, 2, -999) OVER (PARTITION BY "id" ORDER BY t ASC) as lead_val2
FROM win ORDER BY "id", t;
DROP TABLE win;
-- Test with VALUES clause (similar to DuckDB original)
SELECT c1, LEAD(c1, 2) OVER (ORDER BY c0) as lead_val
FROM (VALUES
(1, 2, 1000),
(2, 3, 2000),
(3, 4, 3000),
(4, 5, 4000)
) a(c0, c1, ts) ORDER BY c0;

View File

@@ -0,0 +1,96 @@
-- Migrated from DuckDB test: test/sql/window/test_ntile.test
CREATE TABLE "Scoreboard"("TeamName" VARCHAR, "Player" VARCHAR, "Score" INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO "Scoreboard" VALUES
('Mongrels', 'Apu', 350, 1000),
('Mongrels', 'Ned', 666, 2000),
('Mongrels', 'Meg', 1030, 3000),
('Mongrels', 'Burns', 1270, 4000),
('Simpsons', 'Homer', 1, 5000),
('Simpsons', 'Lisa', 710, 6000),
('Simpsons', 'Marge', 990, 7000),
('Simpsons', 'Bart', 2010, 8000);
Affected Rows: 8
-- NTILE with partition
SELECT "TeamName", "Player", "Score", NTILE(2) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
+----------+--------+-------+-----------+
| TeamName | Player | Score | ntile_val |
+----------+--------+-------+-----------+
| Mongrels | Apu | 350 | 1 |
| Mongrels | Ned | 666 | 1 |
| Mongrels | Meg | 1030 | 2 |
| Mongrels | Burns | 1270 | 2 |
| Simpsons | Homer | 1 | 1 |
| Simpsons | Lisa | 710 | 1 |
| Simpsons | Marge | 990 | 2 |
| Simpsons | Bart | 2010 | 2 |
+----------+--------+-------+-----------+
-- NTILE without partition
SELECT "TeamName", "Player", "Score", NTILE(2) OVER (ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "Score";
+----------+--------+-------+-----------+
| TeamName | Player | Score | ntile_val |
+----------+--------+-------+-----------+
| Simpsons | Homer | 1 | 1 |
| Mongrels | Apu | 350 | 1 |
| Mongrels | Ned | 666 | 1 |
| Simpsons | Lisa | 710 | 1 |
| Simpsons | Marge | 990 | 2 |
| Mongrels | Meg | 1030 | 2 |
| Mongrels | Burns | 1270 | 2 |
| Simpsons | Bart | 2010 | 2 |
+----------+--------+-------+-----------+
-- NTILE with large number
SELECT "TeamName", "Score", NTILE(1000) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
+----------+-------+-----------+
| TeamName | Score | ntile_val |
+----------+-------+-----------+
| Mongrels | 350 | 1 |
| Mongrels | 666 | 2 |
| Mongrels | 1030 | 3 |
| Mongrels | 1270 | 4 |
| Simpsons | 1 | 1 |
| Simpsons | 710 | 2 |
| Simpsons | 990 | 3 |
| Simpsons | 2010 | 4 |
+----------+-------+-----------+
-- NTILE with 1 (all rows in same tile)
SELECT "TeamName", "Score", NTILE(1) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
+----------+-------+-----------+
| TeamName | Score | ntile_val |
+----------+-------+-----------+
| Mongrels | 350 | 1 |
| Mongrels | 666 | 1 |
| Mongrels | 1030 | 1 |
| Mongrels | 1270 | 1 |
| Simpsons | 1 | 1 |
| Simpsons | 710 | 1 |
| Simpsons | 990 | 1 |
| Simpsons | 2010 | 1 |
+----------+-------+-----------+
-- NTILE with NULL (should return NULL)
-- TODO: duckdb return null, but GreptimeDB raise an error
SELECT "TeamName", "Score", NTILE(NULL) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
Error: 3001(EngineExecuteQuery), Execution error: NTILE requires a positive integer, but finds NULL
DROP TABLE "Scoreboard";
Affected Rows: 0

View File

@@ -0,0 +1,36 @@
-- Migrated from DuckDB test: test/sql/window/test_ntile.test
CREATE TABLE "Scoreboard"("TeamName" VARCHAR, "Player" VARCHAR, "Score" INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO "Scoreboard" VALUES
('Mongrels', 'Apu', 350, 1000),
('Mongrels', 'Ned', 666, 2000),
('Mongrels', 'Meg', 1030, 3000),
('Mongrels', 'Burns', 1270, 4000),
('Simpsons', 'Homer', 1, 5000),
('Simpsons', 'Lisa', 710, 6000),
('Simpsons', 'Marge', 990, 7000),
('Simpsons', 'Bart', 2010, 8000);
-- NTILE with partition
SELECT "TeamName", "Player", "Score", NTILE(2) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
-- NTILE without partition
SELECT "TeamName", "Player", "Score", NTILE(2) OVER (ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "Score";
-- NTILE with large number
SELECT "TeamName", "Score", NTILE(1000) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
-- NTILE with 1 (all rows in same tile)
SELECT "TeamName", "Score", NTILE(1) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
-- NTILE with NULL (should return NULL)
-- TODO: duckdb return null, but GreptimeDB raise an error
SELECT "TeamName", "Score", NTILE(NULL) OVER (PARTITION BY "TeamName" ORDER BY "Score" ASC) AS ntile_val
FROM "Scoreboard" ORDER BY "TeamName", "Score";
DROP TABLE "Scoreboard";

View File

@@ -0,0 +1,63 @@
-- Migrated from DuckDB test: test/sql/window/test_cume_dist_orderby.test
-- Tests CUME_DIST and PERCENT_RANK window functions
CREATE TABLE test_rank(x INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO test_rank VALUES
(1, 1000), (1, 2000), (2, 3000), (2, 4000), (3, 5000), (3, 6000), (4, 7000);
Affected Rows: 7
-- CUME_DIST function
SELECT x, CUME_DIST() OVER (ORDER BY x) as cume_dist_val FROM test_rank ORDER BY ts;
+---+--------------------+
| x | cume_dist_val |
+---+--------------------+
| 1 | 0.2857142857142857 |
| 1 | 0.2857142857142857 |
| 2 | 0.5714285714285714 |
| 2 | 0.5714285714285714 |
| 3 | 0.8571428571428571 |
| 3 | 0.8571428571428571 |
| 4 | 1.0 |
+---+--------------------+
-- PERCENT_RANK function
SELECT x, PERCENT_RANK() OVER (ORDER BY x) as percent_rank_val FROM test_rank ORDER BY ts;
+---+--------------------+
| x | percent_rank_val |
+---+--------------------+
| 1 | 0.0 |
| 1 | 0.0 |
| 2 | 0.3333333333333333 |
| 2 | 0.3333333333333333 |
| 3 | 0.6666666666666666 |
| 3 | 0.6666666666666666 |
| 4 | 1.0 |
+---+--------------------+
-- Combined with partitioning
SELECT x,
CUME_DIST() OVER (PARTITION BY x ORDER BY ts) as cume_dist_partition,
PERCENT_RANK() OVER (PARTITION BY x ORDER BY ts) as percent_rank_partition
FROM test_rank ORDER BY x, ts;
+---+---------------------+------------------------+
| x | cume_dist_partition | percent_rank_partition |
+---+---------------------+------------------------+
| 1 | 0.5 | 0.0 |
| 1 | 1.0 | 1.0 |
| 2 | 0.5 | 0.0 |
| 2 | 1.0 | 1.0 |
| 3 | 0.5 | 0.0 |
| 3 | 1.0 | 1.0 |
| 4 | 1.0 | 0.0 |
+---+---------------------+------------------------+
DROP TABLE test_rank;
Affected Rows: 0

View File

@@ -0,0 +1,21 @@
-- Migrated from DuckDB test: test/sql/window/test_cume_dist_orderby.test
-- Tests CUME_DIST and PERCENT_RANK window functions
CREATE TABLE test_rank(x INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO test_rank VALUES
(1, 1000), (1, 2000), (2, 3000), (2, 4000), (3, 5000), (3, 6000), (4, 7000);
-- CUME_DIST function
SELECT x, CUME_DIST() OVER (ORDER BY x) as cume_dist_val FROM test_rank ORDER BY ts;
-- PERCENT_RANK function
SELECT x, PERCENT_RANK() OVER (ORDER BY x) as percent_rank_val FROM test_rank ORDER BY ts;
-- Combined with partitioning
SELECT x,
CUME_DIST() OVER (PARTITION BY x ORDER BY ts) as cume_dist_partition,
PERCENT_RANK() OVER (PARTITION BY x ORDER BY ts) as percent_rank_partition
FROM test_rank ORDER BY x, ts;
DROP TABLE test_rank;

View File

@@ -0,0 +1,52 @@
-- Migrated from DuckDB test: test/sql/window/test_rank.test
CREATE TABLE test_data(i INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO test_data VALUES (1, 1000), (1, 2000), (2, 3000), (2, 4000), (3, 5000);
Affected Rows: 5
-- RANK function with ties
SELECT i, RANK() OVER (ORDER BY i) as rank_val FROM test_data ORDER BY ts;
+---+----------+
| i | rank_val |
+---+----------+
| 1 | 1 |
| 1 | 1 |
| 2 | 3 |
| 2 | 3 |
| 3 | 5 |
+---+----------+
-- DENSE_RANK function
SELECT i, DENSE_RANK() OVER (ORDER BY i) as dense_rank_val FROM test_data ORDER BY ts;
+---+----------------+
| i | dense_rank_val |
+---+----------------+
| 1 | 1 |
| 1 | 1 |
| 2 | 2 |
| 2 | 2 |
| 3 | 3 |
+---+----------------+
-- ROW_NUMBER function
SELECT i, ROW_NUMBER() OVER (ORDER BY i) as row_num FROM test_data ORDER BY ts;
+---+---------+
| i | row_num |
+---+---------+
| 1 | 1 |
| 1 | 2 |
| 2 | 3 |
| 2 | 4 |
| 3 | 5 |
+---+---------+
DROP TABLE test_data;
Affected Rows: 0

View File

@@ -0,0 +1,15 @@
-- Migrated from DuckDB test: test/sql/window/test_rank.test
CREATE TABLE test_data(i INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO test_data VALUES (1, 1000), (1, 2000), (2, 3000), (2, 4000), (3, 5000);
-- RANK function with ties
SELECT i, RANK() OVER (ORDER BY i) as rank_val FROM test_data ORDER BY ts;
-- DENSE_RANK function
SELECT i, DENSE_RANK() OVER (ORDER BY i) as dense_rank_val FROM test_data ORDER BY ts;
-- ROW_NUMBER function
SELECT i, ROW_NUMBER() OVER (ORDER BY i) as row_num FROM test_data ORDER BY ts;
DROP TABLE test_data;

View File

@@ -0,0 +1,49 @@
-- Migrated from DuckDB test: test/sql/window/test_nthvalue.test
-- Tests NTH_VALUE, FIRST_VALUE, LAST_VALUE window functions
CREATE TABLE test_data(i INTEGER, v VARCHAR, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO test_data VALUES
(1, 'A', 1000),
(2, 'B', 2000),
(3, 'C', 3000),
(4, 'D', 4000),
(5, 'E', 5000);
Affected Rows: 5
-- NTH_VALUE function
SELECT i, v, NTH_VALUE(v, 2) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as nth_val
FROM test_data ORDER BY i;
+---+---+---------+
| i | v | nth_val |
+---+---+---------+
| 1 | A | |
| 2 | B | B |
| 3 | C | B |
| 4 | D | B |
| 5 | E | B |
+---+---+---------+
-- FIRST_VALUE and LAST_VALUE
SELECT i, v,
FIRST_VALUE(v) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as first_val,
LAST_VALUE(v) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as last_val
FROM test_data ORDER BY i;
+---+---+-----------+----------+
| i | v | first_val | last_val |
+---+---+-----------+----------+
| 1 | A | A | A |
| 2 | B | A | B |
| 3 | C | A | C |
| 4 | D | A | D |
| 5 | E | A | E |
+---+---+-----------+----------+
DROP TABLE test_data;
Affected Rows: 0

View File

@@ -0,0 +1,23 @@
-- Migrated from DuckDB test: test/sql/window/test_nthvalue.test
-- Tests NTH_VALUE, FIRST_VALUE, LAST_VALUE window functions
CREATE TABLE test_data(i INTEGER, v VARCHAR, ts TIMESTAMP TIME INDEX);
INSERT INTO test_data VALUES
(1, 'A', 1000),
(2, 'B', 2000),
(3, 'C', 3000),
(4, 'D', 4000),
(5, 'E', 5000);
-- NTH_VALUE function
SELECT i, v, NTH_VALUE(v, 2) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as nth_val
FROM test_data ORDER BY i;
-- FIRST_VALUE and LAST_VALUE
SELECT i, v,
FIRST_VALUE(v) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as first_val,
LAST_VALUE(v) OVER (ORDER BY i ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as last_val
FROM test_data ORDER BY i;
DROP TABLE test_data;

View File

@@ -0,0 +1,79 @@
-- Migrated from DuckDB test: Multiple advanced window function tests
-- Tests complex window function scenarios
CREATE TABLE window_data(
group_id INTEGER,
seq_num INTEGER,
"value" DOUBLE,
category VARCHAR,
ts TIMESTAMP TIME INDEX
);
Affected Rows: 0
INSERT INTO window_data VALUES
(1, 1, 100.0, 'A', 1000), (1, 2, 150.0, 'A', 2000), (1, 3, 120.0, 'B', 3000),
(2, 1, 200.0, 'A', 4000), (2, 2, 180.0, 'B', 5000), (2, 3, 220.0, 'A', 6000);
Affected Rows: 6
-- Window with complex partitioning and ordering
SELECT
group_id, seq_num, "value", category,
ROW_NUMBER() OVER (PARTITION BY group_id, category ORDER BY seq_num) as row_in_group_cat,
DENSE_RANK() OVER (PARTITION BY group_id ORDER BY "value" DESC) as value_rank,
LAG("value", 1, 0) OVER (PARTITION BY group_id ORDER BY seq_num) as prev_value
FROM window_data ORDER BY group_id, seq_num;
+----------+---------+-------+----------+------------------+------------+------------+
| group_id | seq_num | value | category | row_in_group_cat | value_rank | prev_value |
+----------+---------+-------+----------+------------------+------------+------------+
| 1 | 1 | 100.0 | A | 1 | 3 | 0.0 |
| 1 | 2 | 150.0 | A | 2 | 1 | 100.0 |
| 1 | 3 | 120.0 | B | 1 | 2 | 150.0 |
| 2 | 1 | 200.0 | A | 1 | 2 | 0.0 |
| 2 | 2 | 180.0 | B | 1 | 3 | 200.0 |
| 2 | 3 | 220.0 | A | 2 | 1 | 180.0 |
+----------+---------+-------+----------+------------------+------------+------------+
-- Running calculations with frames
SELECT
group_id, seq_num, "value",
SUM("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as rolling_sum_2,
AVG("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as cumulative_avg,
MAX("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING) as max_next_2
FROM window_data ORDER BY group_id, seq_num;
+----------+---------+-------+---------------+--------------------+------------+
| group_id | seq_num | value | rolling_sum_2 | cumulative_avg | max_next_2 |
+----------+---------+-------+---------------+--------------------+------------+
| 1 | 1 | 100.0 | 100.0 | 100.0 | 150.0 |
| 1 | 2 | 150.0 | 250.0 | 125.0 | 150.0 |
| 1 | 3 | 120.0 | 270.0 | 123.33333333333333 | 120.0 |
| 2 | 1 | 200.0 | 200.0 | 200.0 | 200.0 |
| 2 | 2 | 180.0 | 380.0 | 190.0 | 220.0 |
| 2 | 3 | 220.0 | 400.0 | 200.0 | 220.0 |
+----------+---------+-------+---------------+--------------------+------------+
-- Percentage calculations
SELECT
group_id, "value", category,
"value" / SUM("value") OVER (PARTITION BY group_id) * 100 as pct_of_group,
PERCENT_RANK() OVER (ORDER BY "value") as percentile_rank,
NTILE(3) OVER (ORDER BY "value") as tertile
FROM window_data ORDER BY "value";
+----------+-------+----------+--------------------+-----------------+---------+
| group_id | value | category | pct_of_group | percentile_rank | tertile |
+----------+-------+----------+--------------------+-----------------+---------+
| 1 | 100.0 | A | 27.027027027027028 | 0.0 | 1 |
| 1 | 120.0 | B | 32.432432432432435 | 0.2 | 1 |
| 1 | 150.0 | A | 40.54054054054054 | 0.4 | 2 |
| 2 | 180.0 | B | 30.0 | 0.6 | 2 |
| 2 | 200.0 | A | 33.33333333333333 | 0.8 | 3 |
| 2 | 220.0 | A | 36.666666666666664 | 1.0 | 3 |
+----------+-------+----------+--------------------+-----------------+---------+
DROP TABLE window_data;
Affected Rows: 0

View File

@@ -0,0 +1,40 @@
-- Migrated from DuckDB test: Multiple advanced window function tests
-- Tests complex window function scenarios
CREATE TABLE window_data(
group_id INTEGER,
seq_num INTEGER,
"value" DOUBLE,
category VARCHAR,
ts TIMESTAMP TIME INDEX
);
INSERT INTO window_data VALUES
(1, 1, 100.0, 'A', 1000), (1, 2, 150.0, 'A', 2000), (1, 3, 120.0, 'B', 3000),
(2, 1, 200.0, 'A', 4000), (2, 2, 180.0, 'B', 5000), (2, 3, 220.0, 'A', 6000);
-- Window with complex partitioning and ordering
SELECT
group_id, seq_num, "value", category,
ROW_NUMBER() OVER (PARTITION BY group_id, category ORDER BY seq_num) as row_in_group_cat,
DENSE_RANK() OVER (PARTITION BY group_id ORDER BY "value" DESC) as value_rank,
LAG("value", 1, 0) OVER (PARTITION BY group_id ORDER BY seq_num) as prev_value
FROM window_data ORDER BY group_id, seq_num;
-- Running calculations with frames
SELECT
group_id, seq_num, "value",
SUM("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as rolling_sum_2,
AVG("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as cumulative_avg,
MAX("value") OVER (PARTITION BY group_id ORDER BY seq_num ROWS BETWEEN CURRENT ROW AND 1 FOLLOWING) as max_next_2
FROM window_data ORDER BY group_id, seq_num;
-- Percentage calculations
SELECT
group_id, "value", category,
"value" / SUM("value") OVER (PARTITION BY group_id) * 100 as pct_of_group,
PERCENT_RANK() OVER (ORDER BY "value") as percentile_rank,
NTILE(3) OVER (ORDER BY "value") as tertile
FROM window_data ORDER BY "value";
DROP TABLE window_data;

View File

@@ -0,0 +1,82 @@
-- Migrated from DuckDB test: Multiple window aggregate tests
-- Tests window aggregate functions
CREATE TABLE sales(region VARCHAR, "quarter" INTEGER, amount INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO sales VALUES
('North', 1, 1000, 1000), ('North', 2, 1200, 2000), ('North', 3, 1100, 3000),
('South', 1, 800, 4000), ('South', 2, 900, 5000), ('South', 3, 950, 6000);
Affected Rows: 6
-- Running totals with SUM window function
SELECT region, "quarter", amount,
SUM(amount) OVER (PARTITION BY region ORDER BY "quarter") as running_total
FROM sales ORDER BY region, "quarter";
+--------+---------+--------+---------------+
| region | quarter | amount | running_total |
+--------+---------+--------+---------------+
| North | 1 | 1000 | 1000 |
| North | 2 | 1200 | 2200 |
| North | 3 | 1100 | 3300 |
| South | 1 | 800 | 800 |
| South | 2 | 900 | 1700 |
| South | 3 | 950 | 2650 |
+--------+---------+--------+---------------+
-- Moving averages with AVG window function
SELECT region, "quarter", amount,
AVG(amount) OVER (PARTITION BY region ORDER BY "quarter" ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as moving_avg
FROM sales ORDER BY region, "quarter";
+--------+---------+--------+------------+
| region | quarter | amount | moving_avg |
+--------+---------+--------+------------+
| North | 1 | 1000 | 1000.0 |
| North | 2 | 1200 | 1100.0 |
| North | 3 | 1100 | 1150.0 |
| South | 1 | 800 | 800.0 |
| South | 2 | 900 | 850.0 |
| South | 3 | 950 | 925.0 |
+--------+---------+--------+------------+
-- MIN/MAX window functions
SELECT region, "quarter", amount,
MIN(amount) OVER (PARTITION BY region) as min_amount,
MAX(amount) OVER (PARTITION BY region) as max_amount
FROM sales ORDER BY region, "quarter";
+--------+---------+--------+------------+------------+
| region | quarter | amount | min_amount | max_amount |
+--------+---------+--------+------------+------------+
| North | 1 | 1000 | 1000 | 1200 |
| North | 2 | 1200 | 1000 | 1200 |
| North | 3 | 1100 | 1000 | 1200 |
| South | 1 | 800 | 800 | 950 |
| South | 2 | 900 | 800 | 950 |
| South | 3 | 950 | 800 | 950 |
+--------+---------+--------+------------+------------+
-- COUNT window function
SELECT region, "quarter",
COUNT(*) OVER (PARTITION BY region) as region_count,
COUNT(*) OVER () as total_count
FROM sales ORDER BY region, "quarter";
+--------+---------+--------------+-------------+
| region | quarter | region_count | total_count |
+--------+---------+--------------+-------------+
| North | 1 | 3 | 6 |
| North | 2 | 3 | 6 |
| North | 3 | 3 | 6 |
| South | 1 | 3 | 6 |
| South | 2 | 3 | 6 |
| South | 3 | 3 | 6 |
+--------+---------+--------------+-------------+
DROP TABLE sales;
Affected Rows: 0

View File

@@ -0,0 +1,32 @@
-- Migrated from DuckDB test: Multiple window aggregate tests
-- Tests window aggregate functions
CREATE TABLE sales(region VARCHAR, "quarter" INTEGER, amount INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO sales VALUES
('North', 1, 1000, 1000), ('North', 2, 1200, 2000), ('North', 3, 1100, 3000),
('South', 1, 800, 4000), ('South', 2, 900, 5000), ('South', 3, 950, 6000);
-- Running totals with SUM window function
SELECT region, "quarter", amount,
SUM(amount) OVER (PARTITION BY region ORDER BY "quarter") as running_total
FROM sales ORDER BY region, "quarter";
-- Moving averages with AVG window function
SELECT region, "quarter", amount,
AVG(amount) OVER (PARTITION BY region ORDER BY "quarter" ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as moving_avg
FROM sales ORDER BY region, "quarter";
-- MIN/MAX window functions
SELECT region, "quarter", amount,
MIN(amount) OVER (PARTITION BY region) as min_amount,
MAX(amount) OVER (PARTITION BY region) as max_amount
FROM sales ORDER BY region, "quarter";
-- COUNT window function
SELECT region, "quarter",
COUNT(*) OVER (PARTITION BY region) as region_count,
COUNT(*) OVER () as total_count
FROM sales ORDER BY region, "quarter";
DROP TABLE sales;

View File

@@ -0,0 +1,58 @@
-- Migrated from DuckDB test: test/sql/window/test_window_rows.test
-- Tests window frame specifications
CREATE TABLE t3(a VARCHAR, b VARCHAR, c INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO t3 VALUES
('AA', 'aa', 100, 1000), ('BB', 'aa', 200, 2000), ('CC', 'bb', 300, 3000),
('DD', 'aa', 400, 4000), ('EE', 'bb', 500, 5000);
Affected Rows: 5
-- Window with ROWS frame
SELECT a, c, SUM(c) OVER (ORDER BY c ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING) as sum_val
FROM t3 ORDER BY c;
+----+-----+---------+
| a | c | sum_val |
+----+-----+---------+
| AA | 100 | 300 |
| BB | 200 | 600 |
| CC | 300 | 900 |
| DD | 400 | 1200 |
| EE | 500 | 900 |
+----+-----+---------+
-- Window with UNBOUNDED frame
SELECT a, c, SUM(c) OVER (ORDER BY c ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as cumsum
FROM t3 ORDER BY c;
+----+-----+--------+
| a | c | cumsum |
+----+-----+--------+
| AA | 100 | 100 |
| BB | 200 | 300 |
| CC | 300 | 600 |
| DD | 400 | 1000 |
| EE | 500 | 1500 |
+----+-----+--------+
-- Window with partition and frame
SELECT a, b, c, AVG(c) OVER (PARTITION BY b ORDER BY c ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as avg_val
FROM t3 ORDER BY b, c;
+----+----+-----+---------+
| a | b | c | avg_val |
+----+----+-----+---------+
| AA | aa | 100 | 100.0 |
| BB | aa | 200 | 150.0 |
| DD | aa | 400 | 300.0 |
| CC | bb | 300 | 300.0 |
| EE | bb | 500 | 400.0 |
+----+----+-----+---------+
DROP TABLE t3;
Affected Rows: 0

View File

@@ -0,0 +1,22 @@
-- Migrated from DuckDB test: test/sql/window/test_window_rows.test
-- Tests window frame specifications
CREATE TABLE t3(a VARCHAR, b VARCHAR, c INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO t3 VALUES
('AA', 'aa', 100, 1000), ('BB', 'aa', 200, 2000), ('CC', 'bb', 300, 3000),
('DD', 'aa', 400, 4000), ('EE', 'bb', 500, 5000);
-- Window with ROWS frame
SELECT a, c, SUM(c) OVER (ORDER BY c ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING) as sum_val
FROM t3 ORDER BY c;
-- Window with UNBOUNDED frame
SELECT a, c, SUM(c) OVER (ORDER BY c ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as cumsum
FROM t3 ORDER BY c;
-- Window with partition and frame
SELECT a, b, c, AVG(c) OVER (PARTITION BY b ORDER BY c ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) as avg_val
FROM t3 ORDER BY b, c;
DROP TABLE t3;

View File

@@ -0,0 +1,79 @@
-- Migrated from DuckDB test: Multiple window tests with NULL handling
-- Tests window functions with NULL values
CREATE TABLE null_test("id" INTEGER, val INTEGER, ts TIMESTAMP TIME INDEX);
Affected Rows: 0
INSERT INTO null_test VALUES
(1, 100, 1000), (2, NULL, 2000), (3, 300, 3000), (4, NULL, 4000), (5, 500, 5000);
Affected Rows: 5
-- Window functions with NULL values
SELECT "id", val,
ROW_NUMBER() OVER (ORDER BY val NULLS LAST) as row_num,
RANK() OVER (ORDER BY val NULLS LAST) as rank_val
FROM null_test ORDER BY "id";
+----+-----+---------+----------+
| id | val | row_num | rank_val |
+----+-----+---------+----------+
| 1 | 100 | 1 | 1 |
| 2 | | 4 | 4 |
| 3 | 300 | 2 | 2 |
| 4 | | 5 | 4 |
| 5 | 500 | 3 | 3 |
+----+-----+---------+----------+
-- LEAD/LAG with NULL values
SELECT "id", val,
LAG(val, 1) OVER (ORDER BY "id") as prev_val,
LEAD(val, 1) OVER (ORDER BY "id") as next_val
FROM null_test ORDER BY "id";
+----+-----+----------+----------+
| id | val | prev_val | next_val |
+----+-----+----------+----------+
| 1 | 100 | | |
| 2 | | 100 | 300 |
| 3 | 300 | | |
| 4 | | 300 | 500 |
| 5 | 500 | | |
+----+-----+----------+----------+
-- Aggregate window functions with NULL
SELECT "id", val,
SUM(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as running_sum,
COUNT(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as running_count
FROM null_test ORDER BY "id";
+----+-----+-------------+---------------+
| id | val | running_sum | running_count |
+----+-----+-------------+---------------+
| 1 | 100 | 100 | 1 |
| 2 | | 100 | 1 |
| 3 | 300 | 400 | 2 |
| 4 | | 400 | 2 |
| 5 | 500 | 900 | 3 |
+----+-----+-------------+---------------+
-- FIRST_VALUE/LAST_VALUE with NULL
SELECT "id", val,
FIRST_VALUE(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as first_val,
LAST_VALUE(val) OVER (ORDER BY "id" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as last_val
FROM null_test ORDER BY "id";
+----+-----+-----------+----------+
| id | val | first_val | last_val |
+----+-----+-----------+----------+
| 1 | 100 | 100 | 100 |
| 2 | | 100 | |
| 3 | 300 | 100 | 300 |
| 4 | | 100 | |
| 5 | 500 | 100 | 500 |
+----+-----+-----------+----------+
DROP TABLE null_test;
Affected Rows: 0

View File

@@ -0,0 +1,33 @@
-- Migrated from DuckDB test: Multiple window tests with NULL handling
-- Tests window functions with NULL values
CREATE TABLE null_test("id" INTEGER, val INTEGER, ts TIMESTAMP TIME INDEX);
INSERT INTO null_test VALUES
(1, 100, 1000), (2, NULL, 2000), (3, 300, 3000), (4, NULL, 4000), (5, 500, 5000);
-- Window functions with NULL values
SELECT "id", val,
ROW_NUMBER() OVER (ORDER BY val NULLS LAST) as row_num,
RANK() OVER (ORDER BY val NULLS LAST) as rank_val
FROM null_test ORDER BY "id";
-- LEAD/LAG with NULL values
SELECT "id", val,
LAG(val, 1) OVER (ORDER BY "id") as prev_val,
LEAD(val, 1) OVER (ORDER BY "id") as next_val
FROM null_test ORDER BY "id";
-- Aggregate window functions with NULL
SELECT "id", val,
SUM(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as running_sum,
COUNT(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as running_count
FROM null_test ORDER BY "id";
-- FIRST_VALUE/LAST_VALUE with NULL
SELECT "id", val,
FIRST_VALUE(val) OVER (ORDER BY "id" ROWS UNBOUNDED PRECEDING) as first_val,
LAST_VALUE(val) OVER (ORDER BY "id" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as last_val
FROM null_test ORDER BY "id";
DROP TABLE null_test;