mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2026-01-05 04:42:56 +00:00
Compare commits
3 Commits
oldrelease
...
v0.14.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66e2242e46 | ||
|
|
489b16ae30 | ||
|
|
85d564b0fb |
85
Cargo.lock
generated
85
Cargo.lock
generated
@@ -1619,9 +1619,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.1.24"
|
||||
version = "1.2.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
|
||||
checksum = "04da6a0d40b948dfc4fa8f5bbf402b0fc1a64a28dbf7d12ffd683550f2c1b63a"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
@@ -2510,7 +2510,7 @@ dependencies = [
|
||||
"futures-util",
|
||||
"serde",
|
||||
"snafu 0.8.5",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"sqlparser_derive 0.1.1",
|
||||
"statrs",
|
||||
"store-api",
|
||||
@@ -2946,9 +2946,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.13"
|
||||
version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
|
||||
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
@@ -3117,7 +3117,7 @@ checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2"
|
||||
[[package]]
|
||||
name = "datafusion"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-array 54.2.1",
|
||||
@@ -3168,7 +3168,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-catalog"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"async-trait",
|
||||
@@ -3188,7 +3188,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-catalog-listing"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-schema 54.3.1",
|
||||
@@ -3211,7 +3211,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-common"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3236,7 +3236,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-common-runtime"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"log",
|
||||
"tokio",
|
||||
@@ -3245,12 +3245,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-doc"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
|
||||
[[package]]
|
||||
name = "datafusion-execution"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"dashmap",
|
||||
@@ -3268,7 +3268,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-expr"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"chrono",
|
||||
@@ -3288,7 +3288,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-expr-common"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"datafusion-common",
|
||||
@@ -3299,7 +3299,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-buffer 54.3.1",
|
||||
@@ -3328,7 +3328,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-aggregate"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3349,7 +3349,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-aggregate-common"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3361,7 +3361,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-nested"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-array 54.2.1",
|
||||
@@ -3383,7 +3383,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-table"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"async-trait",
|
||||
@@ -3398,7 +3398,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-window"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"datafusion-common",
|
||||
"datafusion-doc",
|
||||
@@ -3414,7 +3414,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-functions-window-common"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"datafusion-common",
|
||||
"datafusion-physical-expr-common",
|
||||
@@ -3423,7 +3423,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-macros"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"datafusion-expr",
|
||||
"quote",
|
||||
@@ -3433,7 +3433,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-optimizer"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"chrono",
|
||||
@@ -3451,7 +3451,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-physical-expr"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3474,7 +3474,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-physical-expr-common"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3487,7 +3487,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-physical-optimizer"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-schema 54.3.1",
|
||||
@@ -3508,7 +3508,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-physical-plan"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arrow 54.2.1",
|
||||
@@ -3538,7 +3538,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-sql"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"arrow 54.2.1",
|
||||
"arrow-array 54.2.1",
|
||||
@@ -3556,7 +3556,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "datafusion-substrait"
|
||||
version = "45.0.0"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=5bbedc6704162afb03478f56ffb629405a4e1220#5bbedc6704162afb03478f56ffb629405a4e1220"
|
||||
source = "git+https://github.com/waynexia/arrow-datafusion.git?rev=e104c7cf62b11dd5fe41461b82514978234326b4#e104c7cf62b11dd5fe41461b82514978234326b4"
|
||||
dependencies = [
|
||||
"async-recursion",
|
||||
"async-trait",
|
||||
@@ -3656,7 +3656,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"snafu 0.8.5",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"sqlparser_derive 0.1.1",
|
||||
]
|
||||
|
||||
@@ -4553,7 +4553,7 @@ dependencies = [
|
||||
"session",
|
||||
"snafu 0.8.5",
|
||||
"sql",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"store-api",
|
||||
"strfmt",
|
||||
"substrait 0.14.0",
|
||||
@@ -6509,7 +6509,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8166,7 +8166,7 @@ dependencies = [
|
||||
"session",
|
||||
"snafu 0.8.5",
|
||||
"sql",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"store-api",
|
||||
"substrait 0.14.0",
|
||||
"table",
|
||||
@@ -8443,7 +8443,7 @@ dependencies = [
|
||||
"session",
|
||||
"snafu 0.8.5",
|
||||
"sql",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"store-api",
|
||||
"table",
|
||||
]
|
||||
@@ -9477,7 +9477,7 @@ dependencies = [
|
||||
"session",
|
||||
"snafu 0.8.5",
|
||||
"sql",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"statrs",
|
||||
"store-api",
|
||||
"substrait 0.14.0",
|
||||
@@ -10005,15 +10005,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.17.8"
|
||||
version = "0.17.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
|
||||
checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"getrandom 0.2.15",
|
||||
"libc",
|
||||
"spin",
|
||||
"untrusted",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
@@ -11304,7 +11303,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"snafu 0.8.5",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"sqlparser_derive 0.1.1",
|
||||
"store-api",
|
||||
"table",
|
||||
@@ -11373,7 +11372,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.54.0"
|
||||
source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089#e98e6b322426a9d397a71efef17075966223c089"
|
||||
source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e#0cf6c04490d59435ee965edd2078e8855bd8471e"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
@@ -11381,7 +11380,7 @@ dependencies = [
|
||||
"regex",
|
||||
"serde",
|
||||
"sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"sqlparser_derive 0.3.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser_derive 0.3.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11409,7 +11408,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "sqlparser_derive"
|
||||
version = "0.3.0"
|
||||
source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089#e98e6b322426a9d397a71efef17075966223c089"
|
||||
source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e#0cf6c04490d59435ee965edd2078e8855bd8471e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -12264,7 +12263,7 @@ dependencies = [
|
||||
"serde_yaml",
|
||||
"snafu 0.8.5",
|
||||
"sql",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)",
|
||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||
"sqlx",
|
||||
"store-api",
|
||||
"strum 0.27.1",
|
||||
|
||||
20
Cargo.toml
20
Cargo.toml
@@ -112,15 +112,15 @@ clap = { version = "4.4", features = ["derive"] }
|
||||
config = "0.13.0"
|
||||
crossbeam-utils = "0.8"
|
||||
dashmap = "6.1"
|
||||
datafusion = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-common = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-functions = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-optimizer = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-physical-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-physical-plan = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-sql = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion-substrait = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "5bbedc6704162afb03478f56ffb629405a4e1220" }
|
||||
datafusion = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-common = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-functions = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-optimizer = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-physical-expr = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-physical-plan = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-sql = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
datafusion-substrait = { git = "https://github.com/waynexia/arrow-datafusion.git", rev = "e104c7cf62b11dd5fe41461b82514978234326b4" }
|
||||
deadpool = "0.12"
|
||||
deadpool-postgres = "0.14"
|
||||
derive_builder = "0.20"
|
||||
@@ -191,7 +191,7 @@ simd-json = "0.15"
|
||||
similar-asserts = "1.6.0"
|
||||
smallvec = { version = "1", features = ["serde"] }
|
||||
snafu = "0.8"
|
||||
sqlparser = { git = "https://github.com/GreptimeTeam/sqlparser-rs.git", rev = "e98e6b322426a9d397a71efef17075966223c089", features = [
|
||||
sqlparser = { git = "https://github.com/GreptimeTeam/sqlparser-rs.git", rev = "0cf6c04490d59435ee965edd2078e8855bd8471e", features = [
|
||||
"visitor",
|
||||
"serde",
|
||||
] } # branch = "v0.54.x"
|
||||
|
||||
@@ -115,6 +115,13 @@ impl Function for UddSketchCalcFunction {
|
||||
}
|
||||
};
|
||||
|
||||
// Check if the sketch is empty, if so, return null
|
||||
// This is important to avoid panics when calling estimate_quantile on an empty sketch
|
||||
// In practice, this will happen if input is all null
|
||||
if sketch.bucket_iter().count() == 0 {
|
||||
builder.push_null();
|
||||
continue;
|
||||
}
|
||||
// Compute the estimated quantile from the sketch
|
||||
let result = sketch.estimate_quantile(perc);
|
||||
builder.push(Some(result));
|
||||
|
||||
@@ -32,3 +32,9 @@ pub const SLOW_QUERY_THRESHOLD: Duration = Duration::from_secs(60);
|
||||
|
||||
/// The minimum duration between two queries execution by batching mode task
|
||||
const MIN_REFRESH_DURATION: Duration = Duration::new(5, 0);
|
||||
|
||||
/// Grpc connection timeout
|
||||
const GRPC_CONN_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
/// Grpc max retry number
|
||||
const GRPC_MAX_RETRIES: u32 = 3;
|
||||
|
||||
@@ -25,12 +25,15 @@ use common_meta::cluster::{NodeInfo, NodeInfoKey, Role};
|
||||
use common_meta::peer::Peer;
|
||||
use common_meta::rpc::store::RangeRequest;
|
||||
use common_query::Output;
|
||||
use common_telemetry::warn;
|
||||
use meta_client::client::MetaClient;
|
||||
use servers::query_handler::grpc::GrpcQueryHandler;
|
||||
use session::context::{QueryContextBuilder, QueryContextRef};
|
||||
use snafu::{OptionExt, ResultExt};
|
||||
|
||||
use crate::batching_mode::DEFAULT_BATCHING_ENGINE_QUERY_TIMEOUT;
|
||||
use crate::batching_mode::{
|
||||
DEFAULT_BATCHING_ENGINE_QUERY_TIMEOUT, GRPC_CONN_TIMEOUT, GRPC_MAX_RETRIES,
|
||||
};
|
||||
use crate::error::{ExternalSnafu, InvalidRequestSnafu, UnexpectedSnafu};
|
||||
use crate::Error;
|
||||
|
||||
@@ -99,7 +102,9 @@ impl FrontendClient {
|
||||
Self::Distributed {
|
||||
meta_client,
|
||||
chnl_mgr: {
|
||||
let cfg = ChannelConfig::new().timeout(DEFAULT_BATCHING_ENGINE_QUERY_TIMEOUT);
|
||||
let cfg = ChannelConfig::new()
|
||||
.connect_timeout(GRPC_CONN_TIMEOUT)
|
||||
.timeout(DEFAULT_BATCHING_ENGINE_QUERY_TIMEOUT);
|
||||
ChannelManager::with_config(cfg)
|
||||
},
|
||||
}
|
||||
@@ -223,12 +228,32 @@ impl FrontendClient {
|
||||
peer: db.peer.clone(),
|
||||
});
|
||||
|
||||
db.database
|
||||
.handle(req.clone())
|
||||
.await
|
||||
.with_context(|_| InvalidRequestSnafu {
|
||||
context: format!("Failed to handle request: {:?}", req),
|
||||
})
|
||||
let mut retry = 0;
|
||||
|
||||
loop {
|
||||
let ret = db.database.handle(req.clone()).await.with_context(|_| {
|
||||
InvalidRequestSnafu {
|
||||
context: format!("Failed to handle request: {:?}", req),
|
||||
}
|
||||
});
|
||||
if let Err(err) = ret {
|
||||
if retry < GRPC_MAX_RETRIES {
|
||||
retry += 1;
|
||||
warn!(
|
||||
"Failed to send request to grpc handle at Peer={:?}, retry = {}, error = {:?}",
|
||||
db.peer, retry, err
|
||||
);
|
||||
continue;
|
||||
} else {
|
||||
common_telemetry::error!(
|
||||
"Failed to send request to grpc handle at Peer={:?} after {} retries, error = {:?}",
|
||||
db.peer, retry, err
|
||||
);
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
FrontendClient::Standalone { database_client } => {
|
||||
let ctx = QueryContextBuilder::default()
|
||||
|
||||
@@ -53,6 +53,7 @@ use crate::batching_mode::utils::{
|
||||
use crate::batching_mode::{
|
||||
DEFAULT_BATCHING_ENGINE_QUERY_TIMEOUT, MIN_REFRESH_DURATION, SLOW_QUERY_THRESHOLD,
|
||||
};
|
||||
use crate::df_optimizer::apply_df_optimizer;
|
||||
use crate::error::{
|
||||
ConvertColumnSchemaSnafu, DatafusionSnafu, ExternalSnafu, InvalidQuerySnafu,
|
||||
SubstraitEncodeLogicalPlanSnafu, UnexpectedSnafu,
|
||||
@@ -541,7 +542,10 @@ impl BatchingTask {
|
||||
.clone()
|
||||
.rewrite(&mut add_auto_column)
|
||||
.with_context(|_| DatafusionSnafu {
|
||||
context: format!("Failed to rewrite plan {:?}", self.config.plan),
|
||||
context: format!(
|
||||
"Failed to rewrite plan:\n {}\n",
|
||||
self.config.plan
|
||||
),
|
||||
})?
|
||||
.data;
|
||||
let schema_len = plan.schema().fields().len();
|
||||
@@ -573,16 +577,19 @@ impl BatchingTask {
|
||||
|
||||
let mut add_filter = AddFilterRewriter::new(expr);
|
||||
let mut add_auto_column = AddAutoColumnRewriter::new(sink_table_schema.clone());
|
||||
// make a not optimized plan for clearer unparse
|
||||
|
||||
let plan = sql_to_df_plan(query_ctx.clone(), engine.clone(), &self.config.query, false)
|
||||
.await?;
|
||||
plan.clone()
|
||||
let rewrite = plan
|
||||
.clone()
|
||||
.rewrite(&mut add_filter)
|
||||
.and_then(|p| p.data.rewrite(&mut add_auto_column))
|
||||
.with_context(|_| DatafusionSnafu {
|
||||
context: format!("Failed to rewrite plan {plan:?}"),
|
||||
context: format!("Failed to rewrite plan:\n {}\n", plan),
|
||||
})?
|
||||
.data
|
||||
.data;
|
||||
// only apply optimize after complex rewrite is done
|
||||
apply_df_optimizer(rewrite).await?
|
||||
};
|
||||
|
||||
Ok(Some((new_plan, schema_len)))
|
||||
|
||||
@@ -704,6 +704,28 @@ mod test {
|
||||
),
|
||||
"SELECT arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)') AS time_window FROM numbers_with_ts WHERE ((ts >= CAST('2025-02-24 10:48:00' AS TIMESTAMP)) AND (ts <= CAST('2025-02-24 10:49:00' AS TIMESTAMP))) GROUP BY arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)')"
|
||||
),
|
||||
// complex time window index with where
|
||||
(
|
||||
"SELECT arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)') AS time_window FROM numbers_with_ts WHERE number in (2, 3, 4) GROUP BY time_window;",
|
||||
Timestamp::new(1740394109, TimeUnit::Second),
|
||||
(
|
||||
"ts".to_string(),
|
||||
Some(Timestamp::new(1740394080, TimeUnit::Second)),
|
||||
Some(Timestamp::new(1740394140, TimeUnit::Second)),
|
||||
),
|
||||
"SELECT arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)') AS time_window FROM numbers_with_ts WHERE numbers_with_ts.number IN (2, 3, 4) AND ((ts >= CAST('2025-02-24 10:48:00' AS TIMESTAMP)) AND (ts <= CAST('2025-02-24 10:49:00' AS TIMESTAMP))) GROUP BY arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)')"
|
||||
),
|
||||
// complex time window index with between and
|
||||
(
|
||||
"SELECT arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)') AS time_window FROM numbers_with_ts WHERE number BETWEEN 2 AND 4 GROUP BY time_window;",
|
||||
Timestamp::new(1740394109, TimeUnit::Second),
|
||||
(
|
||||
"ts".to_string(),
|
||||
Some(Timestamp::new(1740394080, TimeUnit::Second)),
|
||||
Some(Timestamp::new(1740394140, TimeUnit::Second)),
|
||||
),
|
||||
"SELECT arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)') AS time_window FROM numbers_with_ts WHERE (numbers_with_ts.number BETWEEN 2 AND 4) AND ((ts >= CAST('2025-02-24 10:48:00' AS TIMESTAMP)) AND (ts <= CAST('2025-02-24 10:49:00' AS TIMESTAMP))) GROUP BY arrow_cast(date_bin(INTERVAL '1 MINS', numbers_with_ts.ts), 'Timestamp(Second, None)')"
|
||||
),
|
||||
// no time index
|
||||
(
|
||||
"SELECT date_bin('5 minutes', ts) FROM numbers_with_ts;",
|
||||
|
||||
@@ -342,8 +342,8 @@ impl TreeNodeRewriter for AddAutoColumnRewriter {
|
||||
}
|
||||
} else {
|
||||
return Err(DataFusionError::Plan(format!(
|
||||
"Expect table have 0,1 or 2 columns more than query columns, found {} query columns {:?}, {} table columns {:?} at node {:?}",
|
||||
query_col_cnt, exprs, table_col_cnt, self.schema.column_schemas(), node
|
||||
"Expect table have 0,1 or 2 columns more than query columns, found {} query columns {:?}, {} table columns {:?}",
|
||||
query_col_cnt, exprs, table_col_cnt, self.schema.column_schemas()
|
||||
)));
|
||||
}
|
||||
|
||||
@@ -406,7 +406,9 @@ mod test {
|
||||
use datatypes::prelude::ConcreteDataType;
|
||||
use datatypes::schema::{ColumnSchema, Schema};
|
||||
use pretty_assertions::assert_eq;
|
||||
use query::query_engine::DefaultSerializer;
|
||||
use session::context::QueryContext;
|
||||
use substrait::{DFLogicalSubstraitConvertor, SubstraitPlan};
|
||||
|
||||
use super::*;
|
||||
use crate::test_utils::create_test_query_engine;
|
||||
@@ -701,4 +703,18 @@ mod test {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_null_cast() {
|
||||
let query_engine = create_test_query_engine();
|
||||
let ctx = QueryContext::arc();
|
||||
let sql = "SELECT NULL::DOUBLE FROM numbers_with_ts";
|
||||
let plan = sql_to_df_plan(ctx, query_engine.clone(), sql, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let _sub_plan = DFLogicalSubstraitConvertor {}
|
||||
.encode(&plan, DefaultSerializer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ use datafusion::config::ConfigOptions;
|
||||
use datafusion::error::DataFusionError;
|
||||
use datafusion::functions_aggregate::count::count_udaf;
|
||||
use datafusion::functions_aggregate::sum::sum_udaf;
|
||||
use datafusion::optimizer::analyzer::count_wildcard_rule::CountWildcardRule;
|
||||
use datafusion::optimizer::analyzer::type_coercion::TypeCoercion;
|
||||
use datafusion::optimizer::common_subexpr_eliminate::CommonSubexprEliminate;
|
||||
use datafusion::optimizer::optimize_projections::OptimizeProjections;
|
||||
@@ -42,6 +41,7 @@ use datafusion_expr::{
|
||||
BinaryExpr, ColumnarValue, Expr, Operator, Projection, ScalarFunctionArgs, ScalarUDFImpl,
|
||||
Signature, TypeSignature, Volatility,
|
||||
};
|
||||
use query::optimizer::count_wildcard::CountWildcardToTimeIndexRule;
|
||||
use query::parser::QueryLanguageParser;
|
||||
use query::query_engine::DefaultSerializer;
|
||||
use query::QueryEngine;
|
||||
@@ -61,9 +61,9 @@ pub async fn apply_df_optimizer(
|
||||
) -> Result<datafusion_expr::LogicalPlan, Error> {
|
||||
let cfg = ConfigOptions::new();
|
||||
let analyzer = Analyzer::with_rules(vec![
|
||||
Arc::new(CountWildcardRule::new()),
|
||||
Arc::new(AvgExpandRule::new()),
|
||||
Arc::new(TumbleExpandRule::new()),
|
||||
Arc::new(CountWildcardToTimeIndexRule),
|
||||
Arc::new(AvgExpandRule),
|
||||
Arc::new(TumbleExpandRule),
|
||||
Arc::new(CheckGroupByRule::new()),
|
||||
Arc::new(TypeCoercion::new()),
|
||||
]);
|
||||
@@ -128,13 +128,7 @@ pub async fn sql_to_flow_plan(
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct AvgExpandRule {}
|
||||
|
||||
impl AvgExpandRule {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
struct AvgExpandRule;
|
||||
|
||||
impl AnalyzerRule for AvgExpandRule {
|
||||
fn analyze(
|
||||
@@ -331,13 +325,7 @@ impl TreeNodeRewriter for ExpandAvgRewriter<'_> {
|
||||
|
||||
/// expand tumble in aggr expr to tumble_start and tumble_end with column name like `window_start`
|
||||
#[derive(Debug)]
|
||||
struct TumbleExpandRule {}
|
||||
|
||||
impl TumbleExpandRule {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
struct TumbleExpandRule;
|
||||
|
||||
impl AnalyzerRule for TumbleExpandRule {
|
||||
fn analyze(
|
||||
|
||||
@@ -28,7 +28,7 @@ pub mod error;
|
||||
pub mod executor;
|
||||
pub mod log_query;
|
||||
pub mod metrics;
|
||||
mod optimizer;
|
||||
pub mod optimizer;
|
||||
pub mod options;
|
||||
pub mod parser;
|
||||
mod part_sort;
|
||||
|
||||
266
tests/cases/standalone/common/flow/flow_step_aggr.result
Normal file
266
tests/cases/standalone/common/flow/flow_step_aggr.result
Normal file
@@ -0,0 +1,266 @@
|
||||
CREATE TABLE access_log (
|
||||
"url" STRING,
|
||||
user_id BIGINT,
|
||||
ts TIMESTAMP TIME INDEX,
|
||||
PRIMARY KEY ("url", user_id)
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE TABLE access_log_10s (
|
||||
"url" STRING,
|
||||
time_window timestamp time INDEX,
|
||||
state BINARY,
|
||||
PRIMARY KEY ("url")
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE FLOW calc_access_log_10s SINK TO access_log_10s
|
||||
AS
|
||||
SELECT
|
||||
"url",
|
||||
date_bin('10s'::INTERVAL, ts) AS time_window,
|
||||
hll(user_id) AS state
|
||||
FROM
|
||||
access_log
|
||||
GROUP BY
|
||||
"url",
|
||||
time_window;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
-- insert 4 rows of data
|
||||
INSERT INTO access_log VALUES
|
||||
("/dashboard", 1, "2025-03-04 00:00:00"),
|
||||
("/dashboard", 1, "2025-03-04 00:00:01"),
|
||||
("/dashboard", 2, "2025-03-04 00:00:05"),
|
||||
("/not_found", 3, "2025-03-04 00:00:11"),
|
||||
("/dashboard", 4, "2025-03-04 00:00:15");
|
||||
|
||||
Affected Rows: 5
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_access_log_10s');
|
||||
|
||||
+-----------------------------------------+
|
||||
| ADMIN FLUSH_FLOW('calc_access_log_10s') |
|
||||
+-----------------------------------------+
|
||||
| FLOW_FLUSHED |
|
||||
+-----------------------------------------+
|
||||
|
||||
-- query should return 3 rows
|
||||
SELECT "url", time_window FROM access_log_10s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
+------------+---------------------+
|
||||
| url | time_window |
|
||||
+------------+---------------------+
|
||||
| /dashboard | 2025-03-04T00:00:00 |
|
||||
| /dashboard | 2025-03-04T00:00:10 |
|
||||
| /not_found | 2025-03-04T00:00:10 |
|
||||
+------------+---------------------+
|
||||
|
||||
-- use hll_count to query the approximate data in access_log_10s
|
||||
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
+------------+---------------------+---------------------------------+
|
||||
| url | time_window | hll_count(access_log_10s.state) |
|
||||
+------------+---------------------+---------------------------------+
|
||||
| /dashboard | 2025-03-04T00:00:00 | 2 |
|
||||
| /dashboard | 2025-03-04T00:00:10 | 1 |
|
||||
| /not_found | 2025-03-04T00:00:10 | 1 |
|
||||
+------------+---------------------+---------------------------------+
|
||||
|
||||
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
||||
SELECT
|
||||
"url",
|
||||
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
||||
hll_count(hll_merge(state)) as uv_per_min
|
||||
FROM
|
||||
access_log_10s
|
||||
GROUP BY
|
||||
"url",
|
||||
time_window_1m
|
||||
ORDER BY
|
||||
time_window_1m;
|
||||
|
||||
+------------+---------------------+------------+
|
||||
| url | time_window_1m | uv_per_min |
|
||||
+------------+---------------------+------------+
|
||||
| /not_found | 2025-03-04T00:00:00 | 1 |
|
||||
| /dashboard | 2025-03-04T00:00:00 | 3 |
|
||||
+------------+---------------------+------------+
|
||||
|
||||
DROP FLOW calc_access_log_10s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE access_log_10s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE access_log;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE TABLE percentile_base (
|
||||
"id" INT PRIMARY KEY,
|
||||
"value" DOUBLE,
|
||||
ts timestamp(0) time index
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE TABLE percentile_5s (
|
||||
"percentile_state" BINARY,
|
||||
time_window timestamp(0) time index
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE FLOW calc_percentile_5s SINK TO percentile_5s
|
||||
AS
|
||||
SELECT
|
||||
uddsketch_state(128, 0.01, "value") AS "value",
|
||||
date_bin('5 seconds'::INTERVAL, ts) AS time_window
|
||||
FROM
|
||||
percentile_base
|
||||
WHERE
|
||||
"value" > 0 AND "value" < 70
|
||||
GROUP BY
|
||||
time_window;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
INSERT INTO percentile_base ("id", "value", ts) VALUES
|
||||
(1, 10.0, 1),
|
||||
(2, 20.0, 2),
|
||||
(3, 30.0, 3),
|
||||
(4, 40.0, 4),
|
||||
(5, 50.0, 5),
|
||||
(6, 60.0, 6),
|
||||
(7, 70.0, 7),
|
||||
(8, 80.0, 8),
|
||||
(9, 90.0, 9),
|
||||
(10, 100.0, 10);
|
||||
|
||||
Affected Rows: 10
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_percentile_5s');
|
||||
|
||||
+----------------------------------------+
|
||||
| ADMIN FLUSH_FLOW('calc_percentile_5s') |
|
||||
+----------------------------------------+
|
||||
| FLOW_FLUSHED |
|
||||
+----------------------------------------+
|
||||
|
||||
SELECT
|
||||
time_window,
|
||||
uddsketch_calc(0.99, `percentile_state`) AS p99
|
||||
FROM
|
||||
percentile_5s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
+---------------------+--------------------+
|
||||
| time_window | p99 |
|
||||
+---------------------+--------------------+
|
||||
| 1970-01-01T00:00:00 | 40.04777053326359 |
|
||||
| 1970-01-01T00:00:05 | 59.745049810145126 |
|
||||
+---------------------+--------------------+
|
||||
|
||||
DROP FLOW calc_percentile_5s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE percentile_5s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE percentile_base;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE TABLE percentile_base (
|
||||
"id" INT PRIMARY KEY,
|
||||
"value" DOUBLE,
|
||||
ts timestamp(0) time index
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE TABLE percentile_5s (
|
||||
"percentile_state" BINARY,
|
||||
time_window timestamp(0) time index
|
||||
);
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
CREATE FLOW calc_percentile_5s SINK TO percentile_5s
|
||||
AS
|
||||
SELECT
|
||||
uddsketch_state(128, 0.01, CASE WHEN "value" > 0 AND "value" < 70 THEN "value" ELSE NULL END) AS "value",
|
||||
date_bin('5 seconds'::INTERVAL, ts) AS time_window
|
||||
FROM
|
||||
percentile_base
|
||||
GROUP BY
|
||||
time_window;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
INSERT INTO percentile_base ("id", "value", ts) VALUES
|
||||
(1, 10.0, 1),
|
||||
(2, 20.0, 2),
|
||||
(3, 30.0, 3),
|
||||
(4, 40.0, 4),
|
||||
(5, 50.0, 5),
|
||||
(6, 60.0, 6),
|
||||
(7, 70.0, 7),
|
||||
(8, 80.0, 8),
|
||||
(9, 90.0, 9),
|
||||
(10, 100.0, 10);
|
||||
|
||||
Affected Rows: 10
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_percentile_5s');
|
||||
|
||||
+----------------------------------------+
|
||||
| ADMIN FLUSH_FLOW('calc_percentile_5s') |
|
||||
+----------------------------------------+
|
||||
| FLOW_FLUSHED |
|
||||
+----------------------------------------+
|
||||
|
||||
SELECT
|
||||
time_window,
|
||||
uddsketch_calc(0.99, percentile_state) AS p99
|
||||
FROM
|
||||
percentile_5s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
+---------------------+--------------------+
|
||||
| time_window | p99 |
|
||||
+---------------------+--------------------+
|
||||
| 1970-01-01T00:00:00 | 40.04777053326359 |
|
||||
| 1970-01-01T00:00:05 | 59.745049810145126 |
|
||||
| 1970-01-01T00:00:10 | |
|
||||
+---------------------+--------------------+
|
||||
|
||||
DROP FLOW calc_percentile_5s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE percentile_5s;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
DROP TABLE percentile_base;
|
||||
|
||||
Affected Rows: 0
|
||||
|
||||
161
tests/cases/standalone/common/flow/flow_step_aggr.sql
Normal file
161
tests/cases/standalone/common/flow/flow_step_aggr.sql
Normal file
@@ -0,0 +1,161 @@
|
||||
CREATE TABLE access_log (
|
||||
"url" STRING,
|
||||
user_id BIGINT,
|
||||
ts TIMESTAMP TIME INDEX,
|
||||
PRIMARY KEY ("url", user_id)
|
||||
);
|
||||
|
||||
CREATE TABLE access_log_10s (
|
||||
"url" STRING,
|
||||
time_window timestamp time INDEX,
|
||||
state BINARY,
|
||||
PRIMARY KEY ("url")
|
||||
);
|
||||
|
||||
CREATE FLOW calc_access_log_10s SINK TO access_log_10s
|
||||
AS
|
||||
SELECT
|
||||
"url",
|
||||
date_bin('10s'::INTERVAL, ts) AS time_window,
|
||||
hll(user_id) AS state
|
||||
FROM
|
||||
access_log
|
||||
GROUP BY
|
||||
"url",
|
||||
time_window;
|
||||
|
||||
-- insert 4 rows of data
|
||||
INSERT INTO access_log VALUES
|
||||
("/dashboard", 1, "2025-03-04 00:00:00"),
|
||||
("/dashboard", 1, "2025-03-04 00:00:01"),
|
||||
("/dashboard", 2, "2025-03-04 00:00:05"),
|
||||
("/not_found", 3, "2025-03-04 00:00:11"),
|
||||
("/dashboard", 4, "2025-03-04 00:00:15");
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_access_log_10s');
|
||||
|
||||
-- query should return 3 rows
|
||||
SELECT "url", time_window FROM access_log_10s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
-- use hll_count to query the approximate data in access_log_10s
|
||||
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
||||
SELECT
|
||||
"url",
|
||||
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
||||
hll_count(hll_merge(state)) as uv_per_min
|
||||
FROM
|
||||
access_log_10s
|
||||
GROUP BY
|
||||
"url",
|
||||
time_window_1m
|
||||
ORDER BY
|
||||
time_window_1m;
|
||||
|
||||
DROP FLOW calc_access_log_10s;
|
||||
DROP TABLE access_log_10s;
|
||||
DROP TABLE access_log;
|
||||
|
||||
CREATE TABLE percentile_base (
|
||||
"id" INT PRIMARY KEY,
|
||||
"value" DOUBLE,
|
||||
ts timestamp(0) time index
|
||||
);
|
||||
|
||||
CREATE TABLE percentile_5s (
|
||||
"percentile_state" BINARY,
|
||||
time_window timestamp(0) time index
|
||||
);
|
||||
|
||||
CREATE FLOW calc_percentile_5s SINK TO percentile_5s
|
||||
AS
|
||||
SELECT
|
||||
uddsketch_state(128, 0.01, "value") AS "value",
|
||||
date_bin('5 seconds'::INTERVAL, ts) AS time_window
|
||||
FROM
|
||||
percentile_base
|
||||
WHERE
|
||||
"value" > 0 AND "value" < 70
|
||||
GROUP BY
|
||||
time_window;
|
||||
|
||||
INSERT INTO percentile_base ("id", "value", ts) VALUES
|
||||
(1, 10.0, 1),
|
||||
(2, 20.0, 2),
|
||||
(3, 30.0, 3),
|
||||
(4, 40.0, 4),
|
||||
(5, 50.0, 5),
|
||||
(6, 60.0, 6),
|
||||
(7, 70.0, 7),
|
||||
(8, 80.0, 8),
|
||||
(9, 90.0, 9),
|
||||
(10, 100.0, 10);
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_percentile_5s');
|
||||
|
||||
SELECT
|
||||
time_window,
|
||||
uddsketch_calc(0.99, `percentile_state`) AS p99
|
||||
FROM
|
||||
percentile_5s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
DROP FLOW calc_percentile_5s;
|
||||
DROP TABLE percentile_5s;
|
||||
DROP TABLE percentile_base;
|
||||
|
||||
CREATE TABLE percentile_base (
|
||||
"id" INT PRIMARY KEY,
|
||||
"value" DOUBLE,
|
||||
ts timestamp(0) time index
|
||||
);
|
||||
|
||||
CREATE TABLE percentile_5s (
|
||||
"percentile_state" BINARY,
|
||||
time_window timestamp(0) time index
|
||||
);
|
||||
|
||||
CREATE FLOW calc_percentile_5s SINK TO percentile_5s
|
||||
AS
|
||||
SELECT
|
||||
uddsketch_state(128, 0.01, CASE WHEN "value" > 0 AND "value" < 70 THEN "value" ELSE NULL END) AS "value",
|
||||
date_bin('5 seconds'::INTERVAL, ts) AS time_window
|
||||
FROM
|
||||
percentile_base
|
||||
GROUP BY
|
||||
time_window;
|
||||
|
||||
INSERT INTO percentile_base ("id", "value", ts) VALUES
|
||||
(1, 10.0, 1),
|
||||
(2, 20.0, 2),
|
||||
(3, 30.0, 3),
|
||||
(4, 40.0, 4),
|
||||
(5, 50.0, 5),
|
||||
(6, 60.0, 6),
|
||||
(7, 70.0, 7),
|
||||
(8, 80.0, 8),
|
||||
(9, 90.0, 9),
|
||||
(10, 100.0, 10);
|
||||
|
||||
-- SQLNESS REPLACE (ADMIN\sFLUSH_FLOW\('\w+'\)\s+\|\n\+-+\+\n\|\s+)[0-9]+\s+\| $1 FLOW_FLUSHED |
|
||||
ADMIN FLUSH_FLOW('calc_percentile_5s');
|
||||
|
||||
SELECT
|
||||
time_window,
|
||||
uddsketch_calc(0.99, percentile_state) AS p99
|
||||
FROM
|
||||
percentile_5s
|
||||
ORDER BY
|
||||
time_window;
|
||||
|
||||
DROP FLOW calc_percentile_5s;
|
||||
DROP TABLE percentile_5s;
|
||||
DROP TABLE percentile_base;
|
||||
@@ -54,7 +54,11 @@ Error: 2000(InvalidSyntax), Invalid SQL syntax: sql parser error: Can't use the
|
||||
-- 2.2 no align param
|
||||
SELECT min(val) RANGE '5s' FROM host;
|
||||
|
||||
Error: 3000(PlanQuery), Error during planning: Missing argument in range select query
|
||||
Error: 2000(InvalidSyntax), Invalid SQL syntax: sql parser error: ALIGN argument cannot be omitted in the range select query
|
||||
|
||||
SELECT min(val) RANGE '5s' FILL PREV FROM host;
|
||||
|
||||
Error: 2000(InvalidSyntax), Invalid SQL syntax: sql parser error: ALIGN argument cannot be omitted in the range select query
|
||||
|
||||
-- 2.3 type mismatch
|
||||
SELECT covar(ceil(val), floor(val)) RANGE '20s' FROM host ALIGN '10s';
|
||||
|
||||
@@ -40,6 +40,8 @@ SELECT 1 RANGE '10s' FILL NULL FROM host ALIGN '1h' FILL NULL;
|
||||
|
||||
SELECT min(val) RANGE '5s' FROM host;
|
||||
|
||||
SELECT min(val) RANGE '5s' FILL PREV FROM host;
|
||||
|
||||
-- 2.3 type mismatch
|
||||
|
||||
SELECT covar(ceil(val), floor(val)) RANGE '20s' FROM host ALIGN '10s';
|
||||
|
||||
Reference in New Issue
Block a user