mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2025-12-23 14:40:01 +00:00
Compare commits
1 Commits
feature/df
...
v0.14.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2df38d0d1 |
146
Cargo.lock
generated
146
Cargo.lock
generated
@@ -185,7 +185,7 @@ checksum = "d301b3b94cb4b2f23d7917810addbbaff90738e0ca2be692bd027e70d7e0330c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "api"
|
name = "api"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-base",
|
"common-base",
|
||||||
"common-decimal",
|
"common-decimal",
|
||||||
@@ -915,7 +915,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "auth"
|
name = "auth"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -1537,7 +1537,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cache"
|
name = "cache"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"catalog",
|
"catalog",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -1561,7 +1561,7 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "catalog"
|
name = "catalog"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arrow 54.2.1",
|
"arrow 54.2.1",
|
||||||
@@ -1874,7 +1874,7 @@ checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cli"
|
name = "cli"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"auth",
|
"auth",
|
||||||
@@ -1917,7 +1917,7 @@ dependencies = [
|
|||||||
"session",
|
"session",
|
||||||
"snafu 0.8.5",
|
"snafu 0.8.5",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -1926,7 +1926,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "client"
|
name = "client"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
@@ -1955,7 +1955,7 @@ dependencies = [
|
|||||||
"rand 0.9.0",
|
"rand 0.9.0",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"snafu 0.8.5",
|
"snafu 0.8.5",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"substrait 0.37.3",
|
"substrait 0.37.3",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-stream",
|
"tokio-stream",
|
||||||
@@ -1996,7 +1996,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cmd"
|
name = "cmd"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"auth",
|
"auth",
|
||||||
@@ -2056,7 +2056,7 @@ dependencies = [
|
|||||||
"similar-asserts",
|
"similar-asserts",
|
||||||
"snafu 0.8.5",
|
"snafu 0.8.5",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"temp-env",
|
"temp-env",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@@ -2102,7 +2102,7 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-base"
|
name = "common-base"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anymap2",
|
"anymap2",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -2124,11 +2124,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-catalog"
|
name = "common-catalog"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-config"
|
name = "common-config"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-base",
|
"common-base",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -2153,7 +2153,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-datasource"
|
name = "common-datasource"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow 54.2.1",
|
"arrow 54.2.1",
|
||||||
"arrow-schema 54.3.1",
|
"arrow-schema 54.3.1",
|
||||||
@@ -2190,7 +2190,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-decimal"
|
name = "common-decimal"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bigdecimal 0.4.8",
|
"bigdecimal 0.4.8",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -2203,7 +2203,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-error"
|
name = "common-error"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-macro",
|
"common-macro",
|
||||||
"http 1.1.0",
|
"http 1.1.0",
|
||||||
@@ -2214,7 +2214,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-frontend"
|
name = "common-frontend"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -2224,7 +2224,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-function"
|
name = "common-function"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"api",
|
"api",
|
||||||
@@ -2277,7 +2277,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-greptimedb-telemetry"
|
name = "common-greptimedb-telemetry"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"common-runtime",
|
"common-runtime",
|
||||||
@@ -2294,7 +2294,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-grpc"
|
name = "common-grpc"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arrow-flight",
|
"arrow-flight",
|
||||||
@@ -2325,7 +2325,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-grpc-expr"
|
name = "common-grpc-expr"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"common-base",
|
"common-base",
|
||||||
@@ -2344,7 +2344,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-macro"
|
name = "common-macro"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"common-query",
|
"common-query",
|
||||||
@@ -2358,7 +2358,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-mem-prof"
|
name = "common-mem-prof"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-error",
|
"common-error",
|
||||||
"common-macro",
|
"common-macro",
|
||||||
@@ -2371,7 +2371,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-meta"
|
name = "common-meta"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anymap2",
|
"anymap2",
|
||||||
"api",
|
"api",
|
||||||
@@ -2432,7 +2432,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-options"
|
name = "common-options"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-grpc",
|
"common-grpc",
|
||||||
"humantime-serde",
|
"humantime-serde",
|
||||||
@@ -2441,11 +2441,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-plugins"
|
name = "common-plugins"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-pprof"
|
name = "common-pprof"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-error",
|
"common-error",
|
||||||
"common-macro",
|
"common-macro",
|
||||||
@@ -2457,7 +2457,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-procedure"
|
name = "common-procedure"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -2484,7 +2484,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-procedure-test"
|
name = "common-procedure-test"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"common-procedure",
|
"common-procedure",
|
||||||
@@ -2493,7 +2493,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-query"
|
name = "common-query"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -2519,7 +2519,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-recordbatch"
|
name = "common-recordbatch"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -2539,7 +2539,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-runtime"
|
name = "common-runtime"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"clap 4.5.19",
|
"clap 4.5.19",
|
||||||
@@ -2569,14 +2569,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-session"
|
name = "common-session"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"strum 0.27.1",
|
"strum 0.27.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-telemetry"
|
name = "common-telemetry"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atty",
|
"atty",
|
||||||
"backtrace",
|
"backtrace",
|
||||||
@@ -2604,7 +2604,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-test-util"
|
name = "common-test-util"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"client",
|
"client",
|
||||||
"common-query",
|
"common-query",
|
||||||
@@ -2616,7 +2616,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-time"
|
name = "common-time"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow 54.2.1",
|
"arrow 54.2.1",
|
||||||
"chrono",
|
"chrono",
|
||||||
@@ -2634,7 +2634,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-version"
|
name = "common-version"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build-data",
|
"build-data",
|
||||||
"const_format",
|
"const_format",
|
||||||
@@ -2644,7 +2644,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "common-wal"
|
name = "common-wal"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"common-base",
|
"common-base",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -3572,7 +3572,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datanode"
|
name = "datanode"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arrow-flight",
|
"arrow-flight",
|
||||||
@@ -3624,7 +3624,7 @@ dependencies = [
|
|||||||
"session",
|
"session",
|
||||||
"snafu 0.8.5",
|
"snafu 0.8.5",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml 0.8.19",
|
"toml 0.8.19",
|
||||||
@@ -3633,7 +3633,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datatypes"
|
name = "datatypes"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow 54.2.1",
|
"arrow 54.2.1",
|
||||||
"arrow-array 54.2.1",
|
"arrow-array 54.2.1",
|
||||||
@@ -4259,7 +4259,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "file-engine"
|
name = "file-engine"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -4382,7 +4382,7 @@ checksum = "8bf7cc16383c4b8d58b9905a8509f02926ce3058053c056376248d958c9df1e8"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flow"
|
name = "flow"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arrow 54.2.1",
|
"arrow 54.2.1",
|
||||||
@@ -4444,7 +4444,7 @@ dependencies = [
|
|||||||
"snafu 0.8.5",
|
"snafu 0.8.5",
|
||||||
"store-api",
|
"store-api",
|
||||||
"strum 0.27.1",
|
"strum 0.27.1",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tonic 0.12.3",
|
"tonic 0.12.3",
|
||||||
@@ -4499,7 +4499,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "frontend"
|
name = "frontend"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
@@ -4556,7 +4556,7 @@ dependencies = [
|
|||||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||||
"store-api",
|
"store-api",
|
||||||
"strfmt",
|
"strfmt",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml 0.8.19",
|
"toml 0.8.19",
|
||||||
@@ -5795,7 +5795,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "index"
|
name = "index"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"asynchronous-codec",
|
"asynchronous-codec",
|
||||||
@@ -6605,7 +6605,7 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log-query"
|
name = "log-query"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"common-error",
|
"common-error",
|
||||||
@@ -6617,7 +6617,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log-store"
|
name = "log-store"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -6911,7 +6911,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meta-client"
|
name = "meta-client"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -6939,7 +6939,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meta-srv"
|
name = "meta-srv"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -7029,7 +7029,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "metric-engine"
|
name = "metric-engine"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"aquamarine",
|
"aquamarine",
|
||||||
@@ -7118,7 +7118,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mito2"
|
name = "mito2"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"aquamarine",
|
"aquamarine",
|
||||||
@@ -7824,7 +7824,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object-store"
|
name = "object-store"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bytes",
|
"bytes",
|
||||||
@@ -8119,7 +8119,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "operator"
|
name = "operator"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"api",
|
"api",
|
||||||
@@ -8168,7 +8168,7 @@ dependencies = [
|
|||||||
"sql",
|
"sql",
|
||||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
@@ -8423,7 +8423,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "partition"
|
name = "partition"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -8705,7 +8705,7 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pipeline"
|
name = "pipeline"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"api",
|
"api",
|
||||||
@@ -8847,7 +8847,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "plugins"
|
name = "plugins"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"auth",
|
"auth",
|
||||||
"clap 4.5.19",
|
"clap 4.5.19",
|
||||||
@@ -9127,7 +9127,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "promql"
|
name = "promql"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -9373,7 +9373,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "puffin"
|
name = "puffin"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-compression 0.4.13",
|
"async-compression 0.4.13",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -9414,7 +9414,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "query"
|
name = "query"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"api",
|
"api",
|
||||||
@@ -9480,7 +9480,7 @@ dependencies = [
|
|||||||
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
"sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=0cf6c04490d59435ee965edd2078e8855bd8471e)",
|
||||||
"statrs",
|
"statrs",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-stream",
|
"tokio-stream",
|
||||||
@@ -10830,7 +10830,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "servers"
|
name = "servers"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"api",
|
"api",
|
||||||
@@ -10950,7 +10950,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "session"
|
name = "session"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
@@ -11275,7 +11275,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sql"
|
name = "sql"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"chrono",
|
"chrono",
|
||||||
@@ -11330,7 +11330,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlness-runner"
|
name = "sqlness-runner"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"clap 4.5.19",
|
"clap 4.5.19",
|
||||||
@@ -11649,7 +11649,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "store-api"
|
name = "store-api"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"aquamarine",
|
"aquamarine",
|
||||||
@@ -11798,7 +11798,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "substrait"
|
name = "substrait"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"bytes",
|
"bytes",
|
||||||
@@ -11978,7 +11978,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "table"
|
name = "table"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -12229,7 +12229,7 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tests-fuzz"
|
name = "tests-fuzz"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arbitrary",
|
"arbitrary",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -12273,7 +12273,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tests-integration"
|
name = "tests-integration"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"api",
|
"api",
|
||||||
"arrow-flight",
|
"arrow-flight",
|
||||||
@@ -12340,7 +12340,7 @@ dependencies = [
|
|||||||
"sql",
|
"sql",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
"store-api",
|
"store-api",
|
||||||
"substrait 0.14.0",
|
"substrait 0.14.1",
|
||||||
"table",
|
"table",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"time",
|
"time",
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ members = [
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
|
|
||||||
|
|||||||
@@ -13,10 +13,8 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
mod greatest;
|
|
||||||
mod to_unixtime;
|
mod to_unixtime;
|
||||||
|
|
||||||
use greatest::GreatestFunction;
|
|
||||||
use to_unixtime::ToUnixtimeFunction;
|
use to_unixtime::ToUnixtimeFunction;
|
||||||
|
|
||||||
use crate::function_registry::FunctionRegistry;
|
use crate::function_registry::FunctionRegistry;
|
||||||
@@ -26,6 +24,5 @@ pub(crate) struct TimestampFunction;
|
|||||||
impl TimestampFunction {
|
impl TimestampFunction {
|
||||||
pub fn register(registry: &FunctionRegistry) {
|
pub fn register(registry: &FunctionRegistry) {
|
||||||
registry.register(Arc::new(ToUnixtimeFunction));
|
registry.register(Arc::new(ToUnixtimeFunction));
|
||||||
registry.register(Arc::new(GreatestFunction));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,328 +0,0 @@
|
|||||||
// Copyright 2023 Greptime Team
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
use std::fmt::{self};
|
|
||||||
|
|
||||||
use common_query::error::{
|
|
||||||
self, ArrowComputeSnafu, InvalidFuncArgsSnafu, Result, UnsupportedInputDataTypeSnafu,
|
|
||||||
};
|
|
||||||
use common_query::prelude::{Signature, Volatility};
|
|
||||||
use datafusion::arrow::compute::kernels::cmp::gt;
|
|
||||||
use datatypes::arrow::array::AsArray;
|
|
||||||
use datatypes::arrow::compute::cast;
|
|
||||||
use datatypes::arrow::compute::kernels::zip;
|
|
||||||
use datatypes::arrow::datatypes::{
|
|
||||||
DataType as ArrowDataType, Date32Type, TimeUnit, TimestampMicrosecondType,
|
|
||||||
TimestampMillisecondType, TimestampNanosecondType, TimestampSecondType,
|
|
||||||
};
|
|
||||||
use datatypes::prelude::ConcreteDataType;
|
|
||||||
use datatypes::types::TimestampType;
|
|
||||||
use datatypes::vectors::{Helper, VectorRef};
|
|
||||||
use snafu::{ensure, ResultExt};
|
|
||||||
|
|
||||||
use crate::function::{Function, FunctionContext};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct GreatestFunction;
|
|
||||||
|
|
||||||
const NAME: &str = "greatest";
|
|
||||||
|
|
||||||
macro_rules! gt_time_types {
|
|
||||||
($ty: ident, $columns:expr) => {{
|
|
||||||
let column1 = $columns[0].to_arrow_array();
|
|
||||||
let column2 = $columns[1].to_arrow_array();
|
|
||||||
|
|
||||||
let column1 = column1.as_primitive::<$ty>();
|
|
||||||
let column2 = column2.as_primitive::<$ty>();
|
|
||||||
let boolean_array = gt(&column1, &column2).context(ArrowComputeSnafu)?;
|
|
||||||
|
|
||||||
let result = zip::zip(&boolean_array, &column1, &column2).context(ArrowComputeSnafu)?;
|
|
||||||
Helper::try_into_vector(&result).context(error::FromArrowArraySnafu)
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Function for GreatestFunction {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
NAME
|
|
||||||
}
|
|
||||||
|
|
||||||
fn return_type(&self, input_types: &[ConcreteDataType]) -> Result<ConcreteDataType> {
|
|
||||||
ensure!(
|
|
||||||
input_types.len() == 2,
|
|
||||||
InvalidFuncArgsSnafu {
|
|
||||||
err_msg: format!(
|
|
||||||
"The length of the args is not correct, expect exactly two, have: {}",
|
|
||||||
input_types.len()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
match &input_types[0] {
|
|
||||||
ConcreteDataType::String(_) => Ok(ConcreteDataType::timestamp_millisecond_datatype()),
|
|
||||||
ConcreteDataType::Date(_) => Ok(ConcreteDataType::date_datatype()),
|
|
||||||
ConcreteDataType::Timestamp(ts_type) => Ok(ConcreteDataType::Timestamp(*ts_type)),
|
|
||||||
_ => UnsupportedInputDataTypeSnafu {
|
|
||||||
function: NAME,
|
|
||||||
datatypes: input_types,
|
|
||||||
}
|
|
||||||
.fail(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::uniform(
|
|
||||||
2,
|
|
||||||
vec![
|
|
||||||
ConcreteDataType::string_datatype(),
|
|
||||||
ConcreteDataType::date_datatype(),
|
|
||||||
ConcreteDataType::timestamp_nanosecond_datatype(),
|
|
||||||
ConcreteDataType::timestamp_microsecond_datatype(),
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype(),
|
|
||||||
ConcreteDataType::timestamp_second_datatype(),
|
|
||||||
],
|
|
||||||
Volatility::Immutable,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eval(&self, _func_ctx: &FunctionContext, columns: &[VectorRef]) -> Result<VectorRef> {
|
|
||||||
ensure!(
|
|
||||||
columns.len() == 2,
|
|
||||||
InvalidFuncArgsSnafu {
|
|
||||||
err_msg: format!(
|
|
||||||
"The length of the args is not correct, expect exactly two, have: {}",
|
|
||||||
columns.len()
|
|
||||||
),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
match columns[0].data_type() {
|
|
||||||
ConcreteDataType::String(_) => {
|
|
||||||
let column1 = cast(
|
|
||||||
&columns[0].to_arrow_array(),
|
|
||||||
&ArrowDataType::Timestamp(TimeUnit::Millisecond, None),
|
|
||||||
)
|
|
||||||
.context(ArrowComputeSnafu)?;
|
|
||||||
let column1 = column1.as_primitive::<TimestampMillisecondType>();
|
|
||||||
let column2 = cast(
|
|
||||||
&columns[1].to_arrow_array(),
|
|
||||||
&ArrowDataType::Timestamp(TimeUnit::Millisecond, None),
|
|
||||||
)
|
|
||||||
.context(ArrowComputeSnafu)?;
|
|
||||||
let column2 = column2.as_primitive::<TimestampMillisecondType>();
|
|
||||||
let boolean_array = gt(&column1, &column2).context(ArrowComputeSnafu)?;
|
|
||||||
let result =
|
|
||||||
zip::zip(&boolean_array, &column1, &column2).context(ArrowComputeSnafu)?;
|
|
||||||
Ok(Helper::try_into_vector(&result).context(error::FromArrowArraySnafu)?)
|
|
||||||
}
|
|
||||||
ConcreteDataType::Date(_) => gt_time_types!(Date32Type, columns),
|
|
||||||
ConcreteDataType::Timestamp(ts_type) => match ts_type {
|
|
||||||
TimestampType::Second(_) => gt_time_types!(TimestampSecondType, columns),
|
|
||||||
TimestampType::Millisecond(_) => {
|
|
||||||
gt_time_types!(TimestampMillisecondType, columns)
|
|
||||||
}
|
|
||||||
TimestampType::Microsecond(_) => {
|
|
||||||
gt_time_types!(TimestampMicrosecondType, columns)
|
|
||||||
}
|
|
||||||
TimestampType::Nanosecond(_) => {
|
|
||||||
gt_time_types!(TimestampNanosecondType, columns)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => UnsupportedInputDataTypeSnafu {
|
|
||||||
function: NAME,
|
|
||||||
datatypes: columns.iter().map(|c| c.data_type()).collect::<Vec<_>>(),
|
|
||||||
}
|
|
||||||
.fail(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for GreatestFunction {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "GREATEST")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use common_time::timestamp::TimeUnit;
|
|
||||||
use common_time::{Date, Timestamp};
|
|
||||||
use datatypes::types::{
|
|
||||||
DateType, TimestampMicrosecondType, TimestampMillisecondType, TimestampNanosecondType,
|
|
||||||
TimestampSecondType,
|
|
||||||
};
|
|
||||||
use datatypes::value::Value;
|
|
||||||
use datatypes::vectors::{
|
|
||||||
DateVector, StringVector, TimestampMicrosecondVector, TimestampMillisecondVector,
|
|
||||||
TimestampNanosecondVector, TimestampSecondVector, Vector,
|
|
||||||
};
|
|
||||||
use paste::paste;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
#[test]
|
|
||||||
fn test_greatest_takes_string_vector() {
|
|
||||||
let function = GreatestFunction;
|
|
||||||
assert_eq!(
|
|
||||||
function
|
|
||||||
.return_type(&[
|
|
||||||
ConcreteDataType::string_datatype(),
|
|
||||||
ConcreteDataType::string_datatype()
|
|
||||||
])
|
|
||||||
.unwrap(),
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype()
|
|
||||||
);
|
|
||||||
let columns = vec![
|
|
||||||
Arc::new(StringVector::from(vec![
|
|
||||||
"1970-01-01".to_string(),
|
|
||||||
"2012-12-23".to_string(),
|
|
||||||
])) as _,
|
|
||||||
Arc::new(StringVector::from(vec![
|
|
||||||
"2001-02-01".to_string(),
|
|
||||||
"1999-01-01".to_string(),
|
|
||||||
])) as _,
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = function
|
|
||||||
.eval(&FunctionContext::default(), &columns)
|
|
||||||
.unwrap();
|
|
||||||
let result = result
|
|
||||||
.as_any()
|
|
||||||
.downcast_ref::<TimestampMillisecondVector>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.len(), 2);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(0),
|
|
||||||
Value::Timestamp(Timestamp::from_str("2001-02-01 00:00:00", None).unwrap())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(1),
|
|
||||||
Value::Timestamp(Timestamp::from_str("2012-12-23 00:00:00", None).unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_greatest_takes_date_vector() {
|
|
||||||
let function = GreatestFunction;
|
|
||||||
assert_eq!(
|
|
||||||
function
|
|
||||||
.return_type(&[
|
|
||||||
ConcreteDataType::date_datatype(),
|
|
||||||
ConcreteDataType::date_datatype()
|
|
||||||
])
|
|
||||||
.unwrap(),
|
|
||||||
ConcreteDataType::Date(DateType)
|
|
||||||
);
|
|
||||||
|
|
||||||
let columns = vec![
|
|
||||||
Arc::new(DateVector::from_slice(vec![-1, 2])) as _,
|
|
||||||
Arc::new(DateVector::from_slice(vec![0, 1])) as _,
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = function
|
|
||||||
.eval(&FunctionContext::default(), &columns)
|
|
||||||
.unwrap();
|
|
||||||
let result = result.as_any().downcast_ref::<DateVector>().unwrap();
|
|
||||||
assert_eq!(result.len(), 2);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(0),
|
|
||||||
Value::Date(Date::from_str_utc("1970-01-01").unwrap())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(1),
|
|
||||||
Value::Date(Date::from_str_utc("1970-01-03").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_greatest_takes_datetime_vector() {
|
|
||||||
let function = GreatestFunction;
|
|
||||||
assert_eq!(
|
|
||||||
function
|
|
||||||
.return_type(&[
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype(),
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype()
|
|
||||||
])
|
|
||||||
.unwrap(),
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype()
|
|
||||||
);
|
|
||||||
|
|
||||||
let columns = vec![
|
|
||||||
Arc::new(TimestampMillisecondVector::from_slice(vec![-1, 2])) as _,
|
|
||||||
Arc::new(TimestampMillisecondVector::from_slice(vec![0, 1])) as _,
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = function
|
|
||||||
.eval(&FunctionContext::default(), &columns)
|
|
||||||
.unwrap();
|
|
||||||
let result = result
|
|
||||||
.as_any()
|
|
||||||
.downcast_ref::<TimestampMillisecondVector>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result.len(), 2);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(0),
|
|
||||||
Value::Timestamp(Timestamp::from_str("1970-01-01 00:00:00", None).unwrap())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(1),
|
|
||||||
Value::Timestamp(Timestamp::from_str("1970-01-01 00:00:00.002", None).unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! test_timestamp {
|
|
||||||
($type: expr,$unit: ident) => {
|
|
||||||
paste! {
|
|
||||||
#[test]
|
|
||||||
fn [<test_greatest_takes_ $unit:lower _vector>]() {
|
|
||||||
let function = GreatestFunction;
|
|
||||||
assert_eq!(
|
|
||||||
function.return_type(&[$type, $type]).unwrap(),
|
|
||||||
ConcreteDataType::Timestamp(TimestampType::$unit([<Timestamp $unit Type>]))
|
|
||||||
);
|
|
||||||
|
|
||||||
let columns = vec![
|
|
||||||
Arc::new([<Timestamp $unit Vector>]::from_slice(vec![-1, 2])) as _,
|
|
||||||
Arc::new([<Timestamp $unit Vector>]::from_slice(vec![0, 1])) as _,
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = function.eval(&FunctionContext::default(), &columns).unwrap();
|
|
||||||
let result = result.as_any().downcast_ref::<[<Timestamp $unit Vector>]>().unwrap();
|
|
||||||
assert_eq!(result.len(), 2);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(0),
|
|
||||||
Value::Timestamp(Timestamp::new(0, TimeUnit::$unit))
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
result.get(1),
|
|
||||||
Value::Timestamp(Timestamp::new(2, TimeUnit::$unit))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test_timestamp!(
|
|
||||||
ConcreteDataType::timestamp_nanosecond_datatype(),
|
|
||||||
Nanosecond
|
|
||||||
);
|
|
||||||
test_timestamp!(
|
|
||||||
ConcreteDataType::timestamp_microsecond_datatype(),
|
|
||||||
Microsecond
|
|
||||||
);
|
|
||||||
test_timestamp!(
|
|
||||||
ConcreteDataType::timestamp_millisecond_datatype(),
|
|
||||||
Millisecond
|
|
||||||
);
|
|
||||||
test_timestamp!(ConcreteDataType::timestamp_second_datatype(), Second);
|
|
||||||
}
|
|
||||||
@@ -302,7 +302,10 @@ impl PartitionTreeMemtable {
|
|||||||
fn update_stats(&self, metrics: &WriteMetrics) {
|
fn update_stats(&self, metrics: &WriteMetrics) {
|
||||||
// Only let the tracker tracks value bytes.
|
// Only let the tracker tracks value bytes.
|
||||||
self.alloc_tracker.on_allocation(metrics.value_bytes);
|
self.alloc_tracker.on_allocation(metrics.value_bytes);
|
||||||
metrics.update_timestamp_range(&self.max_timestamp, &self.min_timestamp);
|
self.max_timestamp
|
||||||
|
.fetch_max(metrics.max_ts, Ordering::SeqCst);
|
||||||
|
self.min_timestamp
|
||||||
|
.fetch_min(metrics.min_ts, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,6 @@
|
|||||||
|
|
||||||
//! Internal metrics of the memtable.
|
//! Internal metrics of the memtable.
|
||||||
|
|
||||||
use std::sync::atomic::{AtomicI64, Ordering};
|
|
||||||
|
|
||||||
/// Metrics of writing memtables.
|
/// Metrics of writing memtables.
|
||||||
pub(crate) struct WriteMetrics {
|
pub(crate) struct WriteMetrics {
|
||||||
/// Size allocated by keys.
|
/// Size allocated by keys.
|
||||||
@@ -28,51 +26,6 @@ pub(crate) struct WriteMetrics {
|
|||||||
pub(crate) max_ts: i64,
|
pub(crate) max_ts: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WriteMetrics {
|
|
||||||
/// Update the min/max timestamp range according to current write metric.
|
|
||||||
pub(crate) fn update_timestamp_range(&self, prev_max_ts: &AtomicI64, prev_min_ts: &AtomicI64) {
|
|
||||||
loop {
|
|
||||||
let current_min = prev_min_ts.load(Ordering::Relaxed);
|
|
||||||
if self.min_ts >= current_min {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let Err(updated) = prev_min_ts.compare_exchange(
|
|
||||||
current_min,
|
|
||||||
self.min_ts,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
) else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
if updated == self.min_ts {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let current_max = prev_max_ts.load(Ordering::Relaxed);
|
|
||||||
if self.max_ts <= current_max {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let Err(updated) = prev_max_ts.compare_exchange(
|
|
||||||
current_max,
|
|
||||||
self.max_ts,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
Ordering::Relaxed,
|
|
||||||
) else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
if updated == self.max_ts {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for WriteMetrics {
|
impl Default for WriteMetrics {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|||||||
@@ -147,7 +147,8 @@ impl TimeSeriesMemtable {
|
|||||||
fn update_stats(&self, stats: WriteMetrics) {
|
fn update_stats(&self, stats: WriteMetrics) {
|
||||||
self.alloc_tracker
|
self.alloc_tracker
|
||||||
.on_allocation(stats.key_bytes + stats.value_bytes);
|
.on_allocation(stats.key_bytes + stats.value_bytes);
|
||||||
stats.update_timestamp_range(&self.max_timestamp, &self.min_timestamp);
|
self.max_timestamp.fetch_max(stats.max_ts, Ordering::SeqCst);
|
||||||
|
self.min_timestamp.fetch_min(stats.min_ts, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_key_value(&self, kv: KeyValue, stats: &mut WriteMetrics) -> Result<()> {
|
fn write_key_value(&self, kv: KeyValue, stats: &mut WriteMetrics) -> Result<()> {
|
||||||
|
|||||||
@@ -322,13 +322,10 @@ impl ScanRegion {
|
|||||||
let memtables: Vec<_> = memtables
|
let memtables: Vec<_> = memtables
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|mem| {
|
.filter(|mem| {
|
||||||
if mem.is_empty() {
|
// check if memtable is empty by reading stats.
|
||||||
|
let Some((start, end)) = mem.stats().time_range() else {
|
||||||
return false;
|
return false;
|
||||||
}
|
};
|
||||||
let stats = mem.stats();
|
|
||||||
// Safety: the memtable is not empty.
|
|
||||||
let (start, end) = stats.time_range().unwrap();
|
|
||||||
|
|
||||||
// The time range of the memtable is inclusive.
|
// The time range of the memtable is inclusive.
|
||||||
let memtable_range = TimestampRange::new_inclusive(Some(start), Some(end));
|
let memtable_range = TimestampRange::new_inclusive(Some(start), Some(end));
|
||||||
memtable_range.intersects(&time_range)
|
memtable_range.intersects(&time_range)
|
||||||
|
|||||||
@@ -134,6 +134,7 @@ impl WriteFormat {
|
|||||||
|
|
||||||
/// Helper for reading the SST format.
|
/// Helper for reading the SST format.
|
||||||
pub struct ReadFormat {
|
pub struct ReadFormat {
|
||||||
|
/// The metadata stored in the SST.
|
||||||
metadata: RegionMetadataRef,
|
metadata: RegionMetadataRef,
|
||||||
/// SST file schema.
|
/// SST file schema.
|
||||||
arrow_schema: SchemaRef,
|
arrow_schema: SchemaRef,
|
||||||
@@ -305,17 +306,23 @@ impl ReadFormat {
|
|||||||
&self,
|
&self,
|
||||||
row_groups: &[impl Borrow<RowGroupMetaData>],
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
column_id: ColumnId,
|
column_id: ColumnId,
|
||||||
) -> Option<ArrayRef> {
|
) -> StatValues {
|
||||||
let column = self.metadata.column_by_id(column_id)?;
|
let Some(column) = self.metadata.column_by_id(column_id) else {
|
||||||
|
// No such column in the SST.
|
||||||
|
return StatValues::NoColumn;
|
||||||
|
};
|
||||||
match column.semantic_type {
|
match column.semantic_type {
|
||||||
SemanticType::Tag => self.tag_values(row_groups, column, true),
|
SemanticType::Tag => self.tag_values(row_groups, column, true),
|
||||||
SemanticType::Field => {
|
SemanticType::Field => {
|
||||||
let index = self.field_id_to_index.get(&column_id)?;
|
// Safety: `field_id_to_index` is initialized by the semantic type.
|
||||||
Self::column_values(row_groups, column, *index, true)
|
let index = self.field_id_to_index.get(&column_id).unwrap();
|
||||||
|
let stats = Self::column_values(row_groups, column, *index, true);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
SemanticType::Timestamp => {
|
SemanticType::Timestamp => {
|
||||||
let index = self.time_index_position();
|
let index = self.time_index_position();
|
||||||
Self::column_values(row_groups, column, index, true)
|
let stats = Self::column_values(row_groups, column, index, true);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -325,17 +332,23 @@ impl ReadFormat {
|
|||||||
&self,
|
&self,
|
||||||
row_groups: &[impl Borrow<RowGroupMetaData>],
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
column_id: ColumnId,
|
column_id: ColumnId,
|
||||||
) -> Option<ArrayRef> {
|
) -> StatValues {
|
||||||
let column = self.metadata.column_by_id(column_id)?;
|
let Some(column) = self.metadata.column_by_id(column_id) else {
|
||||||
|
// No such column in the SST.
|
||||||
|
return StatValues::NoColumn;
|
||||||
|
};
|
||||||
match column.semantic_type {
|
match column.semantic_type {
|
||||||
SemanticType::Tag => self.tag_values(row_groups, column, false),
|
SemanticType::Tag => self.tag_values(row_groups, column, false),
|
||||||
SemanticType::Field => {
|
SemanticType::Field => {
|
||||||
let index = self.field_id_to_index.get(&column_id)?;
|
// Safety: `field_id_to_index` is initialized by the semantic type.
|
||||||
Self::column_values(row_groups, column, *index, false)
|
let index = self.field_id_to_index.get(&column_id).unwrap();
|
||||||
|
let stats = Self::column_values(row_groups, column, *index, false);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
SemanticType::Timestamp => {
|
SemanticType::Timestamp => {
|
||||||
let index = self.time_index_position();
|
let index = self.time_index_position();
|
||||||
Self::column_values(row_groups, column, index, false)
|
let stats = Self::column_values(row_groups, column, index, false);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -345,17 +358,23 @@ impl ReadFormat {
|
|||||||
&self,
|
&self,
|
||||||
row_groups: &[impl Borrow<RowGroupMetaData>],
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
column_id: ColumnId,
|
column_id: ColumnId,
|
||||||
) -> Option<ArrayRef> {
|
) -> StatValues {
|
||||||
let column = self.metadata.column_by_id(column_id)?;
|
let Some(column) = self.metadata.column_by_id(column_id) else {
|
||||||
|
// No such column in the SST.
|
||||||
|
return StatValues::NoColumn;
|
||||||
|
};
|
||||||
match column.semantic_type {
|
match column.semantic_type {
|
||||||
SemanticType::Tag => None,
|
SemanticType::Tag => StatValues::NoStats,
|
||||||
SemanticType::Field => {
|
SemanticType::Field => {
|
||||||
let index = self.field_id_to_index.get(&column_id)?;
|
// Safety: `field_id_to_index` is initialized by the semantic type.
|
||||||
Self::column_null_counts(row_groups, *index)
|
let index = self.field_id_to_index.get(&column_id).unwrap();
|
||||||
|
let stats = Self::column_null_counts(row_groups, *index);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
SemanticType::Timestamp => {
|
SemanticType::Timestamp => {
|
||||||
let index = self.time_index_position();
|
let index = self.time_index_position();
|
||||||
Self::column_null_counts(row_groups, index)
|
let stats = Self::column_null_counts(row_groups, index);
|
||||||
|
StatValues::from_stats_opt(stats)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -390,8 +409,7 @@ impl ReadFormat {
|
|||||||
row_groups: &[impl Borrow<RowGroupMetaData>],
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
column: &ColumnMetadata,
|
column: &ColumnMetadata,
|
||||||
is_min: bool,
|
is_min: bool,
|
||||||
) -> Option<ArrayRef> {
|
) -> StatValues {
|
||||||
let primary_key_encoding = self.metadata.primary_key_encoding;
|
|
||||||
let is_first_tag = self
|
let is_first_tag = self
|
||||||
.metadata
|
.metadata
|
||||||
.primary_key
|
.primary_key
|
||||||
@@ -400,9 +418,28 @@ impl ReadFormat {
|
|||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
if !is_first_tag {
|
if !is_first_tag {
|
||||||
// Only the min-max of the first tag is available in the primary key.
|
// Only the min-max of the first tag is available in the primary key.
|
||||||
return None;
|
return StatValues::NoStats;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StatValues::from_stats_opt(self.first_tag_values(row_groups, column, is_min))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns min/max values of the first tag.
|
||||||
|
/// Returns None if the tag does not have statistics.
|
||||||
|
fn first_tag_values(
|
||||||
|
&self,
|
||||||
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
|
column: &ColumnMetadata,
|
||||||
|
is_min: bool,
|
||||||
|
) -> Option<ArrayRef> {
|
||||||
|
debug_assert!(self
|
||||||
|
.metadata
|
||||||
|
.primary_key
|
||||||
|
.first()
|
||||||
|
.map(|id| *id == column.column_id)
|
||||||
|
.unwrap_or(false));
|
||||||
|
|
||||||
|
let primary_key_encoding = self.metadata.primary_key_encoding;
|
||||||
let converter = build_primary_key_codec_with_fields(
|
let converter = build_primary_key_codec_with_fields(
|
||||||
primary_key_encoding,
|
primary_key_encoding,
|
||||||
[(
|
[(
|
||||||
@@ -452,6 +489,7 @@ impl ReadFormat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns min/max values of specific non-tag columns.
|
/// Returns min/max values of specific non-tag columns.
|
||||||
|
/// Returns None if the column does not have statistics.
|
||||||
fn column_values(
|
fn column_values(
|
||||||
row_groups: &[impl Borrow<RowGroupMetaData>],
|
row_groups: &[impl Borrow<RowGroupMetaData>],
|
||||||
column: &ColumnMetadata,
|
column: &ColumnMetadata,
|
||||||
@@ -544,6 +582,29 @@ impl ReadFormat {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Values of column statistics of the SST.
|
||||||
|
///
|
||||||
|
/// It also distinguishes the case that a column is not found and
|
||||||
|
/// the column exists but has no statistics.
|
||||||
|
pub enum StatValues {
|
||||||
|
/// Values of each row group.
|
||||||
|
Values(ArrayRef),
|
||||||
|
/// No such column.
|
||||||
|
NoColumn,
|
||||||
|
/// Column exists but has no statistics.
|
||||||
|
NoStats,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StatValues {
|
||||||
|
/// Creates a new `StatValues` instance from optional statistics.
|
||||||
|
pub fn from_stats_opt(stats: Option<ArrayRef>) -> Self {
|
||||||
|
match stats {
|
||||||
|
Some(stats) => StatValues::Values(stats),
|
||||||
|
None => StatValues::NoStats,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl ReadFormat {
|
impl ReadFormat {
|
||||||
/// Creates a helper with existing `metadata` and all columns.
|
/// Creates a helper with existing `metadata` and all columns.
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use parquet::file::metadata::RowGroupMetaData;
|
|||||||
use store_api::metadata::RegionMetadataRef;
|
use store_api::metadata::RegionMetadataRef;
|
||||||
use store_api::storage::ColumnId;
|
use store_api::storage::ColumnId;
|
||||||
|
|
||||||
use crate::sst::parquet::format::ReadFormat;
|
use crate::sst::parquet::format::{ReadFormat, StatValues};
|
||||||
|
|
||||||
/// Statistics for pruning row groups.
|
/// Statistics for pruning row groups.
|
||||||
pub(crate) struct RowGroupPruningStats<'a, T> {
|
pub(crate) struct RowGroupPruningStats<'a, T> {
|
||||||
@@ -100,16 +100,18 @@ impl<T: Borrow<RowGroupMetaData>> PruningStatistics for RowGroupPruningStats<'_,
|
|||||||
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
|
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
|
||||||
let column_id = self.column_id_to_prune(&column.name)?;
|
let column_id = self.column_id_to_prune(&column.name)?;
|
||||||
match self.read_format.min_values(self.row_groups, column_id) {
|
match self.read_format.min_values(self.row_groups, column_id) {
|
||||||
Some(values) => Some(values),
|
StatValues::Values(values) => Some(values),
|
||||||
None => self.compat_default_value(&column.name),
|
StatValues::NoColumn => self.compat_default_value(&column.name),
|
||||||
|
StatValues::NoStats => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn max_values(&self, column: &Column) -> Option<ArrayRef> {
|
fn max_values(&self, column: &Column) -> Option<ArrayRef> {
|
||||||
let column_id = self.column_id_to_prune(&column.name)?;
|
let column_id = self.column_id_to_prune(&column.name)?;
|
||||||
match self.read_format.max_values(self.row_groups, column_id) {
|
match self.read_format.max_values(self.row_groups, column_id) {
|
||||||
Some(values) => Some(values),
|
StatValues::Values(values) => Some(values),
|
||||||
None => self.compat_default_value(&column.name),
|
StatValues::NoColumn => self.compat_default_value(&column.name),
|
||||||
|
StatValues::NoStats => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,10 +120,12 @@ impl<T: Borrow<RowGroupMetaData>> PruningStatistics for RowGroupPruningStats<'_,
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn null_counts(&self, column: &Column) -> Option<ArrayRef> {
|
fn null_counts(&self, column: &Column) -> Option<ArrayRef> {
|
||||||
let Some(column_id) = self.column_id_to_prune(&column.name) else {
|
let column_id = self.column_id_to_prune(&column.name)?;
|
||||||
return self.compat_null_count(&column.name);
|
match self.read_format.null_counts(self.row_groups, column_id) {
|
||||||
};
|
StatValues::Values(values) => Some(values),
|
||||||
self.read_format.null_counts(self.row_groups, column_id)
|
StatValues::NoColumn => self.compat_null_count(&column.name),
|
||||||
|
StatValues::NoStats => None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn row_counts(&self, _column: &Column) -> Option<ArrayRef> {
|
fn row_counts(&self, _column: &Column) -> Option<ArrayRef> {
|
||||||
|
|||||||
@@ -50,6 +50,7 @@ ADMIN FLUSH_FLOW('calc_access_log_10s');
|
|||||||
+-----------------------------------------+
|
+-----------------------------------------+
|
||||||
|
|
||||||
-- query should return 3 rows
|
-- query should return 3 rows
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT "url", time_window FROM access_log_10s
|
SELECT "url", time_window FROM access_log_10s
|
||||||
ORDER BY
|
ORDER BY
|
||||||
time_window;
|
time_window;
|
||||||
@@ -63,6 +64,7 @@ ORDER BY
|
|||||||
+------------+---------------------+
|
+------------+---------------------+
|
||||||
|
|
||||||
-- use hll_count to query the approximate data in access_log_10s
|
-- use hll_count to query the approximate data in access_log_10s
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
||||||
ORDER BY
|
ORDER BY
|
||||||
time_window;
|
time_window;
|
||||||
@@ -76,6 +78,7 @@ ORDER BY
|
|||||||
+------------+---------------------+---------------------------------+
|
+------------+---------------------+---------------------------------+
|
||||||
|
|
||||||
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT
|
SELECT
|
||||||
"url",
|
"url",
|
||||||
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
||||||
@@ -91,8 +94,8 @@ ORDER BY
|
|||||||
+------------+---------------------+------------+
|
+------------+---------------------+------------+
|
||||||
| url | time_window_1m | uv_per_min |
|
| url | time_window_1m | uv_per_min |
|
||||||
+------------+---------------------+------------+
|
+------------+---------------------+------------+
|
||||||
| /not_found | 2025-03-04T00:00:00 | 1 |
|
|
||||||
| /dashboard | 2025-03-04T00:00:00 | 3 |
|
| /dashboard | 2025-03-04T00:00:00 | 3 |
|
||||||
|
| /not_found | 2025-03-04T00:00:00 | 1 |
|
||||||
+------------+---------------------+------------+
|
+------------+---------------------+------------+
|
||||||
|
|
||||||
DROP FLOW calc_access_log_10s;
|
DROP FLOW calc_access_log_10s;
|
||||||
|
|||||||
@@ -36,16 +36,19 @@ INSERT INTO access_log VALUES
|
|||||||
ADMIN FLUSH_FLOW('calc_access_log_10s');
|
ADMIN FLUSH_FLOW('calc_access_log_10s');
|
||||||
|
|
||||||
-- query should return 3 rows
|
-- query should return 3 rows
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT "url", time_window FROM access_log_10s
|
SELECT "url", time_window FROM access_log_10s
|
||||||
ORDER BY
|
ORDER BY
|
||||||
time_window;
|
time_window;
|
||||||
|
|
||||||
-- use hll_count to query the approximate data in access_log_10s
|
-- use hll_count to query the approximate data in access_log_10s
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
SELECT "url", time_window, hll_count(state) FROM access_log_10s
|
||||||
ORDER BY
|
ORDER BY
|
||||||
time_window;
|
time_window;
|
||||||
|
|
||||||
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
-- further, we can aggregate 10 seconds of data to every minute, by using hll_merge to merge 10 seconds of hyperloglog state
|
||||||
|
-- SQLNESS SORT_RESULT 3 1
|
||||||
SELECT
|
SELECT
|
||||||
"url",
|
"url",
|
||||||
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
date_bin('1 minute'::INTERVAL, time_window) AS time_window_1m,
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ select GREATEST('1999-01-30', '2023-03-01');
|
|||||||
+-------------------------------------------------+
|
+-------------------------------------------------+
|
||||||
| greatest(Utf8("1999-01-30"),Utf8("2023-03-01")) |
|
| greatest(Utf8("1999-01-30"),Utf8("2023-03-01")) |
|
||||||
+-------------------------------------------------+
|
+-------------------------------------------------+
|
||||||
| 2023-03-01T00:00:00 |
|
| 2023-03-01 |
|
||||||
+-------------------------------------------------+
|
+-------------------------------------------------+
|
||||||
|
|
||||||
select GREATEST('2000-02-11'::Date, '2020-12-30'::Date);
|
select GREATEST('2000-02-11'::Date, '2020-12-30'::Date);
|
||||||
|
|||||||
158
tests/cases/standalone/common/select/prune_pk.result
Normal file
158
tests/cases/standalone/common/select/prune_pk.result
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS `test_multi_pk_filter` ( `namespace` STRING NULL, `env` STRING NULL DEFAULT 'NULL', `flag` INT NULL, `total` BIGINT NULL, `greptime_timestamp` TIMESTAMP(9) NOT NULL, TIME INDEX (`greptime_timestamp`), PRIMARY KEY (`namespace`, `env`, `flag`) ) ENGINE=mito;
|
||||||
|
|
||||||
|
Affected Rows: 0
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5289, '2023-05-15 10:00:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 0, 421, '2023-05-15 10:05:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 356, '2023-05-15 10:10:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_filter');
|
||||||
|
|
||||||
|
+-------------------------------------------+
|
||||||
|
| ADMIN FLUSH_TABLE('test_multi_pk_filter') |
|
||||||
|
+-------------------------------------------+
|
||||||
|
| 0 |
|
||||||
|
+-------------------------------------------+
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 412, '2023-05-15 10:15:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 298, '2023-05-15 10:20:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5289, '2023-05-15 10:25:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5874, '2023-05-15 10:30:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_filter');
|
||||||
|
|
||||||
|
+-------------------------------------------+
|
||||||
|
| ADMIN FLUSH_TABLE('test_multi_pk_filter') |
|
||||||
|
+-------------------------------------------+
|
||||||
|
| 0 |
|
||||||
|
+-------------------------------------------+
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 6132, '2023-05-15 10:35:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1287, '2023-05-15 10:40:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1432, '2023-05-15 10:45:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1056, '2023-05-15 10:50:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
SELECT greptime_timestamp, namespace, env, total FROM test_multi_pk_filter WHERE
|
||||||
|
greptime_timestamp BETWEEN '2023-05-15 10:00:00' AND '2023-05-15 11:00:00' AND flag = 1 AND namespace = 'thermostat_v2'
|
||||||
|
ORDER BY greptime_timestamp;
|
||||||
|
|
||||||
|
+---------------------+---------------+------------+-------+
|
||||||
|
| greptime_timestamp | namespace | env | total |
|
||||||
|
+---------------------+---------------+------------+-------+
|
||||||
|
| 2023-05-15T10:00:00 | thermostat_v2 | production | 5289 |
|
||||||
|
| 2023-05-15T10:10:00 | thermostat_v2 | dev | 356 |
|
||||||
|
| 2023-05-15T10:15:00 | thermostat_v2 | dev | 412 |
|
||||||
|
| 2023-05-15T10:20:00 | thermostat_v2 | dev | 298 |
|
||||||
|
| 2023-05-15T10:25:00 | thermostat_v2 | production | 5289 |
|
||||||
|
| 2023-05-15T10:30:00 | thermostat_v2 | production | 5874 |
|
||||||
|
| 2023-05-15T10:35:00 | thermostat_v2 | production | 6132 |
|
||||||
|
| 2023-05-15T10:40:00 | thermostat_v2 | testing | 1287 |
|
||||||
|
| 2023-05-15T10:45:00 | thermostat_v2 | testing | 1432 |
|
||||||
|
| 2023-05-15T10:50:00 | thermostat_v2 | testing | 1056 |
|
||||||
|
+---------------------+---------------+------------+-------+
|
||||||
|
|
||||||
|
SELECT greptime_timestamp, namespace, env, total FROM test_multi_pk_filter WHERE
|
||||||
|
greptime_timestamp BETWEEN '2023-05-15 10:00:00' AND '2023-05-15 11:00:00' AND flag = 1 AND namespace = 'thermostat_v2' AND env='dev'
|
||||||
|
ORDER BY greptime_timestamp;
|
||||||
|
|
||||||
|
+---------------------+---------------+-----+-------+
|
||||||
|
| greptime_timestamp | namespace | env | total |
|
||||||
|
+---------------------+---------------+-----+-------+
|
||||||
|
| 2023-05-15T10:10:00 | thermostat_v2 | dev | 356 |
|
||||||
|
| 2023-05-15T10:15:00 | thermostat_v2 | dev | 412 |
|
||||||
|
| 2023-05-15T10:20:00 | thermostat_v2 | dev | 298 |
|
||||||
|
+---------------------+---------------+-----+-------+
|
||||||
|
|
||||||
|
DROP TABLE test_multi_pk_filter;
|
||||||
|
|
||||||
|
Affected Rows: 0
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS `test_multi_pk_null` ( `namespace` STRING NULL, `env` STRING NULL DEFAULT 'NULL', `total` BIGINT NULL, `greptime_timestamp` TIMESTAMP(9) NOT NULL, TIME INDEX (`greptime_timestamp`), PRIMARY KEY (`namespace`, `env`) ) ENGINE=mito;
|
||||||
|
|
||||||
|
Affected Rows: 0
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_null
|
||||||
|
(namespace, env, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 5289, '2023-05-15 10:00:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_null
|
||||||
|
(namespace, env, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 421, '2023-05-15 10:05:00');
|
||||||
|
|
||||||
|
Affected Rows: 1
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_null');
|
||||||
|
|
||||||
|
+-----------------------------------------+
|
||||||
|
| ADMIN FLUSH_TABLE('test_multi_pk_null') |
|
||||||
|
+-----------------------------------------+
|
||||||
|
| 0 |
|
||||||
|
+-----------------------------------------+
|
||||||
|
|
||||||
|
SELECT * FROM test_multi_pk_null WHERE env IS NOT NULL;
|
||||||
|
|
||||||
|
+---------------+------------+-------+---------------------+
|
||||||
|
| namespace | env | total | greptime_timestamp |
|
||||||
|
+---------------+------------+-------+---------------------+
|
||||||
|
| thermostat_v2 | production | 5289 | 2023-05-15T10:00:00 |
|
||||||
|
| thermostat_v2 | production | 421 | 2023-05-15T10:05:00 |
|
||||||
|
+---------------+------------+-------+---------------------+
|
||||||
|
|
||||||
|
DROP TABLE test_multi_pk_null;
|
||||||
|
|
||||||
|
Affected Rows: 0
|
||||||
|
|
||||||
66
tests/cases/standalone/common/select/prune_pk.sql
Normal file
66
tests/cases/standalone/common/select/prune_pk.sql
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS `test_multi_pk_filter` ( `namespace` STRING NULL, `env` STRING NULL DEFAULT 'NULL', `flag` INT NULL, `total` BIGINT NULL, `greptime_timestamp` TIMESTAMP(9) NOT NULL, TIME INDEX (`greptime_timestamp`), PRIMARY KEY (`namespace`, `env`, `flag`) ) ENGINE=mito;
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5289, '2023-05-15 10:00:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 0, 421, '2023-05-15 10:05:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 356, '2023-05-15 10:10:00');
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_filter');
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 412, '2023-05-15 10:15:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'dev', 1, 298, '2023-05-15 10:20:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5289, '2023-05-15 10:25:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 5874, '2023-05-15 10:30:00');
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_filter');
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 1, 6132, '2023-05-15 10:35:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1287, '2023-05-15 10:40:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1432, '2023-05-15 10:45:00');
|
||||||
|
INSERT INTO test_multi_pk_filter
|
||||||
|
(namespace, env, flag, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'testing', 1, 1056, '2023-05-15 10:50:00');
|
||||||
|
|
||||||
|
SELECT greptime_timestamp, namespace, env, total FROM test_multi_pk_filter WHERE
|
||||||
|
greptime_timestamp BETWEEN '2023-05-15 10:00:00' AND '2023-05-15 11:00:00' AND flag = 1 AND namespace = 'thermostat_v2'
|
||||||
|
ORDER BY greptime_timestamp;
|
||||||
|
|
||||||
|
SELECT greptime_timestamp, namespace, env, total FROM test_multi_pk_filter WHERE
|
||||||
|
greptime_timestamp BETWEEN '2023-05-15 10:00:00' AND '2023-05-15 11:00:00' AND flag = 1 AND namespace = 'thermostat_v2' AND env='dev'
|
||||||
|
ORDER BY greptime_timestamp;
|
||||||
|
|
||||||
|
DROP TABLE test_multi_pk_filter;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS `test_multi_pk_null` ( `namespace` STRING NULL, `env` STRING NULL DEFAULT 'NULL', `total` BIGINT NULL, `greptime_timestamp` TIMESTAMP(9) NOT NULL, TIME INDEX (`greptime_timestamp`), PRIMARY KEY (`namespace`, `env`) ) ENGINE=mito;
|
||||||
|
|
||||||
|
INSERT INTO test_multi_pk_null
|
||||||
|
(namespace, env, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 5289, '2023-05-15 10:00:00');
|
||||||
|
INSERT INTO test_multi_pk_null
|
||||||
|
(namespace, env, total, greptime_timestamp)
|
||||||
|
VALUES ('thermostat_v2', 'production', 421, '2023-05-15 10:05:00');
|
||||||
|
|
||||||
|
ADMIN FLUSH_TABLE('test_multi_pk_null');
|
||||||
|
|
||||||
|
SELECT * FROM test_multi_pk_null WHERE env IS NOT NULL;
|
||||||
|
|
||||||
|
DROP TABLE test_multi_pk_null;
|
||||||
Reference in New Issue
Block a user