From 311727939d6dfb4838cb9add3090f84dca9647cd Mon Sep 17 00:00:00 2001 From: LFC <990479+MichaelScofield@users.noreply.github.com> Date: Wed, 9 Apr 2025 10:20:55 +0800 Subject: [PATCH] chore: update datafusion family (#5814) --- Cargo.lock | 817 ++++++++++-------- Cargo.toml | 42 +- src/catalog/src/system_schema/predicate.rs | 5 +- src/common/datasource/Cargo.toml | 3 +- src/common/datasource/src/compression.rs | 13 + src/common/datasource/src/file_format/csv.rs | 68 +- src/common/datasource/src/file_format/json.rs | 49 +- .../datasource/src/file_format/tests.rs | 54 +- src/common/datasource/src/test_util.rs | 35 +- src/common/function/src/scalars/udf.rs | 2 +- src/common/query/src/error.rs | 1 + src/common/query/src/logical_plan/expr.rs | 5 +- src/common/query/src/signature.rs | 6 +- src/common/query/src/stream.rs | 8 +- src/common/recordbatch/src/filter.rs | 40 +- src/file-engine/Cargo.toml | 1 + src/file-engine/src/error.rs | 11 +- src/file-engine/src/query/file_stream.rs | 48 +- src/flow/src/df_optimizer.rs | 9 +- src/flow/src/transform/expr.rs | 17 +- src/frontend/src/instance.rs | 3 +- src/mito2/src/memtable/partition_tree.rs | 5 +- .../partition_tree/primary_key_filter.rs | 5 +- .../sst/index/bloom_filter/applier/builder.rs | 5 +- .../index/fulltext_index/applier/builder.rs | 70 +- .../index/inverted_index/applier/builder.rs | 20 +- src/mito2/src/sst/parquet.rs | 10 +- src/operator/Cargo.toml | 1 + src/operator/src/error.rs | 9 - .../src/req_convert/insert/stmt_to_region.rs | 3 +- src/operator/src/statement/copy_table_from.rs | 37 +- src/operator/src/statement/set.rs | 2 + src/promql/src/extension_plan/empty_metric.rs | 8 +- .../src/extension_plan/histogram_fold.rs | 8 +- .../src/extension_plan/range_manipulate.rs | 17 +- .../src/extension_plan/scalar_calculate.rs | 9 +- .../src/extension_plan/union_distinct_on.rs | 8 +- src/promql/src/functions/round.rs | 8 +- src/promql/src/functions/test_util.rs | 23 +- src/query/src/analyze.rs | 11 +- src/query/src/datafusion.rs | 10 +- src/query/src/dist_plan/merge_scan.rs | 13 +- src/query/src/error.rs | 2 +- src/query/src/part_sort.rs | 4 +- src/query/src/planner.rs | 22 +- src/query/src/range_select/plan.rs | 9 +- src/query/src/range_select/plan_rewrite.rs | 2 +- src/query/src/sql.rs | 12 +- src/query/src/test_util.rs | 8 +- src/query/src/window_sort.rs | 4 +- src/servers/Cargo.toml | 2 +- src/sql/src/parser.rs | 25 +- src/sql/src/parsers/alter_parser.rs | 36 +- src/sql/src/parsers/create_parser.rs | 37 +- src/sql/src/parsers/deallocate_parser.rs | 2 +- src/sql/src/parsers/execute_parser.rs | 2 +- src/sql/src/parsers/explain_parser.rs | 5 + src/sql/src/parsers/prepare_parser.rs | 2 +- src/sql/src/parsers/set_var_parser.rs | 5 +- src/sql/src/parsers/show_parser.rs | 15 +- src/sql/src/parsers/tql_parser.rs | 2 +- src/sql/src/statements.rs | 31 +- src/sql/src/statements/insert.rs | 16 +- src/sql/src/statements/show.rs | 13 +- .../src/statements/transform/type_alias.rs | 21 +- src/table/src/predicate.rs | 25 +- src/table/src/table/scan.rs | 6 +- tests-fuzz/src/translator/postgres.rs | 2 +- tests-integration/tests/http.rs | 2 +- .../common/create/current_timestamp.result | 2 +- .../standalone/common/function/geo.result | 20 +- .../common/insert/special_value.result | 2 +- .../standalone/common/order/limit.result | 8 +- .../common/order/order_by_exceptions.result | 2 +- .../common/prepare/mysql_prepare.result | 2 +- tests/cases/standalone/common/range/by.result | 2 +- .../standalone/common/range/error.result | 14 +- .../standalone/common/range/interval.result | 2 +- .../common/range/special_aggr.result | 18 +- tests/cases/standalone/common/range/to.result | 8 +- .../standalone/common/select/unnest.result | 10 +- .../standalone/common/types/json/json.result | 4 +- .../common/types/timestamp/timestamp.result | 4 +- 83 files changed, 902 insertions(+), 1027 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d71947e5aa..3aca7caa6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,7 +195,7 @@ dependencies = [ "datatypes", "greptime-proto", "paste", - "prost 0.13.3", + "prost 0.13.5", "serde_json", "snafu 0.8.5", "tonic-build 0.11.0", @@ -230,7 +230,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -268,9 +268,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "arrow" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf3437355979f1e93ba84ba108c38be5767713051f3c8ffbf07c094e2e61f9f" +checksum = "dc208515aa0151028e464cc94a692156e945ce5126abd3537bb7fd6ba2143ed1" dependencies = [ "arrow-arith", "arrow-array", @@ -289,24 +289,23 @@ dependencies = [ [[package]] name = "arrow-arith" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31dce77d2985522288edae7206bffd5fc4996491841dda01a13a58415867e681" +checksum = "e07e726e2b3f7816a85c6a45b6ec118eeeabf0b2a8c208122ad949437181f49a" dependencies = [ "arrow-array", "arrow-buffer", "arrow-data", "arrow-schema", "chrono", - "half", "num", ] [[package]] name = "arrow-array" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d45fe6d3faed0435b7313e59a02583b14c6c6339fa7729e94c32a20af319a79" +checksum = "a2262eba4f16c78496adfd559a29fe4b24df6088efc9985a873d58e92be022d5" dependencies = [ "ahash 0.8.11", "arrow-buffer", @@ -321,9 +320,9 @@ dependencies = [ [[package]] name = "arrow-buffer" -version = "53.4.0" +version = "54.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b02656a35cc103f28084bc80a0159668e0a680d919cef127bd7e0aaccb06ec1" +checksum = "263f4801ff1839ef53ebd06f99a56cecd1dbaf314ec893d93168e2e860e0291c" dependencies = [ "bytes", "half", @@ -332,9 +331,9 @@ dependencies = [ [[package]] name = "arrow-cast" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c73c6233c5b5d635a56f6010e6eb1ab9e30e94707db21cea03da317f67d84cf3" +checksum = "4103d88c5b441525ed4ac23153be7458494c2b0c9a11115848fdb9b81f6f886a" dependencies = [ "arrow-array", "arrow-buffer", @@ -353,28 +352,25 @@ dependencies = [ [[package]] name = "arrow-csv" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec222848d70fea5a32af9c3602b08f5d740d5e2d33fbd76bf6fd88759b5b13a7" +checksum = "43d3cb0914486a3cae19a5cad2598e44e225d53157926d0ada03c20521191a65" dependencies = [ "arrow-array", - "arrow-buffer", "arrow-cast", - "arrow-data", "arrow-schema", "chrono", "csv", "csv-core", "lazy_static", - "lexical-core", "regex", ] [[package]] name = "arrow-data" -version = "53.4.0" +version = "54.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7f2861ffa86f107b8ab577d86cff7c7a490243eabe961ba1e1af4f27542bb79" +checksum = "61cfdd7d99b4ff618f167e548b2411e5dd2c98c0ddebedd7df433d34c20a4429" dependencies = [ "arrow-buffer", "arrow-schema", @@ -384,9 +380,9 @@ dependencies = [ [[package]] name = "arrow-flight" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ab7635558f3f803b492eae56c03cde97ea5f85a1c768f94181cb7db69cd81be" +checksum = "c7408f2bf3b978eddda272c7699f439760ebc4ac70feca25fefa82c5b8ce808d" dependencies = [ "arrow-array", "arrow-buffer", @@ -396,22 +392,19 @@ dependencies = [ "base64 0.22.1", "bytes", "futures", - "paste", - "prost 0.13.3", - "prost-types 0.13.3", - "tokio", + "prost 0.13.5", + "prost-types 0.13.5", "tonic 0.12.3", ] [[package]] name = "arrow-ipc" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0270dc511f11bb5fa98a25020ad51a99ca5b08d8a8dfbd17503bb9dba0388f0b" +checksum = "ddecdeab02491b1ce88885986e25002a3da34dd349f682c7cfe67bab7cc17b86" dependencies = [ "arrow-array", "arrow-buffer", - "arrow-cast", "arrow-data", "arrow-schema", "flatbuffers", @@ -421,9 +414,9 @@ dependencies = [ [[package]] name = "arrow-json" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eff38eeb8a971ad3a4caf62c5d57f0cff8a48b64a55e3207c4fd696a9234aad" +checksum = "d03b9340013413eb84868682ace00a1098c81a5ebc96d279f7ebf9a4cac3c0fd" dependencies = [ "arrow-array", "arrow-buffer", @@ -432,7 +425,7 @@ dependencies = [ "arrow-schema", "chrono", "half", - "indexmap 2.7.1", + "indexmap 2.9.0", "lexical-core", "num", "serde", @@ -441,26 +434,23 @@ dependencies = [ [[package]] name = "arrow-ord" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f202a879d287099139ff0d121e7f55ae5e0efe634b8cf2106ebc27a8715dee" +checksum = "f841bfcc1997ef6ac48ee0305c4dfceb1f7c786fe31e67c1186edf775e1f1160" dependencies = [ "arrow-array", "arrow-buffer", "arrow-data", "arrow-schema", "arrow-select", - "half", - "num", ] [[package]] name = "arrow-row" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f936954991c360ba762dff23f5dda16300774fafd722353d9683abd97630ae" +checksum = "1eeb55b0a0a83851aa01f2ca5ee5648f607e8506ba6802577afdda9d75cdedcd" dependencies = [ - "ahash 0.8.11", "arrow-array", "arrow-buffer", "arrow-data", @@ -470,18 +460,18 @@ dependencies = [ [[package]] name = "arrow-schema" -version = "53.4.0" +version = "54.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9579b9d8bce47aa41389fe344f2c6758279983b7c0ebb4013e283e3e91bb450e" +checksum = "39cfaf5e440be44db5413b75b72c2a87c1f8f0627117d110264048f2969b99e9" dependencies = [ "serde", ] [[package]] name = "arrow-select" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7471ba126d0b0aaa24b50a36bc6c25e4e74869a1fd1a5553357027a0b1c8d1f1" +checksum = "7e2932aece2d0c869dd2125feb9bd1709ef5c445daa3838ac4112dcfa0fda52c" dependencies = [ "ahash 0.8.11", "arrow-array", @@ -493,9 +483,9 @@ dependencies = [ [[package]] name = "arrow-string" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72993b01cb62507b06f1fb49648d7286c8989ecfabdb7b77a750fcb54410731b" +checksum = "912e38bd6a7a7714c1d9b61df80315685553b7455e8a6045c27531d8ecd5b458" dependencies = [ "arrow-array", "arrow-buffer", @@ -555,7 +545,7 @@ version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "942c7cd7ae39e91bde4820d74132e9862e62c2f386c3aa90ccf55949f5bad63a" dependencies = [ - "bzip2", + "bzip2 0.4.4", "flate2", "futures-core", "futures-io", @@ -574,7 +564,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e614738943d3f68c628ae3dbce7c3daffb196665f82f8c8ea6b65de73c79429" dependencies = [ "brotli", - "bzip2", + "bzip2 0.4.4", "flate2", "futures-core", "futures-io", @@ -616,7 +606,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -638,7 +628,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -655,7 +645,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -735,7 +725,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -936,7 +926,7 @@ checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -1024,9 +1014,9 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.5" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +checksum = "1a22f228ab7a1b23027ccc6c350b72868017af7ea8356fbdf19f8d991c690013" dependencies = [ "autocfg", "libm", @@ -1059,7 +1049,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -1171,7 +1161,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "syn_derive", ] @@ -1274,9 +1264,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" dependencies = [ "serde", ] @@ -1292,13 +1282,21 @@ dependencies = [ ] [[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" +name = "bzip2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" +dependencies = [ + "bzip2-sys", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" dependencies = [ "cc", - "libc", "pkg-config", ] @@ -1475,7 +1473,7 @@ version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6026d8cd82ada8bbcfe337805dd1eb6afdc9e80fa4d57e977b3a36315e0c5525" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "lazy_static", "num-traits", "regex", @@ -1641,7 +1639,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -1735,7 +1733,7 @@ dependencies = [ "moka", "parking_lot 0.12.3", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "query", "rand 0.9.0", "serde_json", @@ -1828,7 +1826,7 @@ dependencies = [ "nu-ansi-term", "plugins", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "query", "rand 0.9.0", "regex", @@ -1959,6 +1957,7 @@ dependencies = [ "futures", "lazy_static", "object-store", + "object_store_opendal", "orc-rust", "parquet", "paste", @@ -1977,7 +1976,7 @@ dependencies = [ name = "common-decimal" version = "0.14.0" dependencies = [ - "bigdecimal 0.4.5", + "bigdecimal 0.4.8", "common-error", "common-macro", "rust_decimal", @@ -2095,7 +2094,7 @@ dependencies = [ "hyper 1.4.1", "hyper-util", "lazy_static", - "prost 0.13.3", + "prost 0.13.5", "rand 0.9.0", "snafu 0.8.5", "tokio", @@ -2116,7 +2115,7 @@ dependencies = [ "common-time", "datatypes", "paste", - "prost 0.13.3", + "prost 0.13.5", "snafu 0.8.5", "store-api", "table", @@ -2133,7 +2132,7 @@ dependencies = [ "quote", "snafu 0.8.5", "static_assertions", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -2190,7 +2189,7 @@ dependencies = [ "lazy_static", "moka", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "rand 0.9.0", "regex", "rskafka", @@ -2230,7 +2229,7 @@ dependencies = [ "common-error", "common-macro", "pprof", - "prost 0.13.3", + "prost 0.13.5", "snafu 0.8.5", "tokio", ] @@ -2290,7 +2289,7 @@ dependencies = [ "futures-util", "serde", "snafu 0.8.5", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "sqlparser_derive 0.1.1", "statrs", "store-api", @@ -2848,7 +2847,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -2870,7 +2869,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core 0.20.10", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -2895,8 +2894,8 @@ checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "datafusion" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", "arrow-array", @@ -2905,10 +2904,10 @@ dependencies = [ "async-compression 0.4.13", "async-trait", "bytes", - "bzip2", + "bzip2 0.5.2", "chrono", - "dashmap", "datafusion-catalog", + "datafusion-catalog-listing", "datafusion-common", "datafusion-common-runtime", "datafusion-execution", @@ -2927,14 +2926,14 @@ dependencies = [ "flate2", "futures", "glob", - "itertools 0.13.0", + "itertools 0.14.0", "log", "object_store", "parking_lot 0.12.3", "parquet", - "paste", "rand 0.8.5", - "sqlparser 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile", "tokio", "tokio-util", @@ -2946,46 +2945,76 @@ dependencies = [ [[package]] name = "datafusion-catalog" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ - "arrow-schema", + "arrow", "async-trait", + "dashmap", "datafusion-common", "datafusion-execution", "datafusion-expr", "datafusion-physical-plan", + "datafusion-sql", + "futures", + "itertools 0.14.0", + "log", "parking_lot 0.12.3", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "datafusion-catalog-listing" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" +dependencies = [ + "arrow", + "arrow-schema", + "chrono", + "datafusion-catalog", + "datafusion-common", + "datafusion-execution", + "datafusion-expr", + "datafusion-physical-expr", + "datafusion-physical-expr-common", + "datafusion-physical-plan", + "futures", + "glob", + "itertools 0.14.0", + "log", + "object_store", + "url", ] [[package]] name = "datafusion-common" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", "arrow-array", - "arrow-buffer", + "arrow-ipc", "arrow-schema", - "chrono", + "base64 0.22.1", "half", "hashbrown 0.14.5", - "indexmap 2.7.1", + "indexmap 2.9.0", "libc", + "log", "object_store", "parquet", "paste", "recursive", - "sqlparser 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", "tokio", "web-time 1.1.0", ] [[package]] name = "datafusion-common-runtime" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "log", "tokio", @@ -2993,16 +3022,15 @@ dependencies = [ [[package]] name = "datafusion-doc" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" [[package]] name = "datafusion-execution" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", - "chrono", "dashmap", "datafusion-common", "datafusion-expr", @@ -3017,13 +3045,10 @@ dependencies = [ [[package]] name = "datafusion-expr" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ - "ahash 0.8.11", "arrow", - "arrow-array", - "arrow-buffer", "chrono", "datafusion-common", "datafusion-doc", @@ -3031,30 +3056,28 @@ dependencies = [ "datafusion-functions-aggregate-common", "datafusion-functions-window-common", "datafusion-physical-expr-common", - "indexmap 2.7.1", + "indexmap 2.9.0", "paste", "recursive", "serde_json", - "sqlparser 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", - "strum 0.26.3", - "strum_macros 0.26.4", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "datafusion-expr-common" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", "datafusion-common", - "itertools 0.13.0", + "itertools 0.14.0", "paste", ] [[package]] name = "datafusion-functions" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", "arrow-buffer", @@ -3066,10 +3089,11 @@ dependencies = [ "datafusion-doc", "datafusion-execution", "datafusion-expr", + "datafusion-expr-common", "datafusion-macros", "hashbrown 0.14.5", "hex", - "itertools 0.13.0", + "itertools 0.14.0", "log", "md-5", "rand 0.8.5", @@ -3081,8 +3105,8 @@ dependencies = [ [[package]] name = "datafusion-functions-aggregate" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", @@ -3102,71 +3126,63 @@ dependencies = [ [[package]] name = "datafusion-functions-aggregate-common" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", "datafusion-common", "datafusion-expr-common", "datafusion-physical-expr-common", - "rand 0.8.5", ] [[package]] name = "datafusion-functions-nested" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", "arrow-array", - "arrow-buffer", "arrow-ord", "arrow-schema", "datafusion-common", + "datafusion-doc", "datafusion-execution", "datafusion-expr", "datafusion-functions", "datafusion-functions-aggregate", + "datafusion-macros", "datafusion-physical-expr-common", - "itertools 0.13.0", + "itertools 0.14.0", "log", "paste", - "rand 0.8.5", ] [[package]] name = "datafusion-functions-table" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ - "ahash 0.8.11", "arrow", - "arrow-schema", "async-trait", "datafusion-catalog", "datafusion-common", - "datafusion-execution", "datafusion-expr", - "datafusion-functions-aggregate-common", - "datafusion-physical-expr", - "datafusion-physical-expr-common", "datafusion-physical-plan", - "half", - "indexmap 2.7.1", - "log", "parking_lot 0.12.3", "paste", ] [[package]] name = "datafusion-functions-window" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "datafusion-common", + "datafusion-doc", "datafusion-expr", "datafusion-functions-window-common", + "datafusion-macros", "datafusion-physical-expr", "datafusion-physical-expr-common", "log", @@ -3175,8 +3191,8 @@ dependencies = [ [[package]] name = "datafusion-functions-window-common" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "datafusion-common", "datafusion-physical-expr-common", @@ -3184,28 +3200,26 @@ dependencies = [ [[package]] name = "datafusion-macros" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ - "datafusion-doc", - "proc-macro2", + "datafusion-expr", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] name = "datafusion-optimizer" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", - "async-trait", "chrono", "datafusion-common", "datafusion-expr", "datafusion-physical-expr", - "indexmap 2.7.1", - "itertools 0.13.0", + "indexmap 2.9.0", + "itertools 0.14.0", "log", "recursive", "regex", @@ -3214,13 +3228,12 @@ dependencies = [ [[package]] name = "datafusion-physical-expr" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", "arrow-array", - "arrow-buffer", "arrow-schema", "datafusion-common", "datafusion-expr", @@ -3229,51 +3242,55 @@ dependencies = [ "datafusion-physical-expr-common", "half", "hashbrown 0.14.5", - "indexmap 2.7.1", - "itertools 0.13.0", + "indexmap 2.9.0", + "itertools 0.14.0", "log", "paste", - "petgraph", + "petgraph 0.7.1", ] [[package]] name = "datafusion-physical-expr-common" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", "datafusion-common", "datafusion-expr-common", "hashbrown 0.14.5", - "itertools 0.13.0", + "itertools 0.14.0", ] [[package]] name = "datafusion-physical-optimizer" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", + "arrow-schema", "datafusion-common", "datafusion-execution", + "datafusion-expr", "datafusion-expr-common", "datafusion-physical-expr", + "datafusion-physical-expr-common", "datafusion-physical-plan", - "itertools 0.13.0", + "futures", + "itertools 0.14.0", "log", "recursive", + "url", ] [[package]] name = "datafusion-physical-plan" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "ahash 0.8.11", "arrow", "arrow-array", - "arrow-buffer", "arrow-ord", "arrow-schema", "async-trait", @@ -3288,48 +3305,46 @@ dependencies = [ "futures", "half", "hashbrown 0.14.5", - "indexmap 2.7.1", - "itertools 0.13.0", + "indexmap 2.9.0", + "itertools 0.14.0", "log", - "once_cell", "parking_lot 0.12.3", "pin-project-lite", - "rand 0.8.5", "tokio", ] [[package]] name = "datafusion-sql" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ "arrow", "arrow-array", "arrow-schema", + "bigdecimal 0.4.8", "datafusion-common", "datafusion-expr", - "indexmap 2.7.1", + "indexmap 2.9.0", "log", "recursive", "regex", - "sqlparser 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "datafusion-substrait" -version = "43.0.0" -source = "git+https://github.com/apache/datafusion.git?rev=2464703c84c400a09cc59277018813f0e797bb4e#2464703c84c400a09cc59277018813f0e797bb4e" +version = "45.0.0" +source = "git+https://github.com/apache/datafusion.git?rev=8ebed674dd71f8a466f658626877944cd16a4375#8ebed674dd71f8a466f658626877944cd16a4375" dependencies = [ - "arrow-buffer", "async-recursion", "async-trait", "chrono", "datafusion", - "itertools 0.13.0", + "itertools 0.14.0", "object_store", "pbjson-types", - "prost 0.13.3", - "substrait 0.49.5", + "prost 0.13.5", + "substrait 0.53.2", "url", ] @@ -3378,7 +3393,7 @@ dependencies = [ "num_cpus", "object-store", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "query", "reqwest", "serde", @@ -3419,7 +3434,7 @@ dependencies = [ "serde", "serde_json", "snafu 0.8.5", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "sqlparser_derive 0.1.1", ] @@ -3515,7 +3530,7 @@ checksum = "2cdc8d50f426189eef89dac62fabfa0abb27d5cc008f25bf4156a0203325becc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3526,7 +3541,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3568,7 +3583,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3588,7 +3603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" dependencies = [ "derive_builder_core 0.20.1", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3608,7 +3623,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "unicode-xid", ] @@ -3620,7 +3635,7 @@ checksum = "65f152f4b8559c4da5d574bafc7af85454d706b4c5fe8b530d508cacbb6807ea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3696,7 +3711,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3831,7 +3846,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3843,7 +3858,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3863,7 +3878,7 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -3909,7 +3924,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39bde3ce50a626efeb1caa9ab1083972d178bebb55ca627639c8ded507dfcbde" dependencies = [ "http 1.1.0", - "prost 0.13.3", + "prost 0.13.5", "tokio", "tokio-stream", "tonic 0.12.3", @@ -4041,6 +4056,7 @@ dependencies = [ "datatypes", "futures", "object-store", + "object_store_opendal", "serde", "serde_json", "snafu 0.8.5", @@ -4079,6 +4095,18 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + +[[package]] +name = "flagset" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe" + [[package]] name = "flatbuffers" version = "24.12.23" @@ -4184,7 +4212,7 @@ dependencies = [ "partition", "pretty_assertions", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "query", "serde", "serde_json", @@ -4293,7 +4321,7 @@ dependencies = [ "pipeline", "prometheus", "promql-parser", - "prost 0.13.3", + "prost 0.13.5", "query", "serde", "serde_json", @@ -4301,7 +4329,7 @@ dependencies = [ "session", "snafu 0.8.5", "sql", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "store-api", "strfmt", "table", @@ -4341,7 +4369,7 @@ checksum = "e99b8b3c28ae0e84b604c75f721c21dc77afb3706076af5e8216d15fd1deaae3" dependencies = [ "frunk_proc_macro_helpers", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -4353,7 +4381,7 @@ dependencies = [ "frunk_core", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -4365,7 +4393,7 @@ dependencies = [ "frunk_core", "frunk_proc_macro_helpers", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -4489,7 +4517,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -4693,7 +4721,7 @@ name = "greptime-proto" version = "0.1.0" source = "git+https://github.com/GreptimeTeam/greptime-proto.git?rev=dd4a1996982534636734674db66e44464b0c0d83#dd4a1996982534636734674db66e44464b0c0d83" dependencies = [ - "prost 0.13.3", + "prost 0.13.5", "serde", "serde_json", "strum 0.25.0", @@ -4714,7 +4742,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.7.1", + "indexmap 2.9.0", "slab", "tokio", "tokio-util", @@ -4733,7 +4761,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap 2.7.1", + "indexmap 2.9.0", "slab", "tokio", "tokio-util", @@ -5114,7 +5142,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -5129,7 +5157,7 @@ dependencies = [ "rust-sitter", "rust-sitter-tool", "slotmap", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -5148,7 +5176,7 @@ dependencies = [ "serde", "serde_json", "slotmap", - "syn 2.0.96", + "syn 2.0.100", "webbrowser", ] @@ -5162,7 +5190,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -5485,7 +5513,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -5562,7 +5590,7 @@ dependencies = [ "jieba-rs", "mockall", "pin-project", - "prost 0.13.3", + "prost 0.13.5", "puffin", "rand 0.9.0", "regex", @@ -5592,9 +5620,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.7.1" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown 0.15.2", @@ -5608,7 +5636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88" dependencies = [ "ahash 0.8.11", - "indexmap 2.7.1", + "indexmap 2.9.0", "is-terminal", "itoa", "log", @@ -5965,7 +5993,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee7893dab2e44ae5f9d0173f26ff4aa327c10b01b06a72b52dd9405b628640d" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", ] [[package]] @@ -6066,7 +6094,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -6127,7 +6155,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -6407,8 +6435,8 @@ name = "loki-proto" version = "0.1.0" source = "git+https://github.com/GreptimeTeam/loki-proto.git?rev=1434ecf23a2654025d86188fb5205e7a74b225d3#1434ecf23a2654025d86188fb5205e7a74b225d3" dependencies = [ - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-types 0.13.5", "tonic 0.12.3", "tonic-build 0.12.3", ] @@ -6454,7 +6482,7 @@ dependencies = [ "cactus", "cfgrammar", "filetime", - "indexmap 2.7.1", + "indexmap 2.9.0", "lazy_static", "lrtable", "num-traits", @@ -6727,7 +6755,7 @@ dependencies = [ "once_cell", "parking_lot 0.12.3", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "rand 0.9.0", "regex", "rskafka", @@ -6906,7 +6934,7 @@ dependencies = [ "paste", "pin-project", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "puffin", "rand 0.9.0", "regex", @@ -6953,7 +6981,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -7052,7 +7080,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "termcolor", "thiserror 1.0.64", ] @@ -7070,7 +7098,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "termcolor", "thiserror 1.0.64", ] @@ -7116,7 +7144,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "478b0ff3f7d67b79da2b96f56f334431aef65e15ba4b29dd74a4236e29582bdc" dependencies = [ "base64 0.21.7", - "bigdecimal 0.4.5", + "bigdecimal 0.4.8", "bindgen", "bitflags 2.9.0", "bitvec", @@ -7235,7 +7263,7 @@ checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -7444,7 +7472,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -7583,6 +7611,23 @@ dependencies = [ "walkdir", ] +[[package]] +name = "object_store_opendal" +version = "0.49.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b18eb960885330175ec89daa991b0bc9050dc7f259b31a887fbfbb297312f83" +dependencies = [ + "async-trait", + "bytes", + "flagset", + "futures", + "futures-util", + "object_store", + "opendal", + "pin-project", + "tokio", +] + [[package]] name = "once_cell" version = "1.20.1" @@ -7606,9 +7651,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "opendal" -version = "0.51.1" +version = "0.51.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9dcfa7a3615e3c60eb662ed6b46b6f244cf2658098f593c0c0915430b3a268" +checksum = "5b1063ea459fa9e94584115743b06330f437902dd1d9f692b863ef1875a20548" dependencies = [ "anyhow", "async-trait", @@ -7677,7 +7722,7 @@ checksum = "1e32339a5dc40459130b3bd269e9892439f55b33e772d2a9d402a789baaf4e8a" dependencies = [ "futures-core", "futures-sink", - "indexmap 2.7.1", + "indexmap 2.9.0", "js-sys", "once_cell", "pin-project-lite", @@ -7739,7 +7784,7 @@ dependencies = [ "hex", "opentelemetry 0.27.1", "opentelemetry_sdk 0.27.1", - "prost 0.13.3", + "prost 0.13.5", "serde", "tonic 0.12.3", ] @@ -7832,6 +7877,7 @@ dependencies = [ "meter-macros", "moka", "object-store", + "object_store_opendal", "partition", "path-slash", "prometheus", @@ -7841,7 +7887,7 @@ dependencies = [ "session", "snafu 0.8.5", "sql", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "store-api", "substrait 0.14.0", "table", @@ -7852,9 +7898,8 @@ dependencies = [ [[package]] name = "orc-rust" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b307d095f0481b043b27d94eeb95cd67a8b19c6a34685ad0ccd884740a9306e" +version = "0.6.0" +source = "git+https://github.com/datafusion-contrib/orc-rust?rev=3134cab581a8e91b942d6a23aca2916ea965f6bb#3134cab581a8e91b942d6a23aca2916ea965f6bb" dependencies = [ "arrow", "async-trait", @@ -7869,7 +7914,7 @@ dependencies = [ "lz4_flex", "lzokay-native", "num", - "prost 0.13.3", + "prost 0.13.5", "snafu 0.8.5", "snap", "tokio", @@ -8026,7 +8071,7 @@ dependencies = [ "backtrace", "cfg-if", "libc", - "petgraph", + "petgraph 0.6.5", "redox_syscall 0.5.7", "smallvec", "thread-id", @@ -8035,9 +8080,9 @@ dependencies = [ [[package]] name = "parquet" -version = "53.4.0" +version = "54.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8957c0c95a6a1804f3e51a18f69df29be53856a8c5768cc9b6d00fcafcd2917c" +checksum = "f88838dca3b84d41444a0341b19f347e8098a3898b0f21536654b8b799e11abd" dependencies = [ "ahash 0.8.11", "arrow-array", @@ -8061,6 +8106,7 @@ dependencies = [ "object_store", "paste", "seq-macro", + "simdutf8", "snap", "thrift", "tokio", @@ -8097,7 +8143,7 @@ dependencies = [ "session", "snafu 0.8.5", "sql", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "store-api", "table", ] @@ -8138,8 +8184,8 @@ checksum = "6eea3058763d6e656105d1403cb04e0a41b7bbac6362d413e7c33be0c32279c9" dependencies = [ "heck 0.5.0", "itertools 0.13.0", - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-types 0.13.5", ] [[package]] @@ -8152,8 +8198,8 @@ dependencies = [ "chrono", "pbjson", "pbjson-build", - "prost 0.13.3", - "prost-build 0.13.3", + "prost 0.13.5", + "prost-build 0.13.5", "serde", ] @@ -8229,7 +8275,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -8249,8 +8295,18 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", - "indexmap 2.7.1", + "fixedbitset 0.4.2", + "indexmap 2.9.0", +] + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset 0.5.7", + "indexmap 2.9.0", ] [[package]] @@ -8332,7 +8388,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -8640,7 +8696,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" dependencies = [ "proc-macro2", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -8705,7 +8761,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -8788,7 +8844,7 @@ dependencies = [ "greptime-proto", "lazy_static", "prometheus", - "prost 0.13.3", + "prost 0.13.5", "snafu 0.8.5", "tokio", ] @@ -8831,12 +8887,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", - "prost-derive 0.13.3", + "prost-derive 0.13.5", ] [[package]] @@ -8851,33 +8907,32 @@ dependencies = [ "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.6.5", "prettyplease", "prost 0.12.6", "prost-types 0.12.6", "regex", - "syn 2.0.96", + "syn 2.0.100", "tempfile", ] [[package]] name = "prost-build" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ - "bytes", "heck 0.5.0", - "itertools 0.13.0", + "itertools 0.14.0", "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.6.5", "prettyplease", - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-types 0.13.5", "regex", - "syn 2.0.96", + "syn 2.0.100", "tempfile", ] @@ -8904,20 +8959,20 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -8940,11 +8995,11 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ - "prost 0.13.3", + "prost 0.13.5", ] [[package]] @@ -9114,7 +9169,7 @@ dependencies = [ "prometheus", "promql", "promql-parser", - "prost 0.13.3", + "prost 0.13.5", "rand 0.9.0", "regex", "serde", @@ -9122,7 +9177,7 @@ dependencies = [ "session", "snafu 0.8.5", "sql", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "statrs", "store-api", "substrait 0.14.0", @@ -9412,7 +9467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b" dependencies = [ "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -9461,7 +9516,7 @@ checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -9819,7 +9874,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.96", + "syn 2.0.100", "unicode-ident", ] @@ -9831,7 +9886,7 @@ checksum = "b3a8fb4672e840a587a66fc577a5491375df51ddb88f2a2c2a792598c326fe14" dependencies = [ "quote", "rand 0.8.5", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -9854,7 +9909,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.96", + "syn 2.0.100", "walkdir", ] @@ -10231,7 +10286,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10272,7 +10327,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10310,9 +10365,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] @@ -10350,7 +10405,7 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10361,16 +10416,16 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] name = "serde_json" -version = "1.0.137" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "itoa", "memchr", "ryu", @@ -10395,7 +10450,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10416,7 +10471,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10441,7 +10496,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.7.1", + "indexmap 2.9.0", "serde", "serde_derive", "serde_json", @@ -10458,7 +10513,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10467,7 +10522,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "itoa", "ryu", "serde", @@ -10529,7 +10584,7 @@ dependencies = [ "humantime", "humantime-serde", "hyper 1.4.1", - "indexmap 2.7.1", + "indexmap 2.9.0", "influxdb_line_protocol", "itertools 0.14.0", "json5", @@ -10555,7 +10610,7 @@ dependencies = [ "pprof", "prometheus", "promql-parser", - "prost 0.13.3", + "prost 0.13.5", "query", "quoted-string", "rand 0.9.0", @@ -10856,7 +10911,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -10948,7 +11003,7 @@ dependencies = [ "serde", "serde_json", "snafu 0.8.5", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "sqlparser_derive 0.1.1", "store-api", "table", @@ -11004,25 +11059,27 @@ dependencies = [ [[package]] name = "sqlparser" -version = "0.52.0" +version = "0.54.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a875d8cd437cc8a97e9aeaeea352ec9a19aea99c23e9effb17757291de80b08" +checksum = "c66e3b7374ad4a6af849b08b3e7a6eda0edbd82f0fd59b57e22671bf16979899" dependencies = [ "log", - "sqlparser_derive 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "recursive", + "sqlparser_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "sqlparser" -version = "0.52.0" -source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170#71dd86058d2af97b9925093d40c4e03360403170" +version = "0.54.0" +source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089#e98e6b322426a9d397a71efef17075966223c089" dependencies = [ "lazy_static", "log", + "recursive", "regex", "serde", - "sqlparser 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sqlparser_derive 0.2.2 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sqlparser_derive 0.3.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", ] [[package]] @@ -11038,23 +11095,23 @@ dependencies = [ [[package]] name = "sqlparser_derive" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b2e185515564f15375f593fb966b5718bc624ba77fe49fa4616ad619690554" +checksum = "da5fc6819faabb412da764b99d3b713bb55083c11e7e0c00144d386cd6a1939c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] name = "sqlparser_derive" -version = "0.2.2" -source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170#71dd86058d2af97b9925093d40c4e03360403170" +version = "0.3.0" +source = "git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089#e98e6b322426a9d397a71efef17075966223c089" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11088,7 +11145,7 @@ dependencies = [ "futures-util", "hashbrown 0.15.2", "hashlink", - "indexmap 2.7.1", + "indexmap 2.9.0", "log", "memchr", "once_cell", @@ -11117,7 +11174,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11140,7 +11197,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.96", + "syn 2.0.100", "tempfile", "tokio", "url", @@ -11310,7 +11367,7 @@ dependencies = [ "derive_builder 0.20.1", "futures", "humantime", - "prost 0.13.3", + "prost 0.13.5", "serde", "serde_json", "snafu 0.8.5", @@ -11379,9 +11436,6 @@ name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" -dependencies = [ - "strum_macros 0.26.4", -] [[package]] name = "strum" @@ -11402,7 +11456,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11415,7 +11469,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11428,7 +11482,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11456,7 +11510,7 @@ dependencies = [ "datafusion-substrait", "datatypes", "promql", - "prost 0.13.3", + "prost 0.13.5", "snafu 0.8.5", "substrait 0.37.3", "tokio", @@ -11470,41 +11524,41 @@ checksum = "9ec889155c56a34200d2c5aee147b8d29545fa7cce7f68b38d927f5d24ced8ef" dependencies = [ "heck 0.5.0", "prettyplease", - "prost 0.13.3", - "prost-build 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-build 0.13.5", + "prost-types 0.13.5", "schemars", "semver", "serde", "serde_json", "serde_yaml", - "syn 2.0.96", + "syn 2.0.100", "typify 0.1.0", "walkdir", ] [[package]] name = "substrait" -version = "0.49.5" +version = "0.53.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c271a596176d3b82bfc5b4107fe9fbd30e6a9a99c0dca146777f05d8f0e08e4" +checksum = "6fac3d70185423235f37b889764e184b81a5af4bb7c95833396ee9bd92577e1b" dependencies = [ "heck 0.5.0", "pbjson", "pbjson-build", "pbjson-types", "prettyplease", - "prost 0.13.3", - "prost-build 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-build 0.13.5", + "prost-types 0.13.5", "regress 0.10.3", "schemars", "semver", "serde", "serde_json", "serde_yaml", - "syn 2.0.96", - "typify 0.2.0", + "syn 2.0.100", + "typify 0.3.0", "walkdir", ] @@ -11550,9 +11604,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.96" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -11578,7 +11632,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11604,7 +11658,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -11908,7 +11962,7 @@ dependencies = [ "serde_yaml", "snafu 0.8.5", "sql", - "sqlparser 0.52.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=71dd86058d2af97b9925093d40c4e03360403170)", + "sqlparser 0.54.0 (git+https://github.com/GreptimeTeam/sqlparser-rs.git?rev=e98e6b322426a9d397a71efef17075966223c089)", "sqlx", "store-api", "strum 0.27.1", @@ -11972,7 +12026,7 @@ dependencies = [ "partition", "paste", "pipeline", - "prost 0.13.3", + "prost 0.13.5", "query", "rand 0.9.0", "rstest", @@ -12040,7 +12094,7 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12051,7 +12105,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12207,9 +12261,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.42.0" +version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", @@ -12236,13 +12290,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12396,7 +12450,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "toml_datetime", "winnow 0.5.40", ] @@ -12407,7 +12461,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "serde", "serde_spanned", "toml_datetime", @@ -12463,7 +12517,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.3", + "prost 0.13.5", "rustls-pemfile", "socket2", "tokio", @@ -12486,7 +12540,7 @@ dependencies = [ "proc-macro2", "prost-build 0.12.6", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12497,10 +12551,10 @@ checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" dependencies = [ "prettyplease", "proc-macro2", - "prost-build 0.13.3", - "prost-types 0.13.3", + "prost-build 0.13.5", + "prost-types 0.13.5", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12509,8 +12563,8 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "878d81f52e7fcfd80026b7fdb6a9b578b3c3653ba987f87f0dce4b64043cba27" dependencies = [ - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.5", + "prost-types 0.13.5", "tokio", "tokio-stream", "tonic 0.12.3", @@ -12545,7 +12599,7 @@ dependencies = [ "futures-core", "futures-util", "hdrhistogram", - "indexmap 2.7.1", + "indexmap 2.9.0", "pin-project-lite", "slab", "sync_wrapper 1.0.1", @@ -12650,7 +12704,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12906,7 +12960,7 @@ checksum = "70b20a22c42c8f1cd23ce5e34f165d4d37038f5b663ad20fb6adbdf029172483" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -12921,12 +12975,12 @@ dependencies = [ [[package]] name = "typify" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c644dda9862f0fef3a570d8ddb3c2cfb1d5ac824a1f2ddfa7bc8f071a5ad8a" +checksum = "e03ba3643450cfd95a1aca2e1938fef63c1c1994489337998aff4ad771f21ef8" dependencies = [ - "typify-impl 0.2.0", - "typify-macro 0.2.0", + "typify-impl 0.3.0", + "typify-macro 0.3.0", ] [[package]] @@ -12944,16 +12998,16 @@ dependencies = [ "semver", "serde", "serde_json", - "syn 2.0.96", + "syn 2.0.100", "thiserror 1.0.64", "unicode-ident", ] [[package]] name = "typify-impl" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59ab345b6c0d8ae9500b9ff334a4c7c0d316c1c628dc55726b95887eb8dbd11" +checksum = "bce48219a2f3154aaa2c56cbf027728b24a3c8fe0a47ed6399781de2b3f3eeaf" dependencies = [ "heck 0.5.0", "log", @@ -12964,8 +13018,8 @@ dependencies = [ "semver", "serde", "serde_json", - "syn 2.0.96", - "thiserror 1.0.64", + "syn 2.0.100", + "thiserror 2.0.12", "unicode-ident", ] @@ -12982,15 +13036,15 @@ dependencies = [ "serde", "serde_json", "serde_tokenstream", - "syn 2.0.96", + "syn 2.0.100", "typify-impl 0.1.0", ] [[package]] name = "typify-macro" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "785e2cdcef0df8160fdd762ed548a637aaec1e83704fdbc14da0df66013ee8d0" +checksum = "68b5780d745920ed73c5b7447496a9b5c42ed2681a9b70859377aec423ecf02b" dependencies = [ "proc-macro2", "quote", @@ -12999,8 +13053,8 @@ dependencies = [ "serde", "serde_json", "serde_tokenstream", - "syn 2.0.96", - "typify-impl 0.2.0", + "syn 2.0.100", + "typify-impl 0.3.0", ] [[package]] @@ -13069,9 +13123,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-normalization" @@ -13174,6 +13228,7 @@ dependencies = [ "getrandom 0.2.15", "rand 0.8.5", "serde", + "wasm-bindgen", ] [[package]] @@ -13304,7 +13359,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "wasm-bindgen-shared", ] @@ -13338,7 +13393,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -13557,7 +13612,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -13568,7 +13623,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -13579,7 +13634,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -13590,7 +13645,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -13980,7 +14035,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "synstructure", ] @@ -14011,7 +14066,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -14022,7 +14077,7 @@ checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -14042,7 +14097,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", "synstructure", ] @@ -14063,7 +14118,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] @@ -14085,7 +14140,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.100", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 5046dafc56..9c36a76805 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,11 +90,11 @@ rust.unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tokio_unstable)'] } # See for more detaiils: https://github.com/rust-lang/cargo/issues/11329 ahash = { version = "0.8", features = ["compile-time-rng"] } aquamarine = "0.6" -arrow = { version = "53.0.0", features = ["prettyprint"] } -arrow-array = { version = "53.0.0", default-features = false, features = ["chrono-tz"] } -arrow-flight = "53.0" -arrow-ipc = { version = "53.0.0", default-features = false, features = ["lz4", "zstd"] } -arrow-schema = { version = "53.0", features = ["serde"] } +arrow = { version = "54.2", features = ["prettyprint"] } +arrow-array = { version = "54.2", default-features = false, features = ["chrono-tz"] } +arrow-flight = "54.2" +arrow-ipc = { version = "54.2", default-features = false, features = ["lz4", "zstd"] } +arrow-schema = { version = "54.2", features = ["serde"] } async-stream = "0.3" async-trait = "0.1" # Remember to update axum-extra, axum-macros when updating axum @@ -113,15 +113,15 @@ clap = { version = "4.4", features = ["derive"] } config = "0.13.0" crossbeam-utils = "0.8" dashmap = "6.1" -datafusion = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-common = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-expr = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-functions = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-optimizer = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-physical-expr = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-physical-plan = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-sql = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } -datafusion-substrait = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" } +datafusion = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-common = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-expr = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-functions = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-optimizer = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-physical-expr = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-physical-plan = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-sql = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } +datafusion-substrait = { git = "https://github.com/apache/datafusion.git", rev = "8ebed674dd71f8a466f658626877944cd16a4375" } deadpool = "0.12" deadpool-postgres = "0.14" derive_builder = "0.20" @@ -148,6 +148,7 @@ moka = "0.12" nalgebra = "0.33" notify = "8.0" num_cpus = "1.16" +object_store_opendal = "0.49.0" once_cell = "1.18" opentelemetry-proto = { version = "0.27", features = [ "gen-tonic", @@ -157,7 +158,7 @@ opentelemetry-proto = { version = "0.27", features = [ "logs", ] } parking_lot = "0.12" -parquet = { version = "53.0.0", default-features = false, features = ["arrow", "async", "object_store"] } +parquet = { version = "54.2", default-features = false, features = ["arrow", "async", "object_store"] } paste = "1.0" pin-project = "1.0" prometheus = { version = "0.13.3", features = ["process"] } @@ -191,19 +192,18 @@ simd-json = "0.15" similar-asserts = "1.6.0" smallvec = { version = "1", features = ["serde"] } snafu = "0.8" +sqlparser = { git = "https://github.com/GreptimeTeam/sqlparser-rs.git", rev = "e98e6b322426a9d397a71efef17075966223c089", features = [ + "visitor", + "serde", +] } # branch = "v0.54.x" sqlx = { version = "0.8", features = [ "runtime-tokio-rustls", "mysql", "postgres", "chrono", ] } -sysinfo = "0.33" -# on branch v0.52.x -sqlparser = { git = "https://github.com/GreptimeTeam/sqlparser-rs.git", rev = "71dd86058d2af97b9925093d40c4e03360403170", features = [ - "visitor", - "serde", -] } # on branch v0.44.x strum = { version = "0.27", features = ["derive"] } +sysinfo = "0.33" tempfile = "3" tokio = { version = "1.40", features = ["full"] } tokio-postgres = "0.7" diff --git a/src/catalog/src/system_schema/predicate.rs b/src/catalog/src/system_schema/predicate.rs index 3bf04124c3..c94141a947 100644 --- a/src/catalog/src/system_schema/predicate.rs +++ b/src/catalog/src/system_schema/predicate.rs @@ -437,10 +437,7 @@ mod tests { } fn column(name: &str) -> Expr { - Expr::Column(Column { - relation: None, - name: name.to_string(), - }) + Expr::Column(Column::from_name(name)) } fn string_literal(v: &str) -> Expr { diff --git a/src/common/datasource/Cargo.toml b/src/common/datasource/Cargo.toml index 63db292a52..890832758c 100644 --- a/src/common/datasource/Cargo.toml +++ b/src/common/datasource/Cargo.toml @@ -31,7 +31,8 @@ derive_builder.workspace = true futures.workspace = true lazy_static.workspace = true object-store.workspace = true -orc-rust = { version = "0.5", default-features = false, features = [ +object_store_opendal.workspace = true +orc-rust = { git = "https://github.com/datafusion-contrib/orc-rust", rev = "3134cab581a8e91b942d6a23aca2916ea965f6bb", default-features = false, features = [ "async", ] } parquet.workspace = true diff --git a/src/common/datasource/src/compression.rs b/src/common/datasource/src/compression.rs index 0d74849177..b7eb2d8b64 100644 --- a/src/common/datasource/src/compression.rs +++ b/src/common/datasource/src/compression.rs @@ -19,6 +19,7 @@ use std::str::FromStr; use async_compression::tokio::bufread::{BzDecoder, GzipDecoder, XzDecoder, ZstdDecoder}; use async_compression::tokio::write; use bytes::Bytes; +use datafusion::datasource::file_format::file_compression_type::FileCompressionType; use futures::Stream; use serde::{Deserialize, Serialize}; use strum::EnumIter; @@ -192,3 +193,15 @@ macro_rules! impl_compression_type { } impl_compression_type!((Gzip, Gzip), (Bzip2, Bz), (Xz, Xz), (Zstd, Zstd)); + +impl From for FileCompressionType { + fn from(t: CompressionType) -> Self { + match t { + CompressionType::Gzip => FileCompressionType::GZIP, + CompressionType::Bzip2 => FileCompressionType::BZIP2, + CompressionType::Xz => FileCompressionType::XZ, + CompressionType::Zstd => FileCompressionType::ZSTD, + CompressionType::Uncompressed => FileCompressionType::UNCOMPRESSED, + } + } +} diff --git a/src/common/datasource/src/file_format/csv.rs b/src/common/datasource/src/file_format/csv.rs index c62f442a78..757d207e89 100644 --- a/src/common/datasource/src/file_format/csv.rs +++ b/src/common/datasource/src/file_format/csv.rs @@ -14,18 +14,14 @@ use std::collections::HashMap; use std::str::FromStr; -use std::sync::Arc; use arrow::csv; use arrow::csv::reader::Format; use arrow::record_batch::RecordBatch; -use arrow_schema::{Schema, SchemaRef}; +use arrow_schema::Schema; use async_trait::async_trait; use common_runtime; -use datafusion::datasource::physical_plan::{FileMeta, FileOpenFuture, FileOpener}; -use datafusion::error::Result as DataFusionResult; use datafusion::physical_plan::SendableRecordBatchStream; -use derive_builder::Builder; use object_store::ObjectStore; use snafu::ResultExt; use tokio_util::compat::FuturesAsyncReadCompatExt; @@ -34,7 +30,7 @@ use tokio_util::io::SyncIoBridge; use crate::buffered_writer::DfRecordBatchEncoder; use crate::compression::CompressionType; use crate::error::{self, Result}; -use crate::file_format::{self, open_with_decoder, stream_to_file, FileFormat}; +use crate::file_format::{self, stream_to_file, FileFormat}; use crate::share_buffer::SharedBuffer; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -99,66 +95,6 @@ impl Default for CsvFormat { } } -#[derive(Debug, Clone, Builder)] -pub struct CsvConfig { - batch_size: usize, - file_schema: SchemaRef, - #[builder(default = "None")] - file_projection: Option>, - #[builder(default = "true")] - has_header: bool, - #[builder(default = "b','")] - delimiter: u8, -} - -impl CsvConfig { - fn builder(&self) -> csv::ReaderBuilder { - let mut builder = csv::ReaderBuilder::new(self.file_schema.clone()) - .with_delimiter(self.delimiter) - .with_batch_size(self.batch_size) - .with_header(self.has_header); - - if let Some(proj) = &self.file_projection { - builder = builder.with_projection(proj.clone()); - } - - builder - } -} - -#[derive(Debug, Clone)] -pub struct CsvOpener { - config: Arc, - object_store: Arc, - compression_type: CompressionType, -} - -impl CsvOpener { - /// Return a new [`CsvOpener`]. The caller must ensure [`CsvConfig`].file_schema must correspond to the opening file. - pub fn new( - config: CsvConfig, - object_store: ObjectStore, - compression_type: CompressionType, - ) -> Self { - CsvOpener { - config: Arc::new(config), - object_store: Arc::new(object_store), - compression_type, - } - } -} - -impl FileOpener for CsvOpener { - fn open(&self, meta: FileMeta) -> DataFusionResult { - open_with_decoder( - self.object_store.clone(), - meta.location().to_string(), - self.compression_type, - || Ok(self.config.builder().build_decoder()), - ) - } -} - #[async_trait] impl FileFormat for CsvFormat { async fn infer_schema(&self, store: &ObjectStore, path: &str) -> Result { diff --git a/src/common/datasource/src/file_format/json.rs b/src/common/datasource/src/file_format/json.rs index 977d9e7d65..099313af55 100644 --- a/src/common/datasource/src/file_format/json.rs +++ b/src/common/datasource/src/file_format/json.rs @@ -15,18 +15,14 @@ use std::collections::HashMap; use std::io::BufReader; use std::str::FromStr; -use std::sync::Arc; -use arrow::datatypes::SchemaRef; +use arrow::json; use arrow::json::reader::{infer_json_schema_from_iterator, ValueIter}; use arrow::json::writer::LineDelimited; -use arrow::json::{self, ReaderBuilder}; use arrow::record_batch::RecordBatch; use arrow_schema::Schema; use async_trait::async_trait; use common_runtime; -use datafusion::datasource::physical_plan::{FileMeta, FileOpenFuture, FileOpener}; -use datafusion::error::{DataFusionError, Result as DataFusionResult}; use datafusion::physical_plan::SendableRecordBatchStream; use object_store::ObjectStore; use snafu::ResultExt; @@ -36,7 +32,7 @@ use tokio_util::io::SyncIoBridge; use crate::buffered_writer::DfRecordBatchEncoder; use crate::compression::CompressionType; use crate::error::{self, Result}; -use crate::file_format::{self, open_with_decoder, stream_to_file, FileFormat}; +use crate::file_format::{self, stream_to_file, FileFormat}; use crate::share_buffer::SharedBuffer; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -113,47 +109,6 @@ impl FileFormat for JsonFormat { } } -#[derive(Debug, Clone)] -pub struct JsonOpener { - batch_size: usize, - projected_schema: SchemaRef, - object_store: Arc, - compression_type: CompressionType, -} - -impl JsonOpener { - /// Return a new [`JsonOpener`]. Any fields not present in `projected_schema` will be ignored. - pub fn new( - batch_size: usize, - projected_schema: SchemaRef, - object_store: ObjectStore, - compression_type: CompressionType, - ) -> Self { - Self { - batch_size, - projected_schema, - object_store: Arc::new(object_store), - compression_type, - } - } -} - -impl FileOpener for JsonOpener { - fn open(&self, meta: FileMeta) -> DataFusionResult { - open_with_decoder( - self.object_store.clone(), - meta.location().to_string(), - self.compression_type, - || { - ReaderBuilder::new(self.projected_schema.clone()) - .with_batch_size(self.batch_size) - .build_decoder() - .map_err(DataFusionError::from) - }, - ) - } -} - pub async fn stream_to_json( stream: SendableRecordBatchStream, store: ObjectStore, diff --git a/src/common/datasource/src/file_format/tests.rs b/src/common/datasource/src/file_format/tests.rs index 57c5f0e380..b2a687dc51 100644 --- a/src/common/datasource/src/file_format/tests.rs +++ b/src/common/datasource/src/file_format/tests.rs @@ -19,7 +19,10 @@ use std::vec; use common_test_util::find_workspace_path; use datafusion::assert_batches_eq; -use datafusion::datasource::physical_plan::{FileOpener, FileScanConfig, FileStream, ParquetExec}; +use datafusion::datasource::file_format::file_compression_type::FileCompressionType; +use datafusion::datasource::physical_plan::{ + CsvConfig, CsvOpener, FileOpener, FileScanConfig, FileStream, JsonOpener, ParquetExec, +}; use datafusion::execution::context::TaskContext; use datafusion::physical_plan::metrics::ExecutionPlanMetricsSet; use datafusion::physical_plan::ExecutionPlan; @@ -27,14 +30,11 @@ use datafusion::prelude::SessionContext; use futures::StreamExt; use super::FORMAT_TYPE; -use crate::compression::CompressionType; -use crate::error; -use crate::file_format::csv::{CsvConfigBuilder, CsvOpener}; -use crate::file_format::json::JsonOpener; use crate::file_format::orc::{OrcFormat, OrcOpener}; use crate::file_format::parquet::DefaultParquetFileReaderFactory; use crate::file_format::{FileFormat, Format}; -use crate::test_util::{self, scan_config, test_basic_schema, test_store}; +use crate::test_util::{scan_config, test_basic_schema, test_store}; +use crate::{error, test_util}; struct Test<'a, T: FileOpener> { config: FileScanConfig, @@ -62,15 +62,18 @@ impl Test<'_, T> { #[tokio::test] async fn test_json_opener() { let store = test_store("/"); + let store = Arc::new(object_store_opendal::OpendalStore::new(store)); let schema = test_basic_schema(); - let json_opener = JsonOpener::new( - 100, - schema.clone(), - store.clone(), - CompressionType::Uncompressed, - ); + let json_opener = || { + JsonOpener::new( + test_util::TEST_BATCH_SIZE, + schema.clone(), + FileCompressionType::UNCOMPRESSED, + store.clone(), + ) + }; let path = &find_workspace_path("/src/common/datasource/tests/json/basic.json") .display() @@ -78,7 +81,7 @@ async fn test_json_opener() { let tests = [ Test { config: scan_config(schema.clone(), None, path), - opener: json_opener.clone(), + opener: json_opener(), expected: vec![ "+-----+-------+", "| num | str |", @@ -91,7 +94,7 @@ async fn test_json_opener() { }, Test { config: scan_config(schema.clone(), Some(1), path), - opener: json_opener.clone(), + opener: json_opener(), expected: vec![ "+-----+------+", "| num | str |", @@ -110,23 +113,30 @@ async fn test_json_opener() { #[tokio::test] async fn test_csv_opener() { let store = test_store("/"); + let store = Arc::new(object_store_opendal::OpendalStore::new(store)); let schema = test_basic_schema(); let path = &find_workspace_path("/src/common/datasource/tests/csv/basic.csv") .display() .to_string(); - let csv_conf = CsvConfigBuilder::default() - .batch_size(test_util::TEST_BATCH_SIZE) - .file_schema(schema.clone()) - .build() - .unwrap(); + let csv_config = Arc::new(CsvConfig::new( + test_util::TEST_BATCH_SIZE, + schema.clone(), + None, + true, + b',', + b'"', + None, + store, + None, + )); - let csv_opener = CsvOpener::new(csv_conf, store, CompressionType::Uncompressed); + let csv_opener = || CsvOpener::new(csv_config.clone(), FileCompressionType::UNCOMPRESSED); let tests = [ Test { config: scan_config(schema.clone(), None, path), - opener: csv_opener.clone(), + opener: csv_opener(), expected: vec![ "+-----+-------+", "| num | str |", @@ -139,7 +149,7 @@ async fn test_csv_opener() { }, Test { config: scan_config(schema.clone(), Some(1), path), - opener: csv_opener.clone(), + opener: csv_opener(), expected: vec![ "+-----+------+", "| num | str |", diff --git a/src/common/datasource/src/test_util.rs b/src/common/datasource/src/test_util.rs index 64fa41ad62..be2b288f2f 100644 --- a/src/common/datasource/src/test_util.rs +++ b/src/common/datasource/src/test_util.rs @@ -16,17 +16,19 @@ use std::sync::Arc; use arrow_schema::{DataType, Field, Schema, SchemaRef}; use common_test_util::temp_dir::{create_temp_dir, TempDir}; -use datafusion::common::Statistics; +use datafusion::common::{Constraints, Statistics}; +use datafusion::datasource::file_format::file_compression_type::FileCompressionType; use datafusion::datasource::listing::PartitionedFile; use datafusion::datasource::object_store::ObjectStoreUrl; -use datafusion::datasource::physical_plan::{FileScanConfig, FileStream}; +use datafusion::datasource::physical_plan::{ + CsvConfig, CsvOpener, FileScanConfig, FileStream, JsonOpener, +}; use datafusion::physical_plan::metrics::ExecutionPlanMetricsSet; use object_store::services::Fs; use object_store::ObjectStore; -use crate::compression::CompressionType; -use crate::file_format::csv::{stream_to_csv, CsvConfigBuilder, CsvOpener}; -use crate::file_format::json::{stream_to_json, JsonOpener}; +use crate::file_format::csv::stream_to_csv; +use crate::file_format::json::stream_to_json; use crate::test_util; pub const TEST_BATCH_SIZE: usize = 100; @@ -74,6 +76,7 @@ pub fn scan_config(file_schema: SchemaRef, limit: Option, filename: &str) object_store_url: ObjectStoreUrl::parse("empty://").unwrap(), // won't be used file_schema, file_groups: vec![vec![PartitionedFile::new(filename.to_string(), 10)]], + constraints: Constraints::empty(), statistics, projection: None, limit, @@ -90,8 +93,8 @@ pub async fn setup_stream_to_json_test(origin_path: &str, threshold: impl Fn(usi let json_opener = JsonOpener::new( test_util::TEST_BATCH_SIZE, schema.clone(), - store.clone(), - CompressionType::Uncompressed, + FileCompressionType::UNCOMPRESSED, + Arc::new(object_store_opendal::OpendalStore::new(store.clone())), ); let size = store.read(origin_path).await.unwrap().len(); @@ -124,13 +127,19 @@ pub async fn setup_stream_to_csv_test(origin_path: &str, threshold: impl Fn(usiz let schema = test_basic_schema(); - let csv_conf = CsvConfigBuilder::default() - .batch_size(test_util::TEST_BATCH_SIZE) - .file_schema(schema.clone()) - .build() - .unwrap(); + let csv_config = Arc::new(CsvConfig::new( + TEST_BATCH_SIZE, + schema.clone(), + None, + true, + b',', + b'"', + None, + Arc::new(object_store_opendal::OpendalStore::new(store.clone())), + None, + )); - let csv_opener = CsvOpener::new(csv_conf, store.clone(), CompressionType::Uncompressed); + let csv_opener = CsvOpener::new(csv_config, FileCompressionType::UNCOMPRESSED); let size = store.read(origin_path).await.unwrap().len(); diff --git a/src/common/function/src/scalars/udf.rs b/src/common/function/src/scalars/udf.rs index 65c094bc6b..bca879fd4e 100644 --- a/src/common/function/src/scalars/udf.rs +++ b/src/common/function/src/scalars/udf.rs @@ -163,7 +163,7 @@ mod tests { ]; let args = ScalarFunctionArgs { - args: &args, + args, number_rows: 4, return_type: &ConcreteDataType::boolean_datatype().as_arrow_type(), }; diff --git a/src/common/query/src/error.rs b/src/common/query/src/error.rs index 408bbab95d..a6d2997e95 100644 --- a/src/common/query/src/error.rs +++ b/src/common/query/src/error.rs @@ -295,6 +295,7 @@ pub fn datafusion_status_code( default_status.unwrap_or(StatusCode::EngineExecuteQuery) } } + DataFusionError::Diagnostic(_, e) => datafusion_status_code::(e, default_status), _ => default_status.unwrap_or(StatusCode::EngineExecuteQuery), } } diff --git a/src/common/query/src/logical_plan/expr.rs b/src/common/query/src/logical_plan/expr.rs index ec12324252..625c2fe5fd 100644 --- a/src/common/query/src/logical_plan/expr.rs +++ b/src/common/query/src/logical_plan/expr.rs @@ -54,10 +54,7 @@ pub fn build_filter_from_timestamp( time_range: Option<&TimestampRange>, ) -> Option { let time_range = time_range?; - let ts_col_expr = Expr::Column(Column { - relation: None, - name: ts_col_name.to_string(), - }); + let ts_col_expr = Expr::Column(Column::from_name(ts_col_name)); match (time_range.start(), time_range.end()) { (None, None) => None, diff --git a/src/common/query/src/signature.rs b/src/common/query/src/signature.rs index e74baeddcf..6aab88d8e8 100644 --- a/src/common/query/src/signature.rs +++ b/src/common/query/src/signature.rs @@ -134,7 +134,7 @@ impl From for DfTypeSignature { } TypeSignature::Uniform(n, types) => { if n == 0 { - return DfTypeSignature::NullAry; + return DfTypeSignature::Nullary; } DfTypeSignature::Uniform(n, concrete_types_to_arrow_types(types)) } @@ -143,7 +143,7 @@ impl From for DfTypeSignature { } TypeSignature::Any(n) => { if n == 0 { - return DfTypeSignature::NullAry; + return DfTypeSignature::Nullary; } DfTypeSignature::Any(n) } @@ -151,7 +151,7 @@ impl From for DfTypeSignature { DfTypeSignature::OneOf(ts.into_iter().map(Into::into).collect()) } TypeSignature::VariadicAny => DfTypeSignature::VariadicAny, - TypeSignature::NullAry => DfTypeSignature::NullAry, + TypeSignature::NullAry => DfTypeSignature::Nullary, } } } diff --git a/src/common/query/src/stream.rs b/src/common/query/src/stream.rs index 2fefb4745d..c1a67fb631 100644 --- a/src/common/query/src/stream.rs +++ b/src/common/query/src/stream.rs @@ -21,9 +21,8 @@ use common_recordbatch::SendableRecordBatchStream; use datafusion::execution::context::TaskContext; use datafusion::execution::SendableRecordBatchStream as DfSendableRecordBatchStream; use datafusion::physical_expr::{EquivalenceProperties, Partitioning, PhysicalSortExpr}; -use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, PlanProperties, -}; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; +use datafusion::physical_plan::{DisplayAs, DisplayFormatType, ExecutionPlan, PlanProperties}; use datafusion_common::DataFusionError; use datatypes::arrow::datatypes::SchemaRef as ArrowSchemaRef; use datatypes::schema::SchemaRef; @@ -53,7 +52,8 @@ impl StreamScanAdapter { let properties = PlanProperties::new( EquivalenceProperties::new(arrow_schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); Self { diff --git a/src/common/recordbatch/src/filter.rs b/src/common/recordbatch/src/filter.rs index 0d8a7632d9..b16ee401c0 100644 --- a/src/common/recordbatch/src/filter.rs +++ b/src/common/recordbatch/src/filter.rs @@ -271,10 +271,7 @@ mod test { fn unsupported_filter_op() { // `+` is not supported let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Plus, right: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), }); @@ -290,25 +287,16 @@ mod test { // two column is not supported let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Eq, - right: Box::new(Expr::Column(Column { - relation: None, - name: "bar".to_string(), - })), + right: Box::new(Expr::Column(Column::from_name("bar"))), }); assert!(SimpleFilterEvaluator::try_new(&expr).is_none()); // compound expr is not supported let expr = Expr::BinaryExpr(BinaryExpr { left: Box::new(Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), })), @@ -322,10 +310,7 @@ mod test { fn supported_filter_op() { // equal let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), }); @@ -335,10 +320,7 @@ mod test { let expr = Expr::BinaryExpr(BinaryExpr { left: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), op: Operator::Lt, - right: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + right: Box::new(Expr::Column(Column::from_name("foo"))), }); let evaluator = SimpleFilterEvaluator::try_new(&expr).unwrap(); assert_eq!(evaluator.op, Operator::Gt); @@ -348,10 +330,7 @@ mod test { #[test] fn run_on_array() { let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), }); @@ -373,10 +352,7 @@ mod test { #[test] fn run_on_scalar() { let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "foo".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("foo"))), op: Operator::Lt, right: Box::new(Expr::Literal(ScalarValue::Int64(Some(1)))), }); diff --git a/src/file-engine/Cargo.toml b/src/file-engine/Cargo.toml index 6c8c9e887d..9d031cb279 100644 --- a/src/file-engine/Cargo.toml +++ b/src/file-engine/Cargo.toml @@ -29,6 +29,7 @@ datafusion-expr.workspace = true datatypes.workspace = true futures.workspace = true object-store.workspace = true +object_store_opendal.workspace = true serde = { version = "1.0", features = ["derive"] } serde_json.workspace = true snafu.workspace = true diff --git a/src/file-engine/src/error.rs b/src/file-engine/src/error.rs index b5ddbe842d..2447fe1fde 100644 --- a/src/file-engine/src/error.rs +++ b/src/file-engine/src/error.rs @@ -128,14 +128,6 @@ pub enum Error { source: common_datasource::error::Error, }, - #[snafu(display("Failed to build csv config"))] - BuildCsvConfig { - #[snafu(source)] - error: common_datasource::file_format::csv::CsvConfigBuilderError, - #[snafu(implicit)] - location: Location, - }, - #[snafu(display("Failed to build stream"))] BuildStream { #[snafu(source)] @@ -224,8 +216,7 @@ impl ErrorExt for Error { use Error::*; match self { - BuildCsvConfig { .. } - | ProjectArrowSchema { .. } + ProjectArrowSchema { .. } | ProjectSchema { .. } | MissingRequiredField { .. } | Unsupported { .. } diff --git a/src/file-engine/src/query/file_stream.rs b/src/file-engine/src/query/file_stream.rs index 70b352c281..fdf7b687ce 100644 --- a/src/file-engine/src/query/file_stream.rs +++ b/src/file-engine/src/query/file_stream.rs @@ -14,17 +14,19 @@ use std::sync::Arc; -use common_datasource::file_format::csv::{CsvConfigBuilder, CsvFormat, CsvOpener}; -use common_datasource::file_format::json::{JsonFormat, JsonOpener}; +use common_datasource::file_format::csv::CsvFormat; +use common_datasource::file_format::json::JsonFormat; use common_datasource::file_format::orc::{OrcFormat, OrcOpener}; use common_datasource::file_format::parquet::{DefaultParquetFileReaderFactory, ParquetFormat}; use common_datasource::file_format::Format; use common_recordbatch::adapter::RecordBatchStreamAdapter; use common_recordbatch::SendableRecordBatchStream; -use datafusion::common::{Statistics, ToDFSchema}; +use datafusion::common::{Constraints, Statistics, ToDFSchema}; use datafusion::datasource::listing::PartitionedFile; use datafusion::datasource::object_store::ObjectStoreUrl; -use datafusion::datasource::physical_plan::{FileOpener, FileScanConfig, FileStream, ParquetExec}; +use datafusion::datasource::physical_plan::{ + CsvConfig, CsvOpener, FileOpener, FileScanConfig, FileStream, JsonOpener, ParquetExec, +}; use datafusion::physical_expr::create_physical_expr; use datafusion::physical_expr::execution_props::ExecutionProps; use datafusion::physical_plan::metrics::ExecutionPlanMetricsSet; @@ -48,20 +50,21 @@ fn build_csv_opener( file_schema: Arc, config: &ScanPlanConfig, format: &CsvFormat, -) -> Result { - let csv_config = CsvConfigBuilder::default() - .batch_size(DEFAULT_BATCH_SIZE) - .file_schema(file_schema) - .file_projection(config.projection.cloned()) - .delimiter(format.delimiter) - .has_header(format.has_header) - .build() - .context(error::BuildCsvConfigSnafu)?; - Ok(CsvOpener::new( - csv_config, - config.store.clone(), - format.compression_type, - )) +) -> CsvOpener { + let csv_config = Arc::new(CsvConfig::new( + DEFAULT_BATCH_SIZE, + file_schema, + config.projection.cloned(), + format.has_header, + format.delimiter, + b'"', + None, + Arc::new(object_store_opendal::OpendalStore::new( + config.store.clone(), + )), + None, + )); + CsvOpener::new(csv_config, format.compression_type.into()) } fn build_json_opener( @@ -78,11 +81,12 @@ fn build_json_opener( } else { file_schema }; + let store = object_store_opendal::OpendalStore::new(config.store.clone()); Ok(JsonOpener::new( DEFAULT_BATCH_SIZE, projected_schema, - config.store.clone(), - format.compression_type, + format.compression_type.into(), + Arc::new(store), )) } @@ -115,6 +119,7 @@ fn build_record_batch_stream( limit, table_partition_cols: vec![], output_ordering: vec![], + constraints: Constraints::empty(), }, 0, // partition: hard-code opener, @@ -132,7 +137,7 @@ fn new_csv_stream( format: &CsvFormat, ) -> Result { let file_schema = config.file_schema.arrow_schema().clone(); - let opener = build_csv_opener(file_schema.clone(), config, format)?; + let opener = build_csv_opener(file_schema.clone(), config, format); // push down limit only if there is no filter let limit = config.filters.is_empty().then_some(config.limit).flatten(); build_record_batch_stream(opener, file_schema, config.files, config.projection, limit) @@ -173,6 +178,7 @@ fn new_parquet_stream_with_exec_plan( .iter() .map(|filename| PartitionedFile::new(filename.to_string(), 0)) .collect::>()], + constraints: Constraints::empty(), statistics: Statistics::new_unknown(file_schema.as_ref()), projection: projection.cloned(), limit: *limit, diff --git a/src/flow/src/df_optimizer.rs b/src/flow/src/df_optimizer.rs index 189b6de162..d83bb77718 100644 --- a/src/flow/src/df_optimizer.rs +++ b/src/flow/src/df_optimizer.rs @@ -39,7 +39,8 @@ use datafusion_common::tree_node::{ use datafusion_common::{Column, DFSchema, ScalarValue}; use datafusion_expr::utils::merge_schema; use datafusion_expr::{ - BinaryExpr, Expr, Operator, Projection, ScalarUDFImpl, Signature, TypeSignature, Volatility, + BinaryExpr, ColumnarValue, Expr, Operator, Projection, ScalarFunctionArgs, ScalarUDFImpl, + Signature, TypeSignature, Volatility, }; use query::parser::QueryLanguageParser; use query::query_engine::DefaultSerializer; @@ -518,10 +519,10 @@ impl ScalarUDFImpl for TumbleExpand { }) } - fn invoke( + fn invoke_with_args( &self, - _args: &[datafusion_expr::ColumnarValue], - ) -> Result { + _args: ScalarFunctionArgs, + ) -> datafusion_common::Result { Err(DataFusionError::Plan( "This function should not be executed by datafusion".to_string(), )) diff --git a/src/flow/src/transform/expr.rs b/src/flow/src/transform/expr.rs index ed75252ee2..fba7b7717e 100644 --- a/src/flow/src/transform/expr.rs +++ b/src/flow/src/transform/expr.rs @@ -21,6 +21,7 @@ use common_telemetry::debug; use datafusion::execution::SessionStateBuilder; use datafusion::functions::all_default_functions; use datafusion_physical_expr::PhysicalExpr; +use datafusion_substrait::logical_plan::consumer::DefaultSubstraitConsumer; use datatypes::data_type::ConcreteDataType as CDT; use snafu::{ensure, OptionExt, ResultExt}; use substrait_proto::proto::expression::field_reference::ReferenceType::DirectReference; @@ -88,15 +89,13 @@ pub(crate) async fn from_scalar_fn_to_df_fn_impl( }; let schema = input_schema.to_df_schema()?; - let df_expr = substrait::df_logical_plan::consumer::from_substrait_rex( - &SessionStateBuilder::new() - .with_scalar_functions(all_default_functions()) - .build(), - &e, - &schema, - &extensions.to_extensions(), - ) - .await; + let extensions = extensions.to_extensions(); + let session_state = SessionStateBuilder::new() + .with_scalar_functions(all_default_functions()) + .build(); + let consumer = DefaultSubstraitConsumer::new(&extensions, &session_state); + let df_expr = + substrait::df_logical_plan::consumer::from_substrait_rex(&consumer, &e, &schema).await; let expr = df_expr.context({ DatafusionSnafu { context: "Failed to convert substrait scalar function to datafusion scalar function", diff --git a/src/frontend/src/instance.rs b/src/frontend/src/instance.rs index f672655378..9c94ec326a 100644 --- a/src/frontend/src/instance.rs +++ b/src/frontend/src/instance.rs @@ -500,7 +500,8 @@ pub fn check_permission( Statement::ShowCharset(_) | Statement::ShowCollation(_) => {} Statement::Insert(insert) => { - validate_param(insert.table_name(), query_ctx)?; + let name = insert.table_name().context(ParseSqlSnafu)?; + validate_param(name, query_ctx)?; } Statement::CreateTable(stmt) => { validate_param(&stmt.name, query_ctx)?; diff --git a/src/mito2/src/memtable/partition_tree.rs b/src/mito2/src/memtable/partition_tree.rs index 3000707418..8dd0af2881 100644 --- a/src/mito2/src/memtable/partition_tree.rs +++ b/src/mito2/src/memtable/partition_tree.rs @@ -606,10 +606,7 @@ mod tests { for i in 0..100 { let timestamps: Vec<_> = (0..10).map(|v| i as i64 * 1000 + v).collect(); let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "k1".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("k1"))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::UInt32(Some(i)))), }); diff --git a/src/mito2/src/memtable/partition_tree/primary_key_filter.rs b/src/mito2/src/memtable/partition_tree/primary_key_filter.rs index 3002df6d6a..ecca5631b6 100644 --- a/src/mito2/src/memtable/partition_tree/primary_key_filter.rs +++ b/src/mito2/src/memtable/partition_tree/primary_key_filter.rs @@ -220,10 +220,7 @@ mod tests { fn create_filter(column_name: &str, value: &str) -> SimpleFilterEvaluator { let expr = Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: column_name.to_string(), - })), + left: Box::new(Expr::Column(Column::from_name(column_name))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Utf8(Some(value.to_string())))), }); diff --git a/src/mito2/src/sst/index/bloom_filter/applier/builder.rs b/src/mito2/src/sst/index/bloom_filter/applier/builder.rs index fa58d7976e..8d6a8db6f2 100644 --- a/src/mito2/src/sst/index/bloom_filter/applier/builder.rs +++ b/src/mito2/src/sst/index/bloom_filter/applier/builder.rs @@ -287,10 +287,7 @@ mod tests { } fn column(name: &str) -> Expr { - Expr::Column(Column { - relation: None, - name: name.to_string(), - }) + Expr::Column(Column::from_name(name)) } fn string_lit(s: impl Into) -> Expr { diff --git a/src/mito2/src/sst/index/fulltext_index/applier/builder.rs b/src/mito2/src/sst/index/fulltext_index/applier/builder.rs index a2de41da66..e5cb6cf765 100644 --- a/src/mito2/src/sst/index/fulltext_index/applier/builder.rs +++ b/src/mito2/src/sst/index/fulltext_index/applier/builder.rs @@ -300,10 +300,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), @@ -320,10 +317,7 @@ mod tests { let metadata = mock_metadata(); let func = ScalarFunction { - args: vec![Expr::Column(Column { - name: "text".to_string(), - relation: None, - })], + args: vec![Expr::Column(Column::from_name("text"))], func: matches_func(), }; @@ -336,10 +330,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "not_found".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("not_found")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), @@ -354,10 +345,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "ts".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("ts")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), @@ -372,10 +360,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Int64(Some(42))), ], func: matches_func(), @@ -390,10 +375,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_term_func(), @@ -416,10 +398,7 @@ mod tests { let metadata = mock_metadata(); let lower_func_expr = ScalarFunction { - args: vec![Expr::Column(Column { - name: "text".to_string(), - relation: None, - })], + args: vec![Expr::Column(Column::from_name("text"))], func: lower(), }; @@ -448,10 +427,7 @@ mod tests { let metadata = mock_metadata(); let func = ScalarFunction { - args: vec![Expr::Column(Column { - name: "text".to_string(), - relation: None, - })], + args: vec![Expr::Column(Column::from_name("text"))], func: matches_term_func(), }; @@ -464,10 +440,7 @@ mod tests { let func = ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), // Using 'matches' instead of 'matches_term' @@ -479,10 +452,7 @@ mod tests { #[test] fn test_extract_lower_arg() { let func = ScalarFunction { - args: vec![Expr::Column(Column { - name: "text".to_string(), - relation: None, - })], + args: vec![Expr::Column(Column::from_name("text"))], func: lower(), }; @@ -498,10 +468,7 @@ mod tests { #[test] fn test_extract_lower_arg_wrong_function() { let func = ScalarFunction { - args: vec![Expr::Column(Column { - name: "text".to_string(), - relation: None, - })], + args: vec![Expr::Column(Column::from_name("text"))], func: matches_func(), // Not 'lower' }; @@ -515,10 +482,7 @@ mod tests { // Create a matches expression let matches_expr = Expr::ScalarFunction(ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), @@ -541,10 +505,7 @@ mod tests { // Create a matches expression let matches_expr = Expr::ScalarFunction(ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("foo".to_string()))), ], func: matches_func(), @@ -553,10 +514,7 @@ mod tests { // Create a matches_term expression let matches_term_expr = Expr::ScalarFunction(ScalarFunction { args: vec![ - Expr::Column(Column { - name: "text".to_string(), - relation: None, - }), + Expr::Column(Column::from_name("text")), Expr::Literal(ScalarValue::Utf8(Some("bar".to_string()))), ], func: matches_term_func(), diff --git a/src/mito2/src/sst/index/inverted_index/applier/builder.rs b/src/mito2/src/sst/index/inverted_index/applier/builder.rs index 721a534220..5daa01b9f5 100644 --- a/src/mito2/src/sst/index/inverted_index/applier/builder.rs +++ b/src/mito2/src/sst/index/inverted_index/applier/builder.rs @@ -287,31 +287,19 @@ mod tests { } pub(crate) fn tag_column() -> Expr { - Expr::Column(Column { - relation: None, - name: "a".to_string(), - }) + Expr::Column(Column::from_name("a")) } pub(crate) fn tag_column2() -> Expr { - Expr::Column(Column { - relation: None, - name: "b".to_string(), - }) + Expr::Column(Column::from_name("b")) } pub(crate) fn field_column() -> Expr { - Expr::Column(Column { - relation: None, - name: "c".to_string(), - }) + Expr::Column(Column::from_name("c")) } pub(crate) fn nonexistent_column() -> Expr { - Expr::Column(Column { - relation: None, - name: "nonexistent".to_string(), - }) + Expr::Column(Column::from_name("nonexistence")) } pub(crate) fn string_lit(s: impl Into) -> Expr { diff --git a/src/mito2/src/sst/parquet.rs b/src/mito2/src/sst/parquet.rs index 00a8b3ab3a..fd06f92fc6 100644 --- a/src/mito2/src/sst/parquet.rs +++ b/src/mito2/src/sst/parquet.rs @@ -342,10 +342,7 @@ mod tests { // Predicate let predicate = Some(Predicate::new(vec![Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "tag_0".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("tag_0"))), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Utf8(Some("a".to_string())))), })])); @@ -435,10 +432,7 @@ mod tests { // Predicate let predicate = Some(Predicate::new(vec![Expr::BinaryExpr(BinaryExpr { - left: Box::new(Expr::Column(Column { - relation: None, - name: "field_0".to_string(), - })), + left: Box::new(Expr::Column(Column::from_name("field_0"))), op: Operator::GtEq, right: Box::new(Expr::Literal(ScalarValue::UInt64(Some(150)))), })])); diff --git a/src/operator/Cargo.toml b/src/operator/Cargo.toml index 889f0a8acd..f4ee399b42 100644 --- a/src/operator/Cargo.toml +++ b/src/operator/Cargo.toml @@ -45,6 +45,7 @@ meter-core.workspace = true meter-macros.workspace = true moka.workspace = true object-store.workspace = true +object_store_opendal.workspace = true partition.workspace = true prometheus.workspace = true query.workspace = true diff --git a/src/operator/src/error.rs b/src/operator/src/error.rs index 6cfbab5646..c0c102ceda 100644 --- a/src/operator/src/error.rs +++ b/src/operator/src/error.rs @@ -514,14 +514,6 @@ pub enum Error { source: common_datasource::error::Error, }, - #[snafu(display("Failed to build csv config"))] - BuildCsvConfig { - #[snafu(source)] - error: common_datasource::file_format::csv::CsvConfigBuilderError, - #[snafu(implicit)] - location: Location, - }, - #[snafu(display("Failed to write stream to path: {}", path))] WriteStreamToFile { path: String, @@ -825,7 +817,6 @@ impl ErrorExt for Error { | Error::ColumnNotFound { .. } | Error::BuildRegex { .. } | Error::InvalidSchema { .. } - | Error::BuildCsvConfig { .. } | Error::ProjectSchema { .. } | Error::UnsupportedFormat { .. } | Error::ColumnNoneDefaultValue { .. } diff --git a/src/operator/src/req_convert/insert/stmt_to_region.rs b/src/operator/src/req_convert/insert/stmt_to_region.rs index cb1b005730..69137c4908 100644 --- a/src/operator/src/req_convert/insert/stmt_to_region.rs +++ b/src/operator/src/req_convert/insert/stmt_to_region.rs @@ -63,7 +63,8 @@ impl<'a> StatementToRegion<'a> { stmt: &Insert, query_ctx: &QueryContextRef, ) -> Result<(InstantAndNormalInsertRequests, TableInfoRef)> { - let (catalog, schema, table_name) = self.get_full_name(stmt.table_name())?; + let name = stmt.table_name().context(ParseSqlSnafu)?; + let (catalog, schema, table_name) = self.get_full_name(name)?; let table = self.get_table(&catalog, &schema, &table_name).await?; let table_schema = table.schema(); let table_info = table.table_info(); diff --git a/src/operator/src/statement/copy_table_from.rs b/src/operator/src/statement/copy_table_from.rs index b3a151b581..2d9fbef6b9 100644 --- a/src/operator/src/statement/copy_table_from.rs +++ b/src/operator/src/statement/copy_table_from.rs @@ -18,8 +18,8 @@ use std::sync::Arc; use client::{Output, OutputData, OutputMeta}; use common_base::readable_size::ReadableSize; -use common_datasource::file_format::csv::{CsvConfigBuilder, CsvFormat, CsvOpener}; -use common_datasource::file_format::json::{JsonFormat, JsonOpener}; +use common_datasource::file_format::csv::CsvFormat; +use common_datasource::file_format::json::JsonFormat; use common_datasource::file_format::orc::{infer_orc_schema, new_orc_stream_reader, ReaderAdapter}; use common_datasource::file_format::{FileFormat, Format}; use common_datasource::lister::{Lister, Source}; @@ -31,11 +31,13 @@ use common_recordbatch::DfSendableRecordBatchStream; use common_telemetry::{debug, tracing}; use datafusion::datasource::listing::PartitionedFile; use datafusion::datasource::object_store::ObjectStoreUrl; -use datafusion::datasource::physical_plan::{FileOpener, FileScanConfig, FileStream}; +use datafusion::datasource::physical_plan::{ + CsvConfig, CsvOpener, FileOpener, FileScanConfig, FileStream, JsonOpener, +}; use datafusion::parquet::arrow::arrow_reader::ArrowReaderMetadata; use datafusion::parquet::arrow::ParquetRecordBatchStreamBuilder; use datafusion::physical_plan::metrics::ExecutionPlanMetricsSet; -use datafusion_common::Statistics; +use datafusion_common::{Constraints, Statistics}; use datafusion_expr::Expr; use datatypes::arrow::compute::can_cast_types; use datatypes::arrow::datatypes::{Schema, SchemaRef}; @@ -210,6 +212,7 @@ impl StatementExecutor { limit: None, table_partition_cols: vec![], output_ordering: vec![], + constraints: Constraints::empty(), }, 0, opener, @@ -239,16 +242,23 @@ impl StatementExecutor { .project(&projection) .context(error::ProjectSchemaSnafu)?, ); - let csv_conf = CsvConfigBuilder::default() - .batch_size(DEFAULT_BATCH_SIZE) - .file_schema(schema.clone()) - .file_projection(Some(projection.clone())) - .build() - .context(error::BuildCsvConfigSnafu)?; + let csv_config = Arc::new(CsvConfig::new( + DEFAULT_BATCH_SIZE, + schema.clone(), + Some(projection.clone()), + format.has_header, + format.delimiter, + b'"', + None, + Arc::new(object_store_opendal::OpendalStore::new( + object_store.clone(), + )), + None, + )); let stream = self .build_file_stream( - CsvOpener::new(csv_conf, object_store.clone(), format.compression_type), + CsvOpener::new(csv_config, format.compression_type.into()), path, schema.clone(), ) @@ -275,13 +285,14 @@ impl StatementExecutor { .project(&projection) .context(error::ProjectSchemaSnafu)?, ); + let store = object_store_opendal::OpendalStore::new(object_store.clone()); let stream = self .build_file_stream( JsonOpener::new( DEFAULT_BATCH_SIZE, projected_file_schema, - object_store.clone(), - format.compression_type, + format.compression_type.into(), + Arc::new(store), ), path, schema.clone(), diff --git a/src/operator/src/statement/set.rs b/src/operator/src/statement/set.rs index 6df8e3e630..2b84ced3eb 100644 --- a/src/operator/src/statement/set.rs +++ b/src/operator/src/statement/set.rs @@ -147,6 +147,7 @@ pub fn validate_client_encoding(set: SetVariables) -> Result<()> { | Expr::Identifier(Ident { value: x, quote_style: _, + span: _, }) => x.to_uppercase(), _ => { return InvalidSqlSnafu { @@ -203,6 +204,7 @@ fn try_parse_datestyle(expr: &Expr) -> Result<(Option, Option

{ diff --git a/src/promql/src/extension_plan/empty_metric.rs b/src/promql/src/extension_plan/empty_metric.rs index a1c8bc68e4..8aa10a251d 100644 --- a/src/promql/src/extension_plan/empty_metric.rs +++ b/src/promql/src/extension_plan/empty_metric.rs @@ -30,10 +30,11 @@ use datafusion::error::DataFusionError; use datafusion::execution::context::{SessionState, TaskContext}; use datafusion::logical_expr::{ExprSchemable, LogicalPlan, UserDefinedLogicalNodeCore}; use datafusion::physical_expr::{EquivalenceProperties, PhysicalExprRef}; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::metrics::{BaselineMetrics, ExecutionPlanMetricsSet, MetricsSet}; use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, Partitioning, PlanProperties, - RecordBatchStream, SendableRecordBatchStream, + DisplayAs, DisplayFormatType, ExecutionPlan, Partitioning, PlanProperties, RecordBatchStream, + SendableRecordBatchStream, }; use datafusion::physical_planner::PhysicalPlanner; use datafusion::prelude::{col, lit, Expr}; @@ -112,7 +113,8 @@ impl EmptyMetric { let properties = Arc::new(PlanProperties::new( EquivalenceProperties::new(result_schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, )); Ok(Arc::new(EmptyMetricExec { start: self.start, diff --git a/src/promql/src/extension_plan/histogram_fold.rs b/src/promql/src/extension_plan/histogram_fold.rs index 4d574122bd..905e2482a7 100644 --- a/src/promql/src/extension_plan/histogram_fold.rs +++ b/src/promql/src/extension_plan/histogram_fold.rs @@ -30,6 +30,7 @@ use datafusion::error::{DataFusionError, Result as DataFusionResult}; use datafusion::execution::TaskContext; use datafusion::logical_expr::{LogicalPlan, UserDefinedLogicalNodeCore}; use datafusion::physical_expr::{EquivalenceProperties, LexRequirement, PhysicalSortRequirement}; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::expressions::{CastExpr as PhyCast, Column as PhyColumn}; use datafusion::physical_plan::metrics::{BaselineMetrics, ExecutionPlanMetricsSet, MetricsSet}; use datafusion::physical_plan::{ @@ -182,7 +183,8 @@ impl HistogramFold { let properties = PlanProperties::new( EquivalenceProperties::new(output_schema.clone()), Partitioning::UnknownPartitioning(1), - exec_input.properties().execution_mode(), + EmissionType::Incremental, + Boundedness::Bounded, ); Arc::new(HistogramFoldExec { le_column_index, @@ -728,7 +730,6 @@ mod test { use datafusion::arrow::datatypes::{Field, Schema}; use datafusion::common::ToDFSchema; use datafusion::physical_plan::memory::MemoryExec; - use datafusion::physical_plan::ExecutionMode; use datafusion::prelude::SessionContext; use datatypes::arrow_array::StringArray; @@ -806,7 +807,8 @@ mod test { let properties = PlanProperties::new( EquivalenceProperties::new(output_schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); let fold_exec = Arc::new(HistogramFoldExec { le_column_index: 1, diff --git a/src/promql/src/extension_plan/range_manipulate.rs b/src/promql/src/extension_plan/range_manipulate.rs index d4c8fc5a55..464cba8735 100644 --- a/src/promql/src/extension_plan/range_manipulate.rs +++ b/src/promql/src/extension_plan/range_manipulate.rs @@ -158,10 +158,12 @@ impl RangeManipulate { pub fn to_execution_plan(&self, exec_input: Arc) -> Arc { let output_schema: SchemaRef = SchemaRef::new(self.output_schema.as_ref().into()); + let properties = exec_input.properties(); let properties = PlanProperties::new( EquivalenceProperties::new(output_schema.clone()), - exec_input.properties().partitioning.clone(), - exec_input.properties().execution_mode, + properties.partitioning.clone(), + properties.emission_type, + properties.boundedness, ); Arc::new(RangeManipulateExec { start: self.start, @@ -336,10 +338,12 @@ impl ExecutionPlan for RangeManipulateExec { ) -> DataFusionResult> { assert!(!children.is_empty()); let exec_input = children[0].clone(); + let properties = exec_input.properties(); let properties = PlanProperties::new( EquivalenceProperties::new(self.output_schema.clone()), - exec_input.properties().partitioning.clone(), - exec_input.properties().execution_mode, + properties.partitioning.clone(), + properties.emission_type, + properties.boundedness, ); Ok(Arc::new(Self { start: self.start, @@ -625,8 +629,8 @@ mod test { }; use datafusion::common::ToDFSchema; use datafusion::physical_expr::Partitioning; + use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::memory::MemoryExec; - use datafusion::physical_plan::ExecutionMode; use datafusion::prelude::SessionContext; use datatypes::arrow::array::TimestampMillisecondArray; @@ -685,7 +689,8 @@ mod test { let properties = PlanProperties::new( EquivalenceProperties::new(manipulate_output_schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); let normalize_exec = Arc::new(RangeManipulateExec { start, diff --git a/src/promql/src/extension_plan/scalar_calculate.rs b/src/promql/src/extension_plan/scalar_calculate.rs index 5634fbdf44..86363cc154 100644 --- a/src/promql/src/extension_plan/scalar_calculate.rs +++ b/src/promql/src/extension_plan/scalar_calculate.rs @@ -128,10 +128,12 @@ impl ScalarCalculate { .index_of(&self.field_column) .map_err(|e| DataFusionError::ArrowError(e, None))?; let schema = Arc::new(Schema::new(fields)); + let properties = exec_input.properties(); let properties = PlanProperties::new( EquivalenceProperties::new(schema.clone()), Partitioning::UnknownPartitioning(1), - exec_input.properties().execution_mode, + properties.emission_type, + properties.boundedness, ); Ok(Arc::new(ScalarCalculateExec { start: self.start, @@ -533,8 +535,8 @@ impl Stream for ScalarCalculateStream { #[cfg(test)] mod test { use datafusion::arrow::datatypes::{DataType, Field, Schema}; + use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::memory::MemoryExec; - use datafusion::physical_plan::ExecutionMode; use datafusion::prelude::SessionContext; use datatypes::arrow::array::{Float64Array, TimestampMillisecondArray}; use datatypes::arrow::datatypes::TimeUnit; @@ -560,7 +562,8 @@ mod test { let properties = PlanProperties::new( EquivalenceProperties::new(schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); let scalar_exec = Arc::new(ScalarCalculateExec { start: 0, diff --git a/src/promql/src/extension_plan/union_distinct_on.rs b/src/promql/src/extension_plan/union_distinct_on.rs index e68c2ce37f..892a56ec1e 100644 --- a/src/promql/src/extension_plan/union_distinct_on.rs +++ b/src/promql/src/extension_plan/union_distinct_on.rs @@ -26,10 +26,11 @@ use datafusion::error::{DataFusionError, Result as DataFusionResult}; use datafusion::execution::context::TaskContext; use datafusion::logical_expr::{Expr, LogicalPlan, UserDefinedLogicalNodeCore}; use datafusion::physical_expr::EquivalenceProperties; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::metrics::{BaselineMetrics, ExecutionPlanMetricsSet, MetricsSet}; use datafusion::physical_plan::{ - hash_utils, DisplayAs, DisplayFormatType, Distribution, ExecutionMode, ExecutionPlan, - Partitioning, PlanProperties, RecordBatchStream, SendableRecordBatchStream, + hash_utils, DisplayAs, DisplayFormatType, Distribution, ExecutionPlan, Partitioning, + PlanProperties, RecordBatchStream, SendableRecordBatchStream, }; use datatypes::arrow::compute; use futures::future::BoxFuture; @@ -95,7 +96,8 @@ impl UnionDistinctOn { let properties = Arc::new(PlanProperties::new( EquivalenceProperties::new(output_schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, )); Arc::new(UnionDistinctOnExec { left: left_exec, diff --git a/src/promql/src/functions/round.rs b/src/promql/src/functions/round.rs index 11779db22e..d52f84f261 100644 --- a/src/promql/src/functions/round.rs +++ b/src/promql/src/functions/round.rs @@ -74,6 +74,7 @@ impl Round { #[cfg(test)] mod tests { + use datafusion_expr::ScalarFunctionArgs; use datatypes::arrow::array::Float64Array; use super::*; @@ -81,7 +82,12 @@ mod tests { fn test_round_f64(value: Vec, nearest: f64, expected: Vec) { let round_udf = Round::scalar_udf(nearest); let input = vec![ColumnarValue::Array(Arc::new(Float64Array::from(value)))]; - let result = round_udf.invoke_batch(&input, 1).unwrap(); + let args = ScalarFunctionArgs { + args: input, + number_rows: 1, + return_type: &DataType::Float64, + }; + let result = round_udf.invoke_with_args(args).unwrap(); let result_array = extract_array(&result).unwrap(); assert_eq!(result_array.len(), 1); assert_eq!( diff --git a/src/promql/src/functions/test_util.rs b/src/promql/src/functions/test_util.rs index a4e3f16ee3..46ad6ec1a8 100644 --- a/src/promql/src/functions/test_util.rs +++ b/src/promql/src/functions/test_util.rs @@ -17,6 +17,8 @@ use std::sync::Arc; use datafusion::arrow::array::Float64Array; use datafusion::logical_expr::ScalarUDF; use datafusion::physical_plan::ColumnarValue; +use datafusion_expr::ScalarFunctionArgs; +use datatypes::arrow::datatypes::DataType; use crate::functions::extract_array; use crate::range_array::RangeArray; @@ -33,14 +35,19 @@ pub fn simple_range_udf_runner( ColumnarValue::Array(Arc::new(input_ts.into_dict())), ColumnarValue::Array(Arc::new(input_value.into_dict())), ]; - let eval_result: Vec> = - extract_array(&range_fn.invoke_batch(&input, num_rows).unwrap()) - .unwrap() - .as_any() - .downcast_ref::() - .unwrap() - .iter() - .collect(); + let args = ScalarFunctionArgs { + args: input, + number_rows: num_rows, + return_type: &DataType::Float64, + }; + let value = range_fn.invoke_with_args(args).unwrap(); + let eval_result: Vec> = extract_array(&value) + .unwrap() + .as_any() + .downcast_ref::() + .unwrap() + .iter() + .collect(); assert_eq!(eval_result.len(), expected.len()); assert!(eval_result .iter() diff --git a/src/query/src/analyze.rs b/src/query/src/analyze.rs index fdc7d07e1d..6f87e91267 100644 --- a/src/query/src/analyze.rs +++ b/src/query/src/analyze.rs @@ -30,7 +30,7 @@ use datafusion::execution::TaskContext; use datafusion::physical_plan::coalesce_partitions::CoalescePartitionsExec; use datafusion::physical_plan::stream::RecordBatchStreamAdapter; use datafusion::physical_plan::{ - accept, DisplayAs, DisplayFormatType, ExecutionPlan, ExecutionPlanProperties, PlanProperties, + accept, DisplayAs, DisplayFormatType, ExecutionPlan, PlanProperties, }; use datafusion_common::tree_node::{TreeNode, TreeNodeRecursion}; use datafusion_common::{internal_err, DataFusionError}; @@ -76,8 +76,13 @@ impl DistAnalyzeExec { fn compute_properties(input: &Arc, schema: SchemaRef) -> PlanProperties { let eq_properties = EquivalenceProperties::new(schema); let output_partitioning = Partitioning::UnknownPartitioning(1); - let exec_mode = input.execution_mode(); - PlanProperties::new(eq_properties, output_partitioning, exec_mode) + let properties = input.properties(); + PlanProperties::new( + eq_properties, + output_partitioning, + properties.emission_type, + properties.boundedness, + ) } } diff --git a/src/query/src/datafusion.rs b/src/query/src/datafusion.rs index 2ebc0d71af..e0f020cd3a 100644 --- a/src/query/src/datafusion.rs +++ b/src/query/src/datafusion.rs @@ -50,9 +50,9 @@ use crate::dataframe::DataFrame; pub use crate::datafusion::planner::DfContextProviderAdapter; use crate::dist_plan::MergeScanLogicalPlan; use crate::error::{ - CatalogSnafu, ConvertSchemaSnafu, CreateRecordBatchSnafu, MissingTableMutationHandlerSnafu, - MissingTimestampColumnSnafu, QueryExecutionSnafu, Result, TableMutationSnafu, - TableNotFoundSnafu, TableReadOnlySnafu, UnsupportedExprSnafu, + CatalogSnafu, ConvertSchemaSnafu, CreateRecordBatchSnafu, DataFusionSnafu, + MissingTableMutationHandlerSnafu, MissingTimestampColumnSnafu, QueryExecutionSnafu, Result, + TableMutationSnafu, TableNotFoundSnafu, TableReadOnlySnafu, UnsupportedExprSnafu, }; use crate::executor::QueryExecutor; use crate::metrics::{OnDone, QUERY_STAGE_ELAPSED}; @@ -309,9 +309,7 @@ impl DatafusionQueryEngine { .query_planner() .create_physical_plan(&optimized_plan, state) .await - .context(error::DatafusionSnafu) - .map_err(BoxedError::new) - .context(QueryExecutionSnafu)?; + .context(DataFusionSnafu)?; Ok(physical_plan) } diff --git a/src/query/src/dist_plan/merge_scan.rs b/src/query/src/dist_plan/merge_scan.rs index 8e9892ae60..c98206dbc5 100644 --- a/src/query/src/dist_plan/merge_scan.rs +++ b/src/query/src/dist_plan/merge_scan.rs @@ -30,11 +30,12 @@ use common_recordbatch::{ }; use common_telemetry::tracing_context::TracingContext; use datafusion::execution::{SessionState, TaskContext}; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::metrics::{ Count, ExecutionPlanMetricsSet, Gauge, MetricBuilder, MetricsSet, Time, }; use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, Partitioning, PlanProperties, + DisplayAs, DisplayFormatType, ExecutionPlan, Partitioning, PlanProperties, }; use datafusion_common::{Column as ColumnExpr, Result}; use datafusion_expr::{Expr, Extension, LogicalPlan, UserDefinedLogicalNodeCore}; @@ -222,7 +223,12 @@ impl MergeScanExec { .collect(); let partitioning = Partitioning::Hash(partition_exprs, target_partition); - let properties = PlanProperties::new(eq_properties, partitioning, ExecutionMode::Bounded); + let properties = PlanProperties::new( + eq_properties, + partitioning, + EmissionType::Incremental, + Boundedness::Bounded, + ); let schema = Self::arrow_schema_to_schema(arrow_schema.clone())?; Ok(Self { table, @@ -387,7 +393,8 @@ impl MergeScanExec { properties: PlanProperties::new( self.properties.eq_properties.clone(), Partitioning::Hash(hash_exprs, self.target_partition), - self.properties.execution_mode, + self.properties.emission_type, + self.properties.boundedness, ), sub_stage_metrics: self.sub_stage_metrics.clone(), query_ctx: self.query_ctx.clone(), diff --git a/src/query/src/error.rs b/src/query/src/error.rs index e696008cf5..1ebba8de5d 100644 --- a/src/query/src/error.rs +++ b/src/query/src/error.rs @@ -126,7 +126,7 @@ pub enum Error { location: Location, }, - #[snafu(display("DataFusion error"))] + #[snafu(display(""))] DataFusion { #[snafu(source)] error: DataFusionError, diff --git a/src/query/src/part_sort.rs b/src/query/src/part_sort.rs index 1b5fbfe201..1c784c8b33 100644 --- a/src/query/src/part_sort.rs +++ b/src/query/src/part_sort.rs @@ -68,10 +68,12 @@ impl PartSortExec { input: Arc, ) -> Self { let metrics = ExecutionPlanMetricsSet::new(); + let properties = input.properties(); let properties = PlanProperties::new( input.equivalence_properties().clone(), input.output_partitioning().clone(), - input.execution_mode(), + properties.emission_type, + properties.boundedness, ); Self { diff --git a/src/query/src/planner.rs b/src/query/src/planner.rs index 58e2bca937..b0d0063d70 100644 --- a/src/query/src/planner.rs +++ b/src/query/src/planner.rs @@ -95,13 +95,13 @@ impl DfLogicalPlanner { .await?; let config_options = self.session_state.config().options(); + let parser_options = &config_options.sql_parser; let parser_options = ParserOptions { - enable_ident_normalization: config_options.sql_parser.enable_ident_normalization, - parse_float_as_decimal: config_options.sql_parser.parse_float_as_decimal, - support_varchar_with_length: config_options.sql_parser.support_varchar_with_length, - enable_options_value_normalization: config_options - .sql_parser - .enable_options_value_normalization, + enable_ident_normalization: parser_options.enable_ident_normalization, + parse_float_as_decimal: parser_options.parse_float_as_decimal, + support_varchar_with_length: parser_options.support_varchar_with_length, + enable_options_value_normalization: parser_options.enable_options_value_normalization, + collect_spans: parser_options.collect_spans, }; let sql_to_rel = SqlToRel::new_with_options(&context_provider, parser_options); @@ -143,13 +143,13 @@ impl DfLogicalPlanner { .await?; let config_options = self.session_state.config().options(); + let parser_options = &config_options.sql_parser; let parser_options = ParserOptions { enable_ident_normalization: normalize_ident, - parse_float_as_decimal: config_options.sql_parser.parse_float_as_decimal, - support_varchar_with_length: config_options.sql_parser.support_varchar_with_length, - enable_options_value_normalization: config_options - .sql_parser - .enable_options_value_normalization, + parse_float_as_decimal: parser_options.parse_float_as_decimal, + support_varchar_with_length: parser_options.support_varchar_with_length, + enable_options_value_normalization: parser_options.enable_options_value_normalization, + collect_spans: parser_options.collect_spans, }; let sql_to_rel = SqlToRel::new_with_options(&context_provider, parser_options); diff --git a/src/query/src/range_select/plan.rs b/src/query/src/range_select/plan.rs index 859a6260d9..eb28aacf1e 100644 --- a/src/query/src/range_select/plan.rs +++ b/src/query/src/range_select/plan.rs @@ -29,9 +29,10 @@ use datafusion::common::{Result as DataFusionResult, Statistics}; use datafusion::error::Result as DfResult; use datafusion::execution::context::SessionState; use datafusion::execution::TaskContext; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::metrics::{BaselineMetrics, ExecutionPlanMetricsSet, MetricsSet}; use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, PlanProperties, RecordBatchStream, + DisplayAs, DisplayFormatType, ExecutionPlan, PlanProperties, RecordBatchStream, SendableRecordBatchStream, }; use datafusion::physical_planner::create_physical_sort_expr; @@ -691,7 +692,8 @@ impl RangeSelect { let cache = PlanProperties::new( EquivalenceProperties::new(schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); Ok(Arc::new(RangeSelectExec { input: exec_input, @@ -1341,7 +1343,8 @@ mod test { let cache = PlanProperties::new( EquivalenceProperties::new(schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); let input_schema = memory_exec.schema().clone(); let range_select_exec = Arc::new(RangeSelectExec { diff --git a/src/query/src/range_select/plan_rewrite.rs b/src/query/src/range_select/plan_rewrite.rs index 9e826e337d..ff05a26706 100644 --- a/src/query/src/range_select/plan_rewrite.rs +++ b/src/query/src/range_select/plan_rewrite.rs @@ -492,7 +492,7 @@ impl RangePlanRewriter { async fn get_index_by(&mut self, schema: &Arc) -> Result<(Expr, Vec)> { let mut time_index_expr = Expr::Wildcard { qualifier: None, - options: WildcardOptions::default(), + options: Box::new(WildcardOptions::default()), }; let mut default_by = vec![]; for i in 0..schema.fields().len() { diff --git a/src/query/src/sql.rs b/src/query/src/sql.rs index 2d82d771e8..fbda344427 100644 --- a/src/query/src/sql.rs +++ b/src/query/src/sql.rs @@ -453,7 +453,7 @@ pub async fn show_index( null().alias(INDEX_EXPRESSION_COLUMN), Expr::Wildcard { qualifier: None, - options: WildcardOptions::default(), + options: Box::new(WildcardOptions::default()), }, ]; @@ -793,10 +793,7 @@ pub async fn show_search_path(_query_ctx: QueryContextRef) -> Result { pub fn show_create_database(database_name: &str, options: OptionMap) -> Result { let stmt = CreateDatabase { - name: ObjectName(vec![Ident { - value: database_name.to_string(), - quote_style: None, - }]), + name: ObjectName(vec![Ident::new(database_name)]), if_not_exists: true, options, }; @@ -999,10 +996,7 @@ pub fn show_create_flow( let stmt = CreateFlow { flow_name, - sink_table_name: ObjectName(vec![Ident { - value: flow_val.sink_table_name().table_name.clone(), - quote_style: None, - }]), + sink_table_name: ObjectName(vec![Ident::new(&flow_val.sink_table_name().table_name)]), // notice we don't want `OR REPLACE` and `IF NOT EXISTS` in same sql since it's unclear what to do // so we set `or_replace` to false. or_replace: false, diff --git a/src/query/src/test_util.rs b/src/query/src/test_util.rs index bec2099da3..f64718b84a 100644 --- a/src/query/src/test_util.rs +++ b/src/query/src/test_util.rs @@ -24,9 +24,8 @@ use arrow::array::{ use arrow_schema::{SchemaRef, TimeUnit}; use common_recordbatch::{DfRecordBatch, DfSendableRecordBatchStream}; use datafusion::execution::{RecordBatchStream, TaskContext}; -use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, PlanProperties, -}; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; +use datafusion::physical_plan::{DisplayAs, DisplayFormatType, ExecutionPlan, PlanProperties}; use datafusion_physical_expr::{EquivalenceProperties, Partitioning}; use futures::Stream; @@ -58,7 +57,8 @@ impl MockInputExec { properties: PlanProperties::new( EquivalenceProperties::new(schema.clone()), Partitioning::UnknownPartitioning(1), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ), input, schema, diff --git a/src/query/src/window_sort.rs b/src/query/src/window_sort.rs index be8fe52549..57309f3c2a 100644 --- a/src/query/src/window_sort.rs +++ b/src/query/src/window_sort.rs @@ -117,13 +117,15 @@ impl WindowedSortExec { ) -> Result { check_partition_range_monotonicity(&ranges, expression.options.descending)?; + let properties = input.properties(); let properties = PlanProperties::new( input .equivalence_properties() .clone() .with_reorder(LexOrdering::new(vec![expression.clone()])), input.output_partitioning().clone(), - input.execution_mode(), + properties.emission_type, + properties.boundedness, ); let mut all_avail_working_range = Vec::with_capacity(ranges.len()); diff --git a/src/servers/Cargo.toml b/src/servers/Cargo.toml index aa0ea0def2..2ad288c1f2 100644 --- a/src/servers/Cargo.toml +++ b/src/servers/Cargo.toml @@ -66,7 +66,7 @@ http-body = "1" humantime.workspace = true humantime-serde.workspace = true hyper = { workspace = true, features = ["full"] } -indexmap = "2.7" +indexmap = "2.8" influxdb_line_protocol = { git = "https://github.com/evenyag/influxdb_iox", branch = "feat/line-protocol" } itertools.workspace = true jsonb.workspace = true diff --git a/src/sql/src/parser.rs b/src/sql/src/parser.rs index 50d68897e3..2265a30bdd 100644 --- a/src/sql/src/parser.rs +++ b/src/sql/src/parser.rs @@ -17,7 +17,7 @@ use sqlparser::ast::{Ident, Query}; use sqlparser::dialect::Dialect; use sqlparser::keywords::Keyword; use sqlparser::parser::{Parser, ParserError, ParserOptions}; -use sqlparser::tokenizer::{Token, TokenWithLocation}; +use sqlparser::tokenizer::{Token, TokenWithSpan}; use crate::ast::{Expr, ObjectName}; use crate::error::{self, Result, SyntaxSnafu}; @@ -112,7 +112,7 @@ impl ParserContext<'_> { .try_with_sql(sql) .context(SyntaxSnafu)?; - let function_name = parser.parse_identifier(false).context(SyntaxSnafu)?; + let function_name = parser.parse_identifier().context(SyntaxSnafu)?; parser .parse_function(ObjectName(vec![function_name])) .context(SyntaxSnafu) @@ -178,12 +178,12 @@ impl ParserContext<'_> { Keyword::USE => { let _ = self.parser.next_token(); - let database_name = self.parser.parse_identifier(false).context( + let database_name = self.parser.parse_identifier().with_context(|_| { error::UnexpectedSnafu { expected: "a database name", actual: self.peek_token_as_string(), - }, - )?; + } + })?; Ok(Statement::Use( Self::canonicalize_identifier(database_name).value, )) @@ -222,7 +222,7 @@ impl ParserContext<'_> { } // Report unexpected token - pub(crate) fn expected(&self, expected: &str, found: TokenWithLocation) -> Result { + pub(crate) fn expected(&self, expected: &str, found: TokenWithSpan) -> Result { Err(ParserError::ParserError(format!( "Expected {expected}, found: {found}", ))) @@ -255,10 +255,7 @@ impl ParserContext<'_> { if ident.quote_style.is_some() { ident } else { - Ident { - value: ident.value.to_lowercase(), - quote_style: None, - } + Ident::new(ident.value.to_lowercase()) } } @@ -280,14 +277,6 @@ impl ParserContext<'_> { pub(crate) fn parse_object_name(&mut self) -> std::result::Result { self.parser.parse_object_name(false) } - - /// Simply a shortcut for sqlparser's same name method `parse_identifier`, - /// but with constant argument "false". - /// Because the argument is always "false" for us (it's introduced by BigQuery), - /// we don't want to write it again and again. - pub(crate) fn parse_identifier(parser: &mut Parser) -> std::result::Result { - parser.parse_identifier(false) - } } #[cfg(test)] diff --git a/src/sql/src/parsers/alter_parser.rs b/src/sql/src/parsers/alter_parser.rs index 1569ede6ed..67ab86f2ca 100644 --- a/src/sql/src/parsers/alter_parser.rs +++ b/src/sql/src/parsers/alter_parser.rs @@ -20,7 +20,7 @@ use snafu::{ensure, ResultExt}; use sqlparser::ast::Ident; use sqlparser::keywords::Keyword; use sqlparser::parser::{Parser, ParserError}; -use sqlparser::tokenizer::{Token, TokenWithLocation}; +use sqlparser::tokenizer::{Token, TokenWithSpan}; use crate::error::{self, InvalidColumnOptionSnafu, Result, SetFulltextOptionSnafu}; use crate::parser::ParserContext; @@ -124,8 +124,7 @@ impl ParserContext<'_> { .expect_keyword(Keyword::COLUMN) .context(error::SyntaxSnafu)?; let name = Self::canonicalize_identifier( - Self::parse_identifier(&mut self.parser) - .context(error::SyntaxSnafu)?, + self.parser.parse_identifier().context(error::SyntaxSnafu)?, ); AlterTableOperation::DropColumn { name } } @@ -205,9 +204,7 @@ impl ParserContext<'_> { .expect_keyword(Keyword::COLUMN) .context(error::SyntaxSnafu)?; let column_name = Self::canonicalize_identifier( - self.parser - .parse_identifier(false) - .context(error::SyntaxSnafu)?, + self.parser.parse_identifier().context(error::SyntaxSnafu)?, ); match self.parser.peek_token().token { @@ -240,7 +237,7 @@ impl ParserContext<'_> { column_name: Ident, ) -> Result { match self.parser.next_token() { - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.keyword == Keyword::FULLTEXT => { @@ -252,7 +249,7 @@ impl ParserContext<'_> { }) } - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.value.eq_ignore_ascii_case(INVERTED) => { @@ -264,7 +261,7 @@ impl ParserContext<'_> { }) } - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.value.eq_ignore_ascii_case("SKIPPING") => { @@ -288,7 +285,7 @@ impl ParserContext<'_> { fn parse_alter_column_set_index(&mut self, column_name: Ident) -> Result { match self.parser.next_token() { - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.keyword == Keyword::FULLTEXT => { @@ -298,7 +295,7 @@ impl ParserContext<'_> { self.parse_alter_column_fulltext(column_name) } - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.value.eq_ignore_ascii_case(INVERTED) => { @@ -310,7 +307,7 @@ impl ParserContext<'_> { }) } - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.value.eq_ignore_ascii_case("SKIPPING") => { @@ -416,8 +413,7 @@ fn parse_add_columns(parser: &mut Parser) -> std::result::Result { assert_eq!("a", column_name.value); - assert_eq!(DataType::Text, *target_type); + assert_eq!(DataType::MediumText, *target_type); } _ => unreachable!(), } @@ -1012,10 +1008,7 @@ mod tests { alter_operation, &AlterTableOperation::UnsetIndex { options: UnsetIndexOperation::Fulltext { - column_name: Ident { - value: "a".to_string(), - quote_style: None - } + column_name: Ident::new("a"), } } ); @@ -1079,10 +1072,7 @@ mod tests { alter_operation, &AlterTableOperation::UnsetIndex { options: UnsetIndexOperation::Inverted { - column_name: Ident { - value: "a".to_string(), - quote_style: None - } + column_name: Ident::new("a"), } } ); diff --git a/src/sql/src/parsers/create_parser.rs b/src/sql/src/parsers/create_parser.rs index d0f5ceef8e..35797c380b 100644 --- a/src/sql/src/parsers/create_parser.rs +++ b/src/sql/src/parsers/create_parser.rs @@ -25,7 +25,7 @@ use sqlparser::dialect::keywords::Keyword; use sqlparser::keywords::ALL_KEYWORDS; use sqlparser::parser::IsOptional::Mandatory; use sqlparser::parser::{Parser, ParserError}; -use sqlparser::tokenizer::{Token, TokenWithLocation, Word}; +use sqlparser::tokenizer::{Token, TokenWithSpan, Word}; use table::requests::validate_table_option; use crate::ast::{ColumnDef, Ident}; @@ -299,7 +299,7 @@ impl<'a> ParserContext<'a> { let comment = if self.parser.parse_keyword(Keyword::COMMENT) { match self.parser.next_token() { - TokenWithLocation { + TokenWithSpan { token: Token::SingleQuotedString(value, ..), .. } => Some(value), @@ -496,10 +496,7 @@ impl<'a> ParserContext<'a> { time_index_opt_idx = Some(index); let constraint = TableConstraint::TimeIndex { - column: Ident { - value: column.name().value.clone(), - quote_style: None, - }, + column: Ident::new(column.name().value.clone()), }; constraints.push(constraint); } @@ -547,7 +544,7 @@ impl<'a> ParserContext<'a> { /// Parse the column name and check if it's valid. fn parse_column_name(&mut self) -> std::result::Result { - let name = self.parser.parse_identifier(false)?; + let name = self.parser.parse_identifier()?; if name.quote_style.is_none() && // "ALL_KEYWORDS" are sorted. ALL_KEYWORDS.binary_search(&name.value.to_uppercase().as_str()).is_ok() @@ -587,7 +584,7 @@ impl<'a> ParserContext<'a> { let mut extensions = ColumnExtensions::default(); loop { if parser.parse_keyword(Keyword::CONSTRAINT) { - let name = Some(parser.parse_identifier(false).context(SyntaxSnafu)?); + let name = Some(parser.parse_identifier().context(SyntaxSnafu)?); if let Some(option) = Self::parse_optional_column_option(parser)? { options.push(ColumnOptionDef { name, option }); } else { @@ -625,7 +622,7 @@ impl<'a> ParserContext<'a> { Ok(Some(ColumnOption::NotNull)) } else if parser.parse_keywords(&[Keyword::COMMENT]) { match parser.next_token() { - TokenWithLocation { + TokenWithSpan { token: Token::SingleQuotedString(value, ..), .. } => Ok(Some(ColumnOption::Comment(value))), @@ -844,7 +841,7 @@ impl<'a> ParserContext<'a> { fn parse_optional_table_constraint(&mut self) -> Result> { match self.parser.next_token() { - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.keyword == Keyword::PRIMARY => { @@ -864,7 +861,7 @@ impl<'a> ParserContext<'a> { .collect(); Ok(Some(TableConstraint::PrimaryKey { columns })) } - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), .. } if w.keyword == Keyword::TIME => { @@ -1313,20 +1310,8 @@ SELECT max(c1), min(c2) FROM schema_2.table_2;"; }; let expected = CreateFlow { - flow_name: ObjectName(vec![Ident { - value: "task_1".to_string(), - quote_style: None, - }]), - sink_table_name: ObjectName(vec![ - Ident { - value: "schema_1".to_string(), - quote_style: None, - }, - Ident { - value: "table_1".to_string(), - quote_style: None, - }, - ]), + flow_name: ObjectName(vec![Ident::new("task_1")]), + sink_table_name: ObjectName(vec![Ident::new("schema_1"), Ident::new("table_1")]), or_replace: true, if_not_exists: true, expire_after: Some(300), @@ -1825,7 +1810,7 @@ ENGINE=mito"; ParserContext::create_with_dialect(sql, &GreptimeDbDialect {}, ParseOptions::default()); assert_eq!( result.unwrap_err().output_msg(), - "Invalid SQL, error: Partition rule expr Identifier(Ident { value: \"b\", quote_style: None }) is not a binary expr" + r#"Invalid SQL, error: Partition rule expr Identifier(Ident { value: "b", quote_style: None, span: Span(Location(4,5)..Location(4,6)) }) is not a binary expr"# ); } diff --git a/src/sql/src/parsers/deallocate_parser.rs b/src/sql/src/parsers/deallocate_parser.rs index 3fac296d86..65cfcab48d 100644 --- a/src/sql/src/parsers/deallocate_parser.rs +++ b/src/sql/src/parsers/deallocate_parser.rs @@ -24,7 +24,7 @@ impl ParserContext<'_> { self.parser .expect_keyword(Keyword::DEALLOCATE) .context(SyntaxSnafu)?; - let stmt_name = self.parser.parse_identifier(false).context(SyntaxSnafu)?; + let stmt_name = self.parser.parse_identifier().context(SyntaxSnafu)?; Ok(stmt_name.value) } } diff --git a/src/sql/src/parsers/execute_parser.rs b/src/sql/src/parsers/execute_parser.rs index 454b024663..7a5a5a8145 100644 --- a/src/sql/src/parsers/execute_parser.rs +++ b/src/sql/src/parsers/execute_parser.rs @@ -27,7 +27,7 @@ impl ParserContext<'_> { self.parser .expect_keyword(Keyword::EXECUTE) .context(SyntaxSnafu)?; - let stmt_name = self.parser.parse_identifier(false).context(SyntaxSnafu)?; + let stmt_name = self.parser.parse_identifier().context(SyntaxSnafu)?; if self.parser.parse_keyword(Keyword::USING) { let param_list = self .parser diff --git a/src/sql/src/parsers/explain_parser.rs b/src/sql/src/parsers/explain_parser.rs index 7574c4a68f..3769825351 100644 --- a/src/sql/src/parsers/explain_parser.rs +++ b/src/sql/src/parsers/explain_parser.rs @@ -37,6 +37,7 @@ impl ParserContext<'_> { #[cfg(test)] mod tests { + use sqlparser::ast::helpers::attached_token::AttachedToken; use sqlparser::ast::{ GroupByExpr, Query as SpQuery, Statement as SpStatement, WildcardAdditionalOptions, }; @@ -69,6 +70,8 @@ mod tests { partitions: vec![], version: None, with_ordinality: false, + json_path: None, + sample: None, }, joins: vec![], }], @@ -86,6 +89,7 @@ mod tests { prewhere: None, window_before_qualify: false, connect_by: None, + select_token: AttachedToken::empty(), }; let sp_statement = SpStatement::Query(Box::new(SpQuery { @@ -110,6 +114,7 @@ mod tests { format: None, query_plan: false, options: None, + estimate: false, }) .unwrap(); diff --git a/src/sql/src/parsers/prepare_parser.rs b/src/sql/src/parsers/prepare_parser.rs index c5c8c808b7..c7410cff3b 100644 --- a/src/sql/src/parsers/prepare_parser.rs +++ b/src/sql/src/parsers/prepare_parser.rs @@ -26,7 +26,7 @@ impl ParserContext<'_> { self.parser .expect_keyword(Keyword::PREPARE) .context(SyntaxSnafu)?; - let stmt_name = self.parser.parse_identifier(false).context(SyntaxSnafu)?; + let stmt_name = self.parser.parse_identifier().context(SyntaxSnafu)?; self.parser .expect_keyword(Keyword::FROM) .context(SyntaxSnafu)?; diff --git a/src/sql/src/parsers/set_var_parser.rs b/src/sql/src/parsers/set_var_parser.rs index 63119198ae..0abde77b4a 100644 --- a/src/sql/src/parsers/set_var_parser.rs +++ b/src/sql/src/parsers/set_var_parser.rs @@ -38,10 +38,7 @@ impl ParserContext<'_> { })), SpStatement::SetTimeZone { value, .. } => Ok(Statement::SetVariables(SetVariables { - variable: ObjectName(vec![Ident { - value: "TIMEZONE".to_string(), - quote_style: None, - }]), + variable: ObjectName(vec![Ident::new("TIMEZONE")]), value: vec![value], })), diff --git a/src/sql/src/parsers/show_parser.rs b/src/sql/src/parsers/show_parser.rs index 5444d09d00..511c539f33 100644 --- a/src/sql/src/parsers/show_parser.rs +++ b/src/sql/src/parsers/show_parser.rs @@ -295,7 +295,7 @@ impl ParserContext<'_> { Keyword::LIKE => { self.parser.next_token(); Ok(ShowKind::Like( - Self::parse_identifier(&mut self.parser).with_context(|_| { + self.parser.parse_identifier().with_context(|_| { error::UnexpectedSnafu { expected: "LIKE", actual: self.peek_token_as_string(), @@ -498,12 +498,12 @@ impl ParserContext<'_> { ))), Token::Word(w) => match w.keyword { Keyword::LIKE => Ok(Statement::ShowDatabases(ShowDatabases::new( - ShowKind::Like(Self::parse_identifier(&mut self.parser).with_context( - |_| error::UnexpectedSnafu { + ShowKind::Like(self.parser.parse_identifier().with_context(|_| { + error::UnexpectedSnafu { expected: "LIKE", actual: tok.to_string(), - }, - )?), + } + })?), full, ))), Keyword::WHERE => Ok(Statement::ShowDatabases(ShowDatabases::new( @@ -639,6 +639,7 @@ mod tests { kind: ShowKind::Like(sqlparser::ast::Ident { value: _, quote_style: None, + span: _, }), .. }) @@ -698,6 +699,7 @@ mod tests { kind: ShowKind::Like(sqlparser::ast::Ident { value: _, quote_style: None, + span: _, }), database: None, full: false @@ -716,6 +718,7 @@ mod tests { kind: ShowKind::Like(sqlparser::ast::Ident { value: _, quote_style: None, + span: _, }), database: Some(_), full: false @@ -806,6 +809,7 @@ mod tests { kind: ShowKind::Like(sqlparser::ast::Ident { value: _, quote_style: None, + span: _, }), database: None, full: true @@ -824,6 +828,7 @@ mod tests { kind: ShowKind::Like(sqlparser::ast::Ident { value: _, quote_style: None, + span: _, }), database: Some(_), full: true diff --git a/src/sql/src/parsers/tql_parser.rs b/src/sql/src/parsers/tql_parser.rs index 67651d9561..797c72381c 100644 --- a/src/sql/src/parsers/tql_parser.rs +++ b/src/sql/src/parsers/tql_parser.rs @@ -217,7 +217,7 @@ impl ParserContext<'_> { while matches!(parser.peek_token().token, Token::Comma) { let _skip_token = parser.next_token(); } - let index = parser.next_token().location.column as usize; + let index = parser.next_token().span.start.column as usize; if index == 0 { return Err(ParserError::ParserError("empty TQL query".to_string())); } diff --git a/src/sql/src/statements.rs b/src/sql/src/statements.rs index 91c6597cc3..984ec4c6be 100644 --- a/src/sql/src/statements.rs +++ b/src/sql/src/statements.rs @@ -281,12 +281,7 @@ pub fn sql_value_to_value( if let Some(unary_op) = unary_op { match unary_op { UnaryOperator::Plus | UnaryOperator::Minus | UnaryOperator::Not => {} - UnaryOperator::PGBitwiseNot - | UnaryOperator::PGSquareRoot - | UnaryOperator::PGCubeRoot - | UnaryOperator::PGPostfixFactorial - | UnaryOperator::PGPrefixFactorial - | UnaryOperator::PGAbs => { + _ => { return UnsupportedUnaryOpSnafu { unary_op }.fail(); } } @@ -570,9 +565,12 @@ pub fn sql_data_type_to_concrete_data_type(data_type: &SqlDataType) -> Result Ok(ConcreteDataType::string_datatype()), SqlDataType::Float(_) => Ok(ConcreteDataType::float32_datatype()), - SqlDataType::Double | SqlDataType::Float64 => Ok(ConcreteDataType::float64_datatype()), + SqlDataType::Double(_) | SqlDataType::Float64 => Ok(ConcreteDataType::float64_datatype()), SqlDataType::Boolean => Ok(ConcreteDataType::boolean_datatype()), SqlDataType::Date => Ok(ConcreteDataType::date_datatype()), SqlDataType::Binary(_) @@ -636,7 +634,7 @@ pub fn concrete_data_type_to_sql_data_type(data_type: &ConcreteDataType) -> Resu ConcreteDataType::UInt8(_) => Ok(SqlDataType::UnsignedTinyInt(None)), ConcreteDataType::String(_) => Ok(SqlDataType::String(None)), ConcreteDataType::Float32(_) => Ok(SqlDataType::Float(None)), - ConcreteDataType::Float64(_) => Ok(SqlDataType::Double), + ConcreteDataType::Float64(_) => Ok(SqlDataType::Double(ExactNumberInfo::None)), ConcreteDataType::Boolean(_) => Ok(SqlDataType::Boolean), ConcreteDataType::Date(_) => Ok(SqlDataType::Date), ConcreteDataType::Timestamp(ts_type) => Ok(SqlDataType::Timestamp( @@ -721,7 +719,10 @@ mod tests { SqlDataType::Float(None), ConcreteDataType::float32_datatype(), ); - check_type(SqlDataType::Double, ConcreteDataType::float64_datatype()); + check_type( + SqlDataType::Double(ExactNumberInfo::None), + ConcreteDataType::float64_datatype(), + ); check_type(SqlDataType::Boolean, ConcreteDataType::boolean_datatype()); check_type(SqlDataType::Date, ConcreteDataType::date_datatype()); check_type( @@ -1187,7 +1188,7 @@ mod tests { // test basic let column_def = ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![], }; @@ -1203,7 +1204,7 @@ mod tests { // test not null let column_def = ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![ColumnOptionDef { name: None, @@ -1217,7 +1218,7 @@ mod tests { // test primary key let column_def = ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![ColumnOptionDef { name: None, @@ -1290,7 +1291,7 @@ mod tests { pub fn test_has_primary_key_option() { let column_def = ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![], }; @@ -1298,7 +1299,7 @@ mod tests { let column_def = ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![ColumnOptionDef { name: None, @@ -1316,7 +1317,7 @@ mod tests { let column_def = Column { column_def: ColumnDef { name: "col".into(), - data_type: SqlDataType::Double, + data_type: SqlDataType::Double(ExactNumberInfo::None), collation: None, options: vec![], }, diff --git a/src/sql/src/statements/insert.rs b/src/sql/src/statements/insert.rs index 6dad03fe95..6a3f044c7b 100644 --- a/src/sql/src/statements/insert.rs +++ b/src/sql/src/statements/insert.rs @@ -14,13 +14,13 @@ use serde::Serialize; use sqlparser::ast::{ - Insert as SpInsert, ObjectName, Query, SetExpr, Statement, UnaryOperator, Values, + Insert as SpInsert, ObjectName, Query, SetExpr, Statement, TableObject, UnaryOperator, Values, }; use sqlparser::parser::ParserError; use sqlparser_derive::{Visit, VisitMut}; use crate::ast::{Expr, Value}; -use crate::error::Result; +use crate::error::{Result, UnsupportedSnafu}; use crate::statements::query::Query as GtQuery; #[derive(Debug, Clone, PartialEq, Eq, Visit, VisitMut, Serialize)] @@ -39,9 +39,17 @@ macro_rules! parse_fail { } impl Insert { - pub fn table_name(&self) -> &ObjectName { + pub fn table_name(&self) -> Result<&ObjectName> { match &self.inner { - Statement::Insert(insert) => &insert.table_name, + Statement::Insert(insert) => { + let TableObject::TableName(name) = &insert.table else { + return UnsupportedSnafu { + keyword: "TABLE FUNCTION".to_string(), + } + .fail(); + }; + Ok(name) + } _ => unreachable!(), } } diff --git a/src/sql/src/statements/show.rs b/src/sql/src/statements/show.rs index 239ac84abd..73ef8fe28e 100644 --- a/src/sql/src/statements/show.rs +++ b/src/sql/src/statements/show.rs @@ -334,13 +334,7 @@ mod tests { assert_eq!("", format!("{}", ShowKind::All)); assert_eq!( "LIKE test", - format!( - "{}", - ShowKind::Like(Ident { - value: "test".to_string(), - quote_style: None, - }) - ) + format!("{}", ShowKind::Like(Ident::new("test")),) ); assert_eq!( "WHERE NOT a", @@ -348,10 +342,7 @@ mod tests { "{}", ShowKind::Where(Expr::UnaryOp { op: UnaryOperator::Not, - expr: Box::new(Expr::Identifier(Ident { - value: "a".to_string(), - quote_style: None, - })), + expr: Box::new(Expr::Identifier(Ident::new("a"))), }) ) ); diff --git a/src/sql/src/statements/transform/type_alias.rs b/src/sql/src/statements/transform/type_alias.rs index b29354009b..340634dadd 100644 --- a/src/sql/src/statements/transform/type_alias.rs +++ b/src/sql/src/statements/transform/type_alias.rs @@ -16,8 +16,8 @@ use std::ops::ControlFlow; use datatypes::data_type::DataType as GreptimeDataType; use sqlparser::ast::{ - DataType, Expr, Function, FunctionArg, FunctionArgExpr, FunctionArgumentList, Ident, - ObjectName, Value, + DataType, ExactNumberInfo, Expr, Function, FunctionArg, FunctionArgExpr, FunctionArgumentList, + Ident, ObjectName, Value, }; use crate::error::Result; @@ -91,6 +91,7 @@ impl TransformRule for TypeAliasTransformRule { over: None, parameters: sqlparser::ast::FunctionArguments::None, within_group: vec![], + uses_odbc_syntax: false, } } @@ -166,7 +167,7 @@ pub(crate) fn get_type_by_alias(data_type: &DataType) -> Option { DataType::UInt32 => Some(DataType::UnsignedInt(None)), DataType::UInt64 => Some(DataType::UnsignedBigInt(None)), DataType::Float32 => Some(DataType::Float(None)), - DataType::Float64 => Some(DataType::Double), + DataType::Float64 => Some(DataType::Double(ExactNumberInfo::None)), DataType::Bool => Some(DataType::Boolean), DataType::Datetime(_) => Some(DataType::Timestamp(Some(6), TimezoneInfo::None)), _ => None, @@ -207,7 +208,7 @@ pub(crate) fn get_data_type_by_alias_name(name: &str) -> Option { "UINT32" => Some(DataType::UnsignedInt(None)), "UINT64" => Some(DataType::UnsignedBigInt(None)), "FLOAT32" => Some(DataType::Float(None)), - "FLOAT64" => Some(DataType::Double), + "FLOAT64" => Some(DataType::Double(ExactNumberInfo::None)), // String type alias "TINYTEXT" | "MEDIUMTEXT" | "LONGTEXT" => Some(DataType::Text), _ => None, @@ -226,15 +227,15 @@ mod tests { fn test_get_data_type_by_alias_name() { assert_eq!( get_data_type_by_alias_name("float64"), - Some(DataType::Double) + Some(DataType::Double(ExactNumberInfo::None)) ); assert_eq!( get_data_type_by_alias_name("Float64"), - Some(DataType::Double) + Some(DataType::Double(ExactNumberInfo::None)) ); assert_eq!( get_data_type_by_alias_name("FLOAT64"), - Some(DataType::Double) + Some(DataType::Double(ExactNumberInfo::None)) ); assert_eq!( @@ -410,9 +411,9 @@ CREATE TABLE data_types ( Statement::CreateTable(c) => { let expected = r#"CREATE TABLE data_types ( s STRING, - tt TEXT, - mt TEXT, - lt TEXT, + tt TINYTEXT, + mt MEDIUMTEXT, + lt LONGTEXT, tint TINYINT, sint SMALLINT, i INT, diff --git a/src/table/src/predicate.rs b/src/table/src/predicate.rs index 1fd5cdcbd3..037902ee53 100644 --- a/src/table/src/predicate.rs +++ b/src/table/src/predicate.rs @@ -254,30 +254,7 @@ fn extract_from_binary_expr( let right = extract_time_range_from_expr(ts_col_name, ts_col_unit, right)?; Some(left.or(&right)) } - Operator::NotEq - | Operator::Plus - | Operator::Minus - | Operator::Multiply - | Operator::Divide - | Operator::Modulo - | Operator::IsDistinctFrom - | Operator::IsNotDistinctFrom - | Operator::RegexMatch - | Operator::RegexIMatch - | Operator::RegexNotMatch - | Operator::RegexNotIMatch - | Operator::BitwiseAnd - | Operator::BitwiseOr - | Operator::BitwiseXor - | Operator::BitwiseShiftRight - | Operator::BitwiseShiftLeft - | Operator::StringConcat - | Operator::ArrowAt - | Operator::AtArrow - | Operator::LikeMatch - | Operator::ILikeMatch - | Operator::NotLikeMatch - | Operator::NotILikeMatch => None, + _ => None, } } diff --git a/src/table/src/table/scan.rs b/src/table/src/table/scan.rs index 6e24b44ea0..cd69b22b5a 100644 --- a/src/table/src/table/scan.rs +++ b/src/table/src/table/scan.rs @@ -25,9 +25,10 @@ use common_telemetry::tracing_context::TracingContext; use common_telemetry::warn; use datafusion::error::Result as DfResult; use datafusion::execution::context::TaskContext; +use datafusion::physical_plan::execution_plan::{Boundedness, EmissionType}; use datafusion::physical_plan::metrics::{ExecutionPlanMetricsSet, MetricsSet}; use datafusion::physical_plan::{ - DisplayAs, DisplayFormatType, ExecutionMode, ExecutionPlan, PlanProperties, + DisplayAs, DisplayFormatType, ExecutionPlan, PlanProperties, RecordBatchStream as DfRecordBatchStream, }; use datafusion_common::stats::Precision; @@ -134,7 +135,8 @@ impl RegionScanExec { let properties = PlanProperties::new( eq_props, Partitioning::UnknownPartitioning(num_output_partition), - ExecutionMode::Bounded, + EmissionType::Incremental, + Boundedness::Bounded, ); let append_mode = scanner_props.append_mode(); let total_rows = scanner_props.total_rows(); diff --git a/tests-fuzz/src/translator/postgres.rs b/tests-fuzz/src/translator/postgres.rs index 25e0ff6ed4..3b89b22cc7 100644 --- a/tests-fuzz/src/translator/postgres.rs +++ b/tests-fuzz/src/translator/postgres.rs @@ -19,7 +19,7 @@ pub mod create_expr; pub fn sql_data_type_to_postgres_data_type(data_type: SqlDataType) -> String { match data_type { - SqlDataType::Double => "DOUBLE PRECISION".to_string(), + SqlDataType::Double(_) => "DOUBLE PRECISION".to_string(), _ => data_type.to_string(), } } diff --git a/tests-integration/tests/http.rs b/tests-integration/tests/http.rs index 1c665d96e1..b4690cf9e1 100644 --- a/tests-integration/tests/http.rs +++ b/tests-integration/tests/http.rs @@ -438,7 +438,7 @@ pub async fn test_sql_api(store_type: StorageType) { assert_eq!(res.status(), StatusCode::OK); assert_eq!( res.text().await, - "[{\"DescribeTable\":{\"name\":[{\"value\":\"t\",\"quote_style\":null}]}}]" + r#"[{"DescribeTable":{"name":[{"value":"t","quote_style":null,"span":{"start":{"line":0,"column":0},"end":{"line":0,"column":0}}}]}}]"#, ); // test timezone header diff --git a/tests/cases/standalone/common/create/current_timestamp.result b/tests/cases/standalone/common/create/current_timestamp.result index 6bb53d8380..19ed385981 100644 --- a/tests/cases/standalone/common/create/current_timestamp.result +++ b/tests/cases/standalone/common/create/current_timestamp.result @@ -54,7 +54,7 @@ show create table t3; create table t4 (ts timestamp time index default now); -Error: 1001(Unsupported), Unsupported expr in default constraint: Identifier(Ident { value: "now", quote_style: None }) for column: ts +Error: 1001(Unsupported), Unsupported expr in default constraint: Identifier(Ident { value: "now", quote_style: None, span: Span(Location(1,50)..Location(1,53)) }) for column: ts drop table t1; diff --git a/tests/cases/standalone/common/function/geo.result b/tests/cases/standalone/common/function/geo.result index 7b051a35ed..3c7bfe5547 100644 --- a/tests/cases/standalone/common/function/geo.result +++ b/tests/cases/standalone/common/function/geo.result @@ -335,11 +335,11 @@ FROM cell_cte; SELECT UNNEST(geo_path(37.76938, -122.3889, 1728083375::TimestampSecond)); -+--------------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------+ -| unnest_placeholder(geo_path(Float64(37.76938),Float64(-122.3889),arrow_cast(Int64(1728083375),Utf8("Timestamp(Second, None)")))).lat | unnest_placeholder(geo_path(Float64(37.76938),Float64(-122.3889),arrow_cast(Int64(1728083375),Utf8("Timestamp(Second, None)")))).lng | -+--------------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------+ -| [37.76938] | [-122.3889] | -+--------------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------+ ++----------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| __unnest_placeholder(geo_path(Float64(37.76938),Float64(-122.3889),arrow_cast(Int64(1728083375),Utf8("Timestamp(Second, None)")))).lat | __unnest_placeholder(geo_path(Float64(37.76938),Float64(-122.3889),arrow_cast(Int64(1728083375),Utf8("Timestamp(Second, None)")))).lng | ++----------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ +| [37.76938] | [-122.3889] | ++----------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ SELECT UNNEST(geo_path(lat, lon, ts)) FROM( @@ -352,11 +352,11 @@ FROM( SELECT 37.77001 AS lat, -122.3888 AS lon, 1728083372::TimestampSecond AS ts ); -+----------------------------------------------+----------------------------------------------+ -| unnest_placeholder(geo_path(lat,lon,ts)).lat | unnest_placeholder(geo_path(lat,lon,ts)).lng | -+----------------------------------------------+----------------------------------------------+ -| [37.77001, 37.76928, 37.76938, 37.7693] | [-122.3888, -122.3839, -122.3889, -122.382] | -+----------------------------------------------+----------------------------------------------+ ++------------------------------------------------+------------------------------------------------+ +| __unnest_placeholder(geo_path(lat,lon,ts)).lat | __unnest_placeholder(geo_path(lat,lon,ts)).lng | ++------------------------------------------------+------------------------------------------------+ +| [37.77001, 37.76928, 37.76938, 37.7693] | [-122.3888, -122.3839, -122.3889, -122.382] | ++------------------------------------------------+------------------------------------------------+ SELECT wkt_point_from_latlng(37.76938, -122.3889) AS point; diff --git a/tests/cases/standalone/common/insert/special_value.result b/tests/cases/standalone/common/insert/special_value.result index 33048217d4..e4b030e64a 100644 --- a/tests/cases/standalone/common/insert/special_value.result +++ b/tests/cases/standalone/common/insert/special_value.result @@ -23,7 +23,7 @@ select * from data; insert into data values (4, 'infinityyyy'::double); -Error: 3001(EngineExecuteQuery), DataFusion error: Cast error: Cannot cast string 'infinityyyy' to value of Float64 type +Error: 3001(EngineExecuteQuery), Cast error: Cannot cast string 'infinityyyy' to value of Float64 type drop table data; diff --git a/tests/cases/standalone/common/order/limit.result b/tests/cases/standalone/common/order/limit.result index bd2fe2ce67..fb13ba699e 100644 --- a/tests/cases/standalone/common/order/limit.result +++ b/tests/cases/standalone/common/order/limit.result @@ -45,11 +45,11 @@ Error: 3000(PlanQuery), Failed to plan SQL: No field named a. SELECT a FROM test LIMIT SUM(42); -Error: 3001(EngineExecuteQuery), DataFusion error: This feature is not implemented: Unsupported LIMIT expression: Some(AggregateFunction(AggregateFunction { func: AggregateUDF { inner: Sum { signature: Signature { type_signature: UserDefined, volatility: Immutable } } }, args: [Literal(Int64(42))], distinct: false, filter: None, order_by: None, null_treatment: None })) +Error: 1001(Unsupported), This feature is not implemented: Unsupported LIMIT expression: Some(AggregateFunction(AggregateFunction { func: AggregateUDF { inner: Sum { signature: Signature { type_signature: UserDefined, volatility: Immutable } } }, args: [Literal(Int64(42))], distinct: false, filter: None, order_by: None, null_treatment: None })) SELECT a FROM test LIMIT row_number() OVER (); -Error: 3001(EngineExecuteQuery), DataFusion error: This feature is not implemented: Unsupported LIMIT expression: Some(Cast(Cast { expr: WindowFunction(WindowFunction { fun: WindowUDF(WindowUDF { inner: RowNumber { signature: Signature { type_signature: NullAry, volatility: Immutable } } }), args: [], partition_by: [], order_by: [], window_frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(NULL)), end_bound: Following(UInt64(NULL)), is_causal: false }, null_treatment: None }), data_type: Int64 })) +Error: 3001(EngineExecuteQuery), This feature is not implemented: Unsupported LIMIT expression: Some(Cast(Cast { expr: WindowFunction(WindowFunction { fun: WindowUDF(WindowUDF { inner: RowNumber { signature: Signature { type_signature: Nullary, volatility: Immutable } } }), args: [], partition_by: [], order_by: [], window_frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(NULL)), end_bound: Following(UInt64(NULL)), is_causal: false }, null_treatment: None }), data_type: Int64 })) CREATE TABLE test2 (a STRING, ts TIMESTAMP TIME INDEX); @@ -122,11 +122,11 @@ Error: 3001(EngineExecuteQuery), DataFusion error: Error during planning: Cannot SELECT * FROM integers as int LIMIT (SELECT NULL); -Error: 3001(EngineExecuteQuery), DataFusion error: This feature is not implemented: Unsupported LIMIT expression: Some(ScalarSubquery()) +Error: 1001(Unsupported), This feature is not implemented: Unsupported LIMIT expression: Some(ScalarSubquery()) SELECT * FROM integers as int LIMIT (SELECT -1); -Error: 3001(EngineExecuteQuery), DataFusion error: This feature is not implemented: Unsupported LIMIT expression: Some(ScalarSubquery()) +Error: 1001(Unsupported), This feature is not implemented: Unsupported LIMIT expression: Some(ScalarSubquery()) SELECT * FROM integers as int LIMIT (SELECT 'ab'); diff --git a/tests/cases/standalone/common/order/order_by_exceptions.result b/tests/cases/standalone/common/order/order_by_exceptions.result index f10e30dbf1..6385b9d721 100644 --- a/tests/cases/standalone/common/order/order_by_exceptions.result +++ b/tests/cases/standalone/common/order/order_by_exceptions.result @@ -82,7 +82,7 @@ EXPLAIN SELECT a % 2, b FROM test UNION SELECT a % 2 AS k, b FROM test ORDER BY SELECT a % 2, b FROM test UNION SELECT a % 2 AS k FROM test ORDER BY -1; -Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: UNION queries have different number of columns: left has 2 columns whereas right has 1 columns +Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: UNION queries have different number of columns DROP TABLE test; diff --git a/tests/cases/standalone/common/prepare/mysql_prepare.result b/tests/cases/standalone/common/prepare/mysql_prepare.result index f6c041f074..37540834a1 100644 --- a/tests/cases/standalone/common/prepare/mysql_prepare.result +++ b/tests/cases/standalone/common/prepare/mysql_prepare.result @@ -3,7 +3,7 @@ -- SQLNESS PROTOCOL MYSQL SELECT ?; -Failed to execute query, err: MySqlError { ERROR 1815 (HY000): (EngineExecuteQuery): DataFusion error: Execution error: Placeholder '?' was not provided a value for execution. } +Failed to execute query, err: MySqlError { ERROR 1815 (HY000): (EngineExecuteQuery): Execution error: Placeholder '?' was not provided a value for execution. } -- SQLNESS PROTOCOL MYSQL PREPARE stmt FROM 'SELECT ?::int;'; diff --git a/tests/cases/standalone/common/range/by.result b/tests/cases/standalone/common/range/by.result index a31e4d42e8..a97d90ce86 100644 --- a/tests/cases/standalone/common/range/by.result +++ b/tests/cases/standalone/common/range/by.result @@ -63,7 +63,7 @@ SELECT ts, length(host)::INT64 + 2, max(val) RANGE '5s' FROM host ALIGN '20s' BY -- project non-aggregation key SELECT ts, host, max(val) RANGE '5s' FROM host ALIGN '20s' BY () ORDER BY ts; -Error: 3001(EngineExecuteQuery), DataFusion error: No field named host.host. Valid fields are "max(host.val) RANGE 5s", host.ts, "Int64(1)". +Error: 3001(EngineExecuteQuery), No field named host.host. Valid fields are "max(host.val) RANGE 5s", host.ts, "Int64(1)". DROP TABLE host; diff --git a/tests/cases/standalone/common/range/error.result b/tests/cases/standalone/common/range/error.result index 11bcfef6de..f7236d6096 100644 --- a/tests/cases/standalone/common/range/error.result +++ b/tests/cases/standalone/common/range/error.result @@ -41,7 +41,7 @@ Error: 2000(InvalidSyntax), Invalid SQL syntax: sql parser error: Illegal Range SELECT min(val) RANGE '10s', max(val) FROM host ALIGN '5s'; -Error: 3001(EngineExecuteQuery), DataFusion error: No field named "max(host.val)". Valid fields are "min(host.val) RANGE 10s", host.ts, host.host. +Error: 3001(EngineExecuteQuery), No field named "max(host.val)". Valid fields are "min(host.val) RANGE 10s", host.ts, host.host. SELECT min(val) * 2 RANGE '10s' FROM host ALIGN '5s'; @@ -54,7 +54,7 @@ Error: 2000(InvalidSyntax), Invalid SQL syntax: sql parser error: Can't use the -- 2.2 no align param SELECT min(val) RANGE '5s' FROM host; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Missing argument in range select query +Error: 3000(PlanQuery), Error during planning: Missing argument in range select query -- 2.3 type mismatch SELECT covar(ceil(val), floor(val)) RANGE '20s' FROM host ALIGN '10s'; @@ -85,24 +85,24 @@ Error: 2000(InvalidSyntax), Range Query: Window functions is not allowed in Rang -- 2.6 invalid fill SELECT min(val) RANGE '5s' FROM host ALIGN '5s' FILL 3.0; -Error: 3000(PlanQuery), DataFusion error: Error during planning: 3.0 is not a valid fill option, fail to convert to a const value. { Arrow error: Cast error: Cannot cast string '3.0' to value of Int64 type } +Error: 3000(PlanQuery), Error during planning: 3.0 is not a valid fill option, fail to convert to a const value. { Arrow error: Cast error: Cannot cast string '3.0' to value of Int64 type } -- 2.7 zero align/range SELECT min(val) RANGE '5s' FROM host ALIGN '0s'; -Error: 3000(PlanQuery), DataFusion error: Error during planning: duration must be greater than 0 +Error: 3000(PlanQuery), Error during planning: duration must be greater than 0 SELECT min(val) RANGE '0s' FROM host ALIGN '5s'; -Error: 3000(PlanQuery), DataFusion error: Error during planning: duration must be greater than 0 +Error: 3000(PlanQuery), Error during planning: duration must be greater than 0 SELECT min(val) RANGE '5s' FROM host ALIGN (INTERVAL '0' day); -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 0, nanoseconds: 0 }")` in range select query +Error: 3000(PlanQuery), Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 0, nanoseconds: 0 }")` in range select query SELECT min(val) RANGE (INTERVAL '0' day) FROM host ALIGN '5s'; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 0, nanoseconds: 0 }")` in range select query +Error: 3000(PlanQuery), Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 0, nanoseconds: 0 }")` in range select query DROP TABLE host; diff --git a/tests/cases/standalone/common/range/interval.result b/tests/cases/standalone/common/range/interval.result index 20d61aeb17..35bb4bc020 100644 --- a/tests/cases/standalone/common/range/interval.result +++ b/tests/cases/standalone/common/range/interval.result @@ -20,7 +20,7 @@ Affected Rows: 8 SELECT ts, host, min(val) RANGE (INTERVAL '1 year') FROM host ALIGN (INTERVAL '1 year') ORDER BY host, ts; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Year or month interval is not allowed in range query: IntervalMonthDayNano("IntervalMonthDayNano { months: 12, days: 0, nanoseconds: 0 }") +Error: 3000(PlanQuery), Error during planning: Year or month interval is not allowed in range query: IntervalMonthDayNano("IntervalMonthDayNano { months: 12, days: 0, nanoseconds: 0 }") SELECT ts, host, min(val) RANGE (INTERVAL '1' day) FROM host ALIGN (INTERVAL '1' day) ORDER BY host, ts; diff --git a/tests/cases/standalone/common/range/special_aggr.result b/tests/cases/standalone/common/range/special_aggr.result index fa2b5cb47e..93933b6247 100644 --- a/tests/cases/standalone/common/range/special_aggr.result +++ b/tests/cases/standalone/common/range/special_aggr.result @@ -64,12 +64,12 @@ SELECT ts, host, first_value(addon ORDER BY val DESC) RANGE '5s', last_value(add | ts | host | first_value(host.addon) ORDER BY [host.val DESC NULLS FIRST] RANGE 5s | last_value(host.addon) ORDER BY [host.val DESC NULLS FIRST] RANGE 5s | +---------------------+-------+-----------------------------------------------------------------------+----------------------------------------------------------------------+ | 1970-01-01T00:00:00 | host1 | 3 | 1 | -| 1970-01-01T00:00:05 | host1 | 4 | 4 | +| 1970-01-01T00:00:05 | host1 | 4 | 6 | | 1970-01-01T00:00:10 | host1 | 7 | 8 | | 1970-01-01T00:00:15 | host1 | 11 | 10 | | 1970-01-01T00:00:20 | host1 | 15 | 13 | | 1970-01-01T00:00:00 | host2 | 18 | 16 | -| 1970-01-01T00:00:05 | host2 | 19 | 19 | +| 1970-01-01T00:00:05 | host2 | 19 | 21 | | 1970-01-01T00:00:10 | host2 | 22 | 23 | | 1970-01-01T00:00:15 | host2 | 26 | 25 | | 1970-01-01T00:00:20 | host2 | 30 | 28 | @@ -81,12 +81,12 @@ SELECT ts, host, first_value(addon ORDER BY val DESC NULLS LAST) RANGE '5s', las | ts | host | first_value(host.addon) ORDER BY [host.val DESC NULLS LAST] RANGE 5s | last_value(host.addon) ORDER BY [host.val DESC NULLS LAST] RANGE 5s | +---------------------+-------+----------------------------------------------------------------------+---------------------------------------------------------------------+ | 1970-01-01T00:00:00 | host1 | 3 | 1 | -| 1970-01-01T00:00:05 | host1 | 4 | 4 | +| 1970-01-01T00:00:05 | host1 | 4 | 6 | | 1970-01-01T00:00:10 | host1 | 9 | 7 | | 1970-01-01T00:00:15 | host1 | 12 | 11 | | 1970-01-01T00:00:20 | host1 | 14 | 15 | | 1970-01-01T00:00:00 | host2 | 18 | 16 | -| 1970-01-01T00:00:05 | host2 | 19 | 19 | +| 1970-01-01T00:00:05 | host2 | 19 | 21 | | 1970-01-01T00:00:10 | host2 | 24 | 22 | | 1970-01-01T00:00:15 | host2 | 27 | 26 | | 1970-01-01T00:00:20 | host2 | 29 | 30 | @@ -98,12 +98,12 @@ SELECT ts, host, first_value(addon ORDER BY val ASC) RANGE '5s', last_value(addo | ts | host | first_value(host.addon) ORDER BY [host.val ASC NULLS LAST] RANGE 5s | last_value(host.addon) ORDER BY [host.val ASC NULLS LAST] RANGE 5s | +---------------------+-------+---------------------------------------------------------------------+--------------------------------------------------------------------+ | 1970-01-01T00:00:00 | host1 | 1 | 3 | -| 1970-01-01T00:00:05 | host1 | 4 | 4 | +| 1970-01-01T00:00:05 | host1 | 4 | 6 | | 1970-01-01T00:00:10 | host1 | 8 | 7 | | 1970-01-01T00:00:15 | host1 | 10 | 11 | | 1970-01-01T00:00:20 | host1 | 13 | 15 | | 1970-01-01T00:00:00 | host2 | 16 | 18 | -| 1970-01-01T00:00:05 | host2 | 19 | 19 | +| 1970-01-01T00:00:05 | host2 | 19 | 21 | | 1970-01-01T00:00:10 | host2 | 23 | 22 | | 1970-01-01T00:00:15 | host2 | 25 | 26 | | 1970-01-01T00:00:20 | host2 | 28 | 30 | @@ -115,12 +115,12 @@ SELECT ts, host, first_value(addon ORDER BY val ASC NULLS FIRST) RANGE '5s', las | ts | host | first_value(host.addon) ORDER BY [host.val ASC NULLS FIRST] RANGE 5s | last_value(host.addon) ORDER BY [host.val ASC NULLS FIRST] RANGE 5s | +---------------------+-------+----------------------------------------------------------------------+---------------------------------------------------------------------+ | 1970-01-01T00:00:00 | host1 | 1 | 3 | -| 1970-01-01T00:00:05 | host1 | 4 | 4 | +| 1970-01-01T00:00:05 | host1 | 4 | 6 | | 1970-01-01T00:00:10 | host1 | 7 | 9 | | 1970-01-01T00:00:15 | host1 | 11 | 12 | | 1970-01-01T00:00:20 | host1 | 15 | 14 | | 1970-01-01T00:00:00 | host2 | 16 | 18 | -| 1970-01-01T00:00:05 | host2 | 19 | 19 | +| 1970-01-01T00:00:05 | host2 | 19 | 21 | | 1970-01-01T00:00:10 | host2 | 22 | 24 | | 1970-01-01T00:00:15 | host2 | 26 | 27 | | 1970-01-01T00:00:20 | host2 | 30 | 29 | @@ -231,7 +231,7 @@ SELECT ts, host, count(distinct *) RANGE '5s' FROM host ALIGN '5s' ORDER BY host -- Test error first_value/last_value SELECT ts, host, first_value(val, val) RANGE '5s' FROM host ALIGN '5s' ORDER BY host, ts; -Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: Error during planning: The function expected 1 arguments but received 2 No function matches the given name and argument types 'first_value(Int64, Int64)'. You might need to add explicit type casts. +Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: The function 'first_value' expected 1 arguments but received 2 No function matches the given name and argument types 'first_value(Int64, Int64)'. You might need to add explicit type casts. Candidate functions: first_value(Any) diff --git a/tests/cases/standalone/common/range/to.result b/tests/cases/standalone/common/range/to.result index 1488eca688..5a94fc2c52 100644 --- a/tests/cases/standalone/common/range/to.result +++ b/tests/cases/standalone/common/range/to.result @@ -31,7 +31,7 @@ SELECT ts, host, min(val) RANGE '1d' FROM host ALIGN '1d' ORDER BY host, ts; SELECT ts, host, min(val) RANGE '1d' FROM host ALIGN '1d' TO UNKNOWN ORDER BY host, ts; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal `align to` argument `UNKNOWN` in range select query, can't be parse as NOW/CALENDAR/Timestamp, error: Failed to parse a string into Timestamp, raw string: UNKNOWN +Error: 3000(PlanQuery), Error during planning: Illegal `align to` argument `UNKNOWN` in range select query, can't be parse as NOW/CALENDAR/Timestamp, error: Failed to parse a string into Timestamp, raw string: UNKNOWN SELECT ts, host, min(val) RANGE '1d' FROM host ALIGN '1d' TO '1900-01-01T00:00:00+01:00' ORDER BY host, ts; @@ -95,16 +95,16 @@ SELECT ts, min(val) RANGE (INTERVAL '2' day - INTERVAL '1' day) FROM host ALIGN -- non-positive duration SELECT ts, min(val) RANGE (INTERVAL '1' day - INTERVAL '2' day) FROM host ALIGN (INTERVAL '1' day) TO '1900-01-01T00:00:00+01:00' by (1) ORDER BY ts; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }") - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 2, nanoseconds: 0 }")` in range select query +Error: 3000(PlanQuery), Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }") - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 2, nanoseconds: 0 }")` in range select query SELECT ts, min(val) RANGE (INTERVAL '1' day - INTERVAL '1' day) FROM host ALIGN (INTERVAL '1' day) TO '1900-01-01T00:00:00+01:00' by (1) ORDER BY ts; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }") - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }")` in range select query +Error: 3000(PlanQuery), Error during planning: Illegal argument `IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }") - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }")` in range select query -- duration not all interval SELECT ts, min(val) RANGE (now() - INTERVAL '1' day) FROM host ALIGN (INTERVAL '1' day) TO '1900-01-01T00:00:00+01:00' by (1) ORDER BY ts; -Error: 3000(PlanQuery), DataFusion error: Error during planning: Illegal argument `now() - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }")` in range select query +Error: 3000(PlanQuery), Error during planning: Illegal argument `now() - IntervalMonthDayNano("IntervalMonthDayNano { months: 0, days: 1, nanoseconds: 0 }")` in range select query --- ALIGN TO with time zone --- set time_zone='Asia/Shanghai'; diff --git a/tests/cases/standalone/common/select/unnest.result b/tests/cases/standalone/common/select/unnest.result index e24a140b9a..e082a8a6c1 100644 --- a/tests/cases/standalone/common/select/unnest.result +++ b/tests/cases/standalone/common/select/unnest.result @@ -29,11 +29,11 @@ SELECT unnest([1,2,3]); SELECT unnest(struct(1,2,3)); -+-----------------------------------------------------------+-----------------------------------------------------------+-----------------------------------------------------------+ -| unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c0 | unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c1 | unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c2 | -+-----------------------------------------------------------+-----------------------------------------------------------+-----------------------------------------------------------+ -| 1 | 2 | 3 | -+-----------------------------------------------------------+-----------------------------------------------------------+-----------------------------------------------------------+ ++-------------------------------------------------------------+-------------------------------------------------------------+-------------------------------------------------------------+ +| __unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c0 | __unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c1 | __unnest_placeholder(struct(Int64(1),Int64(2),Int64(3))).c2 | ++-------------------------------------------------------------+-------------------------------------------------------------+-------------------------------------------------------------+ +| 1 | 2 | 3 | ++-------------------------------------------------------------+-------------------------------------------------------------+-------------------------------------------------------------+ -- Table function is not supported for now -- SELECT * FROM unnest([1,2,3]); diff --git a/tests/cases/standalone/common/types/json/json.result b/tests/cases/standalone/common/types/json/json.result index 8392b4cb4a..1fc69c9c28 100644 --- a/tests/cases/standalone/common/types/json/json.result +++ b/tests/cases/standalone/common/types/json/json.result @@ -119,11 +119,11 @@ Affected Rows: 25 INSERT INTO jsons VALUES(parse_json('{"a":1, "b":2, "c":3'), 4); -Error: 3001(EngineExecuteQuery), DataFusion error: Invalid function args: Cannot convert the string to json, have: {"a":1, "b":2, "c":3 +Error: 3001(EngineExecuteQuery), Invalid function args: Cannot convert the string to json, have: {"a":1, "b":2, "c":3 INSERT INTO jsons VALUES(parse_json('Morning my friends, have a nice day :)'), 5); -Error: 3001(EngineExecuteQuery), DataFusion error: Invalid function args: Cannot convert the string to json, have: Morning my friends, have a nice day :) +Error: 3001(EngineExecuteQuery), Invalid function args: Cannot convert the string to json, have: Morning my friends, have a nice day :) SELECT json_to_string(j), t FROM jsons; diff --git a/tests/cases/standalone/common/types/timestamp/timestamp.result b/tests/cases/standalone/common/types/timestamp/timestamp.result index 4c5398ae10..51bc812814 100644 --- a/tests/cases/standalone/common/types/timestamp/timestamp.result +++ b/tests/cases/standalone/common/types/timestamp/timestamp.result @@ -75,13 +75,13 @@ SELECT MAX(t) FROM timestamp; SELECT SUM(t) FROM timestamp; -Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: Execution error: User-defined coercion failed with Execution("Sum not supported for Timestamp(Millisecond, None)") No function matches the given name and argument types 'sum(Timestamp(Millisecond, None))'. You might need to add explicit type casts. +Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: Execution error: Function 'sum' user-defined coercion failed with Execution("Sum not supported for Timestamp(Millisecond, None)") No function matches the given name and argument types 'sum(Timestamp(Millisecond, None))'. You might need to add explicit type casts. Candidate functions: sum(UserDefined) SELECT AVG(t) FROM timestamp; -Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: Execution error: User-defined coercion failed with Plan("The function \"avg\" does not support inputs of type Timestamp(Millisecond, None).") No function matches the given name and argument types 'avg(Timestamp(Millisecond, None))'. You might need to add explicit type casts. +Error: 3000(PlanQuery), Failed to plan SQL: Error during planning: Execution error: Function 'avg' user-defined coercion failed with Plan("The function \"avg\" does not support inputs of type Timestamp(Millisecond, None).") No function matches the given name and argument types 'avg(Timestamp(Millisecond, None))'. You might need to add explicit type casts. Candidate functions: avg(UserDefined)