From b2377d4b878dd0e96670d9a9d72093c442dcbe41 Mon Sep 17 00:00:00 2001 From: shuiyisong <113876041+shuiyisong@users.noreply.github.com> Date: Tue, 20 May 2025 12:29:40 +0800 Subject: [PATCH] chore: update toolchain to 2025-05-19 (#6124) * chore: update toolchain to 2025-05-19 * chore: update nix sha * chore: rebase main and fix --- Cargo.toml | 1 + flake.nix | 2 +- rust-toolchain.toml | 2 +- src/common/function/src/aggr/uddsketch_state.rs | 2 +- src/common/function/src/scalars/ip/cidr.rs | 6 +++--- src/common/function/src/scalars/ip/ipv6.rs | 2 +- src/common/meta/src/key/flow/flow_route.rs | 11 ++++------- src/flow/src/compute/state.rs | 1 + src/meta-srv/src/lease.rs | 2 +- src/operator/src/req_convert/common.rs | 4 ++-- src/operator/src/statement/ddl.rs | 2 +- src/promql/src/functions.rs | 2 +- src/query/src/window_sort.rs | 9 +++++---- 13 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index e0db30062b..8818ca6897 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -79,6 +79,7 @@ clippy.implicit_clone = "warn" clippy.result_large_err = "allow" clippy.large_enum_variant = "allow" clippy.doc_overindented_list_items = "allow" +clippy.uninlined_format_args = "allow" rust.unknown_lints = "deny" rust.unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tokio_unstable)'] } diff --git a/flake.nix b/flake.nix index d98d60505e..3f5ff02978 100644 --- a/flake.nix +++ b/flake.nix @@ -21,7 +21,7 @@ lib = nixpkgs.lib; rustToolchain = fenix.packages.${system}.fromToolchainName { name = (lib.importTOML ./rust-toolchain.toml).toolchain.channel; - sha256 = "sha256-arzEYlWLGGYeOhECHpBxQd2joZ4rPKV3qLNnZ+eql6A="; + sha256 = "sha256-tJJr8oqX3YD+ohhPK7jlt/7kvKBnBqJVjYtoFr520d4="; }; in { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 5d547223f2..e34fe14525 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2025-04-15" +channel = "nightly-2025-05-19" diff --git a/src/common/function/src/aggr/uddsketch_state.rs b/src/common/function/src/aggr/uddsketch_state.rs index 1c5741c4a9..020a71b799 100644 --- a/src/common/function/src/aggr/uddsketch_state.rs +++ b/src/common/function/src/aggr/uddsketch_state.rs @@ -163,7 +163,7 @@ impl DfAccumulator for UddSketchState { } } // meaning instantiate as `uddsketch_merge` - DataType::Binary => self.merge_batch(&[array.clone()])?, + DataType::Binary => self.merge_batch(std::slice::from_ref(array))?, _ => { return not_impl_err!( "UDDSketch functions do not support data type: {}", diff --git a/src/common/function/src/scalars/ip/cidr.rs b/src/common/function/src/scalars/ip/cidr.rs index 79b6e46f99..a0c2611348 100644 --- a/src/common/function/src/scalars/ip/cidr.rs +++ b/src/common/function/src/scalars/ip/cidr.rs @@ -468,8 +468,8 @@ mod tests { let empty_values = vec![""]; let empty_input = Arc::new(StringVector::from_slice(&empty_values)) as VectorRef; - let ipv4_result = ipv4_func.eval(&ctx, &[empty_input.clone()]); - let ipv6_result = ipv6_func.eval(&ctx, &[empty_input.clone()]); + let ipv4_result = ipv4_func.eval(&ctx, std::slice::from_ref(&empty_input)); + let ipv6_result = ipv6_func.eval(&ctx, std::slice::from_ref(&empty_input)); assert!(ipv4_result.is_err()); assert!(ipv6_result.is_err()); @@ -478,7 +478,7 @@ mod tests { let invalid_values = vec!["not an ip", "192.168.1.256", "zzzz::ffff"]; let invalid_input = Arc::new(StringVector::from_slice(&invalid_values)) as VectorRef; - let ipv4_result = ipv4_func.eval(&ctx, &[invalid_input.clone()]); + let ipv4_result = ipv4_func.eval(&ctx, std::slice::from_ref(&invalid_input)); assert!(ipv4_result.is_err()); } diff --git a/src/common/function/src/scalars/ip/ipv6.rs b/src/common/function/src/scalars/ip/ipv6.rs index e818600d91..b24c658368 100644 --- a/src/common/function/src/scalars/ip/ipv6.rs +++ b/src/common/function/src/scalars/ip/ipv6.rs @@ -294,7 +294,7 @@ mod tests { let input = Arc::new(StringVector::from_slice(&values)) as VectorRef; // Convert IPv6 addresses to binary - let binary_result = to_num.eval(&ctx, &[input.clone()]).unwrap(); + let binary_result = to_num.eval(&ctx, std::slice::from_ref(&input)).unwrap(); // Convert binary to hex string representation (for ipv6_num_to_string) let mut hex_strings = Vec::new(); diff --git a/src/common/meta/src/key/flow/flow_route.rs b/src/common/meta/src/key/flow/flow_route.rs index c1e43957f0..9aead7a9f9 100644 --- a/src/common/meta/src/key/flow/flow_route.rs +++ b/src/common/meta/src/key/flow/flow_route.rs @@ -217,13 +217,10 @@ impl FlowRouteManager { current_flow_info: &DeserializedValueWithBytes, flow_routes: I, ) -> Result { - let del_txns = current_flow_info - .flownode_ids() - .iter() - .map(|(partition_id, _)| { - let key = FlowRouteKey::new(flow_id, *partition_id).to_bytes(); - Ok(TxnOp::Delete(key)) - }); + let del_txns = current_flow_info.flownode_ids().keys().map(|partition_id| { + let key = FlowRouteKey::new(flow_id, *partition_id).to_bytes(); + Ok(TxnOp::Delete(key)) + }); let put_txns = flow_routes.into_iter().map(|(partition_id, route)| { let key = FlowRouteKey::new(flow_id, partition_id).to_bytes(); diff --git a/src/flow/src/compute/state.rs b/src/flow/src/compute/state.rs index 4728503e4e..82d2b908d7 100644 --- a/src/flow/src/compute/state.rs +++ b/src/flow/src/compute/state.rs @@ -65,6 +65,7 @@ impl DataflowState { /// schedule all subgraph that need to run with time <= `as_of` and run_available() /// /// return true if any subgraph actually executed + #[allow(clippy::swap_with_temporary)] pub fn run_available_with_schedule(&mut self, df: &mut Dfir) -> bool { // first split keys <= as_of into another map let mut before = self diff --git a/src/meta-srv/src/lease.rs b/src/meta-srv/src/lease.rs index e9254c2d4f..26a194c035 100644 --- a/src/meta-srv/src/lease.rs +++ b/src/meta-srv/src/lease.rs @@ -34,7 +34,7 @@ use crate::key::{DatanodeLeaseKey, FlownodeLeaseKey, LeaseValue}; fn build_lease_filter(lease_secs: u64) -> impl Fn(&LeaseValue) -> bool { move |v: &LeaseValue| { ((time_util::current_time_millis() - v.timestamp_millis) as u64) - < lease_secs.checked_mul(1000).unwrap_or(u64::MAX) + < lease_secs.saturating_mul(1000) } } diff --git a/src/operator/src/req_convert/common.rs b/src/operator/src/req_convert/common.rs index 700aa1384d..79a661be7f 100644 --- a/src/operator/src/req_convert/common.rs +++ b/src/operator/src/req_convert/common.rs @@ -319,8 +319,8 @@ pub fn column_schema( columns: &HashMap, ) -> Result> { columns - .iter() - .map(|(column_name, _vector)| { + .keys() + .map(|column_name| { let column_schema = table_info .meta .schema diff --git a/src/operator/src/statement/ddl.rs b/src/operator/src/statement/ddl.rs index c995999672..692919ca73 100644 --- a/src/operator/src/statement/ddl.rs +++ b/src/operator/src/statement/ddl.rs @@ -185,7 +185,7 @@ impl StatementExecutor { .contains_key(LOGICAL_TABLE_METADATA_KEY) { return self - .create_logical_tables(&[create_table.clone()], query_ctx) + .create_logical_tables(std::slice::from_ref(create_table), query_ctx) .await? .into_iter() .next() diff --git a/src/promql/src/functions.rs b/src/promql/src/functions.rs index fee6387d20..12841dee09 100644 --- a/src/promql/src/functions.rs +++ b/src/promql/src/functions.rs @@ -243,7 +243,7 @@ mod test { // From prometheus `promql/functions_test.go` case `TestKahanSum` #[test] fn test_kahan_sum() { - let inputs = vec![1.0, 10.0f64.powf(100.0), 1.0, -1.0 * 10.0f64.powf(100.0)]; + let inputs = vec![1.0, 10.0f64.powf(100.0), 1.0, -10.0f64.powf(100.0)]; let mut sum = 0.0; let mut c = 0f64; diff --git a/src/query/src/window_sort.rs b/src/query/src/window_sort.rs index 2e367c5ea0..c8b9ece3a1 100644 --- a/src/query/src/window_sort.rs +++ b/src/query/src/window_sort.rs @@ -18,6 +18,7 @@ use std::any::Any; use std::collections::{BTreeMap, BTreeSet, VecDeque}; use std::pin::Pin; +use std::slice::from_ref; use std::sync::Arc; use std::task::{Context, Poll}; @@ -801,18 +802,18 @@ fn find_slice_from_range( // note that `data < max_val` // i,e, for max_val = 4, array = [5,3,2] should be start=1 // max_val = 4, array = [5, 4, 3, 2] should be start= 2 - let start = bisect::(&[array.clone()], &[max_val.clone()], &[*opt])?; + let start = bisect::(from_ref(array), from_ref(&max_val), &[*opt])?; // min_val = 1, array = [3, 2, 1, 0], end = 3 // min_val = 1, array = [3, 2, 0], end = 2 - let end = bisect::(&[array.clone()], &[min_val.clone()], &[*opt])?; + let end = bisect::(from_ref(array), from_ref(&min_val), &[*opt])?; (start, end) } else { // min_val = 1, array = [1, 2, 3], start = 0 // min_val = 1, array = [0, 2, 3], start = 1 - let start = bisect::(&[array.clone()], &[min_val.clone()], &[*opt])?; + let start = bisect::(from_ref(array), from_ref(&min_val), &[*opt])?; // max_val = 3, array = [1, 3, 4], end = 1 // max_val = 3, array = [1, 2, 4], end = 2 - let end = bisect::(&[array.clone()], &[max_val.clone()], &[*opt])?; + let end = bisect::(from_ref(array), from_ref(&max_val), &[*opt])?; (start, end) };