chore: update toolchain to 2025-05-19 (#6124)

* chore: update toolchain to 2025-05-19

* chore: update nix sha

* chore: rebase main and fix
This commit is contained in:
shuiyisong
2025-05-20 12:29:40 +08:00
committed by GitHub
parent 8d36ffb4e1
commit b2377d4b87
13 changed files with 23 additions and 23 deletions

View File

@@ -79,6 +79,7 @@ clippy.implicit_clone = "warn"
clippy.result_large_err = "allow"
clippy.large_enum_variant = "allow"
clippy.doc_overindented_list_items = "allow"
clippy.uninlined_format_args = "allow"
rust.unknown_lints = "deny"
rust.unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tokio_unstable)'] }

View File

@@ -21,7 +21,7 @@
lib = nixpkgs.lib;
rustToolchain = fenix.packages.${system}.fromToolchainName {
name = (lib.importTOML ./rust-toolchain.toml).toolchain.channel;
sha256 = "sha256-arzEYlWLGGYeOhECHpBxQd2joZ4rPKV3qLNnZ+eql6A=";
sha256 = "sha256-tJJr8oqX3YD+ohhPK7jlt/7kvKBnBqJVjYtoFr520d4=";
};
in
{

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "nightly-2025-04-15"
channel = "nightly-2025-05-19"

View File

@@ -163,7 +163,7 @@ impl DfAccumulator for UddSketchState {
}
}
// meaning instantiate as `uddsketch_merge`
DataType::Binary => self.merge_batch(&[array.clone()])?,
DataType::Binary => self.merge_batch(std::slice::from_ref(array))?,
_ => {
return not_impl_err!(
"UDDSketch functions do not support data type: {}",

View File

@@ -468,8 +468,8 @@ mod tests {
let empty_values = vec![""];
let empty_input = Arc::new(StringVector::from_slice(&empty_values)) as VectorRef;
let ipv4_result = ipv4_func.eval(&ctx, &[empty_input.clone()]);
let ipv6_result = ipv6_func.eval(&ctx, &[empty_input.clone()]);
let ipv4_result = ipv4_func.eval(&ctx, std::slice::from_ref(&empty_input));
let ipv6_result = ipv6_func.eval(&ctx, std::slice::from_ref(&empty_input));
assert!(ipv4_result.is_err());
assert!(ipv6_result.is_err());
@@ -478,7 +478,7 @@ mod tests {
let invalid_values = vec!["not an ip", "192.168.1.256", "zzzz::ffff"];
let invalid_input = Arc::new(StringVector::from_slice(&invalid_values)) as VectorRef;
let ipv4_result = ipv4_func.eval(&ctx, &[invalid_input.clone()]);
let ipv4_result = ipv4_func.eval(&ctx, std::slice::from_ref(&invalid_input));
assert!(ipv4_result.is_err());
}

View File

@@ -294,7 +294,7 @@ mod tests {
let input = Arc::new(StringVector::from_slice(&values)) as VectorRef;
// Convert IPv6 addresses to binary
let binary_result = to_num.eval(&ctx, &[input.clone()]).unwrap();
let binary_result = to_num.eval(&ctx, std::slice::from_ref(&input)).unwrap();
// Convert binary to hex string representation (for ipv6_num_to_string)
let mut hex_strings = Vec::new();

View File

@@ -217,13 +217,10 @@ impl FlowRouteManager {
current_flow_info: &DeserializedValueWithBytes<FlowInfoValue>,
flow_routes: I,
) -> Result<Txn> {
let del_txns = current_flow_info
.flownode_ids()
.iter()
.map(|(partition_id, _)| {
let key = FlowRouteKey::new(flow_id, *partition_id).to_bytes();
Ok(TxnOp::Delete(key))
});
let del_txns = current_flow_info.flownode_ids().keys().map(|partition_id| {
let key = FlowRouteKey::new(flow_id, *partition_id).to_bytes();
Ok(TxnOp::Delete(key))
});
let put_txns = flow_routes.into_iter().map(|(partition_id, route)| {
let key = FlowRouteKey::new(flow_id, partition_id).to_bytes();

View File

@@ -65,6 +65,7 @@ impl DataflowState {
/// schedule all subgraph that need to run with time <= `as_of` and run_available()
///
/// return true if any subgraph actually executed
#[allow(clippy::swap_with_temporary)]
pub fn run_available_with_schedule(&mut self, df: &mut Dfir) -> bool {
// first split keys <= as_of into another map
let mut before = self

View File

@@ -34,7 +34,7 @@ use crate::key::{DatanodeLeaseKey, FlownodeLeaseKey, LeaseValue};
fn build_lease_filter(lease_secs: u64) -> impl Fn(&LeaseValue) -> bool {
move |v: &LeaseValue| {
((time_util::current_time_millis() - v.timestamp_millis) as u64)
< lease_secs.checked_mul(1000).unwrap_or(u64::MAX)
< lease_secs.saturating_mul(1000)
}
}

View File

@@ -319,8 +319,8 @@ pub fn column_schema(
columns: &HashMap<String, VectorRef>,
) -> Result<Vec<ColumnSchema>> {
columns
.iter()
.map(|(column_name, _vector)| {
.keys()
.map(|column_name| {
let column_schema = table_info
.meta
.schema

View File

@@ -185,7 +185,7 @@ impl StatementExecutor {
.contains_key(LOGICAL_TABLE_METADATA_KEY)
{
return self
.create_logical_tables(&[create_table.clone()], query_ctx)
.create_logical_tables(std::slice::from_ref(create_table), query_ctx)
.await?
.into_iter()
.next()

View File

@@ -243,7 +243,7 @@ mod test {
// From prometheus `promql/functions_test.go` case `TestKahanSum`
#[test]
fn test_kahan_sum() {
let inputs = vec![1.0, 10.0f64.powf(100.0), 1.0, -1.0 * 10.0f64.powf(100.0)];
let inputs = vec![1.0, 10.0f64.powf(100.0), 1.0, -10.0f64.powf(100.0)];
let mut sum = 0.0;
let mut c = 0f64;

View File

@@ -18,6 +18,7 @@
use std::any::Any;
use std::collections::{BTreeMap, BTreeSet, VecDeque};
use std::pin::Pin;
use std::slice::from_ref;
use std::sync::Arc;
use std::task::{Context, Poll};
@@ -801,18 +802,18 @@ fn find_slice_from_range(
// note that `data < max_val`
// i,e, for max_val = 4, array = [5,3,2] should be start=1
// max_val = 4, array = [5, 4, 3, 2] should be start= 2
let start = bisect::<false>(&[array.clone()], &[max_val.clone()], &[*opt])?;
let start = bisect::<false>(from_ref(array), from_ref(&max_val), &[*opt])?;
// min_val = 1, array = [3, 2, 1, 0], end = 3
// min_val = 1, array = [3, 2, 0], end = 2
let end = bisect::<false>(&[array.clone()], &[min_val.clone()], &[*opt])?;
let end = bisect::<false>(from_ref(array), from_ref(&min_val), &[*opt])?;
(start, end)
} else {
// min_val = 1, array = [1, 2, 3], start = 0
// min_val = 1, array = [0, 2, 3], start = 1
let start = bisect::<true>(&[array.clone()], &[min_val.clone()], &[*opt])?;
let start = bisect::<true>(from_ref(array), from_ref(&min_val), &[*opt])?;
// max_val = 3, array = [1, 3, 4], end = 1
// max_val = 3, array = [1, 2, 4], end = 2
let end = bisect::<true>(&[array.clone()], &[max_val.clone()], &[*opt])?;
let end = bisect::<true>(from_ref(array), from_ref(&max_val), &[*opt])?;
(start, end)
};