diff --git a/src/aggregation/bucket/histogram/histogram.rs b/src/aggregation/bucket/histogram/histogram.rs index 128cea783..45aae8a5e 100644 --- a/src/aggregation/bucket/histogram/histogram.rs +++ b/src/aggregation/bucket/histogram/histogram.rs @@ -351,6 +351,7 @@ impl SegmentHistogramCollector { let buckets_mem = self.buckets.memory_consumption(); self_mem + sub_aggs_mem + buckets_mem } + /// Converts the collector result into a intermediate bucket result. pub fn into_intermediate_bucket_result( self, agg_with_accessor: &AggregationWithAccessor, diff --git a/src/aggregation/bucket/mod.rs b/src/aggregation/bucket/mod.rs index 0c1b90dbc..5404935a9 100644 --- a/src/aggregation/bucket/mod.rs +++ b/src/aggregation/bucket/mod.rs @@ -28,9 +28,7 @@ mod term_agg; use std::collections::HashMap; -pub(crate) use histogram::SegmentHistogramCollector; pub use histogram::*; -pub(crate) use range::SegmentRangeCollector; pub use range::*; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; pub use term_agg::*; diff --git a/src/aggregation/metric/percentiles.rs b/src/aggregation/metric/percentiles.rs index 87c6d547b..7b66b1273 100644 --- a/src/aggregation/metric/percentiles.rs +++ b/src/aggregation/metric/percentiles.rs @@ -499,7 +499,7 @@ mod tests { fn test_aggregation_percentiles(merge_segments: bool) -> crate::Result<()> { use rand_distr::Distribution; - let num_values_in_segment = vec![100, 30_000, 8000]; + let num_values_in_segment = [100, 30_000, 8000]; let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap(); let mut rng = StdRng::from_seed([1u8; 32]); diff --git a/src/core/json_utils.rs b/src/core/json_utils.rs index e7c85d1ab..30ba5c27d 100644 --- a/src/core/json_utils.rs +++ b/src/core/json_utils.rs @@ -619,21 +619,21 @@ mod tests { #[test] fn test_split_json_path_escaped_dot() { - let json_path = split_json_path(r#"toto\.titi"#); + let json_path = split_json_path(r"toto\.titi"); assert_eq!(&json_path, &["toto.titi"]); - let json_path_2 = split_json_path(r#"k8s\.container\.name"#); + let json_path_2 = split_json_path(r"k8s\.container\.name"); assert_eq!(&json_path_2, &["k8s.container.name"]); } #[test] fn test_split_json_path_escaped_backslash() { - let json_path = split_json_path(r#"toto\\titi"#); - assert_eq!(&json_path, &[r#"toto\titi"#]); + let json_path = split_json_path(r"toto\\titi"); + assert_eq!(&json_path, &[r"toto\titi"]); } #[test] fn test_split_json_path_escaped_normal_letter() { - let json_path = split_json_path(r#"toto\titi"#); + let json_path = split_json_path(r"toto\titi"); assert_eq!(&json_path, &[r#"tototiti"#]); } } diff --git a/src/fastfield/mod.rs b/src/fastfield/mod.rs index d450e3e59..8dd1bd501 100644 --- a/src/fastfield/mod.rs +++ b/src/fastfield/mod.rs @@ -939,7 +939,7 @@ mod tests { .unwrap() .first_or_default_col(0); - let numbers = vec![100, 200, 300]; + let numbers = [100, 200, 300]; let test_range = |range: RangeInclusive| { let expexted_count = numbers.iter().filter(|num| range.contains(num)).count(); let mut vec = vec![]; @@ -1013,7 +1013,7 @@ mod tests { .unwrap() .first_or_default_col(0); - let numbers = vec![1000, 1001, 1003]; + let numbers = [1000, 1001, 1003]; let test_range = |range: RangeInclusive| { let expexted_count = numbers.iter().filter(|num| range.contains(num)).count(); let mut vec = vec![]; @@ -1098,7 +1098,7 @@ mod tests { .unwrap() .is_none()); let column = fast_field_reader - .column_opt::(r#"json.attr\.age"#) + .column_opt::(r"json.attr\.age") .unwrap() .unwrap(); let vals: Vec = column.values_for_doc(0u32).collect(); diff --git a/src/indexer/mod.rs b/src/indexer/mod.rs index 53cdcdaf5..d8b570192 100644 --- a/src/indexer/mod.rs +++ b/src/indexer/mod.rs @@ -89,7 +89,7 @@ mod tests_mmap { let parse_query = QueryParser::for_index(&index, Vec::new()); { let query = parse_query - .parse_query(r#"json.k8s\.container\.name:prometheus"#) + .parse_query(r"json.k8s\.container\.name:prometheus") .unwrap(); let num_docs = searcher.search(&query, &Count).unwrap(); assert_eq!(num_docs, 1); @@ -127,7 +127,7 @@ mod tests_mmap { } { let query = parse_query - .parse_query(r#"json.k8s\.container\.name:prometheus"#) + .parse_query(r"json.k8s\.container\.name:prometheus") .unwrap(); let num_docs = searcher.search(&query, &Count).unwrap(); assert_eq!(num_docs, 1); diff --git a/src/positions/mod.rs b/src/positions/mod.rs index f0d5d5550..cd98eb69d 100644 --- a/src/positions/mod.rs +++ b/src/positions/mod.rs @@ -119,7 +119,7 @@ pub mod tests { serializer.close_term()?; serializer.close()?; let position_delta = OwnedBytes::new(positions_buffer); - let mut output_delta_pos_buffer = vec![0u32; 5]; + let mut output_delta_pos_buffer = [0u32; 5]; let mut position_reader = PositionReader::open(position_delta)?; position_reader.read(0, &mut output_delta_pos_buffer[..]); assert_eq!( diff --git a/src/query/phrase_query/mod.rs b/src/query/phrase_query/mod.rs index 4ad1ac4aa..8abac7ddf 100644 --- a/src/query/phrase_query/mod.rs +++ b/src/query/phrase_query/mod.rs @@ -75,7 +75,7 @@ pub mod tests { let index = create_index(&["a b b d c g c", "a b a b c"])?; let text_field = index.schema().get_field("text").unwrap(); let searcher = index.reader()?.searcher(); - let terms: Vec = vec!["a", "b", "c"] + let terms: Vec = ["a", "b", "c"] .iter() .map(|text| Term::from_field_text(text_field, text)) .collect(); diff --git a/src/query/query_parser/query_parser.rs b/src/query/query_parser/query_parser.rs index 172b1fc6a..49e0cf91b 100644 --- a/src/query/query_parser/query_parser.rs +++ b/src/query/query_parser/query_parser.rs @@ -1305,7 +1305,7 @@ mod test { "k8s\u{1}node\u{1}name\0shello" ); assert_eq!( - extract_query_term_json_path(r#"json.k8s\.node\.name:hello"#), + extract_query_term_json_path(r"json.k8s\.node\.name:hello"), "k8s.node.name\0shello" ); } @@ -1731,10 +1731,10 @@ mod test { #[test] fn test_escaped_field() { let mut schema_builder = Schema::builder(); - schema_builder.add_text_field(r#"a\.b"#, STRING); + schema_builder.add_text_field(r"a\.b", STRING); let schema = schema_builder.build(); let query_parser = QueryParser::new(schema, Vec::new(), TokenizerManager::default()); - let query = query_parser.parse_query(r#"a\.b:hello"#).unwrap(); + let query = query_parser.parse_query(r"a\.b:hello").unwrap(); assert_eq!( format!("{query:?}"), "TermQuery(Term(field=0, type=Str, \"hello\"))" diff --git a/src/schema/schema.rs b/src/schema/schema.rs index b37a3af4a..4d6aae8d8 100644 --- a/src/schema/schema.rs +++ b/src/schema/schema.rs @@ -514,8 +514,8 @@ mod tests { #[test] fn test_locate_splitting_dots() { assert_eq!(&super::locate_splitting_dots("a.b.c"), &[1, 3]); - assert_eq!(&super::locate_splitting_dots(r#"a\.b.c"#), &[4]); - assert_eq!(&super::locate_splitting_dots(r#"a\..b.c"#), &[3, 5]); + assert_eq!(&super::locate_splitting_dots(r"a\.b.c"), &[4]); + assert_eq!(&super::locate_splitting_dots(r"a\..b.c"), &[3, 5]); } #[test]