diff --git a/columnar/src/columnar/column_type.rs b/columnar/src/columnar/column_type.rs index 36763089d..835efdd8b 100644 --- a/columnar/src/columnar/column_type.rs +++ b/columnar/src/columnar/column_type.rs @@ -34,7 +34,7 @@ impl fmt::Display for ColumnType { ColumnType::IpAddr => "ip", ColumnType::DateTime => "datetime", }; - write!(f, "{}", short_str) + write!(f, "{short_str}") } } diff --git a/columnar/src/dynamic_column.rs b/columnar/src/dynamic_column.rs index cfb31d0bf..ef7aaa5e9 100644 --- a/columnar/src/dynamic_column.rs +++ b/columnar/src/dynamic_column.rs @@ -26,14 +26,14 @@ impl fmt::Debug for DynamicColumn { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{} {} |", self.get_cardinality(), self.column_type())?; match self { - DynamicColumn::Bool(col) => write!(f, " {:?}", col)?, - DynamicColumn::I64(col) => write!(f, " {:?}", col)?, - DynamicColumn::U64(col) => write!(f, " {:?}", col)?, - DynamicColumn::F64(col) => write!(f, "{:?}", col)?, - DynamicColumn::IpAddr(col) => write!(f, "{:?}", col)?, - DynamicColumn::DateTime(col) => write!(f, "{:?}", col)?, - DynamicColumn::Bytes(col) => write!(f, "{:?}", col)?, - DynamicColumn::Str(col) => write!(f, "{:?}", col)?, + DynamicColumn::Bool(col) => write!(f, " {col:?}")?, + DynamicColumn::I64(col) => write!(f, " {col:?}")?, + DynamicColumn::U64(col) => write!(f, " {col:?}")?, + DynamicColumn::F64(col) => write!(f, "{col:?}")?, + DynamicColumn::IpAddr(col) => write!(f, "{col:?}")?, + DynamicColumn::DateTime(col) => write!(f, "{col:?}")?, + DynamicColumn::Bytes(col) => write!(f, "{col:?}")?, + DynamicColumn::Str(col) => write!(f, "{col:?}")?, } write!(f, "]") } diff --git a/common/src/byte_count.rs b/common/src/byte_count.rs index 4b346f8cb..559618796 100644 --- a/common/src/byte_count.rs +++ b/common/src/byte_count.rs @@ -37,7 +37,7 @@ impl ByteCount { for (suffix, threshold) in SUFFIX_AND_THRESHOLD.iter().rev() { if self.get_bytes() >= *threshold { let unit_num = self.get_bytes() as f64 / *threshold as f64; - return format!("{:.2} {}", unit_num, suffix); + return format!("{unit_num:.2} {suffix}"); } } format!("{:.2} B", self.get_bytes()) diff --git a/common/src/vint.rs b/common/src/vint.rs index ee8e0aa6f..64d4f7e14 100644 --- a/common/src/vint.rs +++ b/common/src/vint.rs @@ -261,7 +261,7 @@ mod tests { let mut buffer2 = [0u8; 8]; let len_vint = VInt(val as u64).serialize_into(&mut buffer); let res2 = serialize_vint_u32(val, &mut buffer2); - assert_eq!(&buffer[..len_vint], res2, "array wrong for {}", val); + assert_eq!(&buffer[..len_vint], res2, "array wrong for {val}"); } #[test] diff --git a/examples/index_from_multiple_threads.rs b/examples/index_from_multiple_threads.rs index 02807698f..9b8195ebf 100644 --- a/examples/index_from_multiple_threads.rs +++ b/examples/index_from_multiple_threads.rs @@ -96,7 +96,7 @@ fn main() -> tantivy::Result<()> { let mut index_writer_wlock = index_writer.write().unwrap(); index_writer_wlock.commit()? }; - println!("committed with opstamp {}", opstamp); + println!("committed with opstamp {opstamp}"); thread::sleep(Duration::from_millis(500)); } diff --git a/examples/iterating_docs_and_positions.rs b/examples/iterating_docs_and_positions.rs index 8f718cf97..99a8e74e5 100644 --- a/examples/iterating_docs_and_positions.rs +++ b/examples/iterating_docs_and_positions.rs @@ -84,7 +84,7 @@ fn main() -> tantivy::Result<()> { // Doc 0: TermFreq 2: [0, 4] // Doc 2: TermFreq 1: [0] // ``` - println!("Doc {}: TermFreq {}: {:?}", doc_id, term_freq, positions); + println!("Doc {doc_id}: TermFreq {term_freq}: {positions:?}"); doc_id = segment_postings.advance(); } } @@ -125,7 +125,7 @@ fn main() -> tantivy::Result<()> { // Once again these docs MAY contains deleted documents as well. let docs = block_segment_postings.docs(); // Prints `Docs [0, 2].` - println!("Docs {:?}", docs); + println!("Docs {docs:?}"); block_segment_postings.advance(); } } diff --git a/examples/snippet.rs b/examples/snippet.rs index 4d38ade70..34ba3c8c9 100644 --- a/examples/snippet.rs +++ b/examples/snippet.rs @@ -56,7 +56,7 @@ fn main() -> tantivy::Result<()> { for (score, doc_address) in top_docs { let doc = searcher.doc(doc_address)?; let snippet = snippet_generator.snippet_from_doc(&doc); - println!("Document score {}:", score); + println!("Document score {score}:"); println!( "title: {}", doc.get_first(title).unwrap().as_text().unwrap() diff --git a/examples/stop_words.rs b/examples/stop_words.rs index 3a4a0651e..b1c8d7fbb 100644 --- a/examples/stop_words.rs +++ b/examples/stop_words.rs @@ -106,7 +106,7 @@ fn main() -> tantivy::Result<()> { for (score, doc_address) in top_docs { let retrieved_doc = searcher.doc(doc_address)?; - println!("\n==\nDocument score {}:", score); + println!("\n==\nDocument score {score}:"); println!("{}", schema.to_json(&retrieved_doc)); } diff --git a/ownedbytes/src/lib.rs b/ownedbytes/src/lib.rs index 1b6046e2e..12dbd14c1 100644 --- a/ownedbytes/src/lib.rs +++ b/ownedbytes/src/lib.rs @@ -160,7 +160,7 @@ impl fmt::Debug for OwnedBytes { } else { self.as_slice() }; - write!(f, "OwnedBytes({:?}, len={})", bytes_truncated, self.len()) + write!(f, "OwnedBytes({bytes_truncated:?}, len={})", self.len()) } } @@ -259,12 +259,12 @@ mod tests { fn test_owned_bytes_debug() { let short_bytes = OwnedBytes::new(b"abcd".as_ref()); assert_eq!( - format!("{:?}", short_bytes), + format!("{short_bytes:?}"), "OwnedBytes([97, 98, 99, 100], len=4)" ); let long_bytes = OwnedBytes::new(b"abcdefghijklmnopq".as_ref()); assert_eq!( - format!("{:?}", long_bytes), + format!("{long_bytes:?}"), "OwnedBytes([97, 98, 99, 100, 101, 102, 103, 104, 105, 106], len=17)" ); } diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs index baf460e62..76160a295 100644 --- a/query-grammar/src/query_grammar.rs +++ b/query-grammar/src/query_grammar.rs @@ -56,7 +56,7 @@ fn word<'a>() -> impl Parser<&'a str, Output = String> { !c.is_whitespace() && ![':', '^', '{', '}', '"', '[', ']', '(', ')'].contains(&c) })), ) - .map(|(s1, s2): (char, String)| format!("{}{}", s1, s2)) + .map(|(s1, s2): (char, String)| format!("{s1}{s2}")) .and_then(|s: String| match s.as_str() { "OR" | "AND " | "NOT" => Err(StringStreamError::UnexpectedParse), _ => Ok(s), @@ -74,7 +74,7 @@ fn relaxed_word<'a>() -> impl Parser<&'a str, Output = String> { !c.is_whitespace() && !['{', '}', '"', '[', ']', '(', ')'].contains(&c) })), ) - .map(|(s1, s2): (char, String)| format!("{}{}", s1, s2)) + .map(|(s1, s2): (char, String)| format!("{s1}{s2}")) } /// Parses a date time according to rfc3339 @@ -178,9 +178,9 @@ fn negative_number<'a>() -> impl Parser<&'a str, Output = String> { ) .map(|(s1, s2, s3): (char, String, Option<(char, String)>)| { if let Some(('.', s3)) = s3 { - format!("{}{}.{}", s1, s2, s3) + format!("{s1}{s2}.{s3}") } else { - format!("{}{}", s1, s2) + format!("{s1}{s2}") } }) } @@ -419,9 +419,7 @@ mod test { fn assert_nearly_equals(expected: f64, val: f64) { assert!( nearly_equals(val, expected), - "Got {}, expected {}.", - val, - expected + "Got {val}, expected {expected}." ); } @@ -468,7 +466,7 @@ mod test { fn test_parse_query_to_ast_helper(query: &str, expected: &str) { let query = parse_to_ast().parse(query).unwrap().0; - let query_str = format!("{:?}", query); + let query_str = format!("{query:?}"); assert_eq!(query_str, expected); } @@ -554,7 +552,7 @@ mod test { fn test_occur_leaf() { let ((occur, ast), _) = super::occur_leaf().parse("+abc").unwrap(); assert_eq!(occur, Some(Occur::Must)); - assert_eq!(format!("{:?}", ast), "\"abc\""); + assert_eq!(format!("{ast:?}"), "\"abc\""); } #[test] @@ -613,7 +611,7 @@ mod test { let escaped_special_chars_re = Regex::new(ESCAPED_SPECIAL_CHARS_PATTERN).unwrap(); for special_char in SPECIAL_CHARS.iter() { assert_eq!( - escaped_special_chars_re.replace_all(&format!("\\{}", special_char), "$1"), + escaped_special_chars_re.replace_all(&format!("\\{special_char}"), "$1"), special_char.to_string() ); } diff --git a/query-grammar/src/user_input_ast.rs b/query-grammar/src/user_input_ast.rs index c020b1d6a..a1f565fd1 100644 --- a/query-grammar/src/user_input_ast.rs +++ b/query-grammar/src/user_input_ast.rs @@ -28,7 +28,7 @@ impl Debug for UserInputLeaf { ref upper, } => { if let Some(ref field) = field { - write!(formatter, "\"{}\":", field)?; + write!(formatter, "\"{field}\":")?; } lower.display_lower(formatter)?; write!(formatter, " TO ")?; @@ -37,14 +37,14 @@ impl Debug for UserInputLeaf { } UserInputLeaf::Set { field, elements } => { if let Some(ref field) = field { - write!(formatter, "\"{}\": ", field)?; + write!(formatter, "\"{field}\": ")?; } write!(formatter, "IN [")?; for (i, element) in elements.iter().enumerate() { if i != 0 { write!(formatter, " ")?; } - write!(formatter, "\"{}\"", element)?; + write!(formatter, "\"{element}\"")?; } write!(formatter, "]") } @@ -63,7 +63,7 @@ pub struct UserInputLiteral { impl fmt::Debug for UserInputLiteral { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { if let Some(ref field) = self.field_name { - write!(formatter, "\"{}\":", field)?; + write!(formatter, "\"{field}\":")?; } write!(formatter, "\"{}\"", self.phrase)?; if self.slop > 0 { @@ -83,16 +83,16 @@ pub enum UserInputBound { impl UserInputBound { fn display_lower(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { - UserInputBound::Inclusive(ref word) => write!(formatter, "[\"{}\"", word), - UserInputBound::Exclusive(ref word) => write!(formatter, "{{\"{}\"", word), + UserInputBound::Inclusive(ref word) => write!(formatter, "[\"{word}\""), + UserInputBound::Exclusive(ref word) => write!(formatter, "{{\"{word}\""), UserInputBound::Unbounded => write!(formatter, "{{\"*\""), } } fn display_upper(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { - UserInputBound::Inclusive(ref word) => write!(formatter, "\"{}\"]", word), - UserInputBound::Exclusive(ref word) => write!(formatter, "\"{}\"}}", word), + UserInputBound::Inclusive(ref word) => write!(formatter, "\"{word}\"]"), + UserInputBound::Exclusive(ref word) => write!(formatter, "\"{word}\"}}"), UserInputBound::Unbounded => write!(formatter, "\"*\"}}"), } } @@ -163,9 +163,9 @@ fn print_occur_ast( formatter: &mut fmt::Formatter, ) -> fmt::Result { if let Some(occur) = occur_opt { - write!(formatter, "{}{:?}", occur, ast)?; + write!(formatter, "{occur}{ast:?}")?; } else { - write!(formatter, "*{:?}", ast)?; + write!(formatter, "*{ast:?}")?; } Ok(()) } @@ -187,8 +187,8 @@ impl fmt::Debug for UserInputAst { } Ok(()) } - UserInputAst::Leaf(ref subquery) => write!(formatter, "{:?}", subquery), - UserInputAst::Boost(ref leaf, boost) => write!(formatter, "({:?})^{}", leaf, boost), + UserInputAst::Leaf(ref subquery) => write!(formatter, "{subquery:?}"), + UserInputAst::Boost(ref leaf, boost) => write!(formatter, "({leaf:?})^{boost}"), } } } diff --git a/src/aggregation/agg_result.rs b/src/aggregation/agg_result.rs index bb95858ba..ff9e7716f 100644 --- a/src/aggregation/agg_result.rs +++ b/src/aggregation/agg_result.rs @@ -34,8 +34,7 @@ impl AggregationResults { } else { // Validation is be done during request parsing, so we can't reach this state. Err(TantivyError::InternalError(format!( - "Can't find aggregation {:?} in sub-aggregations", - name + "Can't find aggregation {name:?} in sub-aggregations" ))) } } diff --git a/src/aggregation/agg_tests.rs b/src/aggregation/agg_tests.rs index a57126566..4ce745849 100644 --- a/src/aggregation/agg_tests.rs +++ b/src/aggregation/agg_tests.rs @@ -533,7 +533,7 @@ fn test_aggregation_invalid_requests() -> crate::Result<()> { let agg_res = avg_on_field("dummy_text").unwrap_err(); assert_eq!( - format!("{:?}", agg_res), + format!("{agg_res:?}"), r#"InvalidArgument("Field \"dummy_text\" is not configured as fast field")"# ); diff --git a/src/aggregation/bucket/histogram/date_histogram.rs b/src/aggregation/bucket/histogram/date_histogram.rs index a61723c91..6188973c2 100644 --- a/src/aggregation/bucket/histogram/date_histogram.rs +++ b/src/aggregation/bucket/histogram/date_histogram.rs @@ -131,16 +131,14 @@ impl DateHistogramAggregationReq { fn validate(&self) -> crate::Result<()> { if let Some(interval) = self.interval.as_ref() { return Err(crate::TantivyError::InvalidArgument(format!( - "`interval` parameter {:?} in date histogram is unsupported, only \ - `fixed_interval` is supported", - interval + "`interval` parameter {interval:?} in date histogram is unsupported, only \ + `fixed_interval` is supported" ))); } if let Some(interval) = self.calendar_interval.as_ref() { return Err(crate::TantivyError::InvalidArgument(format!( - "`calendar_interval` parameter {:?} in date histogram is unsupported, only \ - `fixed_interval` is supported", - interval + "`calendar_interval` parameter {interval:?} in date histogram is unsupported, \ + only `fixed_interval` is supported" ))); } if self.format.is_some() { diff --git a/src/aggregation/bucket/histogram/histogram.rs b/src/aggregation/bucket/histogram/histogram.rs index 010187a0b..f3c0d13cc 100644 --- a/src/aggregation/bucket/histogram/histogram.rs +++ b/src/aggregation/bucket/histogram/histogram.rs @@ -142,9 +142,8 @@ impl HistogramAggregation { { if extended_bounds.min < hard_bounds.min || extended_bounds.max > hard_bounds.max { return Err(TantivyError::InvalidArgument(format!( - "extended_bounds have to be inside hard_bounds, extended_bounds: {}, \ - hard_bounds {}", - extended_bounds, hard_bounds + "extended_bounds have to be inside hard_bounds, extended_bounds: \ + {extended_bounds}, hard_bounds {hard_bounds}" ))); } } diff --git a/src/aggregation/bucket/term_agg.rs b/src/aggregation/bucket/term_agg.rs index 80f2ce30e..fa6344d2d 100644 --- a/src/aggregation/bucket/term_agg.rs +++ b/src/aggregation/bucket/term_agg.rs @@ -333,8 +333,8 @@ impl SegmentTermCollector { sub_aggregations.aggs.get(agg_name).ok_or_else(|| { TantivyError::InvalidArgument(format!( - "could not find aggregation with name {} in metric sub_aggregations", - agg_name + "could not find aggregation with name {agg_name} in metric \ + sub_aggregations" )) })?; } @@ -409,10 +409,7 @@ impl SegmentTermCollector { .sub_aggs .remove(&id) .unwrap_or_else(|| { - panic!( - "Internal Error: could not find subaggregation for id {}", - id - ) + panic!("Internal Error: could not find subaggregation for id {id}") }) .add_intermediate_aggregation_result( &agg_with_accessor.sub_aggregation, @@ -442,8 +439,7 @@ impl SegmentTermCollector { for (term_id, doc_count) in entries { if !term_dict.ord_to_str(term_id, &mut buffer)? { return Err(TantivyError::InternalError(format!( - "Couldn't find term_id {} in dict", - term_id + "Couldn't find term_id {term_id} in dict" ))); } diff --git a/src/aggregation/date.rs b/src/aggregation/date.rs index 1b5e21838..97befe7b9 100644 --- a/src/aggregation/date.rs +++ b/src/aggregation/date.rs @@ -6,8 +6,7 @@ use crate::TantivyError; pub(crate) fn format_date(val: i64) -> crate::Result { let datetime = OffsetDateTime::from_unix_timestamp_nanos(val as i128).map_err(|err| { TantivyError::InvalidArgument(format!( - "Could not convert {:?} to OffsetDateTime, err {:?}", - val, err + "Could not convert {val:?} to OffsetDateTime, err {err:?}" )) })?; let key_as_string = datetime diff --git a/src/aggregation/metric/percentiles.rs b/src/aggregation/metric/percentiles.rs index db496a7dc..befda0b0c 100644 --- a/src/aggregation/metric/percentiles.rs +++ b/src/aggregation/metric/percentiles.rs @@ -213,8 +213,7 @@ impl PercentilesCollector { pub(crate) fn merge_fruits(&mut self, right: PercentilesCollector) -> crate::Result<()> { self.sketch.merge(&right.sketch).map_err(|err| { TantivyError::AggregationError(AggregationError::InternalError(format!( - "Error while merging percentiles {:?}", - err + "Error while merging percentiles {err:?}" ))) })?; diff --git a/src/aggregation/metric/stats.rs b/src/aggregation/metric/stats.rs index bd63f08dd..f53d6c745 100644 --- a/src/aggregation/metric/stats.rs +++ b/src/aggregation/metric/stats.rs @@ -66,8 +66,7 @@ impl Stats { "max" => Ok(self.max), "avg" => Ok(self.avg), _ => Err(TantivyError::InvalidArgument(format!( - "Unknown property {} on stats metric aggregation", - agg_property + "Unknown property {agg_property} on stats metric aggregation" ))), } } diff --git a/src/aggregation/mod.rs b/src/aggregation/mod.rs index 32741a29c..f6d6aeefe 100644 --- a/src/aggregation/mod.rs +++ b/src/aggregation/mod.rs @@ -292,7 +292,7 @@ pub(crate) fn f64_from_fastfield_u64(val: u64, field_type: &ColumnType) -> f64 { ColumnType::I64 | ColumnType::DateTime => i64::from_u64(val) as f64, ColumnType::F64 => f64::from_u64(val), _ => { - panic!("unexpected type {:?}. This should not happen", field_type) + panic!("unexpected type {field_type:?}. This should not happen") } } } diff --git a/src/collector/facet_collector.rs b/src/collector/facet_collector.rs index 22de5c3bc..6861a9faa 100644 --- a/src/collector/facet_collector.rs +++ b/src/collector/facet_collector.rs @@ -812,7 +812,7 @@ mod bench { let mut docs = vec![]; for val in 0..50 { - let facet = Facet::from(&format!("/facet_{}", val)); + let facet = Facet::from(&format!("/facet_{val}")); for _ in 0..val * val { docs.push(doc!(facet_field=>facet.clone())); } diff --git a/src/collector/top_score_collector.rs b/src/collector/top_score_collector.rs index a8b792b66..7b6e2c350 100644 --- a/src/collector/top_score_collector.rs +++ b/src/collector/top_score_collector.rs @@ -52,10 +52,8 @@ where let requested_type = field_entry.field_type().value_type(); if schema_type != requested_type { return Err(TantivyError::SchemaError(format!( - "Field {:?} is of type {:?}!={:?}", - field_entry.name(), - schema_type, - requested_type + "Field {:?} is of type {schema_type:?}!={requested_type:?}", + field_entry.name() ))); } self.collector.for_segment(segment_local_id, segment) diff --git a/src/core/executor.rs b/src/core/executor.rs index b05fcd3b2..f4d7d2a13 100644 --- a/src/core/executor.rs +++ b/src/core/executor.rs @@ -26,7 +26,7 @@ impl Executor { pub fn multi_thread(num_threads: usize, prefix: &'static str) -> crate::Result { let pool = ThreadPoolBuilder::new() .num_threads(num_threads) - .thread_name(move |num| format!("{}{}", prefix, num)) + .thread_name(move |num| format!("{prefix}{num}")) .build()?; Ok(Executor::ThreadPool(pool)) } diff --git a/src/core/index.rs b/src/core/index.rs index 4ac0c3cdc..37a47bf72 100644 --- a/src/core/index.rs +++ b/src/core/index.rs @@ -39,10 +39,7 @@ fn load_metas( .map_err(|e| { DataCorruption::new( META_FILEPATH.to_path_buf(), - format!( - "Meta file cannot be deserialized. {:?}. Content: {:?}", - e, meta_string - ), + format!("Meta file cannot be deserialized. {e:?}. Content: {meta_string:?}"), ) }) .map_err(From::from) @@ -438,8 +435,7 @@ impl Index { }; let indexing_options = indexing_options_opt.ok_or_else(|| { TantivyError::InvalidArgument(format!( - "No indexing options set for field {:?}", - field_entry + "No indexing options set for field {field_entry:?}" )) })?; @@ -447,8 +443,7 @@ impl Index { .get(indexing_options.tokenizer()) .ok_or_else(|| { TantivyError::InvalidArgument(format!( - "No Tokenizer found for field {:?}", - field_entry + "No Tokenizer found for field {field_entry:?}" )) }) } diff --git a/src/core/searcher.rs b/src/core/searcher.rs index f0a43268b..804b0f956 100644 --- a/src/core/searcher.rs +++ b/src/core/searcher.rs @@ -296,6 +296,6 @@ impl fmt::Debug for Searcher { .iter() .map(SegmentReader::segment_id) .collect::>(); - write!(f, "Searcher({:?})", segment_ids) + write!(f, "Searcher({segment_ids:?})") } } diff --git a/src/core/tests.rs b/src/core/tests.rs index b59d33502..d8af5e7d0 100644 --- a/src/core/tests.rs +++ b/src/core/tests.rs @@ -269,9 +269,8 @@ fn garbage_collect_works_as_intended() -> crate::Result<()> { assert_eq!(searcher.num_docs(), 8_000); assert!( mem_right_after_merge_finished < mem_right_after_commit, - "(mem after merge){} is expected < (mem before merge){}", - mem_right_after_merge_finished, - mem_right_after_commit + "(mem after merge){mem_right_after_merge_finished} is expected < (mem before \ + merge){mem_right_after_commit}" ); Ok(()) } diff --git a/src/directory/error.rs b/src/directory/error.rs index 9f8e6ad51..264fa177c 100644 --- a/src/directory/error.rs +++ b/src/directory/error.rs @@ -116,14 +116,14 @@ impl fmt::Debug for Incompatibility { index_compression_format, } => { let err = format!( - "Library was compiled with {:?} compression, index was compressed with {:?}", - library_compression_format, index_compression_format + "Library was compiled with {library_compression_format:?} compression, index \ + was compressed with {index_compression_format:?}" ); let advice = format!( - "Change the feature flag to {:?} and rebuild the library", - index_compression_format + "Change the feature flag to {index_compression_format:?} and rebuild the \ + library" ); - write!(f, "{}. {}", err, advice)?; + write!(f, "{err}. {advice}")?; } Incompatibility::IndexMismatch { library_version, @@ -140,7 +140,7 @@ impl fmt::Debug for Incompatibility { and rebuild your project.", index_version.index_format_version, index_version.major, index_version.minor ); - write!(f, "{}. {}", err, advice)?; + write!(f, "{err}. {advice}")?; } } diff --git a/src/directory/footer.rs b/src/directory/footer.rs index 80a38392b..4695c5f69 100644 --- a/src/directory/footer.rs +++ b/src/directory/footer.rs @@ -73,9 +73,9 @@ impl Footer { return Err(io::Error::new( io::ErrorKind::InvalidData, format!( - "Footer seems invalid as it suggests a footer len of {}. File is corrupted, \ - or the index was created with a different & old version of tantivy.", - footer_len + "Footer seems invalid as it suggests a footer len of {footer_len}. File is \ + corrupted, or the index was created with a different & old version of \ + tantivy." ), )); } @@ -84,8 +84,8 @@ impl Footer { return Err(io::Error::new( io::ErrorKind::UnexpectedEof, format!( - "File corrupted. The file is smaller than it's footer bytes (len={}).", - total_footer_size + "File corrupted. The file is smaller than it's footer bytes \ + (len={total_footer_size})." ), )); } diff --git a/src/directory/managed_directory.rs b/src/directory/managed_directory.rs index c9ae9f4ec..c24ec5534 100644 --- a/src/directory/managed_directory.rs +++ b/src/directory/managed_directory.rs @@ -69,7 +69,7 @@ impl ManagedDirectory { .map_err(|e| { DataCorruption::new( MANAGED_FILEPATH.to_path_buf(), - format!("Managed file cannot be deserialized: {:?}. ", e), + format!("Managed file cannot be deserialized: {e:?}. "), ) })?; Ok(ManagedDirectory { diff --git a/src/directory/mmap_directory.rs b/src/directory/mmap_directory.rs index 537eb95e3..847e818cd 100644 --- a/src/directory/mmap_directory.rs +++ b/src/directory/mmap_directory.rs @@ -331,10 +331,7 @@ impl Directory for MmapDirectory { let full_path = self.resolve_path(path); let mut mmap_cache = self.inner.mmap_cache.write().map_err(|_| { - let msg = format!( - "Failed to acquired write lock on mmap cache while reading {:?}", - path - ); + let msg = format!("Failed to acquired write lock on mmap cache while reading {path:?}"); let io_err = make_io_err(msg); OpenReadError::wrap_io_error(io_err, path.to_path_buf()) })?; diff --git a/src/error.rs b/src/error.rs index 0089e550c..316b80f86 100644 --- a/src/error.rs +++ b/src/error.rs @@ -44,7 +44,7 @@ impl fmt::Debug for DataCorruption { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "Data corruption")?; if let Some(ref filepath) = &self.filepath { - write!(f, " (in file `{:?}`)", filepath)?; + write!(f, " (in file `{filepath:?}`)")?; } write!(f, ": {}.", self.comment)?; Ok(()) @@ -120,7 +120,7 @@ impl From for TantivyError { } impl From for TantivyError { fn from(fastfield_error: FastFieldNotAvailableError) -> TantivyError { - TantivyError::SchemaError(format!("{}", fastfield_error)) + TantivyError::SchemaError(format!("{fastfield_error}")) } } impl From for TantivyError { @@ -131,7 +131,7 @@ impl From for TantivyError { impl From for TantivyError { fn from(parsing_error: query::QueryParserError) -> TantivyError { - TantivyError::InvalidArgument(format!("Query is invalid. {:?}", parsing_error)) + TantivyError::InvalidArgument(format!("Query is invalid. {parsing_error:?}")) } } @@ -161,7 +161,7 @@ impl From for TantivyError { impl From for TantivyError { fn from(error: schema::DocParsingError) -> TantivyError { - TantivyError::InvalidArgument(format!("Failed to parse document {:?}", error)) + TantivyError::InvalidArgument(format!("Failed to parse document {error:?}")) } } diff --git a/src/fastfield/writer.rs b/src/fastfield/writer.rs index a2fe8cb75..e478ff90d 100644 --- a/src/fastfield/writer.rs +++ b/src/fastfield/writer.rs @@ -65,8 +65,7 @@ impl FastFieldsWriter { if let Some(tokenizer_name) = text_options.get_fast_field_tokenizer_name() { let text_analyzer = tokenizer_manager.get(tokenizer_name).ok_or_else(|| { TantivyError::InvalidArgument(format!( - "Tokenizer {:?} not found", - tokenizer_name + "Tokenizer {tokenizer_name:?} not found" )) })?; per_field_tokenizer[field_id.field_id() as usize] = Some(text_analyzer); diff --git a/src/indexer/index_writer.rs b/src/indexer/index_writer.rs index 5333badbf..c44ad39b6 100644 --- a/src/indexer/index_writer.rs +++ b/src/indexer/index_writer.rs @@ -40,8 +40,7 @@ const PIPELINE_MAX_SIZE_IN_DOCS: usize = 10_000; fn error_in_index_worker_thread(context: &str) -> TantivyError { TantivyError::ErrorInThread(format!( - "{}. A worker thread encountered an error (io::Error most likely) or panicked.", - context + "{context}. A worker thread encountered an error (io::Error most likely) or panicked." )) } @@ -270,15 +269,14 @@ impl IndexWriter { ) -> crate::Result { if memory_arena_in_bytes_per_thread < MEMORY_ARENA_NUM_BYTES_MIN { let err_msg = format!( - "The memory arena in bytes per thread needs to be at least {}.", - MEMORY_ARENA_NUM_BYTES_MIN + "The memory arena in bytes per thread needs to be at least \ + {MEMORY_ARENA_NUM_BYTES_MIN}." ); return Err(TantivyError::InvalidArgument(err_msg)); } if memory_arena_in_bytes_per_thread >= MEMORY_ARENA_NUM_BYTES_MAX { let err_msg = format!( - "The memory arena in bytes per thread cannot exceed {}", - MEMORY_ARENA_NUM_BYTES_MAX + "The memory arena in bytes per thread cannot exceed {MEMORY_ARENA_NUM_BYTES_MAX}" ); return Err(TantivyError::InvalidArgument(err_msg)); } @@ -621,7 +619,7 @@ impl IndexWriter { for worker_handle in former_workers_join_handle { let indexing_worker_result = worker_handle .join() - .map_err(|e| TantivyError::ErrorInThread(format!("{:?}", e)))?; + .map_err(|e| TantivyError::ErrorInThread(format!("{e:?}")))?; indexing_worker_result?; self.add_indexing_worker()?; } @@ -2077,14 +2075,14 @@ mod tests { let do_search_ip_field = |term: &str| do_search(term, ip_field).len() as u64; let ip_addr = Ipv6Addr::from_u128(existing_id as u128); // Test incoming ip as ipv6 - assert_eq!(do_search_ip_field(&format!("\"{}\"", ip_addr)), count); + assert_eq!(do_search_ip_field(&format!("\"{ip_addr}\"")), count); let term = Term::from_field_ip_addr(ip_field, ip_addr); assert_eq!(do_search2(term).len() as u64, count); // Test incoming ip as ipv4 if let Some(ip_addr) = ip_addr.to_ipv4_mapped() { - assert_eq!(do_search_ip_field(&format!("\"{}\"", ip_addr)), count); + assert_eq!(do_search_ip_field(&format!("\"{ip_addr}\"")), count); } } diff --git a/src/indexer/merger.rs b/src/indexer/merger.rs index d889b1274..7ed5e42d9 100644 --- a/src/indexer/merger.rs +++ b/src/indexer/merger.rs @@ -193,9 +193,8 @@ impl IndexMerger { // sort segments by their natural sort setting if max_doc >= MAX_DOC_LIMIT { let err_msg = format!( - "The segment resulting from this merge would have {} docs,which exceeds the limit \ - {}.", - max_doc, MAX_DOC_LIMIT + "The segment resulting from this merge would have {max_doc} docs,which exceeds \ + the limit {MAX_DOC_LIMIT}." ); return Err(crate::TantivyError::InvalidArgument(err_msg)); } diff --git a/src/indexer/segment_updater.rs b/src/indexer/segment_updater.rs index 1a74611b8..5fa027530 100644 --- a/src/indexer/segment_updater.rs +++ b/src/indexer/segment_updater.rs @@ -238,8 +238,7 @@ pub fn merge_filtered_segments>>( segments .iter() .fold(String::new(), |sum, current| format!( - "{}{} ", - sum, + "{sum}{} ", current.meta().id().uuid_string() )) .trim_end() @@ -533,7 +532,7 @@ impl SegmentUpdater { merge_error ); if cfg!(test) { - panic!("{:?}", merge_error); + panic!("{merge_error:?}"); } let _send_result = merging_future_send.send(Err(merge_error)); } diff --git a/src/postings/mod.rs b/src/postings/mod.rs index e323a29f8..dd3397445 100644 --- a/src/postings/mod.rs +++ b/src/postings/mod.rs @@ -544,8 +544,7 @@ pub mod tests { let skip_result_unopt = postings_unopt.seek(target); assert_eq!( skip_result_unopt, skip_result_opt, - "Failed while skipping to {}", - target + "Failed while skipping to {target}" ); assert!(skip_result_opt >= target); assert_eq!(skip_result_opt, postings_opt.doc()); diff --git a/src/postings/serializer.rs b/src/postings/serializer.rs index 572e4284c..ddd681c05 100644 --- a/src/postings/serializer.rs +++ b/src/postings/serializer.rs @@ -206,7 +206,7 @@ impl<'a> FieldSerializer<'a> { /// using `VInt` encoding. pub fn close_term(&mut self) -> io::Result<()> { fail_point!("FieldSerializer::close_term", |msg: Option| { - Err(io::Error::new(io::ErrorKind::Other, format!("{:?}", msg))) + Err(io::Error::new(io::ErrorKind::Other, format!("{msg:?}"))) }); if self.term_open { self.postings_serializer diff --git a/src/query/bm25.rs b/src/query/bm25.rs index b071920e6..e06cf28d6 100644 --- a/src/query/bm25.rs +++ b/src/query/bm25.rs @@ -50,7 +50,7 @@ impl Bm25StatisticsProvider for Searcher { } pub(crate) fn idf(doc_freq: u64, doc_count: u64) -> Score { - assert!(doc_count >= doc_freq, "{} >= {}", doc_count, doc_freq); + assert!(doc_count >= doc_freq, "{doc_count} >= {doc_freq}"); let x = ((doc_count - doc_freq) as Score + 0.5) / (doc_freq as Score + 0.5); (1.0 + x).ln() } diff --git a/src/query/const_score_query.rs b/src/query/const_score_query.rs index 17e4135a8..a267de8b2 100644 --- a/src/query/const_score_query.rs +++ b/src/query/const_score_query.rs @@ -72,8 +72,7 @@ impl Weight for ConstWeight { let mut scorer = self.scorer(reader, 1.0)?; if scorer.seek(doc) != doc { return Err(TantivyError::InvalidArgument(format!( - "Document #({}) does not match", - doc + "Document #({doc}) does not match" ))); } let mut explanation = Explanation::new("Const", self.score); diff --git a/src/query/explanation.rs b/src/query/explanation.rs index 8b08b3242..df9818e0c 100644 --- a/src/query/explanation.rs +++ b/src/query/explanation.rs @@ -5,7 +5,7 @@ use serde::Serialize; use crate::{DocId, Score, TantivyError}; pub(crate) fn does_not_match(doc: DocId) -> TantivyError { - TantivyError::InvalidArgument(format!("Document #({}) does not match", doc)) + TantivyError::InvalidArgument(format!("Document #({doc}) does not match")) } /// Object describing the score of a given document. diff --git a/src/query/phrase_prefix_query/phrase_prefix_query.rs b/src/query/phrase_prefix_query/phrase_prefix_query.rs index 4c0e7b9f3..2bb2d4cc2 100644 --- a/src/query/phrase_prefix_query/phrase_prefix_query.rs +++ b/src/query/phrase_prefix_query/phrase_prefix_query.rs @@ -108,8 +108,8 @@ impl PhrasePrefixQuery { if !has_positions { let field_name = field_entry.name(); return Err(crate::TantivyError::SchemaError(format!( - "Applied phrase query on field {:?}, which does not have positions indexed", - field_name + "Applied phrase query on field {field_name:?}, which does not have positions \ + indexed" ))); } let terms = self.phrase_terms(); diff --git a/src/query/phrase_query/phrase_query.rs b/src/query/phrase_query/phrase_query.rs index 86bcd8ef9..1c608e8a9 100644 --- a/src/query/phrase_query/phrase_query.rs +++ b/src/query/phrase_query/phrase_query.rs @@ -102,8 +102,8 @@ impl PhraseQuery { if !has_positions { let field_name = field_entry.name(); return Err(crate::TantivyError::SchemaError(format!( - "Applied phrase query on field {:?}, which does not have positions indexed", - field_name + "Applied phrase query on field {field_name:?}, which does not have positions \ + indexed" ))); } let terms = self.phrase_terms(); diff --git a/src/query/query_parser/logical_ast.rs b/src/query/query_parser/logical_ast.rs index 47ba87bd7..311377f18 100644 --- a/src/query/query_parser/logical_ast.rs +++ b/src/query/query_parser/logical_ast.rs @@ -55,16 +55,16 @@ impl fmt::Debug for LogicalAst { write!(formatter, "")?; } else { let (occur, subquery) = &clause[0]; - write!(formatter, "({}{:?}", occur_letter(*occur), subquery)?; + write!(formatter, "({}{subquery:?}", occur_letter(*occur))?; for (occur, subquery) in &clause[1..] { - write!(formatter, " {}{:?}", occur_letter(*occur), subquery)?; + write!(formatter, " {}{subquery:?}", occur_letter(*occur))?; } formatter.write_str(")")?; } Ok(()) } - LogicalAst::Boost(ref ast, boost) => write!(formatter, "{:?}^{}", ast, boost), - LogicalAst::Leaf(ref literal) => write!(formatter, "{:?}", literal), + LogicalAst::Boost(ref ast, boost) => write!(formatter, "{ast:?}^{boost}"), + LogicalAst::Leaf(ref literal) => write!(formatter, "{literal:?}"), } } } @@ -78,11 +78,11 @@ impl From for LogicalAst { impl fmt::Debug for LogicalLiteral { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match *self { - LogicalLiteral::Term(ref term) => write!(formatter, "{:?}", term), + LogicalLiteral::Term(ref term) => write!(formatter, "{term:?}"), LogicalLiteral::Phrase(ref terms, slop) => { - write!(formatter, "\"{:?}\"", terms)?; + write!(formatter, "\"{terms:?}\"")?; if slop > 0 { - write!(formatter, "~{:?}", slop) + write!(formatter, "~{slop:?}") } else { Ok(()) } @@ -91,24 +91,23 @@ impl fmt::Debug for LogicalLiteral { ref lower, ref upper, .. - } => write!(formatter, "({:?} TO {:?})", lower, upper), + } => write!(formatter, "({lower:?} TO {upper:?})"), LogicalLiteral::Set { ref elements, .. } => { const MAX_DISPLAYED: usize = 10; write!(formatter, "IN [")?; for (i, element) in elements.iter().enumerate() { if i == 0 { - write!(formatter, "{:?}", element)?; + write!(formatter, "{element:?}")?; } else if i == MAX_DISPLAYED - 1 { write!( formatter, - ", {:?}, ... ({} more)", - element, + ", {element:?}, ... ({} more)", elements.len() - i - 1 )?; break; } else { - write!(formatter, ", {:?}", element)?; + write!(formatter, ", {element:?}")?; } } write!(formatter, "]") diff --git a/src/query/query_parser/query_parser.rs b/src/query/query_parser/query_parser.rs index 6b66f93aa..47dc21390 100644 --- a/src/query/query_parser/query_parser.rs +++ b/src/query/query_parser/query_parser.rs @@ -942,7 +942,7 @@ mod test { default_conjunction: bool, ) { let query = parse_query_to_logical_ast(query, default_conjunction).unwrap(); - let query_str = format!("{:?}", query); + let query_str = format!("{query:?}"); assert_eq!(query_str, expected); } @@ -951,7 +951,7 @@ mod test { let query_parser = make_query_parser(); let query = query_parser.parse_query("facet:/root/branch/leaf").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), r#"TermQuery(Term(field=11, type=Facet, Facet(/root/branch/leaf)))"# ); } @@ -964,7 +964,7 @@ mod test { query_parser.set_field_boost(text_field, 2.0); let query = query_parser.parse_query("text:hello").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), r#"Boost(query=TermQuery(Term(field=1, type=Str, "hello")), boost=2)"# ); } @@ -973,7 +973,7 @@ mod test { pub fn test_parse_query_range_with_boost() { let query = make_query_parser().parse_query("title:[A TO B]").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), "RangeQuery { field: \"title\", value_type: Str, lower_bound: Included([97]), \ upper_bound: Included([98]), limit: None }" ); @@ -987,7 +987,7 @@ mod test { query_parser.set_field_boost(text_field, 2.0); let query = query_parser.parse_query("text:hello^2").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), r#"Boost(query=Boost(query=TermQuery(Term(field=1, type=Str, "hello")), boost=2), boost=2)"# ); } @@ -1039,7 +1039,7 @@ mod test { let query_parser = make_query_parser(); let query_result = query_parser.parse_query(""); let query = query_result.unwrap(); - assert_eq!(format!("{:?}", query), "EmptyQuery"); + assert_eq!(format!("{query:?}"), "EmptyQuery"); } #[test] @@ -1481,7 +1481,7 @@ mod test { Ok(_) => panic!("should never succeed"), Err(e) => assert_eq!( "The facet field is malformed: Failed to parse the facet string: 'INVALID'", - format!("{}", e) + format!("{e}") ), } assert!(query_parser.parse_query("facet:\"/foo/bar\"").is_ok()); @@ -1574,7 +1574,7 @@ mod test { let query_parser = QueryParser::new(schema, Vec::new(), TokenizerManager::default()); let query = query_parser.parse_query(r#"a\.b:hello"#).unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), "TermQuery(Term(field=0, type=Str, \"hello\"))" ); } @@ -1668,7 +1668,7 @@ mod test { ); let query = query_parser.parse_query("abc").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), "BooleanQuery { subqueries: [(Should, FuzzyTermQuery { term: Term(field=0, \ type=Str, \"abc\"), distance: 1, transposition_cost_one: true, prefix: false }), \ (Should, TermQuery(Term(field=1, type=Str, \"abc\")))] }" @@ -1685,7 +1685,7 @@ mod test { ); let query = query_parser.parse_query("abc").unwrap(); assert_eq!( - format!("{:?}", query), + format!("{query:?}"), "BooleanQuery { subqueries: [(Should, TermQuery(Term(field=0, type=Str, \ \"abc\"))), (Should, FuzzyTermQuery { term: Term(field=1, type=Str, \"abc\"), \ distance: 2, transposition_cost_one: false, prefix: true })] }" diff --git a/src/query/range_query/range_query.rs b/src/query/range_query/range_query.rs index 09bf735d8..3c47bdef4 100644 --- a/src/query/range_query/range_query.rs +++ b/src/query/range_query/range_query.rs @@ -337,8 +337,9 @@ impl Query for RangeQuery { let value_type = field_type.value_type(); if value_type != self.value_type { let err_msg = format!( - "Create a range query of the type {:?}, when the field given was of type {:?}", - self.value_type, value_type + "Create a range query of the type {:?}, when the field given was of type \ + {value_type:?}", + self.value_type ); return Err(TantivyError::SchemaError(err_msg)); } diff --git a/src/query/range_query/range_query_ip_fastfield.rs b/src/query/range_query/range_query_ip_fastfield.rs index c8e303e3e..2607ea77b 100644 --- a/src/query/range_query/range_query_ip_fastfield.rs +++ b/src/query/range_query/range_query_ip_fastfield.rs @@ -49,8 +49,7 @@ impl Weight for IPFastFieldRangeWeight { let mut scorer = self.scorer(reader, 1.0)?; if scorer.seek(doc) != doc { return Err(TantivyError::InvalidArgument(format!( - "Document #({}) does not match", - doc + "Document #({doc}) does not match" ))); } let explanation = Explanation::new("Const", scorer.score()); diff --git a/src/query/range_query/range_query_u64_fastfield.rs b/src/query/range_query/range_query_u64_fastfield.rs index a508bd258..f07faf703 100644 --- a/src/query/range_query/range_query_u64_fastfield.rs +++ b/src/query/range_query/range_query_u64_fastfield.rs @@ -91,8 +91,7 @@ impl Weight for FastFieldRangeWeight { let mut scorer = self.scorer(reader, 1.0)?; if scorer.seek(doc) != doc { return Err(TantivyError::InvalidArgument(format!( - "Document #({}) does not match", - doc + "Document #({doc}) does not match" ))); } let explanation = Explanation::new("Const", scorer.score()); diff --git a/src/query/term_query/mod.rs b/src/query/term_query/mod.rs index a017b6035..f3dcd5792 100644 --- a/src/query/term_query/mod.rs +++ b/src/query/term_query/mod.rs @@ -174,7 +174,7 @@ mod tests { IndexRecordOption::WithFreqs, ); assert_eq!( - format!("{:?}", term_query), + format!("{term_query:?}"), r#"TermQuery(Term(field=1, type=Str, "hello"))"# ); } diff --git a/src/query/term_query/term_query.rs b/src/query/term_query/term_query.rs index e382b988a..423d2f69a 100644 --- a/src/query/term_query/term_query.rs +++ b/src/query/term_query/term_query.rs @@ -191,7 +191,7 @@ mod tests { assert_single_hit(query_from_ip(ip_addr_2)); assert_single_hit(query_from_text("127.0.0.1".to_string())); assert_single_hit(query_from_text("\"127.0.0.1\"".to_string())); - assert_single_hit(query_from_text(format!("\"{}\"", ip_addr_1))); - assert_single_hit(query_from_text(format!("\"{}\"", ip_addr_2))); + assert_single_hit(query_from_text(format!("\"{ip_addr_1}\""))); + assert_single_hit(query_from_text(format!("\"{ip_addr_2}\""))); } } diff --git a/src/schema/facet.rs b/src/schema/facet.rs index 2981d523f..845829b30 100644 --- a/src/schema/facet.rs +++ b/src/schema/facet.rs @@ -174,7 +174,7 @@ impl Facet { /// This function is the inverse of Facet::from(&str). pub fn to_path_string(&self) -> String { - format!("{}", self) + format!("{self}") } } @@ -233,7 +233,7 @@ impl<'de> Deserialize<'de> for Facet { impl Debug for Facet { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "Facet({})", self)?; + write!(f, "Facet({self})")?; Ok(()) } } @@ -264,12 +264,12 @@ mod tests { { let v = ["first", "second", "third"]; let facet = Facet::from_path(v.iter()); - assert_eq!(format!("{}", facet), "/first/second/third"); + assert_eq!(format!("{facet}"), "/first/second/third"); } { let v = ["first", "sec/ond", "third"]; let facet = Facet::from_path(v.iter()); - assert_eq!(format!("{}", facet), "/first/sec\\/ond/third"); + assert_eq!(format!("{facet}"), "/first/sec\\/ond/third"); } } @@ -277,7 +277,7 @@ mod tests { fn test_facet_debug() { let v = ["first", "second", "third"]; let facet = Facet::from_path(v.iter()); - assert_eq!(format!("{:?}", facet), "Facet(/first/second/third)"); + assert_eq!(format!("{facet:?}"), "Facet(/first/second/third)"); } #[test] diff --git a/src/schema/field_type.rs b/src/schema/field_type.rs index 7e8dc1dbd..a209ca78f 100644 --- a/src/schema/field_type.rs +++ b/src/schema/field_type.rs @@ -628,7 +628,7 @@ mod tests { let doc = schema.parse_document(doc_json).unwrap(); let date = doc.get_first(date_field).unwrap(); // Time zone is converted to UTC - assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{:?}", date)); + assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{date:?}")); } #[test] diff --git a/src/schema/term.rs b/src/schema/term.rs index 2722a4bf6..9f9b9c6e8 100644 --- a/src/schema/term.rs +++ b/src/schema/term.rs @@ -519,7 +519,7 @@ where B: AsRef<[u8]> fn write_opt(f: &mut fmt::Formatter, val_opt: Option) -> fmt::Result { if let Some(val) = val_opt { - write!(f, "{:?}", val)?; + write!(f, "{val:?}")?; } Ok(()) } diff --git a/src/schema/value.rs b/src/schema/value.rs index 504114b3d..0544f20af 100644 --- a/src/schema/value.rs +++ b/src/schema/value.rs @@ -452,8 +452,7 @@ mod binary_serialize { _ => Err(io::Error::new( io::ErrorKind::InvalidData, format!( - "No extended field type is associated with code {:?}", - ext_type_code + "No extended field type is associated with code {ext_type_code:?}" ), )), } @@ -477,7 +476,7 @@ mod binary_serialize { _ => Err(io::Error::new( io::ErrorKind::InvalidData, - format!("No field type is associated with code {:?}", type_code), + format!("No field type is associated with code {type_code:?}"), )), } } diff --git a/src/store/compressors.rs b/src/store/compressors.rs index 1fdf4479f..a8de3ea38 100644 --- a/src/store/compressors.rs +++ b/src/store/compressors.rs @@ -83,7 +83,7 @@ pub struct ZstdCompressor { impl ZstdCompressor { fn deser_from_str(val: &str) -> Result { if !val.starts_with("zstd") { - return Err(format!("needs to start with zstd, but got {}", val)); + return Err(format!("needs to start with zstd, but got {val}")); } if val == "zstd" { return Ok(ZstdCompressor::default()); @@ -94,15 +94,12 @@ impl ZstdCompressor { for option in options.split(',') { let (opt_name, value) = options .split_once('=') - .ok_or_else(|| format!("no '=' found in option {:?}", option))?; + .ok_or_else(|| format!("no '=' found in option {option:?}"))?; match opt_name { "compression_level" => { let value = value.parse::().map_err(|err| { - format!( - "Could not parse value {} of option {}, e: {}", - value, opt_name, err - ) + format!("Could not parse value {value} of option {opt_name}, e: {err}") })?; if value >= 15 { warn!( @@ -114,7 +111,7 @@ impl ZstdCompressor { compressor.compression_level = Some(value); } _ => { - return Err(format!("unknown zstd option {:?}", opt_name)); + return Err(format!("unknown zstd option {opt_name:?}")); } } } @@ -122,7 +119,7 @@ impl ZstdCompressor { } fn ser_to_string(&self) -> String { if let Some(compression_level) = self.compression_level { - format!("zstd(compression_level={})", compression_level) + format!("zstd(compression_level={compression_level})") } else { "zstd".to_string() } diff --git a/src/store/decompressors.rs b/src/store/decompressors.rs index 233db0889..7e842a618 100644 --- a/src/store/decompressors.rs +++ b/src/store/decompressors.rs @@ -45,7 +45,7 @@ impl Decompressor { 2 => Decompressor::Brotli, 3 => Decompressor::Snappy, 4 => Decompressor::Zstd, - _ => panic!("unknown compressor id {:?}", id), + _ => panic!("unknown compressor id {id:?}"), } } diff --git a/src/store/footer.rs b/src/store/footer.rs index 1b8ee9b91..3505a55e0 100644 --- a/src/store/footer.rs +++ b/src/store/footer.rs @@ -27,10 +27,7 @@ impl BinarySerializable for DocStoreFooter { fn deserialize(reader: &mut R) -> io::Result { let doc_store_version = u32::deserialize(reader)?; if doc_store_version != DOC_STORE_VERSION { - panic!( - "actual doc store version: {}, expected: {}", - doc_store_version, DOC_STORE_VERSION - ); + panic!("actual doc store version: {doc_store_version}, expected: {DOC_STORE_VERSION}"); } let offset = u64::deserialize(reader)?; let compressor_id = u8::deserialize(reader)?; diff --git a/src/store/index/mod.rs b/src/store/index/mod.rs index 5e39d8b2d..b06885e99 100644 --- a/src/store/index/mod.rs +++ b/src/store/index/mod.rs @@ -221,7 +221,7 @@ mod tests { if let Some(last_checkpoint) = checkpoints.last() { for doc in 0u32..last_checkpoint.doc_range.end { let expected = seek_manual(skip_index.checkpoints(), doc); - assert_eq!(expected, skip_index.seek(doc), "Doc {}", doc); + assert_eq!(expected, skip_index.seek(doc), "Doc {doc}"); } assert!(skip_index.seek(last_checkpoint.doc_range.end).is_none()); } diff --git a/src/store/mod.rs b/src/store/mod.rs index 4bc2bb5f0..d64776f13 100644 --- a/src/store/mod.rs +++ b/src/store/mod.rs @@ -128,7 +128,7 @@ pub mod tests { .unwrap() .as_text() .unwrap(), - format!("Doc {}", i) + format!("Doc {i}") ); } @@ -136,7 +136,7 @@ pub mod tests { let doc = doc?; let title_content = doc.get_first(field_title).unwrap().as_text().unwrap(); if !title_content.starts_with("Doc ") { - panic!("unexpected title_content {}", title_content); + panic!("unexpected title_content {title_content}"); } let id = title_content @@ -145,7 +145,7 @@ pub mod tests { .parse::() .unwrap(); if alive_bitset.is_deleted(id) { - panic!("unexpected deleted document {}", id); + panic!("unexpected deleted document {id}"); } } @@ -173,13 +173,13 @@ pub mod tests { .unwrap() .as_text() .unwrap(), - format!("Doc {}", i) + format!("Doc {i}") ); } for (i, doc) in store.iter(None).enumerate() { assert_eq!( *doc?.get_first(field_title).unwrap().as_text().unwrap(), - format!("Doc {}", i) + format!("Doc {i}") ); } Ok(()) diff --git a/src/store/reader.rs b/src/store/reader.rs index 03a277dd8..f688359b9 100644 --- a/src/store/reader.rs +++ b/src/store/reader.rs @@ -158,7 +158,7 @@ impl StoreReader { /// Advanced API. In most cases use [`get`](Self::get). fn block_checkpoint(&self, doc_id: DocId) -> crate::Result { self.skip_index.seek(doc_id).ok_or_else(|| { - crate::TantivyError::InvalidArgument(format!("Failed to lookup Doc #{}.", doc_id)) + crate::TantivyError::InvalidArgument(format!("Failed to lookup Doc #{doc_id}.")) }) } diff --git a/src/termdict/fst_termdict/term_info_store.rs b/src/termdict/fst_termdict/term_info_store.rs index 5232cb01a..837136ff1 100644 --- a/src/termdict/fst_termdict/term_info_store.rs +++ b/src/termdict/fst_termdict/term_info_store.rs @@ -359,8 +359,7 @@ mod tests { assert_eq!( term_info_store.get(i as u64), term_infos[i], - "term info {}", - i + "term info {i}" ); } Ok(()) diff --git a/src/termdict/fst_termdict/termdict.rs b/src/termdict/fst_termdict/termdict.rs index a2553b4d3..8e5db2536 100644 --- a/src/termdict/fst_termdict/termdict.rs +++ b/src/termdict/fst_termdict/termdict.rs @@ -128,10 +128,7 @@ impl TermDictionary { if version != FST_VERSION { return Err(io::Error::new( io::ErrorKind::Other, - format!( - "Unsuported fst version, expected {}, found {}", - version, FST_VERSION, - ), + format!("Unsuported fst version, expected {version}, found {FST_VERSION}",), )); } diff --git a/src/termdict/mod.rs b/src/termdict/mod.rs index 541fa4e93..f0d2a3b9c 100644 --- a/src/termdict/mod.rs +++ b/src/termdict/mod.rs @@ -76,8 +76,8 @@ impl TermDictionary { return Err(io::Error::new( io::ErrorKind::Other, format!( - "Unsuported dictionary type, expected {}, found {}", - CURRENT_TYPE as u32, dict_type, + "Unsuported dictionary type, expected {}, found {dict_type}", + CURRENT_TYPE as u32, ), )); } diff --git a/src/tokenizer/ascii_folding_filter.rs b/src/tokenizer/ascii_folding_filter.rs index 6a9de4875..beef3ff31 100644 --- a/src/tokenizer/ascii_folding_filter.rs +++ b/src/tokenizer/ascii_folding_filter.rs @@ -4052,9 +4052,7 @@ mod tests { assert_eq!( folding_using_raw_tokenizer_helper(c), folded, - "testing that character \"{}\" becomes \"{}\"", - c, - folded + "testing that character \"{c}\" becomes \"{folded}\"" ); } } diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index d8cb2bcae..8bd3fd465 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -177,20 +177,14 @@ pub mod tests { pub fn assert_token(token: &Token, position: usize, text: &str, from: usize, to: usize) { assert_eq!( token.position, position, - "expected position {} but {:?}", - position, token + "expected position {position} but {token:?}" ); - assert_eq!(token.text, text, "expected text {} but {:?}", text, token); + assert_eq!(token.text, text, "expected text {text} but {token:?}"); assert_eq!( token.offset_from, from, - "expected offset_from {} but {:?}", - from, token - ); - assert_eq!( - token.offset_to, to, - "expected offset_to {} but {:?}", - to, token + "expected offset_from {from} but {token:?}" ); + assert_eq!(token.offset_to, to, "expected offset_to {to} but {token:?}"); } #[test] diff --git a/sstable/src/dictionary.rs b/sstable/src/dictionary.rs index bbc733eb4..0eb5822d9 100644 --- a/sstable/src/dictionary.rs +++ b/sstable/src/dictionary.rs @@ -188,8 +188,7 @@ impl Dictionary { return Err(io::Error::new( io::ErrorKind::Other, format!( - "Unsuported sstable version, expected {}, found {}", - version, + "Unsuported sstable version, expected {version}, found {}", crate::SSTABLE_VERSION, ), )); @@ -499,7 +498,7 @@ mod tests { let new_range = dic.sstable_index.get_block_with_ord(ordinal).byte_range; slice.restrict(new_range); assert!(dic.ord_to_term(ordinal, &mut res).unwrap()); - assert_eq!(res, format!("{:05X}", ordinal).into_bytes()); + assert_eq!(res, format!("{ordinal:05X}").into_bytes()); assert_eq!(dic.term_info_from_ord(ordinal).unwrap().unwrap(), ordinal); assert_eq!(dic.get(&res).unwrap().unwrap(), ordinal); assert_eq!(dic.term_ord(&res).unwrap().unwrap(), ordinal); diff --git a/sstable/src/lib.rs b/sstable/src/lib.rs index 28b6312fe..09014b794 100644 --- a/sstable/src/lib.rs +++ b/sstable/src/lib.rs @@ -256,8 +256,8 @@ where || self.previous_key[keep_len] < key[keep_len]; assert!( increasing_keys, - "Keys should be increasing. ({:?} > {:?})", - self.previous_key, key + "Keys should be increasing. ({:?} > {key:?})", + self.previous_key ); self.previous_key.resize(key.len(), 0u8); self.previous_key[keep_len..].copy_from_slice(&key[keep_len..]); diff --git a/stacker/src/expull.rs b/stacker/src/expull.rs index 2fc565fbc..03fd564b6 100644 --- a/stacker/src/expull.rs +++ b/stacker/src/expull.rs @@ -235,14 +235,13 @@ mod tests { for i in 0..10_000_000 { match len_to_capacity(i) { CapacityResult::NeedAlloc(cap) => { - assert_eq!(available, 0, "Failed len={}: Expected 0 got {}", i, cap); + assert_eq!(available, 0, "Failed len={i}: Expected 0 got {cap}"); available = cap; } CapacityResult::Available(cap) => { assert_eq!( available, cap, - "Failed len={}: Expected {} Got {}", - i, available, cap + "Failed len={i}: Expected {available} Got {cap}" ); } }