mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-04 00:02:55 +00:00
Inline format arguments where makes sense (#2038)
Applied this command to the code, making it a bit shorter and slightly more readable. ``` cargo +nightly clippy --all-features --benches --tests --workspace --fix -- -A clippy::all -W clippy::uninlined_format_args cargo +nightly fmt --all ```
This commit is contained in:
@@ -34,7 +34,7 @@ impl fmt::Display for ColumnType {
|
||||
ColumnType::IpAddr => "ip",
|
||||
ColumnType::DateTime => "datetime",
|
||||
};
|
||||
write!(f, "{}", short_str)
|
||||
write!(f, "{short_str}")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,14 +26,14 @@ impl fmt::Debug for DynamicColumn {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "[{} {} |", self.get_cardinality(), self.column_type())?;
|
||||
match self {
|
||||
DynamicColumn::Bool(col) => write!(f, " {:?}", col)?,
|
||||
DynamicColumn::I64(col) => write!(f, " {:?}", col)?,
|
||||
DynamicColumn::U64(col) => write!(f, " {:?}", col)?,
|
||||
DynamicColumn::F64(col) => write!(f, "{:?}", col)?,
|
||||
DynamicColumn::IpAddr(col) => write!(f, "{:?}", col)?,
|
||||
DynamicColumn::DateTime(col) => write!(f, "{:?}", col)?,
|
||||
DynamicColumn::Bytes(col) => write!(f, "{:?}", col)?,
|
||||
DynamicColumn::Str(col) => write!(f, "{:?}", col)?,
|
||||
DynamicColumn::Bool(col) => write!(f, " {col:?}")?,
|
||||
DynamicColumn::I64(col) => write!(f, " {col:?}")?,
|
||||
DynamicColumn::U64(col) => write!(f, " {col:?}")?,
|
||||
DynamicColumn::F64(col) => write!(f, "{col:?}")?,
|
||||
DynamicColumn::IpAddr(col) => write!(f, "{col:?}")?,
|
||||
DynamicColumn::DateTime(col) => write!(f, "{col:?}")?,
|
||||
DynamicColumn::Bytes(col) => write!(f, "{col:?}")?,
|
||||
DynamicColumn::Str(col) => write!(f, "{col:?}")?,
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ impl ByteCount {
|
||||
for (suffix, threshold) in SUFFIX_AND_THRESHOLD.iter().rev() {
|
||||
if self.get_bytes() >= *threshold {
|
||||
let unit_num = self.get_bytes() as f64 / *threshold as f64;
|
||||
return format!("{:.2} {}", unit_num, suffix);
|
||||
return format!("{unit_num:.2} {suffix}");
|
||||
}
|
||||
}
|
||||
format!("{:.2} B", self.get_bytes())
|
||||
|
||||
@@ -261,7 +261,7 @@ mod tests {
|
||||
let mut buffer2 = [0u8; 8];
|
||||
let len_vint = VInt(val as u64).serialize_into(&mut buffer);
|
||||
let res2 = serialize_vint_u32(val, &mut buffer2);
|
||||
assert_eq!(&buffer[..len_vint], res2, "array wrong for {}", val);
|
||||
assert_eq!(&buffer[..len_vint], res2, "array wrong for {val}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -96,7 +96,7 @@ fn main() -> tantivy::Result<()> {
|
||||
let mut index_writer_wlock = index_writer.write().unwrap();
|
||||
index_writer_wlock.commit()?
|
||||
};
|
||||
println!("committed with opstamp {}", opstamp);
|
||||
println!("committed with opstamp {opstamp}");
|
||||
thread::sleep(Duration::from_millis(500));
|
||||
}
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ fn main() -> tantivy::Result<()> {
|
||||
// Doc 0: TermFreq 2: [0, 4]
|
||||
// Doc 2: TermFreq 1: [0]
|
||||
// ```
|
||||
println!("Doc {}: TermFreq {}: {:?}", doc_id, term_freq, positions);
|
||||
println!("Doc {doc_id}: TermFreq {term_freq}: {positions:?}");
|
||||
doc_id = segment_postings.advance();
|
||||
}
|
||||
}
|
||||
@@ -125,7 +125,7 @@ fn main() -> tantivy::Result<()> {
|
||||
// Once again these docs MAY contains deleted documents as well.
|
||||
let docs = block_segment_postings.docs();
|
||||
// Prints `Docs [0, 2].`
|
||||
println!("Docs {:?}", docs);
|
||||
println!("Docs {docs:?}");
|
||||
block_segment_postings.advance();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ fn main() -> tantivy::Result<()> {
|
||||
for (score, doc_address) in top_docs {
|
||||
let doc = searcher.doc(doc_address)?;
|
||||
let snippet = snippet_generator.snippet_from_doc(&doc);
|
||||
println!("Document score {}:", score);
|
||||
println!("Document score {score}:");
|
||||
println!(
|
||||
"title: {}",
|
||||
doc.get_first(title).unwrap().as_text().unwrap()
|
||||
|
||||
@@ -106,7 +106,7 @@ fn main() -> tantivy::Result<()> {
|
||||
|
||||
for (score, doc_address) in top_docs {
|
||||
let retrieved_doc = searcher.doc(doc_address)?;
|
||||
println!("\n==\nDocument score {}:", score);
|
||||
println!("\n==\nDocument score {score}:");
|
||||
println!("{}", schema.to_json(&retrieved_doc));
|
||||
}
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ impl fmt::Debug for OwnedBytes {
|
||||
} else {
|
||||
self.as_slice()
|
||||
};
|
||||
write!(f, "OwnedBytes({:?}, len={})", bytes_truncated, self.len())
|
||||
write!(f, "OwnedBytes({bytes_truncated:?}, len={})", self.len())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -259,12 +259,12 @@ mod tests {
|
||||
fn test_owned_bytes_debug() {
|
||||
let short_bytes = OwnedBytes::new(b"abcd".as_ref());
|
||||
assert_eq!(
|
||||
format!("{:?}", short_bytes),
|
||||
format!("{short_bytes:?}"),
|
||||
"OwnedBytes([97, 98, 99, 100], len=4)"
|
||||
);
|
||||
let long_bytes = OwnedBytes::new(b"abcdefghijklmnopq".as_ref());
|
||||
assert_eq!(
|
||||
format!("{:?}", long_bytes),
|
||||
format!("{long_bytes:?}"),
|
||||
"OwnedBytes([97, 98, 99, 100, 101, 102, 103, 104, 105, 106], len=17)"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ fn word<'a>() -> impl Parser<&'a str, Output = String> {
|
||||
!c.is_whitespace() && ![':', '^', '{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||
})),
|
||||
)
|
||||
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
||||
.map(|(s1, s2): (char, String)| format!("{s1}{s2}"))
|
||||
.and_then(|s: String| match s.as_str() {
|
||||
"OR" | "AND " | "NOT" => Err(StringStreamError::UnexpectedParse),
|
||||
_ => Ok(s),
|
||||
@@ -74,7 +74,7 @@ fn relaxed_word<'a>() -> impl Parser<&'a str, Output = String> {
|
||||
!c.is_whitespace() && !['{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||
})),
|
||||
)
|
||||
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
||||
.map(|(s1, s2): (char, String)| format!("{s1}{s2}"))
|
||||
}
|
||||
|
||||
/// Parses a date time according to rfc3339
|
||||
@@ -178,9 +178,9 @@ fn negative_number<'a>() -> impl Parser<&'a str, Output = String> {
|
||||
)
|
||||
.map(|(s1, s2, s3): (char, String, Option<(char, String)>)| {
|
||||
if let Some(('.', s3)) = s3 {
|
||||
format!("{}{}.{}", s1, s2, s3)
|
||||
format!("{s1}{s2}.{s3}")
|
||||
} else {
|
||||
format!("{}{}", s1, s2)
|
||||
format!("{s1}{s2}")
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -419,9 +419,7 @@ mod test {
|
||||
fn assert_nearly_equals(expected: f64, val: f64) {
|
||||
assert!(
|
||||
nearly_equals(val, expected),
|
||||
"Got {}, expected {}.",
|
||||
val,
|
||||
expected
|
||||
"Got {val}, expected {expected}."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -468,7 +466,7 @@ mod test {
|
||||
|
||||
fn test_parse_query_to_ast_helper(query: &str, expected: &str) {
|
||||
let query = parse_to_ast().parse(query).unwrap().0;
|
||||
let query_str = format!("{:?}", query);
|
||||
let query_str = format!("{query:?}");
|
||||
assert_eq!(query_str, expected);
|
||||
}
|
||||
|
||||
@@ -554,7 +552,7 @@ mod test {
|
||||
fn test_occur_leaf() {
|
||||
let ((occur, ast), _) = super::occur_leaf().parse("+abc").unwrap();
|
||||
assert_eq!(occur, Some(Occur::Must));
|
||||
assert_eq!(format!("{:?}", ast), "\"abc\"");
|
||||
assert_eq!(format!("{ast:?}"), "\"abc\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -613,7 +611,7 @@ mod test {
|
||||
let escaped_special_chars_re = Regex::new(ESCAPED_SPECIAL_CHARS_PATTERN).unwrap();
|
||||
for special_char in SPECIAL_CHARS.iter() {
|
||||
assert_eq!(
|
||||
escaped_special_chars_re.replace_all(&format!("\\{}", special_char), "$1"),
|
||||
escaped_special_chars_re.replace_all(&format!("\\{special_char}"), "$1"),
|
||||
special_char.to_string()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ impl Debug for UserInputLeaf {
|
||||
ref upper,
|
||||
} => {
|
||||
if let Some(ref field) = field {
|
||||
write!(formatter, "\"{}\":", field)?;
|
||||
write!(formatter, "\"{field}\":")?;
|
||||
}
|
||||
lower.display_lower(formatter)?;
|
||||
write!(formatter, " TO ")?;
|
||||
@@ -37,14 +37,14 @@ impl Debug for UserInputLeaf {
|
||||
}
|
||||
UserInputLeaf::Set { field, elements } => {
|
||||
if let Some(ref field) = field {
|
||||
write!(formatter, "\"{}\": ", field)?;
|
||||
write!(formatter, "\"{field}\": ")?;
|
||||
}
|
||||
write!(formatter, "IN [")?;
|
||||
for (i, element) in elements.iter().enumerate() {
|
||||
if i != 0 {
|
||||
write!(formatter, " ")?;
|
||||
}
|
||||
write!(formatter, "\"{}\"", element)?;
|
||||
write!(formatter, "\"{element}\"")?;
|
||||
}
|
||||
write!(formatter, "]")
|
||||
}
|
||||
@@ -63,7 +63,7 @@ pub struct UserInputLiteral {
|
||||
impl fmt::Debug for UserInputLiteral {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
if let Some(ref field) = self.field_name {
|
||||
write!(formatter, "\"{}\":", field)?;
|
||||
write!(formatter, "\"{field}\":")?;
|
||||
}
|
||||
write!(formatter, "\"{}\"", self.phrase)?;
|
||||
if self.slop > 0 {
|
||||
@@ -83,16 +83,16 @@ pub enum UserInputBound {
|
||||
impl UserInputBound {
|
||||
fn display_lower(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
match *self {
|
||||
UserInputBound::Inclusive(ref word) => write!(formatter, "[\"{}\"", word),
|
||||
UserInputBound::Exclusive(ref word) => write!(formatter, "{{\"{}\"", word),
|
||||
UserInputBound::Inclusive(ref word) => write!(formatter, "[\"{word}\""),
|
||||
UserInputBound::Exclusive(ref word) => write!(formatter, "{{\"{word}\""),
|
||||
UserInputBound::Unbounded => write!(formatter, "{{\"*\""),
|
||||
}
|
||||
}
|
||||
|
||||
fn display_upper(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
match *self {
|
||||
UserInputBound::Inclusive(ref word) => write!(formatter, "\"{}\"]", word),
|
||||
UserInputBound::Exclusive(ref word) => write!(formatter, "\"{}\"}}", word),
|
||||
UserInputBound::Inclusive(ref word) => write!(formatter, "\"{word}\"]"),
|
||||
UserInputBound::Exclusive(ref word) => write!(formatter, "\"{word}\"}}"),
|
||||
UserInputBound::Unbounded => write!(formatter, "\"*\"}}"),
|
||||
}
|
||||
}
|
||||
@@ -163,9 +163,9 @@ fn print_occur_ast(
|
||||
formatter: &mut fmt::Formatter,
|
||||
) -> fmt::Result {
|
||||
if let Some(occur) = occur_opt {
|
||||
write!(formatter, "{}{:?}", occur, ast)?;
|
||||
write!(formatter, "{occur}{ast:?}")?;
|
||||
} else {
|
||||
write!(formatter, "*{:?}", ast)?;
|
||||
write!(formatter, "*{ast:?}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -187,8 +187,8 @@ impl fmt::Debug for UserInputAst {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
UserInputAst::Leaf(ref subquery) => write!(formatter, "{:?}", subquery),
|
||||
UserInputAst::Boost(ref leaf, boost) => write!(formatter, "({:?})^{}", leaf, boost),
|
||||
UserInputAst::Leaf(ref subquery) => write!(formatter, "{subquery:?}"),
|
||||
UserInputAst::Boost(ref leaf, boost) => write!(formatter, "({leaf:?})^{boost}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,8 +34,7 @@ impl AggregationResults {
|
||||
} else {
|
||||
// Validation is be done during request parsing, so we can't reach this state.
|
||||
Err(TantivyError::InternalError(format!(
|
||||
"Can't find aggregation {:?} in sub-aggregations",
|
||||
name
|
||||
"Can't find aggregation {name:?} in sub-aggregations"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -533,7 +533,7 @@ fn test_aggregation_invalid_requests() -> crate::Result<()> {
|
||||
|
||||
let agg_res = avg_on_field("dummy_text").unwrap_err();
|
||||
assert_eq!(
|
||||
format!("{:?}", agg_res),
|
||||
format!("{agg_res:?}"),
|
||||
r#"InvalidArgument("Field \"dummy_text\" is not configured as fast field")"#
|
||||
);
|
||||
|
||||
|
||||
@@ -131,16 +131,14 @@ impl DateHistogramAggregationReq {
|
||||
fn validate(&self) -> crate::Result<()> {
|
||||
if let Some(interval) = self.interval.as_ref() {
|
||||
return Err(crate::TantivyError::InvalidArgument(format!(
|
||||
"`interval` parameter {:?} in date histogram is unsupported, only \
|
||||
`fixed_interval` is supported",
|
||||
interval
|
||||
"`interval` parameter {interval:?} in date histogram is unsupported, only \
|
||||
`fixed_interval` is supported"
|
||||
)));
|
||||
}
|
||||
if let Some(interval) = self.calendar_interval.as_ref() {
|
||||
return Err(crate::TantivyError::InvalidArgument(format!(
|
||||
"`calendar_interval` parameter {:?} in date histogram is unsupported, only \
|
||||
`fixed_interval` is supported",
|
||||
interval
|
||||
"`calendar_interval` parameter {interval:?} in date histogram is unsupported, \
|
||||
only `fixed_interval` is supported"
|
||||
)));
|
||||
}
|
||||
if self.format.is_some() {
|
||||
|
||||
@@ -142,9 +142,8 @@ impl HistogramAggregation {
|
||||
{
|
||||
if extended_bounds.min < hard_bounds.min || extended_bounds.max > hard_bounds.max {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"extended_bounds have to be inside hard_bounds, extended_bounds: {}, \
|
||||
hard_bounds {}",
|
||||
extended_bounds, hard_bounds
|
||||
"extended_bounds have to be inside hard_bounds, extended_bounds: \
|
||||
{extended_bounds}, hard_bounds {hard_bounds}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,8 +333,8 @@ impl SegmentTermCollector {
|
||||
|
||||
sub_aggregations.aggs.get(agg_name).ok_or_else(|| {
|
||||
TantivyError::InvalidArgument(format!(
|
||||
"could not find aggregation with name {} in metric sub_aggregations",
|
||||
agg_name
|
||||
"could not find aggregation with name {agg_name} in metric \
|
||||
sub_aggregations"
|
||||
))
|
||||
})?;
|
||||
}
|
||||
@@ -409,10 +409,7 @@ impl SegmentTermCollector {
|
||||
.sub_aggs
|
||||
.remove(&id)
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Internal Error: could not find subaggregation for id {}",
|
||||
id
|
||||
)
|
||||
panic!("Internal Error: could not find subaggregation for id {id}")
|
||||
})
|
||||
.add_intermediate_aggregation_result(
|
||||
&agg_with_accessor.sub_aggregation,
|
||||
@@ -442,8 +439,7 @@ impl SegmentTermCollector {
|
||||
for (term_id, doc_count) in entries {
|
||||
if !term_dict.ord_to_str(term_id, &mut buffer)? {
|
||||
return Err(TantivyError::InternalError(format!(
|
||||
"Couldn't find term_id {} in dict",
|
||||
term_id
|
||||
"Couldn't find term_id {term_id} in dict"
|
||||
)));
|
||||
}
|
||||
|
||||
|
||||
@@ -6,8 +6,7 @@ use crate::TantivyError;
|
||||
pub(crate) fn format_date(val: i64) -> crate::Result<String> {
|
||||
let datetime = OffsetDateTime::from_unix_timestamp_nanos(val as i128).map_err(|err| {
|
||||
TantivyError::InvalidArgument(format!(
|
||||
"Could not convert {:?} to OffsetDateTime, err {:?}",
|
||||
val, err
|
||||
"Could not convert {val:?} to OffsetDateTime, err {err:?}"
|
||||
))
|
||||
})?;
|
||||
let key_as_string = datetime
|
||||
|
||||
@@ -213,8 +213,7 @@ impl PercentilesCollector {
|
||||
pub(crate) fn merge_fruits(&mut self, right: PercentilesCollector) -> crate::Result<()> {
|
||||
self.sketch.merge(&right.sketch).map_err(|err| {
|
||||
TantivyError::AggregationError(AggregationError::InternalError(format!(
|
||||
"Error while merging percentiles {:?}",
|
||||
err
|
||||
"Error while merging percentiles {err:?}"
|
||||
)))
|
||||
})?;
|
||||
|
||||
|
||||
@@ -66,8 +66,7 @@ impl Stats {
|
||||
"max" => Ok(self.max),
|
||||
"avg" => Ok(self.avg),
|
||||
_ => Err(TantivyError::InvalidArgument(format!(
|
||||
"Unknown property {} on stats metric aggregation",
|
||||
agg_property
|
||||
"Unknown property {agg_property} on stats metric aggregation"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,7 +292,7 @@ pub(crate) fn f64_from_fastfield_u64(val: u64, field_type: &ColumnType) -> f64 {
|
||||
ColumnType::I64 | ColumnType::DateTime => i64::from_u64(val) as f64,
|
||||
ColumnType::F64 => f64::from_u64(val),
|
||||
_ => {
|
||||
panic!("unexpected type {:?}. This should not happen", field_type)
|
||||
panic!("unexpected type {field_type:?}. This should not happen")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -812,7 +812,7 @@ mod bench {
|
||||
|
||||
let mut docs = vec![];
|
||||
for val in 0..50 {
|
||||
let facet = Facet::from(&format!("/facet_{}", val));
|
||||
let facet = Facet::from(&format!("/facet_{val}"));
|
||||
for _ in 0..val * val {
|
||||
docs.push(doc!(facet_field=>facet.clone()));
|
||||
}
|
||||
|
||||
@@ -52,10 +52,8 @@ where
|
||||
let requested_type = field_entry.field_type().value_type();
|
||||
if schema_type != requested_type {
|
||||
return Err(TantivyError::SchemaError(format!(
|
||||
"Field {:?} is of type {:?}!={:?}",
|
||||
field_entry.name(),
|
||||
schema_type,
|
||||
requested_type
|
||||
"Field {:?} is of type {schema_type:?}!={requested_type:?}",
|
||||
field_entry.name()
|
||||
)));
|
||||
}
|
||||
self.collector.for_segment(segment_local_id, segment)
|
||||
|
||||
@@ -26,7 +26,7 @@ impl Executor {
|
||||
pub fn multi_thread(num_threads: usize, prefix: &'static str) -> crate::Result<Executor> {
|
||||
let pool = ThreadPoolBuilder::new()
|
||||
.num_threads(num_threads)
|
||||
.thread_name(move |num| format!("{}{}", prefix, num))
|
||||
.thread_name(move |num| format!("{prefix}{num}"))
|
||||
.build()?;
|
||||
Ok(Executor::ThreadPool(pool))
|
||||
}
|
||||
|
||||
@@ -39,10 +39,7 @@ fn load_metas(
|
||||
.map_err(|e| {
|
||||
DataCorruption::new(
|
||||
META_FILEPATH.to_path_buf(),
|
||||
format!(
|
||||
"Meta file cannot be deserialized. {:?}. Content: {:?}",
|
||||
e, meta_string
|
||||
),
|
||||
format!("Meta file cannot be deserialized. {e:?}. Content: {meta_string:?}"),
|
||||
)
|
||||
})
|
||||
.map_err(From::from)
|
||||
@@ -438,8 +435,7 @@ impl Index {
|
||||
};
|
||||
let indexing_options = indexing_options_opt.ok_or_else(|| {
|
||||
TantivyError::InvalidArgument(format!(
|
||||
"No indexing options set for field {:?}",
|
||||
field_entry
|
||||
"No indexing options set for field {field_entry:?}"
|
||||
))
|
||||
})?;
|
||||
|
||||
@@ -447,8 +443,7 @@ impl Index {
|
||||
.get(indexing_options.tokenizer())
|
||||
.ok_or_else(|| {
|
||||
TantivyError::InvalidArgument(format!(
|
||||
"No Tokenizer found for field {:?}",
|
||||
field_entry
|
||||
"No Tokenizer found for field {field_entry:?}"
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -296,6 +296,6 @@ impl fmt::Debug for Searcher {
|
||||
.iter()
|
||||
.map(SegmentReader::segment_id)
|
||||
.collect::<Vec<_>>();
|
||||
write!(f, "Searcher({:?})", segment_ids)
|
||||
write!(f, "Searcher({segment_ids:?})")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -269,9 +269,8 @@ fn garbage_collect_works_as_intended() -> crate::Result<()> {
|
||||
assert_eq!(searcher.num_docs(), 8_000);
|
||||
assert!(
|
||||
mem_right_after_merge_finished < mem_right_after_commit,
|
||||
"(mem after merge){} is expected < (mem before merge){}",
|
||||
mem_right_after_merge_finished,
|
||||
mem_right_after_commit
|
||||
"(mem after merge){mem_right_after_merge_finished} is expected < (mem before \
|
||||
merge){mem_right_after_commit}"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -116,14 +116,14 @@ impl fmt::Debug for Incompatibility {
|
||||
index_compression_format,
|
||||
} => {
|
||||
let err = format!(
|
||||
"Library was compiled with {:?} compression, index was compressed with {:?}",
|
||||
library_compression_format, index_compression_format
|
||||
"Library was compiled with {library_compression_format:?} compression, index \
|
||||
was compressed with {index_compression_format:?}"
|
||||
);
|
||||
let advice = format!(
|
||||
"Change the feature flag to {:?} and rebuild the library",
|
||||
index_compression_format
|
||||
"Change the feature flag to {index_compression_format:?} and rebuild the \
|
||||
library"
|
||||
);
|
||||
write!(f, "{}. {}", err, advice)?;
|
||||
write!(f, "{err}. {advice}")?;
|
||||
}
|
||||
Incompatibility::IndexMismatch {
|
||||
library_version,
|
||||
@@ -140,7 +140,7 @@ impl fmt::Debug for Incompatibility {
|
||||
and rebuild your project.",
|
||||
index_version.index_format_version, index_version.major, index_version.minor
|
||||
);
|
||||
write!(f, "{}. {}", err, advice)?;
|
||||
write!(f, "{err}. {advice}")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -73,9 +73,9 @@ impl Footer {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"Footer seems invalid as it suggests a footer len of {}. File is corrupted, \
|
||||
or the index was created with a different & old version of tantivy.",
|
||||
footer_len
|
||||
"Footer seems invalid as it suggests a footer len of {footer_len}. File is \
|
||||
corrupted, or the index was created with a different & old version of \
|
||||
tantivy."
|
||||
),
|
||||
));
|
||||
}
|
||||
@@ -84,8 +84,8 @@ impl Footer {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
format!(
|
||||
"File corrupted. The file is smaller than it's footer bytes (len={}).",
|
||||
total_footer_size
|
||||
"File corrupted. The file is smaller than it's footer bytes \
|
||||
(len={total_footer_size})."
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ impl ManagedDirectory {
|
||||
.map_err(|e| {
|
||||
DataCorruption::new(
|
||||
MANAGED_FILEPATH.to_path_buf(),
|
||||
format!("Managed file cannot be deserialized: {:?}. ", e),
|
||||
format!("Managed file cannot be deserialized: {e:?}. "),
|
||||
)
|
||||
})?;
|
||||
Ok(ManagedDirectory {
|
||||
|
||||
@@ -331,10 +331,7 @@ impl Directory for MmapDirectory {
|
||||
let full_path = self.resolve_path(path);
|
||||
|
||||
let mut mmap_cache = self.inner.mmap_cache.write().map_err(|_| {
|
||||
let msg = format!(
|
||||
"Failed to acquired write lock on mmap cache while reading {:?}",
|
||||
path
|
||||
);
|
||||
let msg = format!("Failed to acquired write lock on mmap cache while reading {path:?}");
|
||||
let io_err = make_io_err(msg);
|
||||
OpenReadError::wrap_io_error(io_err, path.to_path_buf())
|
||||
})?;
|
||||
|
||||
@@ -44,7 +44,7 @@ impl fmt::Debug for DataCorruption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
write!(f, "Data corruption")?;
|
||||
if let Some(ref filepath) = &self.filepath {
|
||||
write!(f, " (in file `{:?}`)", filepath)?;
|
||||
write!(f, " (in file `{filepath:?}`)")?;
|
||||
}
|
||||
write!(f, ": {}.", self.comment)?;
|
||||
Ok(())
|
||||
@@ -120,7 +120,7 @@ impl From<DataCorruption> for TantivyError {
|
||||
}
|
||||
impl From<FastFieldNotAvailableError> for TantivyError {
|
||||
fn from(fastfield_error: FastFieldNotAvailableError) -> TantivyError {
|
||||
TantivyError::SchemaError(format!("{}", fastfield_error))
|
||||
TantivyError::SchemaError(format!("{fastfield_error}"))
|
||||
}
|
||||
}
|
||||
impl From<LockError> for TantivyError {
|
||||
@@ -131,7 +131,7 @@ impl From<LockError> for TantivyError {
|
||||
|
||||
impl From<query::QueryParserError> for TantivyError {
|
||||
fn from(parsing_error: query::QueryParserError) -> TantivyError {
|
||||
TantivyError::InvalidArgument(format!("Query is invalid. {:?}", parsing_error))
|
||||
TantivyError::InvalidArgument(format!("Query is invalid. {parsing_error:?}"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,7 +161,7 @@ impl From<time::error::ComponentRange> for TantivyError {
|
||||
|
||||
impl From<schema::DocParsingError> for TantivyError {
|
||||
fn from(error: schema::DocParsingError) -> TantivyError {
|
||||
TantivyError::InvalidArgument(format!("Failed to parse document {:?}", error))
|
||||
TantivyError::InvalidArgument(format!("Failed to parse document {error:?}"))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -65,8 +65,7 @@ impl FastFieldsWriter {
|
||||
if let Some(tokenizer_name) = text_options.get_fast_field_tokenizer_name() {
|
||||
let text_analyzer = tokenizer_manager.get(tokenizer_name).ok_or_else(|| {
|
||||
TantivyError::InvalidArgument(format!(
|
||||
"Tokenizer {:?} not found",
|
||||
tokenizer_name
|
||||
"Tokenizer {tokenizer_name:?} not found"
|
||||
))
|
||||
})?;
|
||||
per_field_tokenizer[field_id.field_id() as usize] = Some(text_analyzer);
|
||||
|
||||
@@ -40,8 +40,7 @@ const PIPELINE_MAX_SIZE_IN_DOCS: usize = 10_000;
|
||||
|
||||
fn error_in_index_worker_thread(context: &str) -> TantivyError {
|
||||
TantivyError::ErrorInThread(format!(
|
||||
"{}. A worker thread encountered an error (io::Error most likely) or panicked.",
|
||||
context
|
||||
"{context}. A worker thread encountered an error (io::Error most likely) or panicked."
|
||||
))
|
||||
}
|
||||
|
||||
@@ -270,15 +269,14 @@ impl IndexWriter {
|
||||
) -> crate::Result<IndexWriter> {
|
||||
if memory_arena_in_bytes_per_thread < MEMORY_ARENA_NUM_BYTES_MIN {
|
||||
let err_msg = format!(
|
||||
"The memory arena in bytes per thread needs to be at least {}.",
|
||||
MEMORY_ARENA_NUM_BYTES_MIN
|
||||
"The memory arena in bytes per thread needs to be at least \
|
||||
{MEMORY_ARENA_NUM_BYTES_MIN}."
|
||||
);
|
||||
return Err(TantivyError::InvalidArgument(err_msg));
|
||||
}
|
||||
if memory_arena_in_bytes_per_thread >= MEMORY_ARENA_NUM_BYTES_MAX {
|
||||
let err_msg = format!(
|
||||
"The memory arena in bytes per thread cannot exceed {}",
|
||||
MEMORY_ARENA_NUM_BYTES_MAX
|
||||
"The memory arena in bytes per thread cannot exceed {MEMORY_ARENA_NUM_BYTES_MAX}"
|
||||
);
|
||||
return Err(TantivyError::InvalidArgument(err_msg));
|
||||
}
|
||||
@@ -621,7 +619,7 @@ impl IndexWriter {
|
||||
for worker_handle in former_workers_join_handle {
|
||||
let indexing_worker_result = worker_handle
|
||||
.join()
|
||||
.map_err(|e| TantivyError::ErrorInThread(format!("{:?}", e)))?;
|
||||
.map_err(|e| TantivyError::ErrorInThread(format!("{e:?}")))?;
|
||||
indexing_worker_result?;
|
||||
self.add_indexing_worker()?;
|
||||
}
|
||||
@@ -2077,14 +2075,14 @@ mod tests {
|
||||
let do_search_ip_field = |term: &str| do_search(term, ip_field).len() as u64;
|
||||
let ip_addr = Ipv6Addr::from_u128(existing_id as u128);
|
||||
// Test incoming ip as ipv6
|
||||
assert_eq!(do_search_ip_field(&format!("\"{}\"", ip_addr)), count);
|
||||
assert_eq!(do_search_ip_field(&format!("\"{ip_addr}\"")), count);
|
||||
|
||||
let term = Term::from_field_ip_addr(ip_field, ip_addr);
|
||||
assert_eq!(do_search2(term).len() as u64, count);
|
||||
|
||||
// Test incoming ip as ipv4
|
||||
if let Some(ip_addr) = ip_addr.to_ipv4_mapped() {
|
||||
assert_eq!(do_search_ip_field(&format!("\"{}\"", ip_addr)), count);
|
||||
assert_eq!(do_search_ip_field(&format!("\"{ip_addr}\"")), count);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -193,9 +193,8 @@ impl IndexMerger {
|
||||
// sort segments by their natural sort setting
|
||||
if max_doc >= MAX_DOC_LIMIT {
|
||||
let err_msg = format!(
|
||||
"The segment resulting from this merge would have {} docs,which exceeds the limit \
|
||||
{}.",
|
||||
max_doc, MAX_DOC_LIMIT
|
||||
"The segment resulting from this merge would have {max_doc} docs,which exceeds \
|
||||
the limit {MAX_DOC_LIMIT}."
|
||||
);
|
||||
return Err(crate::TantivyError::InvalidArgument(err_msg));
|
||||
}
|
||||
|
||||
@@ -238,8 +238,7 @@ pub fn merge_filtered_segments<T: Into<Box<dyn Directory>>>(
|
||||
segments
|
||||
.iter()
|
||||
.fold(String::new(), |sum, current| format!(
|
||||
"{}{} ",
|
||||
sum,
|
||||
"{sum}{} ",
|
||||
current.meta().id().uuid_string()
|
||||
))
|
||||
.trim_end()
|
||||
@@ -533,7 +532,7 @@ impl SegmentUpdater {
|
||||
merge_error
|
||||
);
|
||||
if cfg!(test) {
|
||||
panic!("{:?}", merge_error);
|
||||
panic!("{merge_error:?}");
|
||||
}
|
||||
let _send_result = merging_future_send.send(Err(merge_error));
|
||||
}
|
||||
|
||||
@@ -544,8 +544,7 @@ pub mod tests {
|
||||
let skip_result_unopt = postings_unopt.seek(target);
|
||||
assert_eq!(
|
||||
skip_result_unopt, skip_result_opt,
|
||||
"Failed while skipping to {}",
|
||||
target
|
||||
"Failed while skipping to {target}"
|
||||
);
|
||||
assert!(skip_result_opt >= target);
|
||||
assert_eq!(skip_result_opt, postings_opt.doc());
|
||||
|
||||
@@ -206,7 +206,7 @@ impl<'a> FieldSerializer<'a> {
|
||||
/// using `VInt` encoding.
|
||||
pub fn close_term(&mut self) -> io::Result<()> {
|
||||
fail_point!("FieldSerializer::close_term", |msg: Option<String>| {
|
||||
Err(io::Error::new(io::ErrorKind::Other, format!("{:?}", msg)))
|
||||
Err(io::Error::new(io::ErrorKind::Other, format!("{msg:?}")))
|
||||
});
|
||||
if self.term_open {
|
||||
self.postings_serializer
|
||||
|
||||
@@ -50,7 +50,7 @@ impl Bm25StatisticsProvider for Searcher {
|
||||
}
|
||||
|
||||
pub(crate) fn idf(doc_freq: u64, doc_count: u64) -> Score {
|
||||
assert!(doc_count >= doc_freq, "{} >= {}", doc_count, doc_freq);
|
||||
assert!(doc_count >= doc_freq, "{doc_count} >= {doc_freq}");
|
||||
let x = ((doc_count - doc_freq) as Score + 0.5) / (doc_freq as Score + 0.5);
|
||||
(1.0 + x).ln()
|
||||
}
|
||||
|
||||
@@ -72,8 +72,7 @@ impl Weight for ConstWeight {
|
||||
let mut scorer = self.scorer(reader, 1.0)?;
|
||||
if scorer.seek(doc) != doc {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"Document #({}) does not match",
|
||||
doc
|
||||
"Document #({doc}) does not match"
|
||||
)));
|
||||
}
|
||||
let mut explanation = Explanation::new("Const", self.score);
|
||||
|
||||
@@ -5,7 +5,7 @@ use serde::Serialize;
|
||||
use crate::{DocId, Score, TantivyError};
|
||||
|
||||
pub(crate) fn does_not_match(doc: DocId) -> TantivyError {
|
||||
TantivyError::InvalidArgument(format!("Document #({}) does not match", doc))
|
||||
TantivyError::InvalidArgument(format!("Document #({doc}) does not match"))
|
||||
}
|
||||
|
||||
/// Object describing the score of a given document.
|
||||
|
||||
@@ -108,8 +108,8 @@ impl PhrasePrefixQuery {
|
||||
if !has_positions {
|
||||
let field_name = field_entry.name();
|
||||
return Err(crate::TantivyError::SchemaError(format!(
|
||||
"Applied phrase query on field {:?}, which does not have positions indexed",
|
||||
field_name
|
||||
"Applied phrase query on field {field_name:?}, which does not have positions \
|
||||
indexed"
|
||||
)));
|
||||
}
|
||||
let terms = self.phrase_terms();
|
||||
|
||||
@@ -102,8 +102,8 @@ impl PhraseQuery {
|
||||
if !has_positions {
|
||||
let field_name = field_entry.name();
|
||||
return Err(crate::TantivyError::SchemaError(format!(
|
||||
"Applied phrase query on field {:?}, which does not have positions indexed",
|
||||
field_name
|
||||
"Applied phrase query on field {field_name:?}, which does not have positions \
|
||||
indexed"
|
||||
)));
|
||||
}
|
||||
let terms = self.phrase_terms();
|
||||
|
||||
@@ -55,16 +55,16 @@ impl fmt::Debug for LogicalAst {
|
||||
write!(formatter, "<emptyclause>")?;
|
||||
} else {
|
||||
let (occur, subquery) = &clause[0];
|
||||
write!(formatter, "({}{:?}", occur_letter(*occur), subquery)?;
|
||||
write!(formatter, "({}{subquery:?}", occur_letter(*occur))?;
|
||||
for (occur, subquery) in &clause[1..] {
|
||||
write!(formatter, " {}{:?}", occur_letter(*occur), subquery)?;
|
||||
write!(formatter, " {}{subquery:?}", occur_letter(*occur))?;
|
||||
}
|
||||
formatter.write_str(")")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
LogicalAst::Boost(ref ast, boost) => write!(formatter, "{:?}^{}", ast, boost),
|
||||
LogicalAst::Leaf(ref literal) => write!(formatter, "{:?}", literal),
|
||||
LogicalAst::Boost(ref ast, boost) => write!(formatter, "{ast:?}^{boost}"),
|
||||
LogicalAst::Leaf(ref literal) => write!(formatter, "{literal:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -78,11 +78,11 @@ impl From<LogicalLiteral> for LogicalAst {
|
||||
impl fmt::Debug for LogicalLiteral {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
match *self {
|
||||
LogicalLiteral::Term(ref term) => write!(formatter, "{:?}", term),
|
||||
LogicalLiteral::Term(ref term) => write!(formatter, "{term:?}"),
|
||||
LogicalLiteral::Phrase(ref terms, slop) => {
|
||||
write!(formatter, "\"{:?}\"", terms)?;
|
||||
write!(formatter, "\"{terms:?}\"")?;
|
||||
if slop > 0 {
|
||||
write!(formatter, "~{:?}", slop)
|
||||
write!(formatter, "~{slop:?}")
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
@@ -91,24 +91,23 @@ impl fmt::Debug for LogicalLiteral {
|
||||
ref lower,
|
||||
ref upper,
|
||||
..
|
||||
} => write!(formatter, "({:?} TO {:?})", lower, upper),
|
||||
} => write!(formatter, "({lower:?} TO {upper:?})"),
|
||||
LogicalLiteral::Set { ref elements, .. } => {
|
||||
const MAX_DISPLAYED: usize = 10;
|
||||
|
||||
write!(formatter, "IN [")?;
|
||||
for (i, element) in elements.iter().enumerate() {
|
||||
if i == 0 {
|
||||
write!(formatter, "{:?}", element)?;
|
||||
write!(formatter, "{element:?}")?;
|
||||
} else if i == MAX_DISPLAYED - 1 {
|
||||
write!(
|
||||
formatter,
|
||||
", {:?}, ... ({} more)",
|
||||
element,
|
||||
", {element:?}, ... ({} more)",
|
||||
elements.len() - i - 1
|
||||
)?;
|
||||
break;
|
||||
} else {
|
||||
write!(formatter, ", {:?}", element)?;
|
||||
write!(formatter, ", {element:?}")?;
|
||||
}
|
||||
}
|
||||
write!(formatter, "]")
|
||||
|
||||
@@ -942,7 +942,7 @@ mod test {
|
||||
default_conjunction: bool,
|
||||
) {
|
||||
let query = parse_query_to_logical_ast(query, default_conjunction).unwrap();
|
||||
let query_str = format!("{:?}", query);
|
||||
let query_str = format!("{query:?}");
|
||||
assert_eq!(query_str, expected);
|
||||
}
|
||||
|
||||
@@ -951,7 +951,7 @@ mod test {
|
||||
let query_parser = make_query_parser();
|
||||
let query = query_parser.parse_query("facet:/root/branch/leaf").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
r#"TermQuery(Term(field=11, type=Facet, Facet(/root/branch/leaf)))"#
|
||||
);
|
||||
}
|
||||
@@ -964,7 +964,7 @@ mod test {
|
||||
query_parser.set_field_boost(text_field, 2.0);
|
||||
let query = query_parser.parse_query("text:hello").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
r#"Boost(query=TermQuery(Term(field=1, type=Str, "hello")), boost=2)"#
|
||||
);
|
||||
}
|
||||
@@ -973,7 +973,7 @@ mod test {
|
||||
pub fn test_parse_query_range_with_boost() {
|
||||
let query = make_query_parser().parse_query("title:[A TO B]").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
"RangeQuery { field: \"title\", value_type: Str, lower_bound: Included([97]), \
|
||||
upper_bound: Included([98]), limit: None }"
|
||||
);
|
||||
@@ -987,7 +987,7 @@ mod test {
|
||||
query_parser.set_field_boost(text_field, 2.0);
|
||||
let query = query_parser.parse_query("text:hello^2").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
r#"Boost(query=Boost(query=TermQuery(Term(field=1, type=Str, "hello")), boost=2), boost=2)"#
|
||||
);
|
||||
}
|
||||
@@ -1039,7 +1039,7 @@ mod test {
|
||||
let query_parser = make_query_parser();
|
||||
let query_result = query_parser.parse_query("");
|
||||
let query = query_result.unwrap();
|
||||
assert_eq!(format!("{:?}", query), "EmptyQuery");
|
||||
assert_eq!(format!("{query:?}"), "EmptyQuery");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1481,7 +1481,7 @@ mod test {
|
||||
Ok(_) => panic!("should never succeed"),
|
||||
Err(e) => assert_eq!(
|
||||
"The facet field is malformed: Failed to parse the facet string: 'INVALID'",
|
||||
format!("{}", e)
|
||||
format!("{e}")
|
||||
),
|
||||
}
|
||||
assert!(query_parser.parse_query("facet:\"/foo/bar\"").is_ok());
|
||||
@@ -1574,7 +1574,7 @@ mod test {
|
||||
let query_parser = QueryParser::new(schema, Vec::new(), TokenizerManager::default());
|
||||
let query = query_parser.parse_query(r#"a\.b:hello"#).unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
"TermQuery(Term(field=0, type=Str, \"hello\"))"
|
||||
);
|
||||
}
|
||||
@@ -1668,7 +1668,7 @@ mod test {
|
||||
);
|
||||
let query = query_parser.parse_query("abc").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
"BooleanQuery { subqueries: [(Should, FuzzyTermQuery { term: Term(field=0, \
|
||||
type=Str, \"abc\"), distance: 1, transposition_cost_one: true, prefix: false }), \
|
||||
(Should, TermQuery(Term(field=1, type=Str, \"abc\")))] }"
|
||||
@@ -1685,7 +1685,7 @@ mod test {
|
||||
);
|
||||
let query = query_parser.parse_query("abc").unwrap();
|
||||
assert_eq!(
|
||||
format!("{:?}", query),
|
||||
format!("{query:?}"),
|
||||
"BooleanQuery { subqueries: [(Should, TermQuery(Term(field=0, type=Str, \
|
||||
\"abc\"))), (Should, FuzzyTermQuery { term: Term(field=1, type=Str, \"abc\"), \
|
||||
distance: 2, transposition_cost_one: false, prefix: true })] }"
|
||||
|
||||
@@ -337,8 +337,9 @@ impl Query for RangeQuery {
|
||||
let value_type = field_type.value_type();
|
||||
if value_type != self.value_type {
|
||||
let err_msg = format!(
|
||||
"Create a range query of the type {:?}, when the field given was of type {:?}",
|
||||
self.value_type, value_type
|
||||
"Create a range query of the type {:?}, when the field given was of type \
|
||||
{value_type:?}",
|
||||
self.value_type
|
||||
);
|
||||
return Err(TantivyError::SchemaError(err_msg));
|
||||
}
|
||||
|
||||
@@ -49,8 +49,7 @@ impl Weight for IPFastFieldRangeWeight {
|
||||
let mut scorer = self.scorer(reader, 1.0)?;
|
||||
if scorer.seek(doc) != doc {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"Document #({}) does not match",
|
||||
doc
|
||||
"Document #({doc}) does not match"
|
||||
)));
|
||||
}
|
||||
let explanation = Explanation::new("Const", scorer.score());
|
||||
|
||||
@@ -91,8 +91,7 @@ impl Weight for FastFieldRangeWeight {
|
||||
let mut scorer = self.scorer(reader, 1.0)?;
|
||||
if scorer.seek(doc) != doc {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"Document #({}) does not match",
|
||||
doc
|
||||
"Document #({doc}) does not match"
|
||||
)));
|
||||
}
|
||||
let explanation = Explanation::new("Const", scorer.score());
|
||||
|
||||
@@ -174,7 +174,7 @@ mod tests {
|
||||
IndexRecordOption::WithFreqs,
|
||||
);
|
||||
assert_eq!(
|
||||
format!("{:?}", term_query),
|
||||
format!("{term_query:?}"),
|
||||
r#"TermQuery(Term(field=1, type=Str, "hello"))"#
|
||||
);
|
||||
}
|
||||
|
||||
@@ -191,7 +191,7 @@ mod tests {
|
||||
assert_single_hit(query_from_ip(ip_addr_2));
|
||||
assert_single_hit(query_from_text("127.0.0.1".to_string()));
|
||||
assert_single_hit(query_from_text("\"127.0.0.1\"".to_string()));
|
||||
assert_single_hit(query_from_text(format!("\"{}\"", ip_addr_1)));
|
||||
assert_single_hit(query_from_text(format!("\"{}\"", ip_addr_2)));
|
||||
assert_single_hit(query_from_text(format!("\"{ip_addr_1}\"")));
|
||||
assert_single_hit(query_from_text(format!("\"{ip_addr_2}\"")));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,7 +174,7 @@ impl Facet {
|
||||
|
||||
/// This function is the inverse of Facet::from(&str).
|
||||
pub fn to_path_string(&self) -> String {
|
||||
format!("{}", self)
|
||||
format!("{self}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,7 +233,7 @@ impl<'de> Deserialize<'de> for Facet {
|
||||
|
||||
impl Debug for Facet {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Facet({})", self)?;
|
||||
write!(f, "Facet({self})")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -264,12 +264,12 @@ mod tests {
|
||||
{
|
||||
let v = ["first", "second", "third"];
|
||||
let facet = Facet::from_path(v.iter());
|
||||
assert_eq!(format!("{}", facet), "/first/second/third");
|
||||
assert_eq!(format!("{facet}"), "/first/second/third");
|
||||
}
|
||||
{
|
||||
let v = ["first", "sec/ond", "third"];
|
||||
let facet = Facet::from_path(v.iter());
|
||||
assert_eq!(format!("{}", facet), "/first/sec\\/ond/third");
|
||||
assert_eq!(format!("{facet}"), "/first/sec\\/ond/third");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -277,7 +277,7 @@ mod tests {
|
||||
fn test_facet_debug() {
|
||||
let v = ["first", "second", "third"];
|
||||
let facet = Facet::from_path(v.iter());
|
||||
assert_eq!(format!("{:?}", facet), "Facet(/first/second/third)");
|
||||
assert_eq!(format!("{facet:?}"), "Facet(/first/second/third)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -628,7 +628,7 @@ mod tests {
|
||||
let doc = schema.parse_document(doc_json).unwrap();
|
||||
let date = doc.get_first(date_field).unwrap();
|
||||
// Time zone is converted to UTC
|
||||
assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{:?}", date));
|
||||
assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{date:?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -519,7 +519,7 @@ where B: AsRef<[u8]>
|
||||
|
||||
fn write_opt<T: std::fmt::Debug>(f: &mut fmt::Formatter, val_opt: Option<T>) -> fmt::Result {
|
||||
if let Some(val) = val_opt {
|
||||
write!(f, "{:?}", val)?;
|
||||
write!(f, "{val:?}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -452,8 +452,7 @@ mod binary_serialize {
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"No extended field type is associated with code {:?}",
|
||||
ext_type_code
|
||||
"No extended field type is associated with code {ext_type_code:?}"
|
||||
),
|
||||
)),
|
||||
}
|
||||
@@ -477,7 +476,7 @@ mod binary_serialize {
|
||||
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("No field type is associated with code {:?}", type_code),
|
||||
format!("No field type is associated with code {type_code:?}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ pub struct ZstdCompressor {
|
||||
impl ZstdCompressor {
|
||||
fn deser_from_str(val: &str) -> Result<ZstdCompressor, String> {
|
||||
if !val.starts_with("zstd") {
|
||||
return Err(format!("needs to start with zstd, but got {}", val));
|
||||
return Err(format!("needs to start with zstd, but got {val}"));
|
||||
}
|
||||
if val == "zstd" {
|
||||
return Ok(ZstdCompressor::default());
|
||||
@@ -94,15 +94,12 @@ impl ZstdCompressor {
|
||||
for option in options.split(',') {
|
||||
let (opt_name, value) = options
|
||||
.split_once('=')
|
||||
.ok_or_else(|| format!("no '=' found in option {:?}", option))?;
|
||||
.ok_or_else(|| format!("no '=' found in option {option:?}"))?;
|
||||
|
||||
match opt_name {
|
||||
"compression_level" => {
|
||||
let value = value.parse::<i32>().map_err(|err| {
|
||||
format!(
|
||||
"Could not parse value {} of option {}, e: {}",
|
||||
value, opt_name, err
|
||||
)
|
||||
format!("Could not parse value {value} of option {opt_name}, e: {err}")
|
||||
})?;
|
||||
if value >= 15 {
|
||||
warn!(
|
||||
@@ -114,7 +111,7 @@ impl ZstdCompressor {
|
||||
compressor.compression_level = Some(value);
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("unknown zstd option {:?}", opt_name));
|
||||
return Err(format!("unknown zstd option {opt_name:?}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -122,7 +119,7 @@ impl ZstdCompressor {
|
||||
}
|
||||
fn ser_to_string(&self) -> String {
|
||||
if let Some(compression_level) = self.compression_level {
|
||||
format!("zstd(compression_level={})", compression_level)
|
||||
format!("zstd(compression_level={compression_level})")
|
||||
} else {
|
||||
"zstd".to_string()
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ impl Decompressor {
|
||||
2 => Decompressor::Brotli,
|
||||
3 => Decompressor::Snappy,
|
||||
4 => Decompressor::Zstd,
|
||||
_ => panic!("unknown compressor id {:?}", id),
|
||||
_ => panic!("unknown compressor id {id:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,10 +27,7 @@ impl BinarySerializable for DocStoreFooter {
|
||||
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||
let doc_store_version = u32::deserialize(reader)?;
|
||||
if doc_store_version != DOC_STORE_VERSION {
|
||||
panic!(
|
||||
"actual doc store version: {}, expected: {}",
|
||||
doc_store_version, DOC_STORE_VERSION
|
||||
);
|
||||
panic!("actual doc store version: {doc_store_version}, expected: {DOC_STORE_VERSION}");
|
||||
}
|
||||
let offset = u64::deserialize(reader)?;
|
||||
let compressor_id = u8::deserialize(reader)?;
|
||||
|
||||
@@ -221,7 +221,7 @@ mod tests {
|
||||
if let Some(last_checkpoint) = checkpoints.last() {
|
||||
for doc in 0u32..last_checkpoint.doc_range.end {
|
||||
let expected = seek_manual(skip_index.checkpoints(), doc);
|
||||
assert_eq!(expected, skip_index.seek(doc), "Doc {}", doc);
|
||||
assert_eq!(expected, skip_index.seek(doc), "Doc {doc}");
|
||||
}
|
||||
assert!(skip_index.seek(last_checkpoint.doc_range.end).is_none());
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ pub mod tests {
|
||||
.unwrap()
|
||||
.as_text()
|
||||
.unwrap(),
|
||||
format!("Doc {}", i)
|
||||
format!("Doc {i}")
|
||||
);
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ pub mod tests {
|
||||
let doc = doc?;
|
||||
let title_content = doc.get_first(field_title).unwrap().as_text().unwrap();
|
||||
if !title_content.starts_with("Doc ") {
|
||||
panic!("unexpected title_content {}", title_content);
|
||||
panic!("unexpected title_content {title_content}");
|
||||
}
|
||||
|
||||
let id = title_content
|
||||
@@ -145,7 +145,7 @@ pub mod tests {
|
||||
.parse::<u32>()
|
||||
.unwrap();
|
||||
if alive_bitset.is_deleted(id) {
|
||||
panic!("unexpected deleted document {}", id);
|
||||
panic!("unexpected deleted document {id}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,13 +173,13 @@ pub mod tests {
|
||||
.unwrap()
|
||||
.as_text()
|
||||
.unwrap(),
|
||||
format!("Doc {}", i)
|
||||
format!("Doc {i}")
|
||||
);
|
||||
}
|
||||
for (i, doc) in store.iter(None).enumerate() {
|
||||
assert_eq!(
|
||||
*doc?.get_first(field_title).unwrap().as_text().unwrap(),
|
||||
format!("Doc {}", i)
|
||||
format!("Doc {i}")
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -158,7 +158,7 @@ impl StoreReader {
|
||||
/// Advanced API. In most cases use [`get`](Self::get).
|
||||
fn block_checkpoint(&self, doc_id: DocId) -> crate::Result<Checkpoint> {
|
||||
self.skip_index.seek(doc_id).ok_or_else(|| {
|
||||
crate::TantivyError::InvalidArgument(format!("Failed to lookup Doc #{}.", doc_id))
|
||||
crate::TantivyError::InvalidArgument(format!("Failed to lookup Doc #{doc_id}."))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -359,8 +359,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
term_info_store.get(i as u64),
|
||||
term_infos[i],
|
||||
"term info {}",
|
||||
i
|
||||
"term info {i}"
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -128,10 +128,7 @@ impl TermDictionary {
|
||||
if version != FST_VERSION {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Unsuported fst version, expected {}, found {}",
|
||||
version, FST_VERSION,
|
||||
),
|
||||
format!("Unsuported fst version, expected {version}, found {FST_VERSION}",),
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -76,8 +76,8 @@ impl TermDictionary {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Unsuported dictionary type, expected {}, found {}",
|
||||
CURRENT_TYPE as u32, dict_type,
|
||||
"Unsuported dictionary type, expected {}, found {dict_type}",
|
||||
CURRENT_TYPE as u32,
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -4052,9 +4052,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
folding_using_raw_tokenizer_helper(c),
|
||||
folded,
|
||||
"testing that character \"{}\" becomes \"{}\"",
|
||||
c,
|
||||
folded
|
||||
"testing that character \"{c}\" becomes \"{folded}\""
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,20 +177,14 @@ pub mod tests {
|
||||
pub fn assert_token(token: &Token, position: usize, text: &str, from: usize, to: usize) {
|
||||
assert_eq!(
|
||||
token.position, position,
|
||||
"expected position {} but {:?}",
|
||||
position, token
|
||||
"expected position {position} but {token:?}"
|
||||
);
|
||||
assert_eq!(token.text, text, "expected text {} but {:?}", text, token);
|
||||
assert_eq!(token.text, text, "expected text {text} but {token:?}");
|
||||
assert_eq!(
|
||||
token.offset_from, from,
|
||||
"expected offset_from {} but {:?}",
|
||||
from, token
|
||||
);
|
||||
assert_eq!(
|
||||
token.offset_to, to,
|
||||
"expected offset_to {} but {:?}",
|
||||
to, token
|
||||
"expected offset_from {from} but {token:?}"
|
||||
);
|
||||
assert_eq!(token.offset_to, to, "expected offset_to {to} but {token:?}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -188,8 +188,7 @@ impl<TSSTable: SSTable> Dictionary<TSSTable> {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Unsuported sstable version, expected {}, found {}",
|
||||
version,
|
||||
"Unsuported sstable version, expected {version}, found {}",
|
||||
crate::SSTABLE_VERSION,
|
||||
),
|
||||
));
|
||||
@@ -499,7 +498,7 @@ mod tests {
|
||||
let new_range = dic.sstable_index.get_block_with_ord(ordinal).byte_range;
|
||||
slice.restrict(new_range);
|
||||
assert!(dic.ord_to_term(ordinal, &mut res).unwrap());
|
||||
assert_eq!(res, format!("{:05X}", ordinal).into_bytes());
|
||||
assert_eq!(res, format!("{ordinal:05X}").into_bytes());
|
||||
assert_eq!(dic.term_info_from_ord(ordinal).unwrap().unwrap(), ordinal);
|
||||
assert_eq!(dic.get(&res).unwrap().unwrap(), ordinal);
|
||||
assert_eq!(dic.term_ord(&res).unwrap().unwrap(), ordinal);
|
||||
|
||||
@@ -256,8 +256,8 @@ where
|
||||
|| self.previous_key[keep_len] < key[keep_len];
|
||||
assert!(
|
||||
increasing_keys,
|
||||
"Keys should be increasing. ({:?} > {:?})",
|
||||
self.previous_key, key
|
||||
"Keys should be increasing. ({:?} > {key:?})",
|
||||
self.previous_key
|
||||
);
|
||||
self.previous_key.resize(key.len(), 0u8);
|
||||
self.previous_key[keep_len..].copy_from_slice(&key[keep_len..]);
|
||||
|
||||
@@ -235,14 +235,13 @@ mod tests {
|
||||
for i in 0..10_000_000 {
|
||||
match len_to_capacity(i) {
|
||||
CapacityResult::NeedAlloc(cap) => {
|
||||
assert_eq!(available, 0, "Failed len={}: Expected 0 got {}", i, cap);
|
||||
assert_eq!(available, 0, "Failed len={i}: Expected 0 got {cap}");
|
||||
available = cap;
|
||||
}
|
||||
CapacityResult::Available(cap) => {
|
||||
assert_eq!(
|
||||
available, cap,
|
||||
"Failed len={}: Expected {} Got {}",
|
||||
i, available, cap
|
||||
"Failed len={i}: Expected {available} Got {cap}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user