* clippy

* use readable version

---------

Co-authored-by: Pascal Seitz <pascal.seitz@datadoghq.com>
This commit is contained in:
PSeitz
2025-07-02 11:25:03 +02:00
committed by GitHub
parent 295d07e55c
commit 945af922d1
31 changed files with 101 additions and 134 deletions

View File

@@ -518,7 +518,7 @@ impl SegmentTermCollector {
|term| {
let entry = entries[idx];
let intermediate_entry = into_intermediate_bucket_entry(entry.0, entry.1)
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
.map_err(io::Error::other)?;
dict.insert(
IntermediateKey::Str(
String::from_utf8(term.to_vec()).expect("could not convert to String"),

View File

@@ -739,7 +739,7 @@ mod tests {
.flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{c}"));
let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count)
std::iter::repeat_n(doc, count)
})
.map(|mut doc| {
doc.add_facet(
@@ -787,7 +787,7 @@ mod tests {
.flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{c}"));
let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count)
std::iter::repeat_n(doc, count)
})
.collect();

View File

@@ -30,7 +30,7 @@ fn create_format() {
}
fn path_for_version(version: &str) -> String {
format!("./tests/compat_tests_data/index_v{}/", version)
format!("./tests/compat_tests_data/index_v{version}/")
}
/// feature flag quickwit uses a different dictionary type

View File

@@ -65,8 +65,7 @@ impl Executor {
if let Err(err) = fruit_sender_ref.send((idx, fruit)) {
error!(
"Failed to send search task. It probably means all search \
threads have panicked. {:?}",
err
threads have panicked. {err:?}"
);
}
});

View File

@@ -56,7 +56,7 @@ impl<T: Send + Sync + 'static> From<Box<T>> for DirectoryLock {
impl Drop for DirectoryLockGuard {
fn drop(&mut self) {
if let Err(e) = self.directory.delete(&self.path) {
error!("Failed to remove the lock file. {:?}", e);
error!("Failed to remove the lock file. {e:?}");
}
}
}

View File

@@ -51,7 +51,7 @@ impl FileWatcher {
.map(|current_checksum| current_checksum != checksum)
.unwrap_or(true);
if metafile_has_changed {
info!("Meta file {:?} was modified", path);
info!("Meta file {path:?} was modified");
current_checksum_opt = Some(checksum);
// We actually ignore callbacks failing here.
// We just wait for the end of their execution.
@@ -75,7 +75,7 @@ impl FileWatcher {
let reader = match fs::File::open(path) {
Ok(f) => io::BufReader::new(f),
Err(e) => {
warn!("Failed to open meta file {:?}: {:?}", path, e);
warn!("Failed to open meta file {path:?}: {e:?}");
return Err(e);
}
};

View File

@@ -157,7 +157,7 @@ impl ManagedDirectory {
for file_to_delete in files_to_delete {
match self.delete(&file_to_delete) {
Ok(_) => {
info!("Deleted {:?}", file_to_delete);
info!("Deleted {file_to_delete:?}");
deleted_files.push(file_to_delete);
}
Err(file_error) => {
@@ -170,7 +170,7 @@ impl ManagedDirectory {
if !cfg!(target_os = "windows") {
// On windows, delete is expected to fail if the file
// is mmapped.
error!("Failed to delete {:?}", file_to_delete);
error!("Failed to delete {file_to_delete:?}");
}
}
}

View File

@@ -29,7 +29,7 @@ pub type WeakArcBytes = Weak<dyn Deref<Target = [u8]> + Send + Sync + 'static>;
/// Create a default io error given a string.
pub(crate) fn make_io_err(msg: String) -> io::Error {
io::Error::new(io::ErrorKind::Other, msg)
io::Error::other(msg)
}
/// Returns `None` iff the file exists, can be read, but is empty (and hence
@@ -369,7 +369,7 @@ pub(crate) fn atomic_write(path: &Path, content: &[u8]) -> io::Result<()> {
impl Directory for MmapDirectory {
fn get_file_handle(&self, path: &Path) -> Result<Arc<dyn FileHandle>, OpenReadError> {
debug!("Open Read {:?}", path);
debug!("Open Read {path:?}");
let full_path = self.resolve_path(path);
let mut mmap_cache = self.inner.mmap_cache.write().map_err(|_| {
@@ -414,7 +414,7 @@ impl Directory for MmapDirectory {
}
fn open_write(&self, path: &Path) -> Result<WritePtr, OpenWriteError> {
debug!("Open Write {:?}", path);
debug!("Open Write {path:?}");
let full_path = self.resolve_path(path);
let open_res = OpenOptions::new()
@@ -467,7 +467,7 @@ impl Directory for MmapDirectory {
}
fn atomic_write(&self, path: &Path, content: &[u8]) -> io::Result<()> {
debug!("Atomic Write {:?}", path);
debug!("Atomic Write {path:?}");
let full_path = self.resolve_path(path);
atomic_write(&full_path, content)?;
Ok(())

View File

@@ -191,7 +191,7 @@ impl Directory for RamDirectory {
.fs
.read()
.map_err(|e| OpenReadError::IoError {
io_error: Arc::new(io::Error::new(io::ErrorKind::Other, e.to_string())),
io_error: Arc::new(io::Error::other(e.to_string())),
filepath: path.to_path_buf(),
})?
.exists(path))

View File

@@ -90,10 +90,7 @@ impl WatchCallbackList {
let _ = sender.send(Ok(()));
});
if let Err(err) = spawn_res {
error!(
"Failed to spawn thread to call watch callbacks. Cause: {:?}",
err
);
error!("Failed to spawn thread to call watch callbacks. Cause: {err:?}");
}
result
}

View File

@@ -370,7 +370,7 @@ impl<D: Document> IndexWriter<D> {
.map_err(|_| error_in_index_worker_thread("Failed to join merging thread."));
if let Err(ref e) = result {
error!("Some merging thread failed {:?}", e);
error!("Some merging thread failed {e:?}");
}
result
@@ -644,7 +644,7 @@ impl<D: Document> IndexWriter<D> {
let commit_opstamp = self.stamper.stamp();
let prepared_commit = PreparedCommit::new(self, commit_opstamp);
info!("Prepared commit {}", commit_opstamp);
info!("Prepared commit {commit_opstamp}");
Ok(prepared_commit)
}

View File

@@ -501,8 +501,7 @@ impl SegmentUpdater {
Ok(segment_entries) => segment_entries,
Err(err) => {
warn!(
"Starting the merge failed for the following reason. This is not fatal. {}",
err
"Starting the merge failed for the following reason. This is not fatal. {err}"
);
return err.into();
}

View File

@@ -873,8 +873,8 @@ mod tests {
fn assert_type(reader: &SegmentReader, field: &str, typ: ColumnType) {
let cols = reader.fast_fields().dynamic_column_handles(field).unwrap();
assert_eq!(cols.len(), 1, "{}", field);
assert_eq!(cols[0].column_type(), typ, "{}", field);
assert_eq!(cols.len(), 1, "{field}");
assert_eq!(cols[0].column_type(), typ, "{field}");
}
assert_type(segment_reader, "json.toto", ColumnType::Str);
assert_type(segment_reader, "json.float", ColumnType::F64);

View File

@@ -206,7 +206,7 @@ pub(crate) mod tests {
#[test]
fn test_position() -> crate::Result<()> {
const CONST_VAL: u32 = 9u32;
let positions_delta: Vec<u32> = iter::repeat(CONST_VAL).take(2_000_000).collect();
let positions_delta: Vec<u32> = std::iter::repeat_n(CONST_VAL, 2_000_000).collect();
let positions_data = create_positions_data(&positions_delta[..])?;
assert_eq!(positions_data.len(), 1_015_627);
let mut position_reader = PositionReader::open(positions_data)?;

View File

@@ -436,7 +436,7 @@ mod tests {
let fieldnorms_expanded = fieldnorms
.iter()
.cloned()
.flat_map(|fieldnorm| iter::repeat(fieldnorm).take(REPEAT))
.flat_map(|fieldnorm| std::iter::repeat_n(fieldnorm, REPEAT))
.collect::<Vec<u32>>();
let postings_lists_expanded: Vec<Vec<(DocId, u32)>> = posting_lists

View File

@@ -384,7 +384,7 @@ mod tests {
.search(&ExistsQuery::new(field.to_string(), json_subpaths), &Count)
.unwrap_err()
.to_string(),
format!("The field does not exist: '{}'", field)
format!("The field does not exist: '{field}'")
);
}
}

View File

@@ -124,8 +124,7 @@ impl RegexPhraseQuery {
if field_type != Type::Str {
return Err(crate::TantivyError::SchemaError(format!(
"RegexPhraseQuery can only be used with a field of type text currently, but got \
{:?}",
field_type
{field_type:?}"
)));
}

View File

@@ -81,8 +81,7 @@ impl RegexPhraseWeight {
num_terms += term_infos.len();
if num_terms > self.max_expansions as usize {
return Err(crate::TantivyError::InvalidArgument(format!(
"Phrase query exceeded max expansions {}",
num_terms
"Phrase query exceeded max expansions {num_terms}"
)));
}
let union = Self::get_union_from_term_infos(&term_infos, reader, &inverted_index)?;

View File

@@ -177,8 +177,7 @@ impl Weight for FastFieldRangeWeight {
} else {
assert!(
maps_to_u64_fastfield(field_type.value_type()),
"{:?}",
field_type
"{field_type:?}"
);
let bounds = self.bounds.map_bound_res(|term| {
@@ -193,8 +192,7 @@ impl Weight for FastFieldRangeWeight {
val.to_u64()
} else {
return Err(TantivyError::InvalidArgument(format!(
"Expected term with u64, i64, f64 or date, but got {:?}",
term
"Expected term with u64, i64, f64 or date, but got {term:?}"
)));
};
Ok(val)
@@ -254,12 +252,9 @@ fn search_on_json_numerical_field(
else {
return Ok(Box::new(EmptyScorer));
};
let actual_column_type: NumericalType = col_type.numerical_type().unwrap_or_else(|| {
panic!(
"internal error: couldn't cast to numerical_type: {:?}",
col_type
)
});
let actual_column_type: NumericalType = col_type
.numerical_type()
.unwrap_or_else(|| panic!("internal error: couldn't cast to numerical_type: {col_type:?}"));
let bounds = match typ.numerical_type().unwrap() {
NumericalType::I64 => {
@@ -1058,7 +1053,7 @@ mod tests {
fn doc_from_id_1(id: u64) -> Doc {
let id = id * 1000;
Doc {
id_name: format!("id_name{:010}", id),
id_name: format!("id_name{id:010}"),
id,
}
}
@@ -1242,7 +1237,7 @@ mod tests {
let field_path = |field: &str| {
if json {
format!("json.{}", field)
format!("json.{field}")
} else {
field.to_string()
}

View File

@@ -52,7 +52,7 @@ impl TermSetQuery {
.iter()
.map(|key| (key.serialized_value_bytes(), 0)),
)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
.map_err(std::io::Error::other)?;
sub_queries.push((
Occur::Should,

View File

@@ -172,7 +172,7 @@ mod tests {
let doc = i * 10;
doc_and_tfs.push((doc, 1u32 + doc % 3u32));
}
let fieldnorms: Vec<u32> = std::iter::repeat(10u32).take(3_000).collect();
let fieldnorms: Vec<u32> = std::iter::repeat_n(10u32, 3_000).collect();
let mut term_scorer = TermScorer::create_for_test(&doc_and_tfs, &fieldnorms, bm25_weight);
assert_eq!(term_scorer.doc(), 0u32);
term_scorer.shallow_seek(1289);
@@ -238,7 +238,7 @@ mod tests {
doc_tfs.push((257, 3u32));
doc_tfs.push((258, 1u32));
let fieldnorms: Vec<u32> = std::iter::repeat(20u32).take(300).collect();
let fieldnorms: Vec<u32> = std::iter::repeat_n(20u32, 300).collect();
let bm25_weight = Bm25Weight::for_one_term(10, 129, 20.0);
let mut docs = TermScorer::create_for_test(&doc_tfs[..], &fieldnorms[..], bm25_weight);
assert_nearly_equals!(docs.block_max_score(), 2.5161593);
@@ -304,7 +304,7 @@ mod tests {
writer.set_merge_policy(Box::new(NoMergePolicy));
for _ in 0..3_000 {
let term_freq = rng.gen_range(1..10000);
let words: Vec<&str> = std::iter::repeat("bbbb").take(term_freq).collect();
let words: Vec<&str> = std::iter::repeat_n("bbbb", term_freq).collect();
let text = words.join(" ");
writer.add_document(doc!(text_field=>text))?;
}

View File

@@ -86,10 +86,7 @@ impl IndexReaderBuilder {
let inner_reader_arc_clone = inner_reader_arc.clone();
let callback = move || {
if let Err(err) = inner_reader_arc_clone.reload() {
error!(
"Error while loading searcher after commit was detected. {:?}",
err
);
error!("Error while loading searcher after commit was detected. {err:?}");
}
};
let watch_handle = inner_reader_arc

View File

@@ -152,7 +152,7 @@ impl WarmingStateInner {
// rely on deterministic gc in tests
#[cfg(not(test))]
if let Err(err) = std::panic::catch_unwind(|| inner.lock().unwrap().gc_maybe()) {
error!("Panic in Warmer GC {:?}", err);
error!("Panic in Warmer GC {err:?}");
}
// avoid unused var warning in tests
#[cfg(test)]

View File

@@ -55,13 +55,10 @@ where W: Write
}
if num_field_values != actual_length {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Unexpected number of entries written to serializer, expected \
{num_field_values} entries, got {actual_length} entries",
),
));
return Err(io::Error::other(format!(
"Unexpected number of entries written to serializer, expected {num_field_values} \
entries, got {actual_length} entries",
)));
}
Ok(())
@@ -215,14 +212,11 @@ where W: Write
/// Finishes writing the array to the writer and validates it.
pub(crate) fn end(self) -> io::Result<()> {
if self.expected_length != self.actual_length {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Unexpected number of entries written to serializer, expected {} entries, got \
{} entries",
self.expected_length, self.actual_length,
),
));
return Err(io::Error::other(format!(
"Unexpected number of entries written to serializer, expected {} entries, got {} \
entries",
self.expected_length, self.actual_length,
)));
}
Ok(())
}
@@ -276,14 +270,11 @@ where W: Write
/// Finishes writing the array to the writer and validates it.
pub(crate) fn end(self) -> io::Result<()> {
if self.expected_length != self.actual_length {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Unexpected number of entries written to serializer, expected {} entries, got \
{} entries",
self.expected_length, self.actual_length,
),
));
return Err(io::Error::other(format!(
"Unexpected number of entries written to serializer, expected {} entries, got {} \
entries",
self.expected_length, self.actual_length,
)));
}
// This should never fail if the above statement is valid.

View File

@@ -52,7 +52,7 @@ impl BinarySerializable for DocStoreVersion {
v => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Invalid doc store version {}", v),
format!("Invalid doc store version {v}"),
))
}
})
@@ -320,7 +320,9 @@ impl StoreReader {
doc_pos = 0;
}
let alive = alive_bitset.map_or(true, |bitset| bitset.is_alive(doc_id));
let alive = alive_bitset
.map(|bitset| bitset.is_alive(doc_id))
.unwrap_or(true);
let res = if alive {
Some((curr_block.clone(), doc_pos))
} else {

View File

@@ -215,7 +215,7 @@ impl DedicatedThreadBlockCompressorImpl {
fn send(&mut self, msg: BlockCompressorMessage) -> io::Result<()> {
if self.tx.send(msg).is_err() {
harvest_thread_result(self.join_handle.take())?;
return Err(io::Error::new(io::ErrorKind::Other, "Unidentified error."));
return Err(io::Error::other("Unidentified error."));
}
Ok(())
}
@@ -231,11 +231,10 @@ impl DedicatedThreadBlockCompressorImpl {
/// If the thread panicked, or if the result has already been harvested,
/// returns an explicit error.
fn harvest_thread_result(join_handle_opt: Option<JoinHandle<io::Result<()>>>) -> io::Result<()> {
let join_handle = join_handle_opt
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Thread already joined."))?;
let join_handle = join_handle_opt.ok_or_else(|| io::Error::other("Thread already joined."))?;
join_handle
.join()
.map_err(|_err| io::Error::new(io::ErrorKind::Other, "Compressing thread panicked."))?
.map_err(|_err| io::Error::other("Compressing thread panicked."))?
}
#[cfg(test)]

View File

@@ -13,7 +13,7 @@ use crate::postings::TermInfo;
use crate::termdict::TermOrdinal;
fn convert_fst_error(e: tantivy_fst::Error) -> io::Error {
io::Error::new(io::ErrorKind::Other, e)
io::Error::other(e)
}
const FST_VERSION: u32 = 1;
@@ -128,10 +128,9 @@ impl TermDictionary {
let footer_size = u64::deserialize(&mut footer_len_bytes)?;
let version = u32::deserialize(&mut footer_len_bytes)?;
if version != FST_VERSION {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Unsupported fst version, expected {version}, found {FST_VERSION}",),
));
return Err(io::Error::other(format!(
"Unsupported fst version, expected {version}, found {FST_VERSION}",
)));
}
let (fst_file_slice, values_file_slice) = main_slice.split_from_end(footer_size as usize);

View File

@@ -84,20 +84,14 @@ impl TermDictionary {
let mut dict_type = dict_type.read_bytes()?;
let dict_type = u32::deserialize(&mut dict_type)?;
let dict_type = DictionaryType::try_from(dict_type).map_err(|_| {
io::Error::new(
io::ErrorKind::Other,
format!("Unsupported dictionary type, found {dict_type}"),
)
io::Error::other(format!("Unsupported dictionary type, found {dict_type}"))
})?;
if dict_type != CURRENT_TYPE {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Unsupported dictionary type, compiled tantivy with {CURRENT_TYPE:?}, but got \
{dict_type:?}",
),
));
return Err(io::Error::other(format!(
"Unsupported dictionary type, compiled tantivy with {CURRENT_TYPE:?}, but got \
{dict_type:?}",
)));
}
InnerTermDict::open(main_slice).map(TermDictionary)

View File

@@ -1605,35 +1605,35 @@ mod tests {
Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij
ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl";
let mut vec: Vec<&str> = vec!["Des", "mot", "cles", "A", "LA", "CHAINE"];
vec.extend(iter::repeat("A").take(6));
vec.extend(iter::repeat("AE").take(1));
vec.extend(iter::repeat("C").take(1));
vec.extend(iter::repeat("E").take(4));
vec.extend(iter::repeat("I").take(4));
vec.extend(iter::repeat("IJ").take(1));
vec.extend(iter::repeat("D").take(1));
vec.extend(iter::repeat("N").take(1));
vec.extend(iter::repeat("O").take(6));
vec.extend(iter::repeat("OE").take(1));
vec.extend(iter::repeat("TH").take(1));
vec.extend(iter::repeat("U").take(4));
vec.extend(iter::repeat("Y").take(2));
vec.extend(iter::repeat("a").take(6));
vec.extend(iter::repeat("ae").take(1));
vec.extend(iter::repeat("c").take(1));
vec.extend(iter::repeat("e").take(4));
vec.extend(iter::repeat("i").take(4));
vec.extend(iter::repeat("ij").take(1));
vec.extend(iter::repeat("d").take(1));
vec.extend(iter::repeat("n").take(1));
vec.extend(iter::repeat("o").take(6));
vec.extend(iter::repeat("oe").take(1));
vec.extend(iter::repeat("ss").take(1));
vec.extend(iter::repeat("th").take(1));
vec.extend(iter::repeat("u").take(4));
vec.extend(iter::repeat("y").take(2));
vec.extend(iter::repeat("fi").take(1));
vec.extend(iter::repeat("fl").take(1));
vec.extend(std::iter::repeat_n("A", 6));
vec.extend(std::iter::repeat_n("AE", 1));
vec.extend(std::iter::repeat_n("C", 1));
vec.extend(std::iter::repeat_n("E", 4));
vec.extend(std::iter::repeat_n("I", 4));
vec.extend(std::iter::repeat_n("IJ", 1));
vec.extend(std::iter::repeat_n("D", 1));
vec.extend(std::iter::repeat_n("N", 1));
vec.extend(std::iter::repeat_n("O", 6));
vec.extend(std::iter::repeat_n("OE", 1));
vec.extend(std::iter::repeat_n("TH", 1));
vec.extend(std::iter::repeat_n("U", 4));
vec.extend(std::iter::repeat_n("Y", 2));
vec.extend(std::iter::repeat_n("a", 6));
vec.extend(std::iter::repeat_n("ae", 1));
vec.extend(std::iter::repeat_n("c", 1));
vec.extend(std::iter::repeat_n("e", 4));
vec.extend(std::iter::repeat_n("i", 4));
vec.extend(std::iter::repeat_n("ij", 1));
vec.extend(std::iter::repeat_n("d", 1));
vec.extend(std::iter::repeat_n("n", 1));
vec.extend(std::iter::repeat_n("o", 6));
vec.extend(std::iter::repeat_n("oe", 1));
vec.extend(std::iter::repeat_n("ss", 1));
vec.extend(std::iter::repeat_n("th", 1));
vec.extend(std::iter::repeat_n("u", 4));
vec.extend(std::iter::repeat_n("y", 2));
vec.extend(std::iter::repeat_n("fi", 1));
vec.extend(std::iter::repeat_n("fl", 1));
assert_eq!(folding_helper(latin1_string), vec);
}

View File

@@ -287,7 +287,7 @@ impl Iterator for CodepointFrontiers<'_> {
type Item = usize;
fn next(&mut self) -> Option<usize> {
self.next_el.map(|offset| {
self.next_el.inspect(|&offset| {
if self.s.is_empty() {
self.next_el = None;
} else {
@@ -295,7 +295,6 @@ impl Iterator for CodepointFrontiers<'_> {
self.s = &self.s[first_codepoint_width..];
self.next_el = Some(offset + first_codepoint_width);
}
offset
})
}
}

View File

@@ -32,8 +32,7 @@ impl BinarySerializable for PreTokenizedString {
if let Ok(text) = serde_json::to_string(self) {
<String as BinarySerializable>::serialize(&text, writer)
} else {
Err(io::Error::new(
io::ErrorKind::Other,
Err(io::Error::other(
"Failed to dump PreTokenizedString to json.",
))
}
@@ -45,8 +44,7 @@ impl BinarySerializable for PreTokenizedString {
if let Ok(value) = serde_json::from_str(&json_text) {
Ok(value)
} else {
Err(io::Error::new(
io::ErrorKind::Other,
Err(io::Error::other(
"Failed to parse string data as PreTokenizedString.",
))
}