mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2025-12-23 02:29:57 +00:00
Pleasing clippy (#1253)
This commit is contained in:
@@ -62,29 +62,30 @@ impl TinySet {
|
||||
self.0 = 0u64;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns the complement of the set in `[0, 64[`.
|
||||
///
|
||||
/// Careful on making this function public, as it will break the padding handling in the last
|
||||
/// bucket.
|
||||
#[inline]
|
||||
fn complement(self) -> TinySet {
|
||||
TinySet(!self.0)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns true iff the `TinySet` contains the element `el`.
|
||||
#[inline]
|
||||
pub fn contains(self, el: u32) -> bool {
|
||||
!self.intersect(TinySet::singleton(el)).is_empty()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns the number of elements in the TinySet.
|
||||
#[inline]
|
||||
pub fn len(self) -> u32 {
|
||||
self.0.count_ones()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns the intersection of `self` and `other`
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn intersect(self, other: TinySet) -> TinySet {
|
||||
TinySet(self.0 & other.0)
|
||||
}
|
||||
@@ -98,12 +99,14 @@ impl TinySet {
|
||||
|
||||
/// Insert a new element within [0..64)
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn insert(self, el: u32) -> TinySet {
|
||||
self.union(TinySet::singleton(el))
|
||||
}
|
||||
|
||||
/// Removes an element within [0..64)
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn remove(self, el: u32) -> TinySet {
|
||||
self.intersect(TinySet::singleton(el).complement())
|
||||
}
|
||||
@@ -130,6 +133,7 @@ impl TinySet {
|
||||
|
||||
/// Returns the union of two tinysets
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn union(self, other: TinySet) -> TinySet {
|
||||
TinySet(self.0 | other.0)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::return_self_not_must_use)]
|
||||
|
||||
use stable_deref_trait::StableDeref;
|
||||
use std::convert::TryInto;
|
||||
use std::mem;
|
||||
@@ -35,6 +37,8 @@ impl OwnedBytes {
|
||||
}
|
||||
|
||||
/// creates a fileslice that is just a view over a slice of the data.
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn slice(&self, range: Range<usize>) -> Self {
|
||||
OwnedBytes {
|
||||
data: &self.data[range],
|
||||
@@ -63,6 +67,8 @@ impl OwnedBytes {
|
||||
/// On the other hand, both `left` and `right` retain a handle over
|
||||
/// the entire slice of memory. In other words, the memory will only
|
||||
/// be released when both left and right are dropped.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn split(self, split_len: usize) -> (OwnedBytes, OwnedBytes) {
|
||||
let right_box_stable_deref = self.box_stable_deref.clone();
|
||||
let left = OwnedBytes {
|
||||
|
||||
@@ -91,6 +91,7 @@ pub enum UserInputAst {
|
||||
}
|
||||
|
||||
impl UserInputAst {
|
||||
#[must_use]
|
||||
pub fn unary(self, occur: Occur) -> UserInputAst {
|
||||
UserInputAst::Clause(vec![(Some(occur), self)])
|
||||
}
|
||||
|
||||
@@ -210,6 +210,7 @@ impl TopDocs {
|
||||
/// Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn and_offset(self, offset: usize) -> TopDocs {
|
||||
TopDocs(self.0.and_offset(offset))
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ impl FileSlice {
|
||||
|
||||
/// Wraps a FileHandle.
|
||||
#[doc(hidden)]
|
||||
#[must_use]
|
||||
pub fn new_with_num_bytes(file_handle: Box<dyn FileHandle>, num_bytes: usize) -> Self {
|
||||
FileSlice {
|
||||
data: Arc::from(file_handle),
|
||||
|
||||
@@ -43,14 +43,16 @@ impl FileWatcher {
|
||||
thread::Builder::new()
|
||||
.name("thread-tantivy-meta-file-watcher".to_string())
|
||||
.spawn(move || {
|
||||
let mut current_checksum = None;
|
||||
let mut current_checksum_opt = None;
|
||||
|
||||
while state.load(Ordering::SeqCst) == 1 {
|
||||
if let Ok(checksum) = FileWatcher::compute_checksum(&path) {
|
||||
// `None.unwrap_or_else(|| !checksum) != checksum` evaluates to `true`
|
||||
if current_checksum.unwrap_or_else(|| !checksum) != checksum {
|
||||
let metafile_has_changed = current_checksum_opt
|
||||
.map(|current_checksum| current_checksum != checksum)
|
||||
.unwrap_or(true);
|
||||
if metafile_has_changed {
|
||||
info!("Meta file {:?} was modified", path);
|
||||
current_checksum = Some(checksum);
|
||||
current_checksum_opt = Some(checksum);
|
||||
futures::executor::block_on(callbacks.broadcast());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,8 +76,7 @@ impl FieldNormsWriter {
|
||||
if let Some(fieldnorm_buffer) = self
|
||||
.fieldnorms_buffers
|
||||
.get_mut(field.field_id() as usize)
|
||||
.map(Option::as_mut)
|
||||
.flatten()
|
||||
.and_then(Option::as_mut)
|
||||
{
|
||||
match fieldnorm_buffer.len().cmp(&(doc as usize)) {
|
||||
Ordering::Less => {
|
||||
@@ -99,17 +98,13 @@ impl FieldNormsWriter {
|
||||
mut fieldnorms_serializer: FieldNormsSerializer,
|
||||
doc_id_map: Option<&DocIdMapping>,
|
||||
) -> io::Result<()> {
|
||||
for (field, fieldnorms_buffer) in self
|
||||
.fieldnorms_buffers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(field_id, fieldnorms_buffer_opt)| {
|
||||
for (field, fieldnorms_buffer) in self.fieldnorms_buffers.iter().enumerate().filter_map(
|
||||
|(field_id, fieldnorms_buffer_opt)| {
|
||||
fieldnorms_buffer_opt.as_ref().map(|fieldnorms_buffer| {
|
||||
(Field::from_field_id(field_id as u32), fieldnorms_buffer)
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
{
|
||||
},
|
||||
) {
|
||||
if let Some(doc_id_map) = doc_id_map {
|
||||
let remapped_fieldnorm_buffer = doc_id_map.remap(fieldnorms_buffer);
|
||||
fieldnorms_serializer.serialize_field(field, &remapped_fieldnorm_buffer)?;
|
||||
|
||||
@@ -340,14 +340,13 @@ impl IndexMerger {
|
||||
fast_field_serializer: &mut CompositeFastFieldSerializer,
|
||||
doc_id_mapping: &SegmentDocIdMapping,
|
||||
) -> crate::Result<()> {
|
||||
let (min_value, max_value) = self.readers.iter().map(|reader|{
|
||||
let (min_value, max_value) = self.readers.iter().filter_map(|reader|{
|
||||
let u64_reader: DynamicFastFieldReader<u64> = reader
|
||||
.fast_fields()
|
||||
.typed_fast_field_reader(field)
|
||||
.expect("Failed to find a reader for single fast field. This is a tantivy bug and it should never happen.");
|
||||
compute_min_max_val(&u64_reader, reader)
|
||||
})
|
||||
.flatten()
|
||||
.reduce(|a, b| {
|
||||
(a.0.min(b.0), a.1.max(b.1))
|
||||
}).expect("Unexpected error, empty readers in IndexMerger");
|
||||
@@ -657,12 +656,11 @@ impl IndexMerger {
|
||||
self.readers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(reader_ordinal, reader)| {
|
||||
.flat_map(|(reader_ordinal, reader)| {
|
||||
reader
|
||||
.doc_ids_alive()
|
||||
.map(move |doc_id| (doc_id, reader_ordinal as SegmentOrdinal))
|
||||
})
|
||||
.flatten(),
|
||||
}),
|
||||
);
|
||||
Ok(SegmentDocIdMapping::new(mapping, true))
|
||||
}
|
||||
@@ -760,24 +758,18 @@ impl IndexMerger {
|
||||
fast_field_readers: &ff_readers,
|
||||
offsets,
|
||||
};
|
||||
let iter1 = doc_id_mapping
|
||||
.iter()
|
||||
.map(|(doc_id, reader_ordinal)| {
|
||||
let ff_reader = &ff_readers[*reader_ordinal as usize];
|
||||
let mut vals = vec![];
|
||||
ff_reader.get_vals(*doc_id, &mut vals);
|
||||
vals.into_iter()
|
||||
})
|
||||
.flatten();
|
||||
let iter2 = doc_id_mapping
|
||||
.iter()
|
||||
.map(|(doc_id, reader_ordinal)| {
|
||||
let ff_reader = &ff_readers[*reader_ordinal as usize];
|
||||
let mut vals = vec![];
|
||||
ff_reader.get_vals(*doc_id, &mut vals);
|
||||
vals.into_iter()
|
||||
})
|
||||
.flatten();
|
||||
let iter1 = doc_id_mapping.iter().flat_map(|(doc_id, reader_ordinal)| {
|
||||
let ff_reader = &ff_readers[*reader_ordinal as usize];
|
||||
let mut vals = vec![];
|
||||
ff_reader.get_vals(*doc_id, &mut vals);
|
||||
vals.into_iter()
|
||||
});
|
||||
let iter2 = doc_id_mapping.iter().flat_map(|(doc_id, reader_ordinal)| {
|
||||
let ff_reader = &ff_readers[*reader_ordinal as usize];
|
||||
let mut vals = vec![];
|
||||
ff_reader.get_vals(*doc_id, &mut vals);
|
||||
vals.into_iter()
|
||||
});
|
||||
fast_field_serializer.create_auto_detect_u64_fast_field_with_idx(
|
||||
field,
|
||||
stats,
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
|
||||
#![warn(missing_docs)]
|
||||
#![allow(clippy::len_without_is_empty)]
|
||||
#![allow(clippy::return_self_not_must_use)]
|
||||
|
||||
//! # `tantivy`
|
||||
//!
|
||||
|
||||
@@ -186,7 +186,6 @@ mod tests {
|
||||
use super::*;
|
||||
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
|
||||
|
||||
#[test]
|
||||
#[test]
|
||||
fn test_stack() {
|
||||
let mut heap = MemoryArena::new();
|
||||
|
||||
@@ -61,6 +61,7 @@ impl IndexReaderBuilder {
|
||||
/// Building the reader is a non-trivial operation that requires
|
||||
/// to open different segment readers. It may take hundreds of milliseconds
|
||||
/// of time and it may return an error.
|
||||
#[allow(clippy::needless_late_init)]
|
||||
pub fn try_into(self) -> crate::Result<IndexReader> {
|
||||
let inner_reader = InnerIndexReader {
|
||||
index: self.index,
|
||||
|
||||
@@ -131,18 +131,17 @@ impl Document {
|
||||
let mut field_values: Vec<&FieldValue> = self.field_values().iter().collect();
|
||||
field_values.sort_by_key(|field_value| field_value.field());
|
||||
|
||||
let mut grouped_field_values = vec![];
|
||||
|
||||
let mut current_field;
|
||||
let mut current_group;
|
||||
|
||||
let mut field_values_it = field_values.into_iter();
|
||||
if let Some(field_value) = field_values_it.next() {
|
||||
current_field = field_value.field();
|
||||
current_group = vec![field_value]
|
||||
|
||||
let first_field_value = if let Some(first_field_value) = field_values_it.next() {
|
||||
first_field_value
|
||||
} else {
|
||||
return grouped_field_values;
|
||||
}
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let mut grouped_field_values = vec![];
|
||||
let mut current_field = first_field_value.field();
|
||||
let mut current_group = vec![first_field_value];
|
||||
|
||||
for field_value in field_values_it {
|
||||
if field_value.field() == current_field {
|
||||
|
||||
@@ -234,12 +234,12 @@ impl TermInfoStoreWriter {
|
||||
};
|
||||
|
||||
term_info_block_meta.serialize(&mut self.buffer_block_metas)?;
|
||||
for term_info in self.term_infos[1..].iter().cloned() {
|
||||
for term_info in &self.term_infos[1..] {
|
||||
bitpack_serialize(
|
||||
&mut self.buffer_term_infos,
|
||||
&mut bit_packer,
|
||||
&term_info_block_meta,
|
||||
&term_info,
|
||||
term_info,
|
||||
)?;
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ impl<'a> SimpleTokenStream<'a> {
|
||||
.filter(|&(_, ref c)| !c.is_alphanumeric())
|
||||
.map(|(offset, _)| offset)
|
||||
.next()
|
||||
.unwrap_or_else(|| self.text.len())
|
||||
.unwrap_or(self.text.len())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ impl<'a> WhitespaceTokenStream<'a> {
|
||||
.filter(|&(_, ref c)| c.is_ascii_whitespace())
|
||||
.map(|(offset, _)| offset)
|
||||
.next()
|
||||
.unwrap_or_else(|| self.text.len())
|
||||
.unwrap_or(self.text.len())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user