mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-10 11:02:55 +00:00
Cargo clippy
This commit is contained in:
@@ -51,7 +51,7 @@ impl TinySet {
|
||||
/// within `[0; 64[`
|
||||
#[inline(always)]
|
||||
pub fn singleton(el: u32) -> TinySet {
|
||||
TinySet(1u64 << (el as u64))
|
||||
TinySet(1u64 << u64::from(el))
|
||||
}
|
||||
|
||||
/// Insert a new element within [0..64[
|
||||
@@ -109,7 +109,7 @@ impl TinySet {
|
||||
///
|
||||
/// The limit is assumed to be strictly lower than 64.
|
||||
pub fn range_lower(upper_bound: u32) -> TinySet {
|
||||
TinySet((1u64 << ((upper_bound % 64u32) as u64)) - 1u64)
|
||||
TinySet((1u64 << u64::from(upper_bound % 64u32)) - 1u64)
|
||||
}
|
||||
|
||||
/// Returns a `TinySet` that contains all values greater
|
||||
|
||||
@@ -37,7 +37,7 @@ impl Searcher {
|
||||
self.segment_readers
|
||||
.iter()
|
||||
.map(|segment_reader| segment_reader.num_docs())
|
||||
.fold(0u32, |acc, val| acc + val)
|
||||
.sum::<u32>()
|
||||
}
|
||||
|
||||
/// Return the overall number of documents containing
|
||||
@@ -46,7 +46,7 @@ impl Searcher {
|
||||
self.segment_readers
|
||||
.iter()
|
||||
.map(|segment_reader| segment_reader.inverted_index(term.field()).doc_freq(term))
|
||||
.fold(0u32, |acc, val| acc + val)
|
||||
.sum::<u32>()
|
||||
}
|
||||
|
||||
/// Return the list of segment readers
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
|
||||
#![allow(unknown_lints)]
|
||||
#![allow(new_without_default)]
|
||||
#![allow(decimal_literal_representation)]
|
||||
#![warn(missing_docs)]
|
||||
|
||||
//! # `tantivy`
|
||||
|
||||
@@ -27,6 +27,7 @@ pub use common::HasLen;
|
||||
|
||||
pub(crate) type UnorderedTermId = usize;
|
||||
|
||||
#[allow(enum_variant_names)]
|
||||
pub(crate) enum FreqReadingOption {
|
||||
NoFreq,
|
||||
SkipFreq,
|
||||
|
||||
@@ -74,13 +74,11 @@ impl DocSet for BitSetDocSet {
|
||||
self.cursor_tinybitset = self.cursor_tinybitset.intersect(greater_filter);
|
||||
if !self.advance() {
|
||||
SkipResult::End
|
||||
} else if self.doc() == target {
|
||||
SkipResult::Reached
|
||||
} else {
|
||||
if self.doc() == target {
|
||||
SkipResult::Reached
|
||||
} else {
|
||||
debug_assert!(self.doc() > target);
|
||||
SkipResult::OverStep
|
||||
}
|
||||
debug_assert!(self.doc() > target);
|
||||
SkipResult::OverStep
|
||||
}
|
||||
}
|
||||
Ordering::Equal => loop {
|
||||
|
||||
@@ -21,18 +21,14 @@ where
|
||||
if scorers.len() == 1 {
|
||||
scorers.into_iter().next().unwrap() //< we checked the size beforehands
|
||||
} else {
|
||||
if scorers.iter().all(|scorer| {
|
||||
let is_all_term_queries = scorers.iter().all(|scorer| {
|
||||
let scorer_ref: &Scorer = scorer.borrow();
|
||||
Downcast::<TermScorer>::is_type(scorer_ref)
|
||||
}) {
|
||||
});
|
||||
if is_all_term_queries {
|
||||
let scorers: Vec<TermScorer> = scorers
|
||||
.into_iter()
|
||||
.map(|scorer| {
|
||||
*Downcast::<TermScorer>::downcast(scorer).expect(
|
||||
"downcasting should not have failed, we\
|
||||
checked in advance that the type were correct.",
|
||||
)
|
||||
})
|
||||
.map(|scorer| *Downcast::<TermScorer>::downcast(scorer).unwrap())
|
||||
.collect();
|
||||
let scorer: Box<Scorer> = box Union::<TermScorer, TScoreCombiner>::from(scorers);
|
||||
scorer
|
||||
@@ -61,7 +57,7 @@ impl BooleanWeight {
|
||||
reader: &SegmentReader,
|
||||
) -> Result<Box<Scorer>> {
|
||||
let mut per_occur_scorers: HashMap<Occur, Vec<Box<Scorer>>> = HashMap::new();
|
||||
for &(ref occur, ref subweight) in self.weights.iter() {
|
||||
for &(ref occur, ref subweight) in &self.weights {
|
||||
let sub_scorer: Box<Scorer> = subweight.scorer(reader)?;
|
||||
per_occur_scorers
|
||||
.entry(*occur)
|
||||
@@ -82,18 +78,14 @@ impl BooleanWeight {
|
||||
if scorers.len() == 1 {
|
||||
scorers.into_iter().next().unwrap()
|
||||
} else {
|
||||
if scorers.iter().all(|scorer| {
|
||||
let is_all_term_queries = scorers.iter().all(|scorer| {
|
||||
let scorer_ref: &Scorer = scorer.borrow();
|
||||
Downcast::<TermScorer>::is_type(scorer_ref)
|
||||
}) {
|
||||
});
|
||||
if is_all_term_queries {
|
||||
let scorers: Vec<TermScorer> = scorers
|
||||
.into_iter()
|
||||
.map(|scorer| {
|
||||
*Downcast::<TermScorer>::downcast(scorer).expect(
|
||||
"downcasting should not have failed, we\
|
||||
checked in advance that the type were correct.",
|
||||
)
|
||||
})
|
||||
.map(|scorer| *Downcast::<TermScorer>::downcast(scorer).unwrap())
|
||||
.collect();
|
||||
let scorer: Box<Scorer> = box Intersection::from(scorers);
|
||||
scorer
|
||||
|
||||
@@ -121,19 +121,19 @@ impl DocSet for PhraseScorer {
|
||||
|
||||
fn skip_next(&mut self, target: DocId) -> SkipResult {
|
||||
if self.intersection_docset.skip_next(target) == SkipResult::End {
|
||||
SkipResult::End
|
||||
} else if self.phrase_match() {
|
||||
return SkipResult::End;
|
||||
}
|
||||
if self.phrase_match() {
|
||||
if self.doc() == target {
|
||||
SkipResult::Reached
|
||||
return SkipResult::Reached;
|
||||
} else {
|
||||
SkipResult::OverStep
|
||||
return SkipResult::OverStep;
|
||||
}
|
||||
}
|
||||
if self.advance() {
|
||||
SkipResult::OverStep
|
||||
} else {
|
||||
if self.advance() {
|
||||
SkipResult::OverStep
|
||||
} else {
|
||||
SkipResult::End
|
||||
}
|
||||
SkipResult::End
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -150,15 +150,15 @@ impl RangeWeight {
|
||||
{
|
||||
use std::collections::Bound::*;
|
||||
let mut term_stream_builder = term_dict.range();
|
||||
term_stream_builder = match &self.left_bound {
|
||||
&Included(ref term_val) => term_stream_builder.ge(term_val),
|
||||
&Excluded(ref term_val) => term_stream_builder.gt(term_val),
|
||||
&Unbounded => term_stream_builder,
|
||||
term_stream_builder = match self.left_bound {
|
||||
Included(ref term_val) => term_stream_builder.ge(term_val),
|
||||
Excluded(ref term_val) => term_stream_builder.gt(term_val),
|
||||
Unbounded => term_stream_builder,
|
||||
};
|
||||
term_stream_builder = match &self.right_bound {
|
||||
&Included(ref term_val) => term_stream_builder.le(term_val),
|
||||
&Excluded(ref term_val) => term_stream_builder.lt(term_val),
|
||||
&Unbounded => term_stream_builder,
|
||||
term_stream_builder = match self.right_bound {
|
||||
Included(ref term_val) => term_stream_builder.le(term_val),
|
||||
Excluded(ref term_val) => term_stream_builder.lt(term_val),
|
||||
Unbounded => term_stream_builder,
|
||||
};
|
||||
term_stream_builder.into_stream()
|
||||
}
|
||||
|
||||
@@ -50,11 +50,7 @@ where
|
||||
{
|
||||
fn advance(&mut self) -> bool {
|
||||
self.score_cache = None;
|
||||
if self.req_scorer.advance() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
self.req_scorer.advance()
|
||||
}
|
||||
|
||||
fn doc(&self) -> DocId {
|
||||
|
||||
@@ -40,7 +40,7 @@ impl StoreReader {
|
||||
|
||||
fn block_offset(&self, doc_id: DocId) -> (DocId, u64) {
|
||||
self.block_index()
|
||||
.seek(doc_id as u64 + 1)
|
||||
.seek(u64::from(doc_id) + 1)
|
||||
.map(|(doc, offset)| (doc as DocId, offset))
|
||||
.unwrap_or((0u32, 0u64))
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ impl StoreWriter {
|
||||
if !self.current_block.is_empty() {
|
||||
self.write_and_compress_block()?;
|
||||
self.offset_index_writer
|
||||
.insert(self.doc as u64, &(self.writer.written_bytes() as u64))?;
|
||||
.insert(u64::from(self.doc), &(self.writer.written_bytes() as u64))?;
|
||||
}
|
||||
let doc_offset = self.doc;
|
||||
let start_offset = self.writer.written_bytes() as u64;
|
||||
@@ -80,7 +80,7 @@ impl StoreWriter {
|
||||
for (next_doc_id, block_addr) in store_reader.block_index() {
|
||||
self.doc = doc_offset + next_doc_id as u32;
|
||||
self.offset_index_writer
|
||||
.insert(self.doc as u64, &(start_offset + block_addr))?;
|
||||
.insert(u64::from(self.doc), &(start_offset + block_addr))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -96,7 +96,7 @@ impl StoreWriter {
|
||||
(self.intermediary_buffer.len() as u32).serialize(&mut self.writer)?;
|
||||
self.writer.write_all(&self.intermediary_buffer)?;
|
||||
self.offset_index_writer
|
||||
.insert(self.doc as u64, &(self.writer.written_bytes() as u64))?;
|
||||
.insert(u64::from(self.doc), &(self.writer.written_bytes() as u64))?;
|
||||
self.current_block.clear();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
use std::io;
|
||||
use std::cmp;
|
||||
use std::io::{Read, Write};
|
||||
@@ -94,12 +95,12 @@ fn extract_bits(data: &[u8], addr_bits: usize, num_bits: u8) -> u64 {
|
||||
let bit_shift = (addr_bits % 8) as u64;
|
||||
let val_unshifted_unmasked: u64 = unsafe { *(data[addr_byte..].as_ptr() as *const u64) };
|
||||
let val_shifted_unmasked = val_unshifted_unmasked >> bit_shift;
|
||||
let mask = (1u64 << (num_bits as u64)) - 1;
|
||||
let mask = (1u64 << u64::from(num_bits)) - 1;
|
||||
val_shifted_unmasked & mask
|
||||
}
|
||||
|
||||
impl TermInfoStore {
|
||||
pub fn open(data: ReadOnlySource) -> TermInfoStore {
|
||||
pub fn open(data: &ReadOnlySource) -> TermInfoStore {
|
||||
let buffer = data.as_slice();
|
||||
let len = Endianness::read_u64(&buffer[0..8]) as usize;
|
||||
let num_terms = Endianness::read_u64(&buffer[8..16]) as usize;
|
||||
@@ -149,7 +150,7 @@ fn bitpack_serialize<W: Write>(
|
||||
term_info: &TermInfo,
|
||||
) -> io::Result<()> {
|
||||
bit_packer.write(
|
||||
term_info.doc_freq as u64,
|
||||
u64::from(term_info.doc_freq),
|
||||
term_info_block_meta.doc_freq_nbits,
|
||||
write,
|
||||
)?;
|
||||
@@ -163,7 +164,7 @@ fn bitpack_serialize<W: Write>(
|
||||
term_info_block_meta.positions_offset_nbits,
|
||||
write,
|
||||
)?;
|
||||
bit_packer.write(term_info.positions_inner_offset as u64, 7, write)?;
|
||||
bit_packer.write(u64::from(term_info.positions_inner_offset), 7, write)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -197,7 +198,7 @@ impl TermInfoStoreWriter {
|
||||
max_positions_offset = cmp::max(max_positions_offset, term_info.positions_offset);
|
||||
}
|
||||
|
||||
let max_doc_freq_nbits: u8 = compute_num_bits(max_doc_freq as u64);
|
||||
let max_doc_freq_nbits: u8 = compute_num_bits(u64::from(max_doc_freq));
|
||||
let max_postings_offset_nbits = compute_num_bits(max_postings_offset);
|
||||
let max_positions_offset_nbits = compute_num_bits(max_positions_offset);
|
||||
|
||||
@@ -321,7 +322,7 @@ mod tests {
|
||||
}
|
||||
let mut buffer = Vec::new();
|
||||
store_writer.serialize(&mut buffer).unwrap();
|
||||
let term_info_store = TermInfoStore::open(ReadOnlySource::from(buffer));
|
||||
let term_info_store = TermInfoStore::open(&ReadOnlySource::from(buffer));
|
||||
for i in 0..1000 {
|
||||
assert_eq!(term_info_store.get(i as u64), term_infos[i]);
|
||||
}
|
||||
|
||||
@@ -116,7 +116,7 @@ impl<'a> TermDictionary<'a> for TermDictionaryImpl {
|
||||
let fst_index = open_fst_index(fst_source);
|
||||
TermDictionaryImpl {
|
||||
fst_index,
|
||||
term_info_store: TermInfoStore::open(values_source),
|
||||
term_info_store: TermInfoStore::open(&values_source),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user