diff --git a/examples/simple_search.rs b/examples/simple_search.rs index e90304131..d93b72c6f 100644 --- a/examples/simple_search.rs +++ b/examples/simple_search.rs @@ -185,7 +185,7 @@ fn run_example(index_path: &Path) -> tantivy::Result<()> { // Here, if the user does not specify which // field they want to search, tantivy will search // in both title and body. - let mut query_parser = QueryParser::for_index(index, vec![title, body]); + let mut query_parser = QueryParser::for_index(&index, vec![title, body]); // QueryParser may fail if the query is not in the right // format. For user facing applications, this can be a problem. diff --git a/src/collector/facet_collector.rs b/src/collector/facet_collector.rs index c9f921492..b158dbe70 100644 --- a/src/collector/facet_collector.rs +++ b/src/collector/facet_collector.rs @@ -108,7 +108,7 @@ mod tests { { // perform the query let mut facet_collectors = chain().push(&mut ffvf_i64).push(&mut ffvf_u64); - let mut query_parser = QueryParser::for_index(index, vec![text_field]); + let mut query_parser = QueryParser::for_index(&index, vec![text_field]); let query = query_parser.parse_query("text:text").unwrap(); query.search(&searcher, &mut facet_collectors).unwrap(); } diff --git a/src/common/bitpacker.rs b/src/common/bitpacker.rs index ab3d59d18..456328bbf 100644 --- a/src/common/bitpacker.rs +++ b/src/common/bitpacker.rs @@ -135,16 +135,16 @@ where let val_shifted = (val_unshifted_unmasked >> bit_shift) as u64; (val_shifted & mask) } else { - let val_unshifted_unmasked: u64; - if addr + 8 <= data.len() { - val_unshifted_unmasked = unsafe { *(data[addr..].as_ptr() as *const u64) }; - } else { - let mut buffer = [0u8; 8]; - for i in addr..data.len() { - buffer[i - addr] += data[i]; - } - val_unshifted_unmasked = unsafe { *(buffer[..].as_ptr() as *const u64) }; - } + let val_unshifted_unmasked: u64 = + if addr + 8 <= data.len() { + unsafe { *(data[addr..].as_ptr() as *const u64) } + } else { + let mut buffer = [0u8; 8]; + for i in addr..data.len() { + buffer[i - addr] += data[i]; + } + unsafe { *(buffer[..].as_ptr() as *const u64) } + }; let val_shifted = (val_unshifted_unmasked >> bit_shift) as u64; (val_shifted & mask) } diff --git a/src/common/composite_file.rs b/src/common/composite_file.rs index 4ab843d38..1c74c92b7 100644 --- a/src/common/composite_file.rs +++ b/src/common/composite_file.rs @@ -81,7 +81,7 @@ pub struct CompositeFile { impl CompositeFile { /// Opens a composite file stored in a given /// `ReadOnlySource`. - pub fn open(data: ReadOnlySource) -> io::Result { + pub fn open(data: &ReadOnlySource) -> io::Result { let end = data.len(); let footer_len_data = data.slice_from(end - 4); let footer_len = u32::deserialize(&mut footer_len_data.as_slice())? as usize; @@ -169,7 +169,7 @@ mod test { } { let r = directory.open_read(path).unwrap(); - let composite_file = CompositeFile::open(r).unwrap(); + let composite_file = CompositeFile::open(&r).unwrap(); { let file0 = composite_file.open_read(Field(0u32)).unwrap(); let mut file0_buf = file0.as_slice(); diff --git a/src/common/vint.rs b/src/common/vint.rs index 07cdfa24c..c86849b1a 100644 --- a/src/common/vint.rs +++ b/src/common/vint.rs @@ -41,7 +41,7 @@ impl BinarySerializable for VInt { loop { match bytes.next() { Some(Ok(b)) => { - result += ((b % 128u8) as u64) << shift; + result += u64::from(b % 128u8) << shift; if b & 128u8 != 0u8 { break; } diff --git a/src/compression/mod.rs b/src/compression/mod.rs index cd40e4f1a..16bc5854b 100644 --- a/src/compression/mod.rs +++ b/src/compression/mod.rs @@ -31,7 +31,7 @@ mod vint { pub(crate) use self::compression_vint_simd::*; } -/// Returns the size in bytes of a compressed block, given num_bits. +/// Returns the size in bytes of a compressed block, given `num_bits`. pub fn compressed_block_size(num_bits: u8) -> usize { 1 + (num_bits as usize) * 16 } diff --git a/src/compression/stream.rs b/src/compression/stream.rs index 9071d0fff..5593b0477 100644 --- a/src/compression/stream.rs +++ b/src/compression/stream.rs @@ -36,7 +36,7 @@ impl CompressedIntStream { if available > 0 { let uncompressed_block = &self.block_decoder.output_array() [self.inner_offset..]; - &mut output[start..start + available].clone_from_slice(uncompressed_block); + output[start..][..available].clone_from_slice(uncompressed_block); } num_els -= available; start += available; @@ -49,7 +49,7 @@ impl CompressedIntStream { let uncompressed_block = &self.block_decoder.output_array()[self.inner_offset.. self.inner_offset + num_els]; - &output[start..start + num_els].clone_from_slice(uncompressed_block); + output[start..][..num_els].clone_from_slice(uncompressed_block); self.inner_offset += num_els; break; } diff --git a/src/core/pool.rs b/src/core/pool.rs index 285b25072..6fffc7524 100644 --- a/src/core/pool.rs +++ b/src/core/pool.rs @@ -44,7 +44,7 @@ impl Pool { /// At the exit of this method, /// - freshest_generation has a value greater or equal than generation /// - freshest_generation has a value that has been advertised - /// - freshest_generation has + /// - freshest_generation has) fn advertise_generation(&self, generation: usize) { // not optimal at all but the easiest to read proof. loop { @@ -71,7 +71,7 @@ impl Pool { if gen_item.generation >= generation { return LeasedItem { gen_item: Some(gen_item), - recycle_queue: self.queue.clone(), + recycle_queue: Arc::clone(&self.queue), }; } else { // this searcher is obsolete, diff --git a/src/core/segment_reader.rs b/src/core/segment_reader.rs index 8cd59f271..28b9e8ec3 100644 --- a/src/core/segment_reader.rs +++ b/src/core/segment_reader.rs @@ -123,17 +123,17 @@ impl SegmentReader { pub fn open(segment: Segment) -> Result { let termdict_source = segment.open_read(SegmentComponent::TERMS)?; - let termdict_composite = CompositeFile::open(termdict_source)?; + let termdict_composite = CompositeFile::open(&termdict_source)?; let store_source = segment.open_read(SegmentComponent::STORE)?; let store_reader = StoreReader::from_source(store_source); let postings_source = segment.open_read(SegmentComponent::POSTINGS)?; - let postings_composite = CompositeFile::open(postings_source)?; + let postings_composite = CompositeFile::open(&postings_source)?; let positions_composite = { if let Ok(source) = segment.open_read(SegmentComponent::POSITIONS) { - CompositeFile::open(source)? + CompositeFile::open(&source)? } else { CompositeFile::empty() } @@ -141,10 +141,10 @@ impl SegmentReader { let fast_fields_data = segment.open_read(SegmentComponent::FASTFIELDS)?; - let fast_fields_composite = CompositeFile::open(fast_fields_data)?; + let fast_fields_composite = CompositeFile::open(&fast_fields_data)?; let fieldnorms_data = segment.open_read(SegmentComponent::FIELDNORMS)?; - let fieldnorms_composite = CompositeFile::open(fieldnorms_data)?; + let fieldnorms_composite = CompositeFile::open(&fieldnorms_data)?; let delete_bitset = if segment.meta().has_deletes() { @@ -183,7 +183,7 @@ impl SegmentReader { .expect("Lock poisoned. This should never happen") .get(&field) { - inv_idx_reader.clone(); + Arc::clone(inv_idx_reader); } let termdict_source: ReadOnlySource = self.termdict_composite.open_read(field).expect( @@ -213,7 +213,7 @@ impl SegmentReader { .expect( "Field reader cache lock poisoned. This should never happen.", ) - .insert(field, inv_idx_reader.clone()); + .insert(field, Arc::clone(&inv_idx_reader)); inv_idx_reader } diff --git a/src/datastruct/stacker/hashmap.rs b/src/datastruct/stacker/hashmap.rs index 6af8d6ee4..6067871e4 100644 --- a/src/datastruct/stacker/hashmap.rs +++ b/src/datastruct/stacker/hashmap.rs @@ -9,7 +9,7 @@ mod murmurhash2 { #[inline(always)] pub fn murmurhash2(key: &[u8]) -> u32 { let mut key_ptr: *const u32 = key.as_ptr() as *const u32; - let m: u32 = 0x5bd1e995; + let m: u32 = 0x5bd1_e995; let r = 24; let len = key.len() as u32; @@ -30,18 +30,18 @@ mod murmurhash2 { let key_ptr_u8: *const u8 = key_ptr as *const u8; match remaining { 3 => { - h ^= unsafe { *key_ptr_u8.wrapping_offset(2) as u32 } << 16; - h ^= unsafe { *key_ptr_u8.wrapping_offset(1) as u32 } << 8; - h ^= unsafe { *key_ptr_u8 as u32 }; + h ^= unsafe { u32::from(*key_ptr_u8.wrapping_offset(2)) } << 16; + h ^= unsafe { u32::from(*key_ptr_u8.wrapping_offset(1)) } << 8; + h ^= unsafe { u32::from(*key_ptr_u8) }; h = h.wrapping_mul(m); } 2 => { - h ^= unsafe { *key_ptr_u8.wrapping_offset(1) as u32 } << 8; - h ^= unsafe { *key_ptr_u8 as u32 }; + h ^= unsafe { u32::from(*key_ptr_u8.wrapping_offset(1)) } << 8; + h ^= unsafe { u32::from(*key_ptr_u8) }; h = h.wrapping_mul(m); } 1 => { - h ^= unsafe { *key_ptr_u8 as u32 }; + h ^= unsafe { u32::from(*key_ptr_u8) }; h = h.wrapping_mul(m); } _ => {} diff --git a/src/directory/managed_directory.rs b/src/directory/managed_directory.rs index 8005c62b4..b65bf1df6 100644 --- a/src/directory/managed_directory.rs +++ b/src/directory/managed_directory.rs @@ -282,7 +282,7 @@ impl Clone for ManagedDirectory { fn clone(&self) -> ManagedDirectory { ManagedDirectory { directory: self.directory.box_clone(), - meta_informations: self.meta_informations.clone(), + meta_informations: Arc::clone(&self.meta_informations), } } } diff --git a/src/directory/mmap_directory.rs b/src/directory/mmap_directory.rs index d79ee3244..7644aaeed 100644 --- a/src/directory/mmap_directory.rs +++ b/src/directory/mmap_directory.rs @@ -108,7 +108,7 @@ impl MmapCache { } } - fn get_mmap(&mut self, full_path: PathBuf) -> Result>, OpenReadError> { + fn get_mmap(&mut self, full_path: &PathBuf) -> Result>, OpenReadError> { // if we exceed this limit, then we go through the weak // and remove those that are obsolete. if self.cache.len() > self.purge_weak_limit { @@ -118,11 +118,11 @@ impl MmapCache { HashMapEntry::Occupied(mut occupied_entry) => { if let Some(mmap_arc) = occupied_entry.get().upgrade() { self.counters.hit += 1; - Some(mmap_arc.clone()) + Some(Arc::clone(&mmap_arc)) } else { // The entry exists but the weak ref has been destroyed. self.counters.miss_weak += 1; - if let Some(mmap_arc) = open_mmap(&full_path)? { + if let Some(mmap_arc) = open_mmap(full_path)? { occupied_entry.insert(Arc::downgrade(&mmap_arc)); Some(mmap_arc) } else { @@ -132,7 +132,7 @@ impl MmapCache { } HashMapEntry::Vacant(vacant_entry) => { self.counters.miss_empty += 1; - if let Some(mmap_arc) = open_mmap(&full_path)? { + if let Some(mmap_arc) = open_mmap(full_path)? { vacant_entry.insert(Arc::downgrade(&mmap_arc)); Some(mmap_arc) } else { @@ -169,7 +169,7 @@ impl MmapDirectory { let tempdir = TempDir::new("index")?; let tempdir_path = PathBuf::from(tempdir.path()); let directory = MmapDirectory { - root_path: PathBuf::from(tempdir_path), + root_path: tempdir_path, mmap_cache: Arc::new(RwLock::new(MmapCache::default())), _temp_directory: Arc::new(Some(tempdir)), }; @@ -288,7 +288,7 @@ impl Directory for MmapDirectory { Ok( mmap_cache - .get_mmap(full_path)? + .get_mmap(&full_path)? .map(MmapReadOnly::from) .map(ReadOnlySource::Mmap) .unwrap_or_else(|| ReadOnlySource::Anonymous(SharedVecSlice::empty())), diff --git a/src/directory/ram_directory.rs b/src/directory/ram_directory.rs index 7d2a38375..f961c8638 100644 --- a/src/directory/ram_directory.rs +++ b/src/directory/ram_directory.rs @@ -110,8 +110,9 @@ impl InnerDirectory { readable_map .get(path) .ok_or_else(|| OpenReadError::FileDoesNotExist(PathBuf::from(path))) + .map(Arc::clone) .map(|data| { - ReadOnlySource::Anonymous(SharedVecSlice::new(data.clone())) + ReadOnlySource::Anonymous(SharedVecSlice::new(data)) }) }) } diff --git a/src/directory/read_only_source.rs b/src/directory/read_only_source.rs index 9b1506217..a2b85a9b7 100644 --- a/src/directory/read_only_source.rs +++ b/src/directory/read_only_source.rs @@ -111,7 +111,7 @@ impl From> for ReadOnlySource { } -/// Acts as a owning cursor over the data backed up by a ReadOnlySource +/// Acts as a owning cursor over the data backed up by a `ReadOnlySource` pub(crate) struct SourceRead { _data_owner: ReadOnlySource, cursor: &'static [u8], diff --git a/src/directory/shared_vec_slice.rs b/src/directory/shared_vec_slice.rs index 52bb5aa86..76bc7ad80 100644 --- a/src/directory/shared_vec_slice.rs +++ b/src/directory/shared_vec_slice.rs @@ -28,7 +28,7 @@ impl SharedVecSlice { pub fn slice(&self, from_offset: usize, to_offset: usize) -> SharedVecSlice { SharedVecSlice { - data: self.data.clone(), + data: Arc::clone(&self.data), start: self.start + from_offset, len: to_offset - from_offset, } diff --git a/src/fastfield/mod.rs b/src/fastfield/mod.rs index 175970eb2..6be9b1d64 100644 --- a/src/fastfield/mod.rs +++ b/src/fastfield/mod.rs @@ -98,7 +98,7 @@ mod tests { assert_eq!(source.len(), 35 as usize); } { - let composite_file = CompositeFile::open(source).unwrap(); + let composite_file = CompositeFile::open(&source).unwrap(); let field_source = composite_file.open_read(*FIELD).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(field_source); assert_eq!(fast_field_reader.get(0), 13u64); @@ -132,7 +132,7 @@ mod tests { assert_eq!(source.len(), 60 as usize); } { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); assert_eq!(fast_field_reader.get(0), 4u64); @@ -168,7 +168,7 @@ mod tests { assert_eq!(source.len(), 33 as usize); } { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); for doc in 0..10_000 { @@ -203,7 +203,7 @@ mod tests { assert_eq!(source.len(), 80041 as usize); } { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); @@ -242,7 +242,7 @@ mod tests { assert_eq!(source.len(), 17708 as usize); } { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: I64FastFieldReader = I64FastFieldReader::open(fast_fields_composite.open_read(i64_field).unwrap()); @@ -280,7 +280,7 @@ mod tests { let source = directory.open_read(&path).unwrap(); { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: I64FastFieldReader = I64FastFieldReader::open(fast_fields_composite.open_read(i64_field).unwrap()); assert_eq!(fast_field_reader.get(0u32), 0i64); @@ -313,7 +313,7 @@ mod tests { } let source = directory.open_read(&path).unwrap(); { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); @@ -368,7 +368,7 @@ mod tests { } let source = directory.open_read(&path).unwrap(); { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); @@ -401,7 +401,7 @@ mod tests { } let source = directory.open_read(&path).unwrap(); { - let fast_fields_composite = CompositeFile::open(source).unwrap(); + let fast_fields_composite = CompositeFile::open(&source).unwrap(); let fast_field_reader: U64FastFieldReader = U64FastFieldReader::open(fast_fields_composite.open_read(*FIELD).unwrap()); diff --git a/src/fastfield/reader.rs b/src/fastfield/reader.rs index d7544b28c..e18c09b5d 100644 --- a/src/fastfield/reader.rs +++ b/src/fastfield/reader.rs @@ -154,7 +154,7 @@ impl From> for U64FastFieldReader { let source = directory.open_read(path).expect("Failed to open the file"); let composite_file = - CompositeFile::open(source).expect("Failed to read the composite file"); + CompositeFile::open(&source).expect("Failed to read the composite file"); let field_source = composite_file.open_read(field).expect( "File component not found", diff --git a/src/indexer/delete_queue.rs b/src/indexer/delete_queue.rs index 3a940616e..88ad45f5e 100644 --- a/src/indexer/delete_queue.rs +++ b/src/indexer/delete_queue.rs @@ -138,7 +138,7 @@ impl NextBlock { "Failed to acquire write lock in delete queue", ); if let InnerNextBlock::Closed(ref block) = *next_read_lock { - return Some(block.clone()); + return Some(Arc::clone(block)); } } let next_block; @@ -148,7 +148,7 @@ impl NextBlock { ); match *next_write_lock { InnerNextBlock::Closed(ref block) => { - return Some(block.clone()); + return Some(Arc::clone(block)); } InnerNextBlock::Writer(ref writer) => { match writer.flush() { @@ -161,7 +161,7 @@ impl NextBlock { } } } - *next_write_lock.deref_mut() = InnerNextBlock::Closed(next_block.clone()); + *next_write_lock.deref_mut() = InnerNextBlock::Closed(Arc::clone(&next_block)); Some(next_block) } } diff --git a/src/indexer/segment_writer.rs b/src/indexer/segment_writer.rs index 5a029a5b0..edc880d31 100644 --- a/src/indexer/segment_writer.rs +++ b/src/indexer/segment_writer.rs @@ -164,7 +164,7 @@ impl<'a> SegmentWriter<'a> { 0 }; self.fieldnorms_writer.get_field_writer(field).map( - |field_norms_writer| field_norms_writer.add_val(num_tokens as u64), + |field_norms_writer| field_norms_writer.add_val(u64::from(num_tokens)), ); } FieldType::U64(ref int_option) => { @@ -174,7 +174,7 @@ impl<'a> SegmentWriter<'a> { field_value.field(), field_value.value().u64_value(), ); - self.multifield_postings.suscribe(doc_id, &term); + self.multifield_postings.subscribe(doc_id, &term); } } } @@ -185,7 +185,7 @@ impl<'a> SegmentWriter<'a> { field_value.field(), field_value.value().i64_value(), ); - self.multifield_postings.suscribe(doc_id, &term); + self.multifield_postings.subscribe(doc_id, &term); } } } diff --git a/src/lib.rs b/src/lib.rs index faffade18..18d5cf00c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ #![doc(test(attr(allow(unused_variables), deny(warnings))))] #![allow(unknown_lints)] +#![allow(new_without_default)] #![warn(missing_docs)] diff --git a/src/postings/docset.rs b/src/postings/docset.rs index 8aa665f53..9d84ddfc1 100644 --- a/src/postings/docset.rs +++ b/src/postings/docset.rs @@ -76,7 +76,7 @@ pub trait DocSet { return i; } } - return buffer.len(); + buffer.len() } /// Returns the current document diff --git a/src/postings/postings_writer.rs b/src/postings/postings_writer.rs index 2ebe65519..b554cd00e 100644 --- a/src/postings/postings_writer.rs +++ b/src/postings/postings_writer.rs @@ -73,7 +73,7 @@ impl<'a> MultiFieldPostingsWriter<'a> { postings_writer.index_text(&mut self.term_index, doc, field, token_stream, self.heap) } - pub fn suscribe(&mut self, doc: DocId, term: &Term) { + pub fn subscribe(&mut self, doc: DocId, term: &Term) { let postings_writer = self.per_field_postings_writers[term.field().0 as usize].deref_mut(); postings_writer.suscribe(&mut self.term_index, doc, 0u32, term, self.heap) } @@ -155,7 +155,7 @@ pub trait PostingsWriter { ) -> io::Result<()>; /// Tokenize a text and suscribe all of its token. - fn index_text<'a>( + fn index_text( &mut self, term_index: &mut HashMap, doc_id: DocId, diff --git a/src/postings/recorder.rs b/src/postings/recorder.rs index 4aa2f33fa..684d43213 100644 --- a/src/postings/recorder.rs +++ b/src/postings/recorder.rs @@ -1,10 +1,10 @@ use DocId; -use std::io; +use std::{self, io}; use postings::FieldSerializer; use datastruct::stacker::{ExpUnrolledLinkedList, Heap, HeapAllocable}; const EMPTY_ARRAY: [u32; 0] = [0u32; 0]; -const POSITION_END: u32 = 4294967295; +const POSITION_END: u32 = std::u32::MAX; /// Recorder is in charge of recording relevant information about /// the presence of a term in a document. diff --git a/src/postings/segment_postings.rs b/src/postings/segment_postings.rs index e1dcccf45..7db7f6dd8 100644 --- a/src/postings/segment_postings.rs +++ b/src/postings/segment_postings.rs @@ -108,7 +108,7 @@ impl SegmentPostings { fn position_add_skip usize>(&self, num_skips_fn: F) { - if let Some(ref position_computer) = self.position_computer.as_ref() { + if let Some(position_computer) = self.position_computer.as_ref() { let num_skips = num_skips_fn(); unsafe { (*position_computer.get()).add_skip(num_skips); diff --git a/src/postings/serializer.rs b/src/postings/serializer.rs index b9a42d809..877a6cf52 100644 --- a/src/postings/serializer.rs +++ b/src/postings/serializer.rs @@ -131,7 +131,7 @@ impl<'a> FieldSerializer<'a> { let (term_freq_enabled, position_enabled): (bool, bool) = match field_type { FieldType::Str(ref text_options) => { - if let Some(ref text_indexing_options) = text_options.get_indexing_options() { + if let Some(text_indexing_options) = text_options.get_indexing_options() { let index_option = text_indexing_options.index_option(); ( index_option.is_termfreq_enabled(), @@ -373,7 +373,7 @@ impl PositionSerializer { vals = &vals[len_to_completion..]; buffer_len = self.buffer.len(); } - self.buffer.extend_from_slice(&vals); + self.buffer.extend_from_slice(vals); Ok(()) } diff --git a/src/query/query_parser/query_parser.rs b/src/query/query_parser/query_parser.rs index b939d2d3f..3135c4502 100644 --- a/src/query/query_parser/query_parser.rs +++ b/src/query/query_parser/query_parser.rs @@ -102,7 +102,7 @@ impl QueryParser { /// * an index /// * a set of default - fields used to search if no field is specifically defined /// in the query. - pub fn for_index(index: Index, default_fields: Vec) -> QueryParser { + pub fn for_index(index: &Index, default_fields: Vec) -> QueryParser { QueryParser::new(index.schema(), default_fields, index.tokenizers().clone()) } diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs index 09a3fd2b2..177e496d3 100644 --- a/src/schema/field_entry.rs +++ b/src/schema/field_entry.rs @@ -127,7 +127,7 @@ impl<'de> Deserialize<'de> for FieldEntry { Options, }; - const FIELDS: &'static [&'static str] = &["name", "type", "options"]; + const FIELDS: &[&str] = &["name", "type", "options"]; struct FieldEntryVisitor; diff --git a/src/schema/index_record_option.rs b/src/schema/index_record_option.rs index e43063b7e..331530f63 100644 --- a/src/schema/index_record_option.rs +++ b/src/schema/index_record_option.rs @@ -6,11 +6,11 @@ /// /// * describe in the schema the amount of information /// that should be retained during indexing (See -/// [TextFieldIndexing.html.set_index_option]( +/// [`TextFieldIndexing.html.set_index_option`]( /// ../schema/struct.TextFieldIndexing.html#method.set_index_option)) /// * to request for a given /// amount of information to be decoded as one goes through a posting list. -/// (See [InvertedIndexReader.read_postings]( +/// (See [`InvertedIndexReader.read_postings`]( /// ../struct.InvertedIndexReader.html#method.read_postings)) /// #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Serialize, Deserialize)] diff --git a/src/tokenizer/japanese_tokenizer.rs b/src/tokenizer/japanese_tokenizer.rs index 3dfb3ad5c..95e30bf2a 100644 --- a/src/tokenizer/japanese_tokenizer.rs +++ b/src/tokenizer/japanese_tokenizer.rs @@ -48,10 +48,10 @@ impl<'a> TokenStream for JapaneseTokenizerStream { fn advance(&mut self) -> bool { let new_cursor = match self.cursor { Cursor::HasNotStarted => { - if self.tokens.len() > 0 { - Cursor::Cursor(0) - } else { + if self.tokens.is_empty() { Cursor::Terminated + } else { + Cursor::Cursor(0) } } Cursor::Cursor(pos) => { diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index ac906488a..f4360d41b 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -78,7 +78,7 @@ //! ``` //! //! Once your tokenizer is defined, you need to -//! register it with a name in your index's [TokenizerManager](./struct.TokenizerManager.html). +//! register it with a name in your index's [`TokenizerManager`](./struct.TokenizerManager.html). //! //! ``` //! # extern crate tantivy; diff --git a/src/tokenizer/simple_tokenizer.rs b/src/tokenizer/simple_tokenizer.rs index 8850c5f45..40d7583f9 100644 --- a/src/tokenizer/simple_tokenizer.rs +++ b/src/tokenizer/simple_tokenizer.rs @@ -32,7 +32,7 @@ impl<'a> SimpleTokenStream<'a> { .filter(|&(_, ref c)| !c.is_alphanumeric()) .map(|(offset, _)| offset) .next() - .unwrap_or(self.text.len()) + .unwrap_or_else(|| self.text.len()) } } diff --git a/src/tokenizer/tokenizer.rs b/src/tokenizer/tokenizer.rs index 9302b5877..62bf21be7 100644 --- a/src/tokenizer/tokenizer.rs +++ b/src/tokenizer/tokenizer.rs @@ -110,7 +110,7 @@ where } fn token_stream_texts<'b>(&self, texts: &'b [&'b str]) -> Box { - assert!(texts.len() > 0); + assert!(!texts.is_empty()); if texts.len() == 1 { box self.0.token_stream(texts[0]) } else {