diff --git a/common/src/writer.rs b/common/src/writer.rs index 473896e9a..9b8b86908 100644 --- a/common/src/writer.rs +++ b/common/src/writer.rs @@ -65,9 +65,7 @@ pub struct AntiCallToken(()); pub trait TerminatingWrite: Write + Send { /// Indicate that the writer will no longer be used. Internally call terminate_ref. fn terminate(mut self) -> io::Result<()> - where - Self: Sized, - { + where Self: Sized { self.terminate_ref(AntiCallToken(())) } diff --git a/ownedbytes/src/lib.rs b/ownedbytes/src/lib.rs index 7e3f10d47..b7e5dfd88 100644 --- a/ownedbytes/src/lib.rs +++ b/ownedbytes/src/lib.rs @@ -160,8 +160,7 @@ impl PartialEq for OwnedBytes { } impl<'a, T: ?Sized> PartialEq<&'a T> for OwnedBytes -where - OwnedBytes: PartialEq, +where OwnedBytes: PartialEq { fn eq(&self, other: &&'a T) -> bool { *self == **other diff --git a/src/aggregation/agg_result.rs b/src/aggregation/agg_result.rs index 5be6852cd..8bc9cf60c 100644 --- a/src/aggregation/agg_result.rs +++ b/src/aggregation/agg_result.rs @@ -57,8 +57,7 @@ impl AggregationResult { match self { AggregationResult::BucketResult(_bucket) => Err(TantivyError::InternalError( "Tried to retrieve value from bucket aggregation. This is not supported and \ - should not happen during collection phase, but should be caught during \ - validation" + should not happen during collection phase, but should be caught during validation" .to_string(), )), AggregationResult::MetricResult(metric) => metric.get_value(agg_property), diff --git a/src/aggregation/bucket/mod.rs b/src/aggregation/bucket/mod.rs index bb0242f22..a47437952 100644 --- a/src/aggregation/bucket/mod.rs +++ b/src/aggregation/bucket/mod.rs @@ -94,9 +94,7 @@ pub struct CustomOrder { impl Serialize for CustomOrder { fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { + where S: Serializer { let map: HashMap = std::iter::once((self.target.to_string(), self.order)).collect(); map.serialize(serializer) @@ -105,9 +103,7 @@ impl Serialize for CustomOrder { impl<'de> Deserialize<'de> for CustomOrder { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { + where D: Deserializer<'de> { HashMap::::deserialize(deserializer).and_then(|map| { if let Some((key, value)) = map.into_iter().next() { Ok(CustomOrder { diff --git a/src/collector/custom_score_top_collector.rs b/src/collector/custom_score_top_collector.rs index 0a804cf5c..d645004ad 100644 --- a/src/collector/custom_score_top_collector.rs +++ b/src/collector/custom_score_top_collector.rs @@ -8,8 +8,7 @@ pub(crate) struct CustomScoreTopCollector { } impl CustomScoreTopCollector -where - TScore: Clone + PartialOrd, +where TScore: Clone + PartialOrd { pub(crate) fn new( custom_scorer: TCustomScorer, @@ -114,8 +113,7 @@ where } impl CustomSegmentScorer for F -where - F: 'static + FnMut(DocId) -> TScore, +where F: 'static + FnMut(DocId) -> TScore { fn score(&mut self, doc: DocId) -> TScore { (self)(doc) diff --git a/src/collector/facet_collector.rs b/src/collector/facet_collector.rs index e861351b7..fc514c816 100644 --- a/src/collector/facet_collector.rs +++ b/src/collector/facet_collector.rs @@ -233,9 +233,7 @@ impl FacetCollector { /// If you need the correct number of unique documents for two such facets, /// just add them in separate `FacetCollector`. pub fn add_facet(&mut self, facet_from: T) - where - Facet: From, - { + where Facet: From { let facet = Facet::from(facet_from); for old_facet in &self.facets { assert!( @@ -395,9 +393,7 @@ impl FacetCounts { /// Returns an iterator over all of the facet count pairs inside this result. /// See the documentation for [FacetCollector] for a usage example. pub fn get(&self, facet_from: T) -> FacetChildIterator<'_> - where - Facet: From, - { + where Facet: From { let facet = Facet::from(facet_from); let left_bound = Bound::Excluded(facet.clone()); let right_bound = if facet.is_root() { @@ -416,9 +412,7 @@ impl FacetCounts { /// Returns a vector of top `k` facets with their counts, sorted highest-to-lowest by counts. /// See the documentation for [FacetCollector] for a usage example. pub fn top_k(&self, facet: T, k: usize) -> Vec<(&Facet, u64)> - where - Facet: From, - { + where Facet: From { let mut heap = BinaryHeap::with_capacity(k); let mut it = self.get(facet); diff --git a/src/collector/filter_collector_wrapper.rs b/src/collector/filter_collector_wrapper.rs index 4094a572b..b1dbaaa20 100644 --- a/src/collector/filter_collector_wrapper.rs +++ b/src/collector/filter_collector_wrapper.rs @@ -59,8 +59,7 @@ use crate::{Score, SegmentReader, TantivyError}; /// # } /// ``` pub struct FilterCollector -where - TPredicate: 'static + Clone, +where TPredicate: 'static + Clone { field: Field, collector: TCollector, diff --git a/src/collector/top_collector.rs b/src/collector/top_collector.rs index 846ed0591..691ef324b 100644 --- a/src/collector/top_collector.rs +++ b/src/collector/top_collector.rs @@ -60,8 +60,7 @@ pub(crate) struct TopCollector { } impl TopCollector -where - T: PartialOrd + Clone, +where T: PartialOrd + Clone { /// Creates a top collector, with a number of documents equal to "limit". /// diff --git a/src/collector/tweak_score_top_collector.rs b/src/collector/tweak_score_top_collector.rs index 4ae748e16..1a81e7361 100644 --- a/src/collector/tweak_score_top_collector.rs +++ b/src/collector/tweak_score_top_collector.rs @@ -8,8 +8,7 @@ pub(crate) struct TweakedScoreTopCollector { } impl TweakedScoreTopCollector -where - TScore: Clone + PartialOrd, +where TScore: Clone + PartialOrd { pub fn new( score_tweaker: TScoreTweaker, @@ -117,8 +116,7 @@ where } impl ScoreSegmentTweaker for F -where - F: 'static + FnMut(DocId, Score) -> TScore, +where F: 'static + FnMut(DocId, Score) -> TScore { fn score(&mut self, doc: DocId, score: Score) -> TScore { (self)(doc, score) diff --git a/src/directory/directory.rs b/src/directory/directory.rs index 7f04990e2..43d6ce5f4 100644 --- a/src/directory/directory.rs +++ b/src/directory/directory.rs @@ -233,8 +233,7 @@ pub trait DirectoryClone { } impl DirectoryClone for T -where - T: 'static + Directory + Clone, +where T: 'static + Directory + Clone { fn box_clone(&self) -> Box { Box::new(self.clone()) diff --git a/src/directory/file_slice.rs b/src/directory/file_slice.rs index 26672cff9..42ba07bb5 100644 --- a/src/directory/file_slice.rs +++ b/src/directory/file_slice.rs @@ -51,8 +51,7 @@ impl FileHandle for &'static [u8] { } impl From for FileSlice -where - B: StableDeref + Deref + 'static + Send + Sync, +where B: StableDeref + Deref + 'static + Send + Sync { fn from(bytes: B) -> FileSlice { FileSlice::new(Arc::new(OwnedBytes::new(bytes))) diff --git a/src/postings/block_search.rs b/src/postings/block_search.rs index eac587d0c..3a53bd78f 100644 --- a/src/postings/block_search.rs +++ b/src/postings/block_search.rs @@ -10,7 +10,7 @@ use crate::postings::compression::COMPRESSION_BLOCK_SIZE; // .take_while(|&&val| val < target) // .count() /// ``` -/// +/// /// the `start` argument is just used to hint that the response is /// greater than beyond `start`. the implementation may or may not use /// it for optimization. diff --git a/src/query/boolean_query/boolean_weight.rs b/src/query/boolean_query/boolean_weight.rs index 82d97ca08..6a32f53f8 100644 --- a/src/query/boolean_query/boolean_weight.rs +++ b/src/query/boolean_query/boolean_weight.rs @@ -18,9 +18,7 @@ enum SpecializedScorer { } fn scorer_union(scorers: Vec>) -> SpecializedScorer -where - TScoreCombiner: ScoreCombiner, -{ +where TScoreCombiner: ScoreCombiner { assert!(!scorers.is_empty()); if scorers.len() == 1 { return SpecializedScorer::Other(scorers.into_iter().next().unwrap()); //< we checked the size beforehand diff --git a/src/query/query.rs b/src/query/query.rs index fd95a5a71..48ca4e7fd 100644 --- a/src/query/query.rs +++ b/src/query/query.rs @@ -82,8 +82,7 @@ pub trait QueryClone { } impl QueryClone for T -where - T: 'static + Query + Clone, +where T: 'static + Query + Clone { fn box_clone(&self) -> Box { Box::new(self.clone()) diff --git a/src/query/union.rs b/src/query/union.rs index f25d653fd..06ffb0ab3 100644 --- a/src/query/union.rs +++ b/src/query/union.rs @@ -14,9 +14,7 @@ const HORIZON: u32 = 64u32 * HORIZON_NUM_TINYBITSETS as u32; // // Also, it does not "yield" any elements. fn unordered_drain_filter(v: &mut Vec, mut predicate: P) -where - P: FnMut(&mut T) -> bool, -{ +where P: FnMut(&mut T) -> bool { let mut i = 0; while i < v.len() { if predicate(&mut v[i]) { diff --git a/src/schema/document.rs b/src/schema/document.rs index e6aed8761..3bde526b1 100644 --- a/src/schema/document.rs +++ b/src/schema/document.rs @@ -75,9 +75,7 @@ impl Document { /// Adding a facet to the document. pub fn add_facet(&mut self, field: Field, path: F) - where - Facet: From, - { + where Facet: From { let facet = Facet::from(path); let value = Value::Facet(facet); self.add_field_value(field, value); diff --git a/src/schema/facet.rs b/src/schema/facet.rs index cbcc68106..3e826e384 100644 --- a/src/schema/facet.rs +++ b/src/schema/facet.rs @@ -83,9 +83,7 @@ impl Facet { /// contains a `/`, it should be escaped /// using an anti-slash `\`. pub fn from_text(path: &T) -> Result - where - T: ?Sized + AsRef, - { + where T: ?Sized + AsRef { #[derive(Copy, Clone)] enum State { Escaped, @@ -211,18 +209,14 @@ fn escape_slashes(s: &str) -> Cow<'_, str> { impl Serialize for Facet { fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { + where S: Serializer { serializer.serialize_str(&self.to_string()) } } impl<'de> Deserialize<'de> for Facet { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { + where D: Deserializer<'de> { <&'de str as Deserialize<'de>>::deserialize(deserializer).map(Facet::from) } } diff --git a/src/schema/schema.rs b/src/schema/schema.rs index e07b382e6..e91a31a62 100644 --- a/src/schema/schema.rs +++ b/src/schema/schema.rs @@ -367,9 +367,7 @@ impl Schema { impl Serialize for Schema { fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { + where S: Serializer { let mut seq = serializer.serialize_seq(Some(self.0.fields.len()))?; for e in &self.0.fields { seq.serialize_element(e)?; @@ -380,9 +378,7 @@ impl Serialize for Schema { impl<'de> Deserialize<'de> for Schema { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { + where D: Deserializer<'de> { struct SchemaVisitor; impl<'de> Visitor<'de> for SchemaVisitor { @@ -393,9 +389,7 @@ impl<'de> Deserialize<'de> for Schema { } fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { + where A: SeqAccess<'de> { let mut schema = SchemaBuilder { fields: Vec::with_capacity(seq.size_hint().unwrap_or(0)), fields_map: HashMap::with_capacity(seq.size_hint().unwrap_or(0)), diff --git a/src/schema/term.rs b/src/schema/term.rs index ed89cd33b..4b63c19c4 100644 --- a/src/schema/term.rs +++ b/src/schema/term.rs @@ -34,8 +34,7 @@ pub const JSON_END_OF_PATH: u8 = 0u8; /// It actually wraps a `Vec`. #[derive(Clone)] pub struct Term>(B) -where - B: AsRef<[u8]>; +where B: AsRef<[u8]>; impl AsMut> for Term { fn as_mut(&mut self) -> &mut Vec { @@ -175,8 +174,7 @@ impl Term { } impl Ord for Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.as_slice().cmp(other.as_slice()) @@ -184,8 +182,7 @@ where } impl PartialOrd for Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) @@ -193,8 +190,7 @@ where } impl PartialEq for Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() @@ -204,8 +200,7 @@ where impl Eq for Term where B: AsRef<[u8]> {} impl Hash for Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { fn hash(&self, state: &mut H) { self.0.as_ref().hash(state) @@ -213,8 +208,7 @@ where } impl Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { /// Wraps a object holding bytes pub fn wrap(data: B) -> Term { @@ -426,8 +420,7 @@ fn debug_value_bytes(typ: Type, bytes: &[u8], f: &mut fmt::Formatter) -> fmt::Re } impl fmt::Debug for Term -where - B: AsRef<[u8]>, +where B: AsRef<[u8]> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let field_id = self.field().field_id(); diff --git a/src/schema/value.rs b/src/schema/value.rs index 8d1653cb0..5cd5d3894 100644 --- a/src/schema/value.rs +++ b/src/schema/value.rs @@ -38,9 +38,7 @@ impl Eq for Value {} impl Serialize for Value { fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { + where S: Serializer { match *self { Value::Str(ref v) => serializer.serialize_str(v), Value::PreTokStr(ref v) => v.serialize(serializer), @@ -58,9 +56,7 @@ impl Serialize for Value { impl<'de> Deserialize<'de> for Value { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { + where D: Deserializer<'de> { struct ValueVisitor; impl<'de> Visitor<'de> for ValueVisitor { diff --git a/src/store/compressors.rs b/src/store/compressors.rs index ea2478015..764a09bec 100644 --- a/src/store/compressors.rs +++ b/src/store/compressors.rs @@ -28,9 +28,7 @@ pub enum Compressor { impl Serialize for Compressor { fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { + where S: serde::Serializer { match *self { Compressor::None => serializer.serialize_str("none"), Compressor::Lz4 => serializer.serialize_str("lz4"), @@ -43,9 +41,7 @@ impl Serialize for Compressor { impl<'de> Deserialize<'de> for Compressor { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { + where D: Deserializer<'de> { let buf = String::deserialize(deserializer)?; let compressor = match buf.as_str() { "none" => Compressor::None, diff --git a/src/termdict/fst_termdict/streamer.rs b/src/termdict/fst_termdict/streamer.rs index 60136adab..978ab5e0d 100644 --- a/src/termdict/fst_termdict/streamer.rs +++ b/src/termdict/fst_termdict/streamer.rs @@ -11,16 +11,14 @@ use crate::termdict::TermOrdinal; /// `TermStreamerBuilder` is a helper object used to define /// a range of terms that should be streamed. pub struct TermStreamerBuilder<'a, A = AlwaysMatch> -where - A: Automaton, +where A: Automaton { fst_map: &'a TermDictionary, stream_builder: StreamBuilder<'a, A>, } impl<'a, A> TermStreamerBuilder<'a, A> -where - A: Automaton, +where A: Automaton { pub(crate) fn new(fst_map: &'a TermDictionary, stream_builder: StreamBuilder<'a, A>) -> Self { TermStreamerBuilder { @@ -75,8 +73,7 @@ where /// `TermStreamer` acts as a cursor over a range of terms of a segment. /// Terms are guaranteed to be sorted. pub struct TermStreamer<'a, A = AlwaysMatch> -where - A: Automaton, +where A: Automaton { pub(crate) fst_map: &'a TermDictionary, pub(crate) stream: Stream<'a, A>, @@ -86,8 +83,7 @@ where } impl<'a, A> TermStreamer<'a, A> -where - A: Automaton, +where A: Automaton { /// Advance position the stream on the next item. /// Before the first call to `.advance()`, the stream diff --git a/src/termdict/fst_termdict/termdict.rs b/src/termdict/fst_termdict/termdict.rs index 23a3d3aa3..937471733 100644 --- a/src/termdict/fst_termdict/termdict.rs +++ b/src/termdict/fst_termdict/termdict.rs @@ -26,8 +26,7 @@ pub struct TermDictionaryBuilder { } impl TermDictionaryBuilder -where - W: Write, +where W: Write { /// Creates a new `TermDictionaryBuilder` pub fn create(w: W) -> io::Result { diff --git a/src/termdict/sstable_termdict/sstable/delta.rs b/src/termdict/sstable_termdict/sstable/delta.rs index 65c408f5d..3551891cd 100644 --- a/src/termdict/sstable_termdict/sstable/delta.rs +++ b/src/termdict/sstable_termdict/sstable/delta.rs @@ -11,8 +11,7 @@ const VINT_MODE: u8 = 1u8; const BLOCK_LEN: usize = 32_000; pub struct DeltaWriter -where - W: io::Write, +where W: io::Write { block: Vec, write: CountingWriter>, @@ -100,8 +99,7 @@ pub struct DeltaReader<'a, TValueReader> { } impl<'a, TValueReader> DeltaReader<'a, TValueReader> -where - TValueReader: value::ValueReader, +where TValueReader: value::ValueReader { pub fn new(reader: R) -> Self { DeltaReader { diff --git a/src/termdict/sstable_termdict/sstable/mod.rs b/src/termdict/sstable_termdict/sstable/mod.rs index a138bca6f..0afef761c 100644 --- a/src/termdict/sstable_termdict/sstable/mod.rs +++ b/src/termdict/sstable_termdict/sstable/mod.rs @@ -96,8 +96,7 @@ pub struct Reader<'a, TValueReader> { } impl<'a, TValueReader> Reader<'a, TValueReader> -where - TValueReader: ValueReader, +where TValueReader: ValueReader { pub fn advance(&mut self) -> io::Result { if !self.delta_reader.advance()? { @@ -127,8 +126,7 @@ impl<'a, TValueReader> AsRef<[u8]> for Reader<'a, TValueReader> { } pub struct Writer -where - W: io::Write, +where W: io::Write { previous_key: Vec, index_builder: SSTableIndexBuilder, diff --git a/src/termdict/sstable_termdict/termdict.rs b/src/termdict/sstable_termdict/termdict.rs index 5f88df136..29e2ffbbe 100644 --- a/src/termdict/sstable_termdict/termdict.rs +++ b/src/termdict/sstable_termdict/termdict.rs @@ -243,9 +243,7 @@ impl TermDictionary { // Returns a search builder, to stream all of the terms // within the Automaton pub fn search<'a, A: Automaton + 'a>(&'a self, automaton: A) -> TermStreamerBuilder<'a, A> - where - A::State: Clone, - { + where A::State: Clone { TermStreamerBuilder::::new(self, automaton) } diff --git a/src/tokenizer/ngram_tokenizer.rs b/src/tokenizer/ngram_tokenizer.rs index 57aefc386..b268d6105 100644 --- a/src/tokenizer/ngram_tokenizer.rs +++ b/src/tokenizer/ngram_tokenizer.rs @@ -192,8 +192,7 @@ struct StutteringIterator { } impl StutteringIterator -where - T: Iterator, +where T: Iterator { pub fn new(mut underlying: T, min_gram: usize, max_gram: usize) -> StutteringIterator { assert!(min_gram > 0); @@ -222,8 +221,7 @@ where } impl Iterator for StutteringIterator -where - T: Iterator, +where T: Iterator { type Item = (usize, usize); diff --git a/src/tokenizer/tokenizer.rs b/src/tokenizer/tokenizer.rs index d916a3299..0965f004f 100644 --- a/src/tokenizer/tokenizer.rs +++ b/src/tokenizer/tokenizer.rs @@ -159,8 +159,7 @@ impl<'a> TokenStream for Box { pub struct BoxTokenStream<'a>(Box); impl<'a, T> From for BoxTokenStream<'a> -where - T: TokenStream + 'a, +where T: TokenStream + 'a { fn from(token_stream: T) -> BoxTokenStream<'a> { BoxTokenStream(Box::new(token_stream)) diff --git a/src/tokenizer/tokenizer_manager.rs b/src/tokenizer/tokenizer_manager.rs index f3e0d5d64..73dcba21a 100644 --- a/src/tokenizer/tokenizer_manager.rs +++ b/src/tokenizer/tokenizer_manager.rs @@ -35,9 +35,7 @@ impl TokenizerManager { /// Registers a new tokenizer associated with a given name. pub fn register(&self, tokenizer_name: &str, tokenizer: T) - where - TextAnalyzer: From, - { + where TextAnalyzer: From { let boxed_tokenizer: TextAnalyzer = TextAnalyzer::from(tokenizer); self.tokenizers .write()