cargo fmt

This commit is contained in:
Kian-Meng Ang
2022-08-15 21:07:01 +08:00
parent 625bcb4877
commit 84295d5b35
28 changed files with 122 additions and 51 deletions

View File

@@ -65,7 +65,9 @@ pub struct AntiCallToken(());
pub trait TerminatingWrite: Write + Send {
/// Indicate that the writer will no longer be used. Internally call terminate_ref.
fn terminate(mut self) -> io::Result<()>
where Self: Sized {
where
Self: Sized,
{
self.terminate_ref(AntiCallToken(()))
}

View File

@@ -160,7 +160,8 @@ impl PartialEq<str> for OwnedBytes {
}
impl<'a, T: ?Sized> PartialEq<&'a T> for OwnedBytes
where OwnedBytes: PartialEq<T>
where
OwnedBytes: PartialEq<T>,
{
fn eq(&self, other: &&'a T) -> bool {
*self == **other

View File

@@ -94,7 +94,9 @@ pub struct CustomOrder {
impl Serialize for CustomOrder {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
where
S: Serializer,
{
let map: HashMap<String, Order> =
std::iter::once((self.target.to_string(), self.order)).collect();
map.serialize(serializer)
@@ -103,7 +105,9 @@ impl Serialize for CustomOrder {
impl<'de> Deserialize<'de> for CustomOrder {
fn deserialize<D>(deserializer: D) -> Result<CustomOrder, D::Error>
where D: Deserializer<'de> {
where
D: Deserializer<'de>,
{
HashMap::<String, Order>::deserialize(deserializer).and_then(|map| {
if let Some((key, value)) = map.into_iter().next() {
Ok(CustomOrder {

View File

@@ -8,7 +8,8 @@ pub(crate) struct CustomScoreTopCollector<TCustomScorer, TScore = Score> {
}
impl<TCustomScorer, TScore> CustomScoreTopCollector<TCustomScorer, TScore>
where TScore: Clone + PartialOrd
where
TScore: Clone + PartialOrd,
{
pub(crate) fn new(
custom_scorer: TCustomScorer,
@@ -113,7 +114,8 @@ where
}
impl<F, TScore> CustomSegmentScorer<TScore> for F
where F: 'static + FnMut(DocId) -> TScore
where
F: 'static + FnMut(DocId) -> TScore,
{
fn score(&mut self, doc: DocId) -> TScore {
(self)(doc)

View File

@@ -233,7 +233,9 @@ impl FacetCollector {
/// If you need the correct number of unique documents for two such facets,
/// just add them in separate `FacetCollector`.
pub fn add_facet<T>(&mut self, facet_from: T)
where Facet: From<T> {
where
Facet: From<T>,
{
let facet = Facet::from(facet_from);
for old_facet in &self.facets {
assert!(
@@ -393,7 +395,9 @@ impl FacetCounts {
/// Returns an iterator over all of the facet count pairs inside this result.
/// See the documentation for [FacetCollector] for a usage example.
pub fn get<T>(&self, facet_from: T) -> FacetChildIterator<'_>
where Facet: From<T> {
where
Facet: From<T>,
{
let facet = Facet::from(facet_from);
let left_bound = Bound::Excluded(facet.clone());
let right_bound = if facet.is_root() {
@@ -412,7 +416,9 @@ impl FacetCounts {
/// Returns a vector of top `k` facets with their counts, sorted highest-to-lowest by counts.
/// See the documentation for [FacetCollector] for a usage example.
pub fn top_k<T>(&self, facet: T, k: usize) -> Vec<(&Facet, u64)>
where Facet: From<T> {
where
Facet: From<T>,
{
let mut heap = BinaryHeap::with_capacity(k);
let mut it = self.get(facet);

View File

@@ -59,7 +59,8 @@ use crate::{Score, SegmentReader, TantivyError};
/// # }
/// ```
pub struct FilterCollector<TCollector, TPredicate, TPredicateValue: FastValue>
where TPredicate: 'static + Clone
where
TPredicate: 'static + Clone,
{
field: Field,
collector: TCollector,

View File

@@ -60,7 +60,8 @@ pub(crate) struct TopCollector<T> {
}
impl<T> TopCollector<T>
where T: PartialOrd + Clone
where
T: PartialOrd + Clone,
{
/// Creates a top collector, with a number of documents equal to "limit".
///

View File

@@ -8,7 +8,8 @@ pub(crate) struct TweakedScoreTopCollector<TScoreTweaker, TScore = Score> {
}
impl<TScoreTweaker, TScore> TweakedScoreTopCollector<TScoreTweaker, TScore>
where TScore: Clone + PartialOrd
where
TScore: Clone + PartialOrd,
{
pub fn new(
score_tweaker: TScoreTweaker,
@@ -116,7 +117,8 @@ where
}
impl<F, TScore> ScoreSegmentTweaker<TScore> for F
where F: 'static + FnMut(DocId, Score) -> TScore
where
F: 'static + FnMut(DocId, Score) -> TScore,
{
fn score(&mut self, doc: DocId, score: Score) -> TScore {
(self)(doc, score)

View File

@@ -233,7 +233,8 @@ pub trait DirectoryClone {
}
impl<T> DirectoryClone for T
where T: 'static + Directory + Clone
where
T: 'static + Directory + Clone,
{
fn box_clone(&self) -> Box<dyn Directory> {
Box::new(self.clone())

View File

@@ -51,7 +51,8 @@ impl FileHandle for &'static [u8] {
}
impl<B> From<B> for FileSlice
where B: StableDeref + Deref<Target = [u8]> + 'static + Send + Sync
where
B: StableDeref + Deref<Target = [u8]> + 'static + Send + Sync,
{
fn from(bytes: B) -> FileSlice {
FileSlice::new(Arc::new(OwnedBytes::new(bytes)))

View File

@@ -10,7 +10,7 @@ use crate::postings::compression::COMPRESSION_BLOCK_SIZE;
// .take_while(|&&val| val < target)
// .count()
/// ```
///
///
/// the `start` argument is just used to hint that the response is
/// greater than beyond `start`. the implementation may or may not use
/// it for optimization.

View File

@@ -18,7 +18,9 @@ enum SpecializedScorer {
}
fn scorer_union<TScoreCombiner>(scorers: Vec<Box<dyn Scorer>>) -> SpecializedScorer
where TScoreCombiner: ScoreCombiner {
where
TScoreCombiner: ScoreCombiner,
{
assert!(!scorers.is_empty());
if scorers.len() == 1 {
return SpecializedScorer::Other(scorers.into_iter().next().unwrap()); //< we checked the size beforehand

View File

@@ -82,7 +82,8 @@ pub trait QueryClone {
}
impl<T> QueryClone for T
where T: 'static + Query + Clone
where
T: 'static + Query + Clone,
{
fn box_clone(&self) -> Box<dyn Query> {
Box::new(self.clone())

View File

@@ -14,7 +14,9 @@ const HORIZON: u32 = 64u32 * HORIZON_NUM_TINYBITSETS as u32;
//
// Also, it does not "yield" any elements.
fn unordered_drain_filter<T, P>(v: &mut Vec<T>, mut predicate: P)
where P: FnMut(&mut T) -> bool {
where
P: FnMut(&mut T) -> bool,
{
let mut i = 0;
while i < v.len() {
if predicate(&mut v[i]) {

View File

@@ -75,7 +75,9 @@ impl Document {
/// Adding a facet to the document.
pub fn add_facet<F>(&mut self, field: Field, path: F)
where Facet: From<F> {
where
Facet: From<F>,
{
let facet = Facet::from(path);
let value = Value::Facet(facet);
self.add_field_value(field, value);

View File

@@ -83,7 +83,9 @@ impl Facet {
/// contains a `/`, it should be escaped
/// using an anti-slash `\`.
pub fn from_text<T>(path: &T) -> Result<Facet, FacetParseError>
where T: ?Sized + AsRef<str> {
where
T: ?Sized + AsRef<str>,
{
#[derive(Copy, Clone)]
enum State {
Escaped,
@@ -209,14 +211,18 @@ fn escape_slashes(s: &str) -> Cow<'_, str> {
impl Serialize for Facet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for Facet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> {
where
D: Deserializer<'de>,
{
<&'de str as Deserialize<'de>>::deserialize(deserializer).map(Facet::from)
}
}

View File

@@ -367,7 +367,9 @@ impl Schema {
impl Serialize for Schema {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(self.0.fields.len()))?;
for e in &self.0.fields {
seq.serialize_element(e)?;
@@ -378,7 +380,9 @@ impl Serialize for Schema {
impl<'de> Deserialize<'de> for Schema {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> {
where
D: Deserializer<'de>,
{
struct SchemaVisitor;
impl<'de> Visitor<'de> for SchemaVisitor {
@@ -389,7 +393,9 @@ impl<'de> Deserialize<'de> for Schema {
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where A: SeqAccess<'de> {
where
A: SeqAccess<'de>,
{
let mut schema = SchemaBuilder {
fields: Vec::with_capacity(seq.size_hint().unwrap_or(0)),
fields_map: HashMap::with_capacity(seq.size_hint().unwrap_or(0)),

View File

@@ -34,7 +34,8 @@ pub const JSON_END_OF_PATH: u8 = 0u8;
/// It actually wraps a `Vec<u8>`.
#[derive(Clone)]
pub struct Term<B = Vec<u8>>(B)
where B: AsRef<[u8]>;
where
B: AsRef<[u8]>;
impl AsMut<Vec<u8>> for Term {
fn as_mut(&mut self) -> &mut Vec<u8> {
@@ -174,7 +175,8 @@ impl Term {
}
impl<B> Ord for Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_slice().cmp(other.as_slice())
@@ -182,7 +184,8 @@ where B: AsRef<[u8]>
}
impl<B> PartialOrd for Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
@@ -190,7 +193,8 @@ where B: AsRef<[u8]>
}
impl<B> PartialEq for Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
@@ -200,7 +204,8 @@ where B: AsRef<[u8]>
impl<B> Eq for Term<B> where B: AsRef<[u8]> {}
impl<B> Hash for Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.as_ref().hash(state)
@@ -208,7 +213,8 @@ where B: AsRef<[u8]>
}
impl<B> Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
/// Wraps a object holding bytes
pub fn wrap(data: B) -> Term<B> {
@@ -420,7 +426,8 @@ fn debug_value_bytes(typ: Type, bytes: &[u8], f: &mut fmt::Formatter) -> fmt::Re
}
impl<B> fmt::Debug for Term<B>
where B: AsRef<[u8]>
where
B: AsRef<[u8]>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let field_id = self.field().field_id();

View File

@@ -38,7 +38,9 @@ impl Eq for Value {}
impl Serialize for Value {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
where
S: Serializer,
{
match *self {
Value::Str(ref v) => serializer.serialize_str(v),
Value::PreTokStr(ref v) => v.serialize(serializer),
@@ -56,7 +58,9 @@ impl Serialize for Value {
impl<'de> Deserialize<'de> for Value {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> {
where
D: Deserializer<'de>,
{
struct ValueVisitor;
impl<'de> Visitor<'de> for ValueVisitor {

View File

@@ -28,7 +28,9 @@ pub enum Compressor {
impl Serialize for Compressor {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: serde::Serializer {
where
S: serde::Serializer,
{
match *self {
Compressor::None => serializer.serialize_str("none"),
Compressor::Lz4 => serializer.serialize_str("lz4"),
@@ -41,7 +43,9 @@ impl Serialize for Compressor {
impl<'de> Deserialize<'de> for Compressor {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> {
where
D: Deserializer<'de>,
{
let buf = String::deserialize(deserializer)?;
let compressor = match buf.as_str() {
"none" => Compressor::None,

View File

@@ -11,14 +11,16 @@ use crate::termdict::TermOrdinal;
/// `TermStreamerBuilder` is a helper object used to define
/// a range of terms that should be streamed.
pub struct TermStreamerBuilder<'a, A = AlwaysMatch>
where A: Automaton
where
A: Automaton,
{
fst_map: &'a TermDictionary,
stream_builder: StreamBuilder<'a, A>,
}
impl<'a, A> TermStreamerBuilder<'a, A>
where A: Automaton
where
A: Automaton,
{
pub(crate) fn new(fst_map: &'a TermDictionary, stream_builder: StreamBuilder<'a, A>) -> Self {
TermStreamerBuilder {
@@ -73,7 +75,8 @@ where A: Automaton
/// `TermStreamer` acts as a cursor over a range of terms of a segment.
/// Terms are guaranteed to be sorted.
pub struct TermStreamer<'a, A = AlwaysMatch>
where A: Automaton
where
A: Automaton,
{
pub(crate) fst_map: &'a TermDictionary,
pub(crate) stream: Stream<'a, A>,
@@ -83,7 +86,8 @@ where A: Automaton
}
impl<'a, A> TermStreamer<'a, A>
where A: Automaton
where
A: Automaton,
{
/// Advance position the stream on the next item.
/// Before the first call to `.advance()`, the stream

View File

@@ -26,7 +26,8 @@ pub struct TermDictionaryBuilder<W> {
}
impl<W> TermDictionaryBuilder<W>
where W: Write
where
W: Write,
{
/// Creates a new `TermDictionaryBuilder`
pub fn create(w: W) -> io::Result<Self> {

View File

@@ -11,7 +11,8 @@ const VINT_MODE: u8 = 1u8;
const BLOCK_LEN: usize = 32_000;
pub struct DeltaWriter<W, TValueWriter>
where W: io::Write
where
W: io::Write,
{
block: Vec<u8>,
write: CountingWriter<BufWriter<W>>,
@@ -99,7 +100,8 @@ pub struct DeltaReader<'a, TValueReader> {
}
impl<'a, TValueReader> DeltaReader<'a, TValueReader>
where TValueReader: value::ValueReader
where
TValueReader: value::ValueReader,
{
pub fn new<R: io::Read + 'a>(reader: R) -> Self {
DeltaReader {

View File

@@ -96,7 +96,8 @@ pub struct Reader<'a, TValueReader> {
}
impl<'a, TValueReader> Reader<'a, TValueReader>
where TValueReader: ValueReader
where
TValueReader: ValueReader,
{
pub fn advance(&mut self) -> io::Result<bool> {
if !self.delta_reader.advance()? {
@@ -126,7 +127,8 @@ impl<'a, TValueReader> AsRef<[u8]> for Reader<'a, TValueReader> {
}
pub struct Writer<W, TValueWriter>
where W: io::Write
where
W: io::Write,
{
previous_key: Vec<u8>,
index_builder: SSTableIndexBuilder,

View File

@@ -243,7 +243,9 @@ impl TermDictionary {
// Returns a search builder, to stream all of the terms
// within the Automaton
pub fn search<'a, A: Automaton + 'a>(&'a self, automaton: A) -> TermStreamerBuilder<'a, A>
where A::State: Clone {
where
A::State: Clone,
{
TermStreamerBuilder::<A>::new(self, automaton)
}

View File

@@ -192,7 +192,8 @@ struct StutteringIterator<T> {
}
impl<T> StutteringIterator<T>
where T: Iterator<Item = usize>
where
T: Iterator<Item = usize>,
{
pub fn new(mut underlying: T, min_gram: usize, max_gram: usize) -> StutteringIterator<T> {
assert!(min_gram > 0);
@@ -221,7 +222,8 @@ where T: Iterator<Item = usize>
}
impl<T> Iterator for StutteringIterator<T>
where T: Iterator<Item = usize>
where
T: Iterator<Item = usize>,
{
type Item = (usize, usize);

View File

@@ -159,7 +159,8 @@ impl<'a> TokenStream for Box<dyn TokenStream + 'a> {
pub struct BoxTokenStream<'a>(Box<dyn TokenStream + 'a>);
impl<'a, T> From<T> for BoxTokenStream<'a>
where T: TokenStream + 'a
where
T: TokenStream + 'a,
{
fn from(token_stream: T) -> BoxTokenStream<'a> {
BoxTokenStream(Box::new(token_stream))

View File

@@ -35,7 +35,9 @@ impl TokenizerManager {
/// Registers a new tokenizer associated with a given name.
pub fn register<T>(&self, tokenizer_name: &str, tokenizer: T)
where TextAnalyzer: From<T> {
where
TextAnalyzer: From<T>,
{
let boxed_tokenizer: TextAnalyzer = TextAnalyzer::from(tokenizer);
self.tokenizers
.write()