cargo +nightly fmt

This commit is contained in:
Kian-Meng Ang
2022-08-17 22:33:28 +08:00
parent 84295d5b35
commit 014b1adc3e
29 changed files with 52 additions and 124 deletions

View File

@@ -65,9 +65,7 @@ pub struct AntiCallToken(());
pub trait TerminatingWrite: Write + Send {
/// Indicate that the writer will no longer be used. Internally call terminate_ref.
fn terminate(mut self) -> io::Result<()>
where
Self: Sized,
{
where Self: Sized {
self.terminate_ref(AntiCallToken(()))
}

View File

@@ -160,8 +160,7 @@ impl PartialEq<str> for OwnedBytes {
}
impl<'a, T: ?Sized> PartialEq<&'a T> for OwnedBytes
where
OwnedBytes: PartialEq<T>,
where OwnedBytes: PartialEq<T>
{
fn eq(&self, other: &&'a T) -> bool {
*self == **other

View File

@@ -57,8 +57,7 @@ impl AggregationResult {
match self {
AggregationResult::BucketResult(_bucket) => Err(TantivyError::InternalError(
"Tried to retrieve value from bucket aggregation. This is not supported and \
should not happen during collection phase, but should be caught during \
validation"
should not happen during collection phase, but should be caught during validation"
.to_string(),
)),
AggregationResult::MetricResult(metric) => metric.get_value(agg_property),

View File

@@ -94,9 +94,7 @@ pub struct CustomOrder {
impl Serialize for CustomOrder {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
where S: Serializer {
let map: HashMap<String, Order> =
std::iter::once((self.target.to_string(), self.order)).collect();
map.serialize(serializer)
@@ -105,9 +103,7 @@ impl Serialize for CustomOrder {
impl<'de> Deserialize<'de> for CustomOrder {
fn deserialize<D>(deserializer: D) -> Result<CustomOrder, D::Error>
where
D: Deserializer<'de>,
{
where D: Deserializer<'de> {
HashMap::<String, Order>::deserialize(deserializer).and_then(|map| {
if let Some((key, value)) = map.into_iter().next() {
Ok(CustomOrder {

View File

@@ -8,8 +8,7 @@ pub(crate) struct CustomScoreTopCollector<TCustomScorer, TScore = Score> {
}
impl<TCustomScorer, TScore> CustomScoreTopCollector<TCustomScorer, TScore>
where
TScore: Clone + PartialOrd,
where TScore: Clone + PartialOrd
{
pub(crate) fn new(
custom_scorer: TCustomScorer,
@@ -114,8 +113,7 @@ where
}
impl<F, TScore> CustomSegmentScorer<TScore> for F
where
F: 'static + FnMut(DocId) -> TScore,
where F: 'static + FnMut(DocId) -> TScore
{
fn score(&mut self, doc: DocId) -> TScore {
(self)(doc)

View File

@@ -233,9 +233,7 @@ impl FacetCollector {
/// If you need the correct number of unique documents for two such facets,
/// just add them in separate `FacetCollector`.
pub fn add_facet<T>(&mut self, facet_from: T)
where
Facet: From<T>,
{
where Facet: From<T> {
let facet = Facet::from(facet_from);
for old_facet in &self.facets {
assert!(
@@ -395,9 +393,7 @@ impl FacetCounts {
/// Returns an iterator over all of the facet count pairs inside this result.
/// See the documentation for [FacetCollector] for a usage example.
pub fn get<T>(&self, facet_from: T) -> FacetChildIterator<'_>
where
Facet: From<T>,
{
where Facet: From<T> {
let facet = Facet::from(facet_from);
let left_bound = Bound::Excluded(facet.clone());
let right_bound = if facet.is_root() {
@@ -416,9 +412,7 @@ impl FacetCounts {
/// Returns a vector of top `k` facets with their counts, sorted highest-to-lowest by counts.
/// See the documentation for [FacetCollector] for a usage example.
pub fn top_k<T>(&self, facet: T, k: usize) -> Vec<(&Facet, u64)>
where
Facet: From<T>,
{
where Facet: From<T> {
let mut heap = BinaryHeap::with_capacity(k);
let mut it = self.get(facet);

View File

@@ -59,8 +59,7 @@ use crate::{Score, SegmentReader, TantivyError};
/// # }
/// ```
pub struct FilterCollector<TCollector, TPredicate, TPredicateValue: FastValue>
where
TPredicate: 'static + Clone,
where TPredicate: 'static + Clone
{
field: Field,
collector: TCollector,

View File

@@ -60,8 +60,7 @@ pub(crate) struct TopCollector<T> {
}
impl<T> TopCollector<T>
where
T: PartialOrd + Clone,
where T: PartialOrd + Clone
{
/// Creates a top collector, with a number of documents equal to "limit".
///

View File

@@ -8,8 +8,7 @@ pub(crate) struct TweakedScoreTopCollector<TScoreTweaker, TScore = Score> {
}
impl<TScoreTweaker, TScore> TweakedScoreTopCollector<TScoreTweaker, TScore>
where
TScore: Clone + PartialOrd,
where TScore: Clone + PartialOrd
{
pub fn new(
score_tweaker: TScoreTweaker,
@@ -117,8 +116,7 @@ where
}
impl<F, TScore> ScoreSegmentTweaker<TScore> for F
where
F: 'static + FnMut(DocId, Score) -> TScore,
where F: 'static + FnMut(DocId, Score) -> TScore
{
fn score(&mut self, doc: DocId, score: Score) -> TScore {
(self)(doc, score)

View File

@@ -233,8 +233,7 @@ pub trait DirectoryClone {
}
impl<T> DirectoryClone for T
where
T: 'static + Directory + Clone,
where T: 'static + Directory + Clone
{
fn box_clone(&self) -> Box<dyn Directory> {
Box::new(self.clone())

View File

@@ -51,8 +51,7 @@ impl FileHandle for &'static [u8] {
}
impl<B> From<B> for FileSlice
where
B: StableDeref + Deref<Target = [u8]> + 'static + Send + Sync,
where B: StableDeref + Deref<Target = [u8]> + 'static + Send + Sync
{
fn from(bytes: B) -> FileSlice {
FileSlice::new(Arc::new(OwnedBytes::new(bytes)))

View File

@@ -18,9 +18,7 @@ enum SpecializedScorer {
}
fn scorer_union<TScoreCombiner>(scorers: Vec<Box<dyn Scorer>>) -> SpecializedScorer
where
TScoreCombiner: ScoreCombiner,
{
where TScoreCombiner: ScoreCombiner {
assert!(!scorers.is_empty());
if scorers.len() == 1 {
return SpecializedScorer::Other(scorers.into_iter().next().unwrap()); //< we checked the size beforehand

View File

@@ -82,8 +82,7 @@ pub trait QueryClone {
}
impl<T> QueryClone for T
where
T: 'static + Query + Clone,
where T: 'static + Query + Clone
{
fn box_clone(&self) -> Box<dyn Query> {
Box::new(self.clone())

View File

@@ -14,9 +14,7 @@ const HORIZON: u32 = 64u32 * HORIZON_NUM_TINYBITSETS as u32;
//
// Also, it does not "yield" any elements.
fn unordered_drain_filter<T, P>(v: &mut Vec<T>, mut predicate: P)
where
P: FnMut(&mut T) -> bool,
{
where P: FnMut(&mut T) -> bool {
let mut i = 0;
while i < v.len() {
if predicate(&mut v[i]) {

View File

@@ -75,9 +75,7 @@ impl Document {
/// Adding a facet to the document.
pub fn add_facet<F>(&mut self, field: Field, path: F)
where
Facet: From<F>,
{
where Facet: From<F> {
let facet = Facet::from(path);
let value = Value::Facet(facet);
self.add_field_value(field, value);

View File

@@ -83,9 +83,7 @@ impl Facet {
/// contains a `/`, it should be escaped
/// using an anti-slash `\`.
pub fn from_text<T>(path: &T) -> Result<Facet, FacetParseError>
where
T: ?Sized + AsRef<str>,
{
where T: ?Sized + AsRef<str> {
#[derive(Copy, Clone)]
enum State {
Escaped,
@@ -211,18 +209,14 @@ fn escape_slashes(s: &str) -> Cow<'_, str> {
impl Serialize for Facet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
where S: Serializer {
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for Facet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
where D: Deserializer<'de> {
<&'de str as Deserialize<'de>>::deserialize(deserializer).map(Facet::from)
}
}

View File

@@ -367,9 +367,7 @@ impl Schema {
impl Serialize for Schema {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
where S: Serializer {
let mut seq = serializer.serialize_seq(Some(self.0.fields.len()))?;
for e in &self.0.fields {
seq.serialize_element(e)?;
@@ -380,9 +378,7 @@ impl Serialize for Schema {
impl<'de> Deserialize<'de> for Schema {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
where D: Deserializer<'de> {
struct SchemaVisitor;
impl<'de> Visitor<'de> for SchemaVisitor {
@@ -393,9 +389,7 @@ impl<'de> Deserialize<'de> for Schema {
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
where A: SeqAccess<'de> {
let mut schema = SchemaBuilder {
fields: Vec::with_capacity(seq.size_hint().unwrap_or(0)),
fields_map: HashMap::with_capacity(seq.size_hint().unwrap_or(0)),

View File

@@ -34,8 +34,7 @@ pub const JSON_END_OF_PATH: u8 = 0u8;
/// It actually wraps a `Vec<u8>`.
#[derive(Clone)]
pub struct Term<B = Vec<u8>>(B)
where
B: AsRef<[u8]>;
where B: AsRef<[u8]>;
impl AsMut<Vec<u8>> for Term {
fn as_mut(&mut self) -> &mut Vec<u8> {
@@ -175,8 +174,7 @@ impl Term {
}
impl<B> Ord for Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_slice().cmp(other.as_slice())
@@ -184,8 +182,7 @@ where
}
impl<B> PartialOrd for Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
@@ -193,8 +190,7 @@ where
}
impl<B> PartialEq for Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
@@ -204,8 +200,7 @@ where
impl<B> Eq for Term<B> where B: AsRef<[u8]> {}
impl<B> Hash for Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.as_ref().hash(state)
@@ -213,8 +208,7 @@ where
}
impl<B> Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
/// Wraps a object holding bytes
pub fn wrap(data: B) -> Term<B> {
@@ -426,8 +420,7 @@ fn debug_value_bytes(typ: Type, bytes: &[u8], f: &mut fmt::Formatter) -> fmt::Re
}
impl<B> fmt::Debug for Term<B>
where
B: AsRef<[u8]>,
where B: AsRef<[u8]>
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let field_id = self.field().field_id();

View File

@@ -38,9 +38,7 @@ impl Eq for Value {}
impl Serialize for Value {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
where S: Serializer {
match *self {
Value::Str(ref v) => serializer.serialize_str(v),
Value::PreTokStr(ref v) => v.serialize(serializer),
@@ -58,9 +56,7 @@ impl Serialize for Value {
impl<'de> Deserialize<'de> for Value {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
where D: Deserializer<'de> {
struct ValueVisitor;
impl<'de> Visitor<'de> for ValueVisitor {

View File

@@ -28,9 +28,7 @@ pub enum Compressor {
impl Serialize for Compressor {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
where S: serde::Serializer {
match *self {
Compressor::None => serializer.serialize_str("none"),
Compressor::Lz4 => serializer.serialize_str("lz4"),
@@ -43,9 +41,7 @@ impl Serialize for Compressor {
impl<'de> Deserialize<'de> for Compressor {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
where D: Deserializer<'de> {
let buf = String::deserialize(deserializer)?;
let compressor = match buf.as_str() {
"none" => Compressor::None,

View File

@@ -11,16 +11,14 @@ use crate::termdict::TermOrdinal;
/// `TermStreamerBuilder` is a helper object used to define
/// a range of terms that should be streamed.
pub struct TermStreamerBuilder<'a, A = AlwaysMatch>
where
A: Automaton,
where A: Automaton
{
fst_map: &'a TermDictionary,
stream_builder: StreamBuilder<'a, A>,
}
impl<'a, A> TermStreamerBuilder<'a, A>
where
A: Automaton,
where A: Automaton
{
pub(crate) fn new(fst_map: &'a TermDictionary, stream_builder: StreamBuilder<'a, A>) -> Self {
TermStreamerBuilder {
@@ -75,8 +73,7 @@ where
/// `TermStreamer` acts as a cursor over a range of terms of a segment.
/// Terms are guaranteed to be sorted.
pub struct TermStreamer<'a, A = AlwaysMatch>
where
A: Automaton,
where A: Automaton
{
pub(crate) fst_map: &'a TermDictionary,
pub(crate) stream: Stream<'a, A>,
@@ -86,8 +83,7 @@ where
}
impl<'a, A> TermStreamer<'a, A>
where
A: Automaton,
where A: Automaton
{
/// Advance position the stream on the next item.
/// Before the first call to `.advance()`, the stream

View File

@@ -26,8 +26,7 @@ pub struct TermDictionaryBuilder<W> {
}
impl<W> TermDictionaryBuilder<W>
where
W: Write,
where W: Write
{
/// Creates a new `TermDictionaryBuilder`
pub fn create(w: W) -> io::Result<Self> {

View File

@@ -11,8 +11,7 @@ const VINT_MODE: u8 = 1u8;
const BLOCK_LEN: usize = 32_000;
pub struct DeltaWriter<W, TValueWriter>
where
W: io::Write,
where W: io::Write
{
block: Vec<u8>,
write: CountingWriter<BufWriter<W>>,
@@ -100,8 +99,7 @@ pub struct DeltaReader<'a, TValueReader> {
}
impl<'a, TValueReader> DeltaReader<'a, TValueReader>
where
TValueReader: value::ValueReader,
where TValueReader: value::ValueReader
{
pub fn new<R: io::Read + 'a>(reader: R) -> Self {
DeltaReader {

View File

@@ -96,8 +96,7 @@ pub struct Reader<'a, TValueReader> {
}
impl<'a, TValueReader> Reader<'a, TValueReader>
where
TValueReader: ValueReader,
where TValueReader: ValueReader
{
pub fn advance(&mut self) -> io::Result<bool> {
if !self.delta_reader.advance()? {
@@ -127,8 +126,7 @@ impl<'a, TValueReader> AsRef<[u8]> for Reader<'a, TValueReader> {
}
pub struct Writer<W, TValueWriter>
where
W: io::Write,
where W: io::Write
{
previous_key: Vec<u8>,
index_builder: SSTableIndexBuilder,

View File

@@ -243,9 +243,7 @@ impl TermDictionary {
// Returns a search builder, to stream all of the terms
// within the Automaton
pub fn search<'a, A: Automaton + 'a>(&'a self, automaton: A) -> TermStreamerBuilder<'a, A>
where
A::State: Clone,
{
where A::State: Clone {
TermStreamerBuilder::<A>::new(self, automaton)
}

View File

@@ -192,8 +192,7 @@ struct StutteringIterator<T> {
}
impl<T> StutteringIterator<T>
where
T: Iterator<Item = usize>,
where T: Iterator<Item = usize>
{
pub fn new(mut underlying: T, min_gram: usize, max_gram: usize) -> StutteringIterator<T> {
assert!(min_gram > 0);
@@ -222,8 +221,7 @@ where
}
impl<T> Iterator for StutteringIterator<T>
where
T: Iterator<Item = usize>,
where T: Iterator<Item = usize>
{
type Item = (usize, usize);

View File

@@ -159,8 +159,7 @@ impl<'a> TokenStream for Box<dyn TokenStream + 'a> {
pub struct BoxTokenStream<'a>(Box<dyn TokenStream + 'a>);
impl<'a, T> From<T> for BoxTokenStream<'a>
where
T: TokenStream + 'a,
where T: TokenStream + 'a
{
fn from(token_stream: T) -> BoxTokenStream<'a> {
BoxTokenStream(Box::new(token_stream))

View File

@@ -35,9 +35,7 @@ impl TokenizerManager {
/// Registers a new tokenizer associated with a given name.
pub fn register<T>(&self, tokenizer_name: &str, tokenizer: T)
where
TextAnalyzer: From<T>,
{
where TextAnalyzer: From<T> {
let boxed_tokenizer: TextAnalyzer = TextAnalyzer::from(tokenizer);
self.tokenizers
.write()