Remove unused imports.

This commit is contained in:
dcraven
2021-01-04 13:12:50 +01:00
committed by Paul Masurel
parent 0356b7d779
commit de5a8bfab3
9 changed files with 8 additions and 12 deletions

View File

@@ -20,8 +20,7 @@ use crate::reader::IndexReaderBuilder;
use crate::schema::Field;
use crate::schema::FieldType;
use crate::schema::Schema;
use crate::tokenizer::Tokenizer;
use crate::tokenizer::{TextAnalyzer, TextAnalyzerT, TokenizerManager};
use crate::tokenizer::{TextAnalyzerT, TokenizerManager};
use crate::IndexWriter;
use std::collections::HashSet;
use std::fmt;

View File

@@ -10,10 +10,9 @@ use crate::schema::FieldType;
use crate::schema::Schema;
use crate::schema::Term;
use crate::schema::Value;
use crate::schema::{Field, FieldEntry};
use crate::tokenizer::PreTokenizedStream;
use crate::tokenizer::{DynTokenStreamChain, TokenStreamChain, Tokenizer};
use crate::tokenizer::{FacetTokenizer, TextAnalyzer, TextAnalyzerT, Token};
use crate::tokenizer::{DynTokenStreamChain, Tokenizer};
use crate::tokenizer::{FacetTokenizer, TextAnalyzerT, Token};
use crate::Opstamp;
use crate::{DocId, SegmentComponent};

View File

@@ -3,7 +3,6 @@
#![cfg_attr(feature = "cargo-clippy", allow(clippy::module_inception))]
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
#![warn(missing_docs)]
#![allow(unused_imports)]
//! # `tantivy`
//!

View File

@@ -1,7 +1,7 @@
use crate::query::Query;
use crate::schema::Field;
use crate::schema::Value;
use crate::tokenizer::{TextAnalyzerT, Token, Tokenizer};
use crate::tokenizer::{TextAnalyzerT, Token};
use crate::Searcher;
use crate::{Document, Score};
use htmlescape::encode_minimal;

View File

@@ -1,4 +1,4 @@
use super::{analyzer_builder, Token, TokenFilter};
use super::{Token, TokenFilter};
use std::mem;
/// This class converts alphabetic, numeric, and symbolic Unicode characters

View File

@@ -1,4 +1,4 @@
use super::{analyzer_builder, TextAnalyzerT, Token, TokenFilter};
use super::{Token, TokenFilter};
use std::mem;
impl TokenFilter for LowerCaser {

View File

@@ -1,5 +1,4 @@
use super::{Token, Tokenizer};
use std::str::CharIndices;
/// Tokenize the text by splitting on whitespaces and punctuation.
#[derive(Clone, Debug)]

View File

@@ -1,4 +1,4 @@
use crate::tokenizer::{Token, Tokenizer};
use crate::tokenizer::Token;
const POSITION_GAP: usize = 2;

View File

@@ -1,4 +1,4 @@
use crate::tokenizer::{DynTokenStreamChain, TokenStreamChain};
use crate::tokenizer::TokenStreamChain;
use serde::{Deserialize, Serialize};
/// The tokenizer module contains all of the tools used to process
/// text in `tantivy`.