diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index 33f6191c7..ac906488a 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -144,7 +144,7 @@ pub use self::tokenizer::BoxedTokenizer; pub use self::tokenizer_manager::TokenizerManager; pub use self::simple_tokenizer::SimpleTokenizer; pub use self::raw_tokenizer::RawTokenizer; -pub use self::token_stream_chain::TokenStreamChain; +pub(crate) use self::token_stream_chain::TokenStreamChain; pub use self::japanese_tokenizer::JapaneseTokenizer; pub use self::remove_long::RemoveLongFilter; pub use self::lower_caser::LowerCaser; diff --git a/src/tokenizer/token_stream_chain.rs b/src/tokenizer/token_stream_chain.rs index 82fe06299..eaeccd420 100644 --- a/src/tokenizer/token_stream_chain.rs +++ b/src/tokenizer/token_stream_chain.rs @@ -1,6 +1,6 @@ use tokenizer::{TokenStream, Token}; -pub struct TokenStreamChain { +pub(crate) struct TokenStreamChain { offsets: Vec, token_streams: Vec, position_shift: usize, diff --git a/src/tokenizer/tokenizer.rs b/src/tokenizer/tokenizer.rs index 63dd21ee6..9302b5877 100644 --- a/src/tokenizer/tokenizer.rs +++ b/src/tokenizer/tokenizer.rs @@ -205,6 +205,7 @@ pub trait TokenStream { /// ``` /// # extern crate tantivy; /// # use tantivy::tokenizer::*; + /// # /// # fn main() { /// # let tokenizer = SimpleTokenizer /// # .filter(RemoveLongFilter::limit(40))