From bf7ac960b3d560f12702e050dfbf8cd2624ed5dc Mon Sep 17 00:00:00 2001 From: dcraven Date: Wed, 23 Dec 2020 09:40:01 +0100 Subject: [PATCH] Simplify control flow. --- src/tokenizer/tokenizer.rs | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/src/tokenizer/tokenizer.rs b/src/tokenizer/tokenizer.rs index 32ee3279b..84853fe32 100644 --- a/src/tokenizer/tokenizer.rs +++ b/src/tokenizer/tokenizer.rs @@ -90,17 +90,13 @@ impl TextAnalyzer { /// to prevent accidental `PhraseQuery` to match accross two terms. pub fn token_stream_texts<'a>(&self, texts: &'a [&'a str]) -> Box { debug_assert!(!texts.is_empty()); - if texts.len() == 1 { - self.token_stream(texts[0]) - } else { - let mut streams_with_offsets = vec![]; - let mut total_offset = 0; - for &text in texts { - streams_with_offsets.push((self.token_stream(text), total_offset)); - total_offset += text.len(); - } - Box::new(TokenStreamChain::new(streams_with_offsets)) + let mut streams_with_offsets = vec![]; + let mut total_offset = 0; + for &text in texts { + streams_with_offsets.push((self.token_stream(text), total_offset)); + total_offset += text.len(); } + Box::new(TokenStreamChain::new(streams_with_offsets)) } /// Creates a token stream for a given `str`.