mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-03 15:52:55 +00:00
Simple Implementation of NGram Tokenizer (#278)
* Simple Implementation of NGram Tokenizer It does not yet support edges It could probably be better in many "rusty" ways But the test is passing, so I'll call this a good stopping point for the day. * Remove Ngram from manager. Too many variations * Basic configuration model Should the extensive tests exist here? * Add Sample to provide an End to End testing * Basic Edgegram support * cleanup * code feedback * More code review feedback processed
This commit is contained in:
committed by
Paul Masurel
parent
68ee18e4e8
commit
ca74c14647
@@ -132,6 +132,7 @@ mod alphanum_only;
|
||||
mod facet_tokenizer;
|
||||
mod japanese_tokenizer;
|
||||
mod lower_caser;
|
||||
mod ngram_tokenizer;
|
||||
mod raw_tokenizer;
|
||||
mod remove_long;
|
||||
mod simple_tokenizer;
|
||||
@@ -144,6 +145,7 @@ pub use self::alphanum_only::AlphaNumOnlyFilter;
|
||||
pub use self::facet_tokenizer::FacetTokenizer;
|
||||
pub use self::japanese_tokenizer::JapaneseTokenizer;
|
||||
pub use self::lower_caser::LowerCaser;
|
||||
pub use self::ngram_tokenizer::NgramTokenizer;
|
||||
pub use self::raw_tokenizer::RawTokenizer;
|
||||
pub use self::remove_long::RemoveLongFilter;
|
||||
pub use self::simple_tokenizer::SimpleTokenizer;
|
||||
@@ -153,8 +155,32 @@ pub use self::tokenizer::BoxedTokenizer;
|
||||
pub use self::tokenizer::{Token, TokenFilter, TokenStream, Tokenizer};
|
||||
pub use self::tokenizer_manager::TokenizerManager;
|
||||
|
||||
/// This is a function that can be used in tests and doc tests
|
||||
/// to assert a token's correctness.
|
||||
/// TODO: can this be wrapped in #[cfg(test)] so as not to be in the
|
||||
/// public api?
|
||||
pub fn assert_token(token: &Token, position: usize, text: &str, from: usize, to: usize) {
|
||||
assert_eq!(
|
||||
token.position, position,
|
||||
"expected position {} but {:?}",
|
||||
position, token
|
||||
);
|
||||
assert_eq!(token.text, text, "expected text {} but {:?}", text, token);
|
||||
assert_eq!(
|
||||
token.offset_from, from,
|
||||
"expected offset_from {} but {:?}",
|
||||
from, token
|
||||
);
|
||||
assert_eq!(
|
||||
token.offset_to, to,
|
||||
"expected offset_to {} but {:?}",
|
||||
to, token
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
pub mod test {
|
||||
use super::assert_token;
|
||||
use super::Token;
|
||||
use super::TokenizerManager;
|
||||
|
||||
@@ -162,17 +188,17 @@ mod test {
|
||||
fn test_raw_tokenizer() {
|
||||
let tokenizer_manager = TokenizerManager::default();
|
||||
let en_tokenizer = tokenizer_manager.get("raw").unwrap();
|
||||
let mut tokens: Vec<String> = vec![];
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.text.clone());
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
en_tokenizer
|
||||
.token_stream("Hello, happy tax payer!")
|
||||
.process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 1);
|
||||
assert_eq!(&tokens[0], "Hello, happy tax payer!");
|
||||
assert_token(&tokens[0], 0, "Hello, happy tax payer!", 0, 23);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -180,20 +206,20 @@ mod test {
|
||||
let tokenizer_manager = TokenizerManager::default();
|
||||
assert!(tokenizer_manager.get("en_doesnotexist").is_none());
|
||||
let en_tokenizer = tokenizer_manager.get("en_stem").unwrap();
|
||||
let mut tokens: Vec<String> = vec![];
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.text.clone());
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
en_tokenizer
|
||||
.token_stream("Hello, happy tax payer!")
|
||||
.process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 4);
|
||||
assert_eq!(&tokens[0], "hello");
|
||||
assert_eq!(&tokens[1], "happi");
|
||||
assert_eq!(&tokens[2], "tax");
|
||||
assert_eq!(&tokens[3], "payer");
|
||||
assert_token(&tokens[0], 0, "hello", 0, 5);
|
||||
assert_token(&tokens[1], 1, "happi", 7, 12);
|
||||
assert_token(&tokens[2], 2, "tax", 13, 16);
|
||||
assert_token(&tokens[3], 3, "payer", 17, 22);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -201,21 +227,87 @@ mod test {
|
||||
let tokenizer_manager = TokenizerManager::default();
|
||||
let en_tokenizer = tokenizer_manager.get("ja").unwrap();
|
||||
|
||||
let mut tokens: Vec<String> = vec![];
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.text.clone());
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
en_tokenizer
|
||||
.token_stream("野菜食べないとやばい!")
|
||||
.process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 5);
|
||||
assert_eq!(&tokens[0], "野菜");
|
||||
assert_eq!(&tokens[1], "食べ");
|
||||
assert_eq!(&tokens[2], "ない");
|
||||
assert_eq!(&tokens[3], "と");
|
||||
assert_eq!(&tokens[4], "やばい");
|
||||
assert_token(&tokens[0], 0, "野菜", 0, 6);
|
||||
assert_token(&tokens[1], 1, "食べ", 6, 12);
|
||||
assert_token(&tokens[2], 2, "ない", 12, 18);
|
||||
assert_token(&tokens[3], 3, "と", 18, 21);
|
||||
assert_token(&tokens[4], 4, "やばい", 21, 30);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ngram_tokenizer() {
|
||||
use super::{LowerCaser, NgramTokenizer};
|
||||
use tokenizer::tokenizer::TokenStream;
|
||||
use tokenizer::tokenizer::Tokenizer;
|
||||
|
||||
let tokenizer_manager = TokenizerManager::default();
|
||||
tokenizer_manager.register("ngram12", NgramTokenizer::new(1, 2, false));
|
||||
tokenizer_manager.register(
|
||||
"ngram3",
|
||||
NgramTokenizer::new(3, 3, false).filter(LowerCaser),
|
||||
);
|
||||
tokenizer_manager.register(
|
||||
"edgegram5",
|
||||
NgramTokenizer::new(2, 5, true).filter(LowerCaser),
|
||||
);
|
||||
|
||||
let tokenizer = NgramTokenizer::new(1, 2, false);
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
tokenizer.token_stream("hello").process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 9);
|
||||
assert_token(&tokens[0], 0, "h", 0, 1);
|
||||
assert_token(&tokens[1], 0, "he", 0, 2);
|
||||
assert_token(&tokens[2], 1, "e", 1, 2);
|
||||
assert_token(&tokens[3], 1, "el", 1, 3);
|
||||
assert_token(&tokens[4], 2, "l", 2, 3);
|
||||
assert_token(&tokens[5], 2, "ll", 2, 4);
|
||||
assert_token(&tokens[6], 3, "l", 3, 4);
|
||||
assert_token(&tokens[7], 3, "lo", 3, 5);
|
||||
assert_token(&tokens[8], 4, "o", 4, 5);
|
||||
|
||||
let tokenizer = tokenizer_manager.get("ngram3").unwrap();
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
tokenizer.token_stream("Hello").process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 3);
|
||||
assert_token(&tokens[0], 0, "hel", 0, 3);
|
||||
assert_token(&tokens[1], 1, "ell", 1, 4);
|
||||
assert_token(&tokens[2], 2, "llo", 2, 5);
|
||||
|
||||
let tokenizer = tokenizer_manager.get("edgegram5").unwrap();
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
tokenizer
|
||||
.token_stream("Frankenstein")
|
||||
.process(&mut add_token);
|
||||
}
|
||||
assert_eq!(tokens.len(), 4);
|
||||
assert_token(&tokens[0], 0, "fr", 0, 2);
|
||||
assert_token(&tokens[1], 0, "fra", 0, 3);
|
||||
assert_token(&tokens[2], 0, "fran", 0, 4);
|
||||
assert_token(&tokens[3], 0, "frank", 0, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -223,20 +315,20 @@ mod test {
|
||||
let tokenizer_manager = TokenizerManager::default();
|
||||
let en_tokenizer = tokenizer_manager.get("en_stem").unwrap();
|
||||
{
|
||||
let mut tokens: Vec<String> = vec![];
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.text.clone());
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
en_tokenizer.token_stream(" ").process(&mut add_token);
|
||||
}
|
||||
assert!(tokens.is_empty());
|
||||
}
|
||||
{
|
||||
let mut tokens: Vec<String> = vec![];
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
tokens.push(token.text.clone());
|
||||
tokens.push(token.clone());
|
||||
};
|
||||
en_tokenizer.token_stream(" ").process(&mut add_token);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user