mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-06 17:22:54 +00:00
Compare commits
40 Commits
bundle
...
segment_wr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7428f6ccd3 | ||
|
|
8a488b8315 | ||
|
|
4448854f73 | ||
|
|
b449749d63 | ||
|
|
2ba6a12ddc | ||
|
|
bbbf95018b | ||
|
|
6d76a82fea | ||
|
|
444048d225 | ||
|
|
4c6bbca661 | ||
|
|
c3d3b3113b | ||
|
|
b591542c0b | ||
|
|
a83fa00ac4 | ||
|
|
7ff5c7c797 | ||
|
|
1748602691 | ||
|
|
6542dd5337 | ||
|
|
c64a44b9e1 | ||
|
|
fccc5b3bed | ||
|
|
98b9d5c6c4 | ||
|
|
afd2c1a8ad | ||
|
|
81f35a3ceb | ||
|
|
7e2e765f4a | ||
|
|
7d6cfa58e1 | ||
|
|
14735ce3aa | ||
|
|
72f7cc1569 | ||
|
|
abef5c4e74 | ||
|
|
ae14022bf0 | ||
|
|
55f5658d40 | ||
|
|
3ae6363462 | ||
|
|
9e20d7f8a5 | ||
|
|
ab13ffe377 | ||
|
|
039138ed50 | ||
|
|
6227a0555a | ||
|
|
f85d0a522a | ||
|
|
5795488ba7 | ||
|
|
c3045dfb5c | ||
|
|
811fd0cb9e | ||
|
|
f6847c46d7 | ||
|
|
92dac7af5c | ||
|
|
801905d77f | ||
|
|
8f5ac86f30 |
19
CHANGELOG.md
19
CHANGELOG.md
@@ -1,3 +1,22 @@
|
|||||||
|
Tantivy 0.12.1
|
||||||
|
=====================
|
||||||
|
- By default IndexReader are in `Manual` mode.
|
||||||
|
|
||||||
|
Tantivy 0.12.0
|
||||||
|
======================
|
||||||
|
- Removing static dispatch in tokenizers for simplicity. (#762)
|
||||||
|
- Added backward iteration for `TermDictionary` stream. (@halvorboe)
|
||||||
|
- Fixed a performance issue when searching for the posting lists of a missing term (@audunhalland)
|
||||||
|
- Added a configurable maximum number of docs (10M by default) for a segment to be considered for merge (@hntd187, landed by @halvorboe #713)
|
||||||
|
- Important Bugfix #777, causing tantivy to retain memory mapping. (diagnosed by @poljar)
|
||||||
|
- Added support for field boosting. (#547, @fulmicoton)
|
||||||
|
|
||||||
|
## How to update?
|
||||||
|
|
||||||
|
Crates relying on custom tokenizer, or registering tokenizer in the manager will require some
|
||||||
|
minor changes. Check https://github.com/tantivy-search/tantivy/blob/master/examples/custom_tokenizer.rs
|
||||||
|
to check for some code sample.
|
||||||
|
|
||||||
Tantivy 0.11.3
|
Tantivy 0.11.3
|
||||||
=======================
|
=======================
|
||||||
- Fixed DateTime as a fast field (#735)
|
- Fixed DateTime as a fast field (#735)
|
||||||
|
|||||||
13
Cargo.toml
13
Cargo.toml
@@ -1,11 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "tantivy"
|
name = "tantivy"
|
||||||
version = "0.11.3"
|
version = "0.12.0"
|
||||||
authors = ["Paul Masurel <paul.masurel@gmail.com>"]
|
authors = ["Paul Masurel <paul.masurel@gmail.com>"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
categories = ["database-implementations", "data-structures"]
|
categories = ["database-implementations", "data-structures"]
|
||||||
description = """Search engine library"""
|
description = """Search engine library"""
|
||||||
documentation = "https://tantivy-search.github.io/tantivy/tantivy/index.html"
|
documentation = "https://docs.rs/tantivy/"
|
||||||
homepage = "https://github.com/tantivy-search/tantivy"
|
homepage = "https://github.com/tantivy-search/tantivy"
|
||||||
repository = "https://github.com/tantivy-search/tantivy"
|
repository = "https://github.com/tantivy-search/tantivy"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@@ -18,10 +18,10 @@ byteorder = "1.0"
|
|||||||
crc32fast = "1.2.0"
|
crc32fast = "1.2.0"
|
||||||
once_cell = "1.0"
|
once_cell = "1.0"
|
||||||
regex ={version = "1.3.0", default-features = false, features = ["std"]}
|
regex ={version = "1.3.0", default-features = false, features = ["std"]}
|
||||||
tantivy-fst = "0.1"
|
tantivy-fst = "0.2.1"
|
||||||
memmap = {version = "0.7", optional=true}
|
memmap = {version = "0.7", optional=true}
|
||||||
lz4 = {version="1.20", optional=true}
|
lz4 = {version="1.20", optional=true}
|
||||||
snap = {version="0.2"}
|
snap = "1"
|
||||||
atomicwrites = {version="0.2.2", optional=true}
|
atomicwrites = {version="0.2.2", optional=true}
|
||||||
tempfile = "3.0"
|
tempfile = "3.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
@@ -31,7 +31,7 @@ serde_json = "1.0"
|
|||||||
num_cpus = "1.2"
|
num_cpus = "1.2"
|
||||||
fs2={version="0.4", optional=true}
|
fs2={version="0.4", optional=true}
|
||||||
itertools = "0.8"
|
itertools = "0.8"
|
||||||
levenshtein_automata = {version="0.1", features=["fst_automaton"]}
|
levenshtein_automata = "0.1"
|
||||||
notify = {version="4", optional=true}
|
notify = {version="4", optional=true}
|
||||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
crossbeam = "0.7"
|
crossbeam = "0.7"
|
||||||
@@ -40,7 +40,7 @@ owning_ref = "0.4"
|
|||||||
stable_deref_trait = "1.0.0"
|
stable_deref_trait = "1.0.0"
|
||||||
rust-stemmers = "1.2"
|
rust-stemmers = "1.2"
|
||||||
downcast-rs = { version="1.0" }
|
downcast-rs = { version="1.0" }
|
||||||
tantivy-query-grammar = { version="0.11", path="./query-grammar" }
|
tantivy-query-grammar = { version="0.12", path="./query-grammar" }
|
||||||
bitpacking = {version="0.8", default-features = false, features=["bitpacker4x"]}
|
bitpacking = {version="0.8", default-features = false, features=["bitpacker4x"]}
|
||||||
census = "0.4"
|
census = "0.4"
|
||||||
fnv = "1.0.6"
|
fnv = "1.0.6"
|
||||||
@@ -60,7 +60,6 @@ winapi = "0.3"
|
|||||||
rand = "0.7"
|
rand = "0.7"
|
||||||
maplit = "1"
|
maplit = "1"
|
||||||
matches = "0.1.8"
|
matches = "0.1.8"
|
||||||
time = "0.1.42"
|
|
||||||
|
|
||||||
[dev-dependencies.fail]
|
[dev-dependencies.fail]
|
||||||
version = "0.3"
|
version = "0.3"
|
||||||
|
|||||||
@@ -59,18 +59,17 @@ performance for different type of queries / collection.
|
|||||||
- Configurable indexing (optional term frequency and position indexing)
|
- Configurable indexing (optional term frequency and position indexing)
|
||||||
- Cheesy logo with a horse
|
- Cheesy logo with a horse
|
||||||
|
|
||||||
# Non-features
|
## Non-features
|
||||||
|
|
||||||
- Distributed search is out of the scope of Tantivy. That being said, Tantivy is a
|
- Distributed search is out of the scope of Tantivy. That being said, Tantivy is a
|
||||||
library upon which one could build a distributed search. Serializable/mergeable collector state for instance,
|
library upon which one could build a distributed search. Serializable/mergeable collector state for instance,
|
||||||
are within the scope of Tantivy.
|
are within the scope of Tantivy.
|
||||||
|
|
||||||
# Supported OS and compiler
|
|
||||||
|
|
||||||
Tantivy works on stable Rust (>= 1.27) and supports Linux, MacOS, and Windows.
|
|
||||||
|
|
||||||
# Getting started
|
# Getting started
|
||||||
|
|
||||||
|
Tantivy works on stable Rust (>= 1.27) and supports Linux, MacOS, and Windows.
|
||||||
|
|
||||||
- [Tantivy's simple search example](https://tantivy-search.github.io/examples/basic_search.html)
|
- [Tantivy's simple search example](https://tantivy-search.github.io/examples/basic_search.html)
|
||||||
- [tantivy-cli and its tutorial](https://github.com/tantivy-search/tantivy-cli) - `tantivy-cli` is an actual command line interface that makes it easy for you to create a search engine,
|
- [tantivy-cli and its tutorial](https://github.com/tantivy-search/tantivy-cli) - `tantivy-cli` is an actual command line interface that makes it easy for you to create a search engine,
|
||||||
index documents, and search via the CLI or a small server with a REST API.
|
index documents, and search via the CLI or a small server with a REST API.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
// - import tokenized text straight from json,
|
// - import tokenized text straight from json,
|
||||||
// - perform a search on documents with pre-tokenized text
|
// - perform a search on documents with pre-tokenized text
|
||||||
|
|
||||||
use tantivy::tokenizer::{PreTokenizedString, SimpleTokenizer, Token, TokenStream, Tokenizer};
|
use tantivy::tokenizer::{PreTokenizedString, SimpleTokenizer, Token, Tokenizer};
|
||||||
|
|
||||||
use tantivy::collector::{Count, TopDocs};
|
use tantivy::collector::{Count, TopDocs};
|
||||||
use tantivy::query::TermQuery;
|
use tantivy::query::TermQuery;
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ fn main() -> tantivy::Result<()> {
|
|||||||
|
|
||||||
// This tokenizer lowers all of the text (to help with stop word matching)
|
// This tokenizer lowers all of the text (to help with stop word matching)
|
||||||
// then removes all instances of `the` and `and` from the corpus
|
// then removes all instances of `the` and `and` from the corpus
|
||||||
let tokenizer = SimpleTokenizer
|
let tokenizer = TextAnalyzer::from(SimpleTokenizer)
|
||||||
.filter(LowerCaser)
|
.filter(LowerCaser)
|
||||||
.filter(StopWordFilter::remove(vec![
|
.filter(StopWordFilter::remove(vec![
|
||||||
"the".to_string(),
|
"the".to_string(),
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "tantivy-query-grammar"
|
name = "tantivy-query-grammar"
|
||||||
version = "0.11.0"
|
version = "0.12.0"
|
||||||
authors = ["Paul Masurel <paul.masurel@gmail.com>"]
|
authors = ["Paul Masurel <paul.masurel@gmail.com>"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
categories = ["database-implementations", "data-structures"]
|
categories = ["database-implementations", "data-structures"]
|
||||||
@@ -13,4 +13,4 @@ keywords = ["search", "information", "retrieval"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
combine = ">=3.6.0,<4.0.0"
|
combine = {version="4", default-features=false, features=[] }
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
#![recursion_limit = "100"]
|
|
||||||
|
|
||||||
mod occur;
|
mod occur;
|
||||||
mod query_grammar;
|
mod query_grammar;
|
||||||
mod user_input_ast;
|
mod user_input_ast;
|
||||||
|
|||||||
@@ -1,150 +1,153 @@
|
|||||||
use super::user_input_ast::*;
|
use super::user_input_ast::{UserInputAST, UserInputBound, UserInputLeaf, UserInputLiteral};
|
||||||
use crate::Occur;
|
use crate::Occur;
|
||||||
use combine::char::*;
|
use combine::error::StringStreamError;
|
||||||
use combine::error::StreamError;
|
use combine::parser::char::{char, digit, letter, space, spaces, string};
|
||||||
use combine::stream::StreamErrorFor;
|
use combine::parser::Parser;
|
||||||
use combine::*;
|
use combine::{
|
||||||
|
attempt, choice, eof, many, many1, one_of, optional, parser, satisfy, skip_many1, value,
|
||||||
|
};
|
||||||
|
|
||||||
parser! {
|
fn field<'a>() -> impl Parser<&'a str, Output = String> {
|
||||||
fn field[I]()(I) -> String
|
(
|
||||||
where [I: Stream<Item = char>] {
|
letter(),
|
||||||
(
|
many(satisfy(|c: char| c.is_alphanumeric() || c == '_')),
|
||||||
letter(),
|
)
|
||||||
many(satisfy(|c: char| c.is_alphanumeric() || c == '_')),
|
.skip(char(':'))
|
||||||
).skip(char(':')).map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parser! {
|
|
||||||
fn word[I]()(I) -> String
|
|
||||||
where [I: Stream<Item = char>] {
|
|
||||||
(
|
|
||||||
satisfy(|c: char| !c.is_whitespace() && !['-', '`', ':', '{', '}', '"', '[', ']', '(',')'].contains(&c) ),
|
|
||||||
many(satisfy(|c: char| !c.is_whitespace() && ![':', '{', '}', '"', '[', ']', '(',')'].contains(&c)))
|
|
||||||
)
|
|
||||||
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
||||||
.and_then(|s: String|
|
|
||||||
match s.as_str() {
|
|
||||||
"OR" => Err(StreamErrorFor::<I>::unexpected_static_message("OR")),
|
|
||||||
"AND" => Err(StreamErrorFor::<I>::unexpected_static_message("AND")),
|
|
||||||
"NOT" => Err(StreamErrorFor::<I>::unexpected_static_message("NOT")),
|
|
||||||
_ => Ok(s)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parser! {
|
fn word<'a>() -> impl Parser<&'a str, Output = String> {
|
||||||
fn literal[I]()(I) -> UserInputLeaf
|
(
|
||||||
where [I: Stream<Item = char>]
|
satisfy(|c: char| {
|
||||||
{
|
!c.is_whitespace()
|
||||||
let term_val = || {
|
&& !['-', '^', '`', ':', '{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||||
let phrase = char('"').with(many1(satisfy(|c| c != '"'))).skip(char('"'));
|
}),
|
||||||
phrase.or(word())
|
many(satisfy(|c: char| {
|
||||||
};
|
!c.is_whitespace() && ![':', '^', '{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||||
let term_val_with_field = negative_number().or(term_val());
|
})),
|
||||||
let term_query =
|
)
|
||||||
(field(), term_val_with_field)
|
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
|
||||||
.map(|(field_name, phrase)| UserInputLiteral {
|
.and_then(|s: String| match s.as_str() {
|
||||||
field_name: Some(field_name),
|
"OR" | "AND " | "NOT" => Err(StringStreamError::UnexpectedParse),
|
||||||
phrase,
|
_ => Ok(s),
|
||||||
});
|
|
||||||
let term_default_field = term_val().map(|phrase| UserInputLiteral {
|
|
||||||
field_name: None,
|
|
||||||
phrase,
|
|
||||||
});
|
|
||||||
attempt(term_query)
|
|
||||||
.or(term_default_field)
|
|
||||||
.map(UserInputLeaf::from)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parser! {
|
|
||||||
fn negative_number[I]()(I) -> String
|
|
||||||
where [I: Stream<Item = char>]
|
|
||||||
{
|
|
||||||
(char('-'), many1(satisfy(char::is_numeric)),
|
|
||||||
optional((char('.'), many1(satisfy(char::is_numeric)))))
|
|
||||||
.map(|(s1, s2, s3): (char, String, Option<(char, String)>)| {
|
|
||||||
if let Some(('.', s3)) = s3 {
|
|
||||||
format!("{}{}.{}", s1, s2, s3)
|
|
||||||
} else {
|
|
||||||
format!("{}{}", s1, s2)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parser! {
|
|
||||||
fn spaces1[I]()(I) -> ()
|
|
||||||
where [I: Stream<Item = char>] {
|
|
||||||
skip_many1(space())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parser! {
|
|
||||||
/// Function that parses a range out of a Stream
|
|
||||||
/// Supports ranges like:
|
|
||||||
/// [5 TO 10], {5 TO 10}, [* TO 10], [10 TO *], {10 TO *], >5, <=10
|
|
||||||
/// [a TO *], [a TO c], [abc TO bcd}
|
|
||||||
fn range[I]()(I) -> UserInputLeaf
|
|
||||||
where [I: Stream<Item = char>] {
|
|
||||||
let range_term_val = || {
|
|
||||||
word().or(negative_number()).or(char('*').with(value("*".to_string())))
|
|
||||||
};
|
|
||||||
|
|
||||||
// check for unbounded range in the form of <5, <=10, >5, >=5
|
|
||||||
let elastic_unbounded_range = (choice([attempt(string(">=")),
|
|
||||||
attempt(string("<=")),
|
|
||||||
attempt(string("<")),
|
|
||||||
attempt(string(">"))])
|
|
||||||
.skip(spaces()),
|
|
||||||
range_term_val()).
|
|
||||||
map(|(comparison_sign, bound): (&str, String)|
|
|
||||||
match comparison_sign {
|
|
||||||
">=" => (UserInputBound::Inclusive(bound), UserInputBound::Unbounded),
|
|
||||||
"<=" => (UserInputBound::Unbounded, UserInputBound::Inclusive(bound)),
|
|
||||||
"<" => (UserInputBound::Unbounded, UserInputBound::Exclusive(bound)),
|
|
||||||
">" => (UserInputBound::Exclusive(bound), UserInputBound::Unbounded),
|
|
||||||
// default case
|
|
||||||
_ => (UserInputBound::Unbounded, UserInputBound::Unbounded)
|
|
||||||
});
|
|
||||||
let lower_bound = (one_of("{[".chars()), range_term_val())
|
|
||||||
.map(|(boundary_char, lower_bound): (char, String)|
|
|
||||||
if lower_bound == "*" {
|
|
||||||
UserInputBound::Unbounded
|
|
||||||
} else if boundary_char == '{' {
|
|
||||||
UserInputBound::Exclusive(lower_bound)
|
|
||||||
} else {
|
|
||||||
UserInputBound::Inclusive(lower_bound)
|
|
||||||
});
|
|
||||||
let upper_bound = (range_term_val(), one_of("}]".chars()))
|
|
||||||
.map(|(higher_bound, boundary_char): (String, char)|
|
|
||||||
if higher_bound == "*" {
|
|
||||||
UserInputBound::Unbounded
|
|
||||||
} else if boundary_char == '}' {
|
|
||||||
UserInputBound::Exclusive(higher_bound)
|
|
||||||
} else {
|
|
||||||
UserInputBound::Inclusive(higher_bound)
|
|
||||||
});
|
|
||||||
// return only lower and upper
|
|
||||||
let lower_to_upper = (lower_bound.
|
|
||||||
skip((spaces(),
|
|
||||||
string("TO"),
|
|
||||||
spaces())),
|
|
||||||
upper_bound);
|
|
||||||
|
|
||||||
(optional(field()).skip(spaces()),
|
|
||||||
// try elastic first, if it matches, the range is unbounded
|
|
||||||
attempt(elastic_unbounded_range).or(lower_to_upper))
|
|
||||||
.map(|(field, (lower, upper))|
|
|
||||||
// Construct the leaf from extracted field (optional)
|
|
||||||
// and bounds
|
|
||||||
UserInputLeaf::Range {
|
|
||||||
field,
|
|
||||||
lower,
|
|
||||||
upper
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn term_val<'a>() -> impl Parser<&'a str, Output = String> {
|
||||||
|
let phrase = char('"').with(many1(satisfy(|c| c != '"'))).skip(char('"'));
|
||||||
|
phrase.or(word())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn term_query<'a>() -> impl Parser<&'a str, Output = UserInputLiteral> {
|
||||||
|
let term_val_with_field = negative_number().or(term_val());
|
||||||
|
(field(), term_val_with_field).map(|(field_name, phrase)| UserInputLiteral {
|
||||||
|
field_name: Some(field_name),
|
||||||
|
phrase,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn literal<'a>() -> impl Parser<&'a str, Output = UserInputLeaf> {
|
||||||
|
let term_default_field = term_val().map(|phrase| UserInputLiteral {
|
||||||
|
field_name: None,
|
||||||
|
phrase,
|
||||||
|
});
|
||||||
|
attempt(term_query())
|
||||||
|
.or(term_default_field)
|
||||||
|
.map(UserInputLeaf::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn negative_number<'a>() -> impl Parser<&'a str, Output = String> {
|
||||||
|
(
|
||||||
|
char('-'),
|
||||||
|
many1(digit()),
|
||||||
|
optional((char('.'), many1(digit()))),
|
||||||
|
)
|
||||||
|
.map(|(s1, s2, s3): (char, String, Option<(char, String)>)| {
|
||||||
|
if let Some(('.', s3)) = s3 {
|
||||||
|
format!("{}{}.{}", s1, s2, s3)
|
||||||
|
} else {
|
||||||
|
format!("{}{}", s1, s2)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn spaces1<'a>() -> impl Parser<&'a str, Output = ()> {
|
||||||
|
skip_many1(space())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Function that parses a range out of a Stream
|
||||||
|
/// Supports ranges like:
|
||||||
|
/// [5 TO 10], {5 TO 10}, [* TO 10], [10 TO *], {10 TO *], >5, <=10
|
||||||
|
/// [a TO *], [a TO c], [abc TO bcd}
|
||||||
|
fn range<'a>() -> impl Parser<&'a str, Output = UserInputLeaf> {
|
||||||
|
let range_term_val = || {
|
||||||
|
word()
|
||||||
|
.or(negative_number())
|
||||||
|
.or(char('*').with(value("*".to_string())))
|
||||||
|
};
|
||||||
|
|
||||||
|
// check for unbounded range in the form of <5, <=10, >5, >=5
|
||||||
|
let elastic_unbounded_range = (
|
||||||
|
choice([
|
||||||
|
attempt(string(">=")),
|
||||||
|
attempt(string("<=")),
|
||||||
|
attempt(string("<")),
|
||||||
|
attempt(string(">")),
|
||||||
|
])
|
||||||
|
.skip(spaces()),
|
||||||
|
range_term_val(),
|
||||||
|
)
|
||||||
|
.map(
|
||||||
|
|(comparison_sign, bound): (&str, String)| match comparison_sign {
|
||||||
|
">=" => (UserInputBound::Inclusive(bound), UserInputBound::Unbounded),
|
||||||
|
"<=" => (UserInputBound::Unbounded, UserInputBound::Inclusive(bound)),
|
||||||
|
"<" => (UserInputBound::Unbounded, UserInputBound::Exclusive(bound)),
|
||||||
|
">" => (UserInputBound::Exclusive(bound), UserInputBound::Unbounded),
|
||||||
|
// default case
|
||||||
|
_ => (UserInputBound::Unbounded, UserInputBound::Unbounded),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let lower_bound = (one_of("{[".chars()), range_term_val()).map(
|
||||||
|
|(boundary_char, lower_bound): (char, String)| {
|
||||||
|
if lower_bound == "*" {
|
||||||
|
UserInputBound::Unbounded
|
||||||
|
} else if boundary_char == '{' {
|
||||||
|
UserInputBound::Exclusive(lower_bound)
|
||||||
|
} else {
|
||||||
|
UserInputBound::Inclusive(lower_bound)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let upper_bound = (range_term_val(), one_of("}]".chars())).map(
|
||||||
|
|(higher_bound, boundary_char): (String, char)| {
|
||||||
|
if higher_bound == "*" {
|
||||||
|
UserInputBound::Unbounded
|
||||||
|
} else if boundary_char == '}' {
|
||||||
|
UserInputBound::Exclusive(higher_bound)
|
||||||
|
} else {
|
||||||
|
UserInputBound::Inclusive(higher_bound)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
// return only lower and upper
|
||||||
|
let lower_to_upper = (
|
||||||
|
lower_bound.skip((spaces(), string("TO"), spaces())),
|
||||||
|
upper_bound,
|
||||||
|
);
|
||||||
|
|
||||||
|
(
|
||||||
|
optional(field()).skip(spaces()),
|
||||||
|
// try elastic first, if it matches, the range is unbounded
|
||||||
|
attempt(elastic_unbounded_range).or(lower_to_upper),
|
||||||
|
)
|
||||||
|
.map(|(field, (lower, upper))|
|
||||||
|
// Construct the leaf from extracted field (optional)
|
||||||
|
// and bounds
|
||||||
|
UserInputLeaf::Range {
|
||||||
|
field,
|
||||||
|
lower,
|
||||||
|
upper
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn negate(expr: UserInputAST) -> UserInputAST {
|
fn negate(expr: UserInputAST) -> UserInputAST {
|
||||||
@@ -155,17 +158,48 @@ fn must(expr: UserInputAST) -> UserInputAST {
|
|||||||
expr.unary(Occur::Must)
|
expr.unary(Occur::Must)
|
||||||
}
|
}
|
||||||
|
|
||||||
parser! {
|
fn leaf<'a>() -> impl Parser<&'a str, Output = UserInputAST> {
|
||||||
fn leaf[I]()(I) -> UserInputAST
|
parser(|input| {
|
||||||
where [I: Stream<Item = char>] {
|
char('-')
|
||||||
char('-').with(leaf()).map(negate)
|
.with(leaf())
|
||||||
.or(char('+').with(leaf()).map(must))
|
.map(negate)
|
||||||
.or(char('(').with(ast()).skip(char(')')))
|
.or(char('+').with(leaf()).map(must))
|
||||||
.or(char('*').map(|_| UserInputAST::from(UserInputLeaf::All)))
|
.or(char('(').with(ast()).skip(char(')')))
|
||||||
.or(attempt(string("NOT").skip(spaces1()).with(leaf()).map(negate)))
|
.or(char('*').map(|_| UserInputAST::from(UserInputLeaf::All)))
|
||||||
.or(attempt(range().map(UserInputAST::from)))
|
.or(attempt(
|
||||||
.or(literal().map(UserInputAST::from))
|
string("NOT").skip(spaces1()).with(leaf()).map(negate),
|
||||||
}
|
))
|
||||||
|
.or(attempt(range().map(UserInputAST::from)))
|
||||||
|
.or(literal().map(UserInputAST::from))
|
||||||
|
.parse_stream(input)
|
||||||
|
.into_result()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn positive_float_number<'a>() -> impl Parser<&'a str, Output = f32> {
|
||||||
|
(many1(digit()), optional((char('.'), many1(digit())))).map(
|
||||||
|
|(int_part, decimal_part_opt): (String, Option<(char, String)>)| {
|
||||||
|
let mut float_str = int_part;
|
||||||
|
if let Some((chr, decimal_str)) = decimal_part_opt {
|
||||||
|
float_str.push(chr);
|
||||||
|
float_str.push_str(&decimal_str);
|
||||||
|
}
|
||||||
|
float_str.parse::<f32>().unwrap()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn boost<'a>() -> impl Parser<&'a str, Output = f32> {
|
||||||
|
(char('^'), positive_float_number()).map(|(_, boost)| boost)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn boosted_leaf<'a>() -> impl Parser<&'a str, Output = UserInputAST> {
|
||||||
|
(leaf(), optional(boost())).map(|(leaf, boost_opt)| match boost_opt {
|
||||||
|
Some(boost) if (boost - 1.0).abs() > std::f32::EPSILON => {
|
||||||
|
UserInputAST::Boost(Box::new(leaf), boost)
|
||||||
|
}
|
||||||
|
_ => leaf,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
@@ -174,13 +208,10 @@ enum BinaryOperand {
|
|||||||
And,
|
And,
|
||||||
}
|
}
|
||||||
|
|
||||||
parser! {
|
fn binary_operand<'a>() -> impl Parser<&'a str, Output = BinaryOperand> {
|
||||||
fn binary_operand[I]()(I) -> BinaryOperand
|
string("AND")
|
||||||
where [I: Stream<Item = char>]
|
.with(value(BinaryOperand::And))
|
||||||
{
|
.or(string("OR").with(value(BinaryOperand::Or)))
|
||||||
string("AND").with(value(BinaryOperand::And))
|
|
||||||
.or(string("OR").with(value(BinaryOperand::Or)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aggregate_binary_expressions(
|
fn aggregate_binary_expressions(
|
||||||
@@ -208,37 +239,67 @@ fn aggregate_binary_expressions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parser! {
|
pub fn ast<'a>() -> impl Parser<&'a str, Output = UserInputAST> {
|
||||||
pub fn ast[I]()(I) -> UserInputAST
|
let operand_leaf = (
|
||||||
where [I: Stream<Item = char>]
|
binary_operand().skip(spaces()),
|
||||||
{
|
boosted_leaf().skip(spaces()),
|
||||||
let operand_leaf = (binary_operand().skip(spaces()), leaf().skip(spaces()));
|
);
|
||||||
let boolean_expr = (leaf().skip(spaces().silent()), many1(operand_leaf)).map(
|
let boolean_expr = (boosted_leaf().skip(spaces().silent()), many1(operand_leaf))
|
||||||
|(left, right)| aggregate_binary_expressions(left,right));
|
.map(|(left, right)| aggregate_binary_expressions(left, right));
|
||||||
let whitespace_separated_leaves = many1(leaf().skip(spaces().silent()))
|
let whitespace_separated_leaves =
|
||||||
.map(|subqueries: Vec<UserInputAST>|
|
many1(boosted_leaf().skip(spaces().silent())).map(|subqueries: Vec<UserInputAST>| {
|
||||||
if subqueries.len() == 1 {
|
if subqueries.len() == 1 {
|
||||||
subqueries.into_iter().next().unwrap()
|
subqueries.into_iter().next().unwrap()
|
||||||
} else {
|
} else {
|
||||||
UserInputAST::Clause(subqueries.into_iter().collect())
|
UserInputAST::Clause(subqueries.into_iter().collect())
|
||||||
});
|
}
|
||||||
let expr = attempt(boolean_expr).or(whitespace_separated_leaves);
|
});
|
||||||
spaces().with(expr).skip(spaces())
|
let expr = attempt(boolean_expr).or(whitespace_separated_leaves);
|
||||||
}
|
spaces().with(expr).skip(spaces())
|
||||||
}
|
}
|
||||||
|
|
||||||
parser! {
|
pub fn parse_to_ast<'a>() -> impl Parser<&'a str, Output = UserInputAST> {
|
||||||
pub fn parse_to_ast[I]()(I) -> UserInputAST
|
spaces()
|
||||||
where [I: Stream<Item = char>]
|
.with(optional(ast()).skip(eof()))
|
||||||
{
|
.map(|opt_ast| opt_ast.unwrap_or_else(UserInputAST::empty_query))
|
||||||
spaces().with(optional(ast()).skip(eof())).map(|opt_ast| opt_ast.unwrap_or_else(UserInputAST::empty_query))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use combine::parser::Parser;
|
||||||
|
|
||||||
|
pub fn nearly_equals(a: f32, b: f32) -> bool {
|
||||||
|
(a - b).abs() < 0.0005 * (a + b).abs()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_nearly_equals(expected: f32, val: f32) {
|
||||||
|
assert!(
|
||||||
|
nearly_equals(val, expected),
|
||||||
|
"Got {}, expected {}.",
|
||||||
|
val,
|
||||||
|
expected
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_positive_float_number() {
|
||||||
|
fn valid_parse(float_str: &str, expected_val: f32, expected_remaining: &str) {
|
||||||
|
let (val, remaining) = positive_float_number().parse(float_str).unwrap();
|
||||||
|
assert_eq!(remaining, expected_remaining);
|
||||||
|
assert_nearly_equals(val, expected_val);
|
||||||
|
}
|
||||||
|
fn error_parse(float_str: &str) {
|
||||||
|
assert!(positive_float_number().parse(float_str).is_err());
|
||||||
|
}
|
||||||
|
valid_parse("1.0", 1.0f32, "");
|
||||||
|
valid_parse("1", 1.0f32, "");
|
||||||
|
valid_parse("0.234234 aaa", 0.234234f32, " aaa");
|
||||||
|
error_parse(".3332");
|
||||||
|
error_parse("1.");
|
||||||
|
error_parse("-1.");
|
||||||
|
}
|
||||||
|
|
||||||
fn test_parse_query_to_ast_helper(query: &str, expected: &str) {
|
fn test_parse_query_to_ast_helper(query: &str, expected: &str) {
|
||||||
let query = parse_to_ast().parse(query).unwrap().0;
|
let query = parse_to_ast().parse(query).unwrap().0;
|
||||||
@@ -272,6 +333,15 @@ mod test {
|
|||||||
test_parse_query_to_ast_helper("NOT a", "-(\"a\")");
|
test_parse_query_to_ast_helper("NOT a", "-(\"a\")");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_boosting() {
|
||||||
|
assert!(parse_to_ast().parse("a^2^3").is_err());
|
||||||
|
assert!(parse_to_ast().parse("a^2^").is_err());
|
||||||
|
test_parse_query_to_ast_helper("a^3", "(\"a\")^3");
|
||||||
|
test_parse_query_to_ast_helper("a^3 b^2", "((\"a\")^3 (\"b\")^2)");
|
||||||
|
test_parse_query_to_ast_helper("a^1", "\"a\"");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_query_to_ast_binary_op() {
|
fn test_parse_query_to_ast_binary_op() {
|
||||||
test_parse_query_to_ast_helper("a AND b", "(+(\"a\") +(\"b\"))");
|
test_parse_query_to_ast_helper("a AND b", "(+(\"a\") +(\"b\"))");
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ pub enum UserInputAST {
|
|||||||
Clause(Vec<UserInputAST>),
|
Clause(Vec<UserInputAST>),
|
||||||
Unary(Occur, Box<UserInputAST>),
|
Unary(Occur, Box<UserInputAST>),
|
||||||
Leaf(Box<UserInputLeaf>),
|
Leaf(Box<UserInputLeaf>),
|
||||||
|
Boost(Box<UserInputAST>, f32),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UserInputAST {
|
impl UserInputAST {
|
||||||
@@ -154,6 +155,7 @@ impl fmt::Debug for UserInputAST {
|
|||||||
write!(formatter, "{}({:?})", occur, subquery)
|
write!(formatter, "{}({:?})", occur, subquery)
|
||||||
}
|
}
|
||||||
UserInputAST::Leaf(ref subquery) => write!(formatter, "{:?}", subquery),
|
UserInputAST::Leaf(ref subquery) => write!(formatter, "{:?}", subquery),
|
||||||
|
UserInputAST::Boost(ref leaf, boost) => write!(formatter, "({:?})^{}", leaf, boost),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use super::Collector;
|
use super::Collector;
|
||||||
use crate::collector::SegmentCollector;
|
use crate::collector::SegmentCollector;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -44,7 +43,11 @@ impl Collector for Count {
|
|||||||
|
|
||||||
type Child = SegmentCountCollector;
|
type Child = SegmentCountCollector;
|
||||||
|
|
||||||
fn for_segment(&self, _: SegmentLocalId, _: &SegmentReader) -> Result<SegmentCountCollector> {
|
fn for_segment(
|
||||||
|
&self,
|
||||||
|
_: SegmentLocalId,
|
||||||
|
_: &SegmentReader,
|
||||||
|
) -> crate::Result<SegmentCountCollector> {
|
||||||
Ok(SegmentCountCollector::default())
|
Ok(SegmentCountCollector::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -52,7 +55,7 @@ impl Collector for Count {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, segment_counts: Vec<usize>) -> Result<usize> {
|
fn merge_fruits(&self, segment_counts: Vec<usize>) -> crate::Result<usize> {
|
||||||
Ok(segment_counts.into_iter().sum())
|
Ok(segment_counts.into_iter().sum())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use crate::collector::top_collector::{TopCollector, TopSegmentCollector};
|
use crate::collector::top_collector::{TopCollector, TopSegmentCollector};
|
||||||
use crate::collector::{Collector, SegmentCollector};
|
use crate::collector::{Collector, SegmentCollector};
|
||||||
use crate::Result;
|
|
||||||
use crate::{DocAddress, DocId, Score, SegmentReader};
|
use crate::{DocAddress, DocId, Score, SegmentReader};
|
||||||
|
|
||||||
pub(crate) struct CustomScoreTopCollector<TCustomScorer, TScore = Score> {
|
pub(crate) struct CustomScoreTopCollector<TCustomScorer, TScore = Score> {
|
||||||
@@ -42,7 +41,7 @@ pub trait CustomScorer<TScore>: Sync {
|
|||||||
type Child: CustomSegmentScorer<TScore>;
|
type Child: CustomSegmentScorer<TScore>;
|
||||||
/// Builds a child scorer for a specific segment. The child scorer is associated to
|
/// Builds a child scorer for a specific segment. The child scorer is associated to
|
||||||
/// a specific segment.
|
/// a specific segment.
|
||||||
fn segment_scorer(&self, segment_reader: &SegmentReader) -> Result<Self::Child>;
|
fn segment_scorer(&self, segment_reader: &SegmentReader) -> crate::Result<Self::Child>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TCustomScorer, TScore> Collector for CustomScoreTopCollector<TCustomScorer, TScore>
|
impl<TCustomScorer, TScore> Collector for CustomScoreTopCollector<TCustomScorer, TScore>
|
||||||
@@ -58,7 +57,7 @@ where
|
|||||||
&self,
|
&self,
|
||||||
segment_local_id: u32,
|
segment_local_id: u32,
|
||||||
segment_reader: &SegmentReader,
|
segment_reader: &SegmentReader,
|
||||||
) -> Result<Self::Child> {
|
) -> crate::Result<Self::Child> {
|
||||||
let segment_scorer = self.custom_scorer.segment_scorer(segment_reader)?;
|
let segment_scorer = self.custom_scorer.segment_scorer(segment_reader)?;
|
||||||
let segment_collector = self
|
let segment_collector = self
|
||||||
.collector
|
.collector
|
||||||
@@ -73,7 +72,7 @@ where
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, segment_fruits: Vec<Self::Fruit>) -> Result<Self::Fruit> {
|
fn merge_fruits(&self, segment_fruits: Vec<Self::Fruit>) -> crate::Result<Self::Fruit> {
|
||||||
self.collector.merge_fruits(segment_fruits)
|
self.collector.merge_fruits(segment_fruits)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -111,7 +110,7 @@ where
|
|||||||
{
|
{
|
||||||
type Child = T;
|
type Child = T;
|
||||||
|
|
||||||
fn segment_scorer(&self, segment_reader: &SegmentReader) -> Result<Self::Child> {
|
fn segment_scorer(&self, segment_reader: &SegmentReader) -> crate::Result<Self::Child> {
|
||||||
Ok((self)(segment_reader))
|
Ok((self)(segment_reader))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ use crate::fastfield::FacetReader;
|
|||||||
use crate::schema::Facet;
|
use crate::schema::Facet;
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -84,9 +83,9 @@ fn facet_depth(facet_bytes: &[u8]) -> usize {
|
|||||||
/// use tantivy::collector::FacetCollector;
|
/// use tantivy::collector::FacetCollector;
|
||||||
/// use tantivy::query::AllQuery;
|
/// use tantivy::query::AllQuery;
|
||||||
/// use tantivy::schema::{Facet, Schema, TEXT};
|
/// use tantivy::schema::{Facet, Schema, TEXT};
|
||||||
/// use tantivy::{doc, Index, Result};
|
/// use tantivy::{doc, Index};
|
||||||
///
|
///
|
||||||
/// fn example() -> Result<()> {
|
/// fn example() -> tantivy::Result<()> {
|
||||||
/// let mut schema_builder = Schema::builder();
|
/// let mut schema_builder = Schema::builder();
|
||||||
///
|
///
|
||||||
/// // Facet have their own specific type.
|
/// // Facet have their own specific type.
|
||||||
@@ -262,7 +261,7 @@ impl Collector for FacetCollector {
|
|||||||
&self,
|
&self,
|
||||||
_: SegmentLocalId,
|
_: SegmentLocalId,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
) -> Result<FacetSegmentCollector> {
|
) -> crate::Result<FacetSegmentCollector> {
|
||||||
let field_name = reader.schema().get_field_name(self.field);
|
let field_name = reader.schema().get_field_name(self.field);
|
||||||
let facet_reader = reader.facet_reader(self.field).ok_or_else(|| {
|
let facet_reader = reader.facet_reader(self.field).ok_or_else(|| {
|
||||||
TantivyError::SchemaError(format!("Field {:?} is not a facet field.", field_name))
|
TantivyError::SchemaError(format!("Field {:?} is not a facet field.", field_name))
|
||||||
@@ -328,7 +327,7 @@ impl Collector for FacetCollector {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, segments_facet_counts: Vec<FacetCounts>) -> Result<FacetCounts> {
|
fn merge_fruits(&self, segments_facet_counts: Vec<FacetCounts>) -> crate::Result<FacetCounts> {
|
||||||
let mut facet_counts: BTreeMap<Facet, u64> = BTreeMap::new();
|
let mut facet_counts: BTreeMap<Facet, u64> = BTreeMap::new();
|
||||||
for segment_facet_counts in segments_facet_counts {
|
for segment_facet_counts in segments_facet_counts {
|
||||||
for (facet, count) in segment_facet_counts.facet_counts {
|
for (facet, count) in segment_facet_counts.facet_counts {
|
||||||
|
|||||||
@@ -85,7 +85,6 @@ See the `custom_collector` example.
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -147,14 +146,14 @@ pub trait Collector: Sync {
|
|||||||
&self,
|
&self,
|
||||||
segment_local_id: SegmentLocalId,
|
segment_local_id: SegmentLocalId,
|
||||||
segment: &SegmentReader,
|
segment: &SegmentReader,
|
||||||
) -> Result<Self::Child>;
|
) -> crate::Result<Self::Child>;
|
||||||
|
|
||||||
/// Returns true iff the collector requires to compute scores for documents.
|
/// Returns true iff the collector requires to compute scores for documents.
|
||||||
fn requires_scoring(&self) -> bool;
|
fn requires_scoring(&self) -> bool;
|
||||||
|
|
||||||
/// Combines the fruit associated to the collection of each segments
|
/// Combines the fruit associated to the collection of each segments
|
||||||
/// into one fruit.
|
/// into one fruit.
|
||||||
fn merge_fruits(&self, segment_fruits: Vec<Self::Fruit>) -> Result<Self::Fruit>;
|
fn merge_fruits(&self, segment_fruits: Vec<Self::Fruit>) -> crate::Result<Self::Fruit>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `SegmentCollector` is the trait in charge of defining the
|
/// The `SegmentCollector` is the trait in charge of defining the
|
||||||
@@ -185,7 +184,11 @@ where
|
|||||||
type Fruit = (Left::Fruit, Right::Fruit);
|
type Fruit = (Left::Fruit, Right::Fruit);
|
||||||
type Child = (Left::Child, Right::Child);
|
type Child = (Left::Child, Right::Child);
|
||||||
|
|
||||||
fn for_segment(&self, segment_local_id: u32, segment: &SegmentReader) -> Result<Self::Child> {
|
fn for_segment(
|
||||||
|
&self,
|
||||||
|
segment_local_id: u32,
|
||||||
|
segment: &SegmentReader,
|
||||||
|
) -> crate::Result<Self::Child> {
|
||||||
let left = self.0.for_segment(segment_local_id, segment)?;
|
let left = self.0.for_segment(segment_local_id, segment)?;
|
||||||
let right = self.1.for_segment(segment_local_id, segment)?;
|
let right = self.1.for_segment(segment_local_id, segment)?;
|
||||||
Ok((left, right))
|
Ok((left, right))
|
||||||
@@ -198,7 +201,7 @@ where
|
|||||||
fn merge_fruits(
|
fn merge_fruits(
|
||||||
&self,
|
&self,
|
||||||
children: Vec<(Left::Fruit, Right::Fruit)>,
|
children: Vec<(Left::Fruit, Right::Fruit)>,
|
||||||
) -> Result<(Left::Fruit, Right::Fruit)> {
|
) -> crate::Result<(Left::Fruit, Right::Fruit)> {
|
||||||
let mut left_fruits = vec![];
|
let mut left_fruits = vec![];
|
||||||
let mut right_fruits = vec![];
|
let mut right_fruits = vec![];
|
||||||
for (left_fruit, right_fruit) in children {
|
for (left_fruit, right_fruit) in children {
|
||||||
@@ -240,7 +243,11 @@ where
|
|||||||
type Fruit = (One::Fruit, Two::Fruit, Three::Fruit);
|
type Fruit = (One::Fruit, Two::Fruit, Three::Fruit);
|
||||||
type Child = (One::Child, Two::Child, Three::Child);
|
type Child = (One::Child, Two::Child, Three::Child);
|
||||||
|
|
||||||
fn for_segment(&self, segment_local_id: u32, segment: &SegmentReader) -> Result<Self::Child> {
|
fn for_segment(
|
||||||
|
&self,
|
||||||
|
segment_local_id: u32,
|
||||||
|
segment: &SegmentReader,
|
||||||
|
) -> crate::Result<Self::Child> {
|
||||||
let one = self.0.for_segment(segment_local_id, segment)?;
|
let one = self.0.for_segment(segment_local_id, segment)?;
|
||||||
let two = self.1.for_segment(segment_local_id, segment)?;
|
let two = self.1.for_segment(segment_local_id, segment)?;
|
||||||
let three = self.2.for_segment(segment_local_id, segment)?;
|
let three = self.2.for_segment(segment_local_id, segment)?;
|
||||||
@@ -251,7 +258,7 @@ where
|
|||||||
self.0.requires_scoring() || self.1.requires_scoring() || self.2.requires_scoring()
|
self.0.requires_scoring() || self.1.requires_scoring() || self.2.requires_scoring()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, children: Vec<Self::Fruit>) -> Result<Self::Fruit> {
|
fn merge_fruits(&self, children: Vec<Self::Fruit>) -> crate::Result<Self::Fruit> {
|
||||||
let mut one_fruits = vec![];
|
let mut one_fruits = vec![];
|
||||||
let mut two_fruits = vec![];
|
let mut two_fruits = vec![];
|
||||||
let mut three_fruits = vec![];
|
let mut three_fruits = vec![];
|
||||||
@@ -299,7 +306,11 @@ where
|
|||||||
type Fruit = (One::Fruit, Two::Fruit, Three::Fruit, Four::Fruit);
|
type Fruit = (One::Fruit, Two::Fruit, Three::Fruit, Four::Fruit);
|
||||||
type Child = (One::Child, Two::Child, Three::Child, Four::Child);
|
type Child = (One::Child, Two::Child, Three::Child, Four::Child);
|
||||||
|
|
||||||
fn for_segment(&self, segment_local_id: u32, segment: &SegmentReader) -> Result<Self::Child> {
|
fn for_segment(
|
||||||
|
&self,
|
||||||
|
segment_local_id: u32,
|
||||||
|
segment: &SegmentReader,
|
||||||
|
) -> crate::Result<Self::Child> {
|
||||||
let one = self.0.for_segment(segment_local_id, segment)?;
|
let one = self.0.for_segment(segment_local_id, segment)?;
|
||||||
let two = self.1.for_segment(segment_local_id, segment)?;
|
let two = self.1.for_segment(segment_local_id, segment)?;
|
||||||
let three = self.2.for_segment(segment_local_id, segment)?;
|
let three = self.2.for_segment(segment_local_id, segment)?;
|
||||||
@@ -314,7 +325,7 @@ where
|
|||||||
|| self.3.requires_scoring()
|
|| self.3.requires_scoring()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, children: Vec<Self::Fruit>) -> Result<Self::Fruit> {
|
fn merge_fruits(&self, children: Vec<Self::Fruit>) -> crate::Result<Self::Fruit> {
|
||||||
let mut one_fruits = vec![];
|
let mut one_fruits = vec![];
|
||||||
let mut two_fruits = vec![];
|
let mut two_fruits = vec![];
|
||||||
let mut three_fruits = vec![];
|
let mut three_fruits = vec![];
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use super::Collector;
|
|||||||
use super::SegmentCollector;
|
use super::SegmentCollector;
|
||||||
use crate::collector::Fruit;
|
use crate::collector::Fruit;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -24,7 +23,7 @@ impl<TCollector: Collector> Collector for CollectorWrapper<TCollector> {
|
|||||||
&self,
|
&self,
|
||||||
segment_local_id: u32,
|
segment_local_id: u32,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
) -> Result<Box<dyn BoxableSegmentCollector>> {
|
) -> crate::Result<Box<dyn BoxableSegmentCollector>> {
|
||||||
let child = self.0.for_segment(segment_local_id, reader)?;
|
let child = self.0.for_segment(segment_local_id, reader)?;
|
||||||
Ok(Box::new(SegmentCollectorWrapper(child)))
|
Ok(Box::new(SegmentCollectorWrapper(child)))
|
||||||
}
|
}
|
||||||
@@ -33,7 +32,10 @@ impl<TCollector: Collector> Collector for CollectorWrapper<TCollector> {
|
|||||||
self.0.requires_scoring()
|
self.0.requires_scoring()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, children: Vec<<Self as Collector>::Fruit>) -> Result<Box<dyn Fruit>> {
|
fn merge_fruits(
|
||||||
|
&self,
|
||||||
|
children: Vec<<Self as Collector>::Fruit>,
|
||||||
|
) -> crate::Result<Box<dyn Fruit>> {
|
||||||
let typed_fruit: Vec<TCollector::Fruit> = children
|
let typed_fruit: Vec<TCollector::Fruit> = children
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|untyped_fruit| {
|
.map(|untyped_fruit| {
|
||||||
@@ -44,7 +46,7 @@ impl<TCollector: Collector> Collector for CollectorWrapper<TCollector> {
|
|||||||
TantivyError::InvalidArgument("Failed to cast child fruit.".to_string())
|
TantivyError::InvalidArgument("Failed to cast child fruit.".to_string())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()?;
|
.collect::<crate::Result<_>>()?;
|
||||||
let merged_fruit = self.0.merge_fruits(typed_fruit)?;
|
let merged_fruit = self.0.merge_fruits(typed_fruit)?;
|
||||||
Ok(Box::new(merged_fruit))
|
Ok(Box::new(merged_fruit))
|
||||||
}
|
}
|
||||||
@@ -175,12 +177,12 @@ impl<'a> Collector for MultiCollector<'a> {
|
|||||||
&self,
|
&self,
|
||||||
segment_local_id: SegmentLocalId,
|
segment_local_id: SegmentLocalId,
|
||||||
segment: &SegmentReader,
|
segment: &SegmentReader,
|
||||||
) -> Result<MultiCollectorChild> {
|
) -> crate::Result<MultiCollectorChild> {
|
||||||
let children = self
|
let children = self
|
||||||
.collector_wrappers
|
.collector_wrappers
|
||||||
.iter()
|
.iter()
|
||||||
.map(|collector_wrapper| collector_wrapper.for_segment(segment_local_id, segment))
|
.map(|collector_wrapper| collector_wrapper.for_segment(segment_local_id, segment))
|
||||||
.collect::<Result<Vec<_>>>()?;
|
.collect::<crate::Result<Vec<_>>>()?;
|
||||||
Ok(MultiCollectorChild { children })
|
Ok(MultiCollectorChild { children })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -191,7 +193,7 @@ impl<'a> Collector for MultiCollector<'a> {
|
|||||||
.any(Collector::requires_scoring)
|
.any(Collector::requires_scoring)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, segments_multifruits: Vec<MultiFruit>) -> Result<MultiFruit> {
|
fn merge_fruits(&self, segments_multifruits: Vec<MultiFruit>) -> crate::Result<MultiFruit> {
|
||||||
let mut segment_fruits_list: Vec<Vec<Box<dyn Fruit>>> = (0..self.collector_wrappers.len())
|
let mut segment_fruits_list: Vec<Vec<Box<dyn Fruit>>> = (0..self.collector_wrappers.len())
|
||||||
.map(|_| Vec::with_capacity(segments_multifruits.len()))
|
.map(|_| Vec::with_capacity(segments_multifruits.len()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
@@ -209,7 +211,7 @@ impl<'a> Collector for MultiCollector<'a> {
|
|||||||
.map(|(child_collector, segment_fruits)| {
|
.map(|(child_collector, segment_fruits)| {
|
||||||
Ok(Some(child_collector.merge_fruits(segment_fruits)?))
|
Ok(Some(child_collector.merge_fruits(segment_fruits)?))
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()?;
|
.collect::<crate::Result<_>>()?;
|
||||||
Ok(MultiFruit { sub_fruits })
|
Ok(MultiFruit { sub_fruits })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ impl Collector for TestCollector {
|
|||||||
&self,
|
&self,
|
||||||
segment_id: SegmentLocalId,
|
segment_id: SegmentLocalId,
|
||||||
_reader: &SegmentReader,
|
_reader: &SegmentReader,
|
||||||
) -> Result<TestSegmentCollector> {
|
) -> crate::Result<TestSegmentCollector> {
|
||||||
Ok(TestSegmentCollector {
|
Ok(TestSegmentCollector {
|
||||||
segment_id,
|
segment_id,
|
||||||
fruit: TestFruit::default(),
|
fruit: TestFruit::default(),
|
||||||
@@ -66,7 +66,7 @@ impl Collector for TestCollector {
|
|||||||
self.compute_score
|
self.compute_score
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, mut children: Vec<TestFruit>) -> Result<TestFruit> {
|
fn merge_fruits(&self, mut children: Vec<TestFruit>) -> crate::Result<TestFruit> {
|
||||||
children.sort_by_key(|fruit| {
|
children.sort_by_key(|fruit| {
|
||||||
if fruit.docs().is_empty() {
|
if fruit.docs().is_empty() {
|
||||||
0
|
0
|
||||||
@@ -124,7 +124,7 @@ impl Collector for FastFieldTestCollector {
|
|||||||
&self,
|
&self,
|
||||||
_: SegmentLocalId,
|
_: SegmentLocalId,
|
||||||
segment_reader: &SegmentReader,
|
segment_reader: &SegmentReader,
|
||||||
) -> Result<FastFieldSegmentCollector> {
|
) -> crate::Result<FastFieldSegmentCollector> {
|
||||||
let reader = segment_reader
|
let reader = segment_reader
|
||||||
.fast_fields()
|
.fast_fields()
|
||||||
.u64(self.field)
|
.u64(self.field)
|
||||||
@@ -139,7 +139,7 @@ impl Collector for FastFieldTestCollector {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, children: Vec<Vec<u64>>) -> Result<Vec<u64>> {
|
fn merge_fruits(&self, children: Vec<Vec<u64>>) -> crate::Result<Vec<u64>> {
|
||||||
Ok(children.into_iter().flat_map(|v| v.into_iter()).collect())
|
Ok(children.into_iter().flat_map(|v| v.into_iter()).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,7 +184,7 @@ impl Collector for BytesFastFieldTestCollector {
|
|||||||
&self,
|
&self,
|
||||||
_segment_local_id: u32,
|
_segment_local_id: u32,
|
||||||
segment_reader: &SegmentReader,
|
segment_reader: &SegmentReader,
|
||||||
) -> Result<BytesFastFieldSegmentCollector> {
|
) -> crate::Result<BytesFastFieldSegmentCollector> {
|
||||||
Ok(BytesFastFieldSegmentCollector {
|
Ok(BytesFastFieldSegmentCollector {
|
||||||
vals: Vec::new(),
|
vals: Vec::new(),
|
||||||
reader: segment_reader
|
reader: segment_reader
|
||||||
@@ -198,7 +198,7 @@ impl Collector for BytesFastFieldTestCollector {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, children: Vec<Vec<u8>>) -> Result<Vec<u8>> {
|
fn merge_fruits(&self, children: Vec<Vec<u8>>) -> crate::Result<Vec<u8>> {
|
||||||
Ok(children.into_iter().flat_map(|c| c.into_iter()).collect())
|
Ok(children.into_iter().flat_map(|c| c.into_iter()).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use crate::DocAddress;
|
use crate::DocAddress;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
use serde::export::PhantomData;
|
use serde::export::PhantomData;
|
||||||
@@ -86,7 +85,7 @@ where
|
|||||||
pub fn merge_fruits(
|
pub fn merge_fruits(
|
||||||
&self,
|
&self,
|
||||||
children: Vec<Vec<(T, DocAddress)>>,
|
children: Vec<Vec<(T, DocAddress)>>,
|
||||||
) -> Result<Vec<(T, DocAddress)>> {
|
) -> crate::Result<Vec<(T, DocAddress)>> {
|
||||||
if self.limit == 0 {
|
if self.limit == 0 {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
}
|
}
|
||||||
@@ -113,7 +112,7 @@ where
|
|||||||
&self,
|
&self,
|
||||||
segment_id: SegmentLocalId,
|
segment_id: SegmentLocalId,
|
||||||
_: &SegmentReader,
|
_: &SegmentReader,
|
||||||
) -> Result<TopSegmentCollector<F>> {
|
) -> crate::Result<TopSegmentCollector<F>> {
|
||||||
Ok(TopSegmentCollector::new(segment_id, self.limit))
|
Ok(TopSegmentCollector::new(segment_id, self.limit))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ use crate::collector::tweak_score_top_collector::TweakedScoreTopCollector;
|
|||||||
use crate::collector::{
|
use crate::collector::{
|
||||||
CustomScorer, CustomSegmentScorer, ScoreSegmentTweaker, ScoreTweaker, SegmentCollector,
|
CustomScorer, CustomSegmentScorer, ScoreSegmentTweaker, ScoreTweaker, SegmentCollector,
|
||||||
};
|
};
|
||||||
|
use crate::fastfield::FastFieldReader;
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use crate::DocAddress;
|
use crate::DocAddress;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::SegmentLocalId;
|
use crate::SegmentLocalId;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -61,6 +61,37 @@ impl fmt::Debug for TopDocs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct ScorerByFastFieldReader {
|
||||||
|
ff_reader: FastFieldReader<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CustomSegmentScorer<u64> for ScorerByFastFieldReader {
|
||||||
|
fn score(&self, doc: DocId) -> u64 {
|
||||||
|
self.ff_reader.get_u64(u64::from(doc))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ScorerByField {
|
||||||
|
field: Field,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CustomScorer<u64> for ScorerByField {
|
||||||
|
type Child = ScorerByFastFieldReader;
|
||||||
|
|
||||||
|
fn segment_scorer(&self, segment_reader: &SegmentReader) -> crate::Result<Self::Child> {
|
||||||
|
let ff_reader = segment_reader
|
||||||
|
.fast_fields()
|
||||||
|
.u64(self.field)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
crate::TantivyError::SchemaError(format!(
|
||||||
|
"Field requested ({:?}) is not a i64/u64 fast field.",
|
||||||
|
self.field
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
Ok(ScorerByFastFieldReader { ff_reader })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TopDocs {
|
impl TopDocs {
|
||||||
/// Creates a top score collector, with a number of documents equal to "limit".
|
/// Creates a top score collector, with a number of documents equal to "limit".
|
||||||
///
|
///
|
||||||
@@ -74,7 +105,7 @@ impl TopDocs {
|
|||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
/// # use tantivy::schema::{Schema, FAST, TEXT};
|
/// # use tantivy::schema::{Schema, FAST, TEXT};
|
||||||
/// # use tantivy::{doc, Index, Result, DocAddress};
|
/// # use tantivy::{doc, Index, DocAddress};
|
||||||
/// # use tantivy::query::{Query, QueryParser};
|
/// # use tantivy::query::{Query, QueryParser};
|
||||||
/// use tantivy::Searcher;
|
/// use tantivy::Searcher;
|
||||||
/// use tantivy::collector::TopDocs;
|
/// use tantivy::collector::TopDocs;
|
||||||
@@ -111,7 +142,7 @@ impl TopDocs {
|
|||||||
/// fn docs_sorted_by_rating(searcher: &Searcher,
|
/// fn docs_sorted_by_rating(searcher: &Searcher,
|
||||||
/// query: &dyn Query,
|
/// query: &dyn Query,
|
||||||
/// sort_by_field: Field)
|
/// sort_by_field: Field)
|
||||||
/// -> Result<Vec<(u64, DocAddress)>> {
|
/// -> tantivy::Result<Vec<(u64, DocAddress)>> {
|
||||||
///
|
///
|
||||||
/// // This is where we build our topdocs collector
|
/// // This is where we build our topdocs collector
|
||||||
/// //
|
/// //
|
||||||
@@ -143,14 +174,7 @@ impl TopDocs {
|
|||||||
self,
|
self,
|
||||||
field: Field,
|
field: Field,
|
||||||
) -> impl Collector<Fruit = Vec<(u64, DocAddress)>> {
|
) -> impl Collector<Fruit = Vec<(u64, DocAddress)>> {
|
||||||
self.custom_score(move |segment_reader: &SegmentReader| {
|
self.custom_score(ScorerByField { field })
|
||||||
let ff_reader = segment_reader
|
|
||||||
.fast_fields()
|
|
||||||
.u64(field)
|
|
||||||
.expect("Field requested is not a i64/u64 fast field.");
|
|
||||||
//TODO error message missmatch actual behavior for i64
|
|
||||||
move |doc: DocId| ff_reader.get(doc)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ranks the documents using a custom score.
|
/// Ranks the documents using a custom score.
|
||||||
@@ -384,7 +408,7 @@ impl Collector for TopDocs {
|
|||||||
&self,
|
&self,
|
||||||
segment_local_id: SegmentLocalId,
|
segment_local_id: SegmentLocalId,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
) -> Result<Self::Child> {
|
) -> crate::Result<Self::Child> {
|
||||||
let collector = self.0.for_segment(segment_local_id, reader)?;
|
let collector = self.0.for_segment(segment_local_id, reader)?;
|
||||||
Ok(TopScoreSegmentCollector(collector))
|
Ok(TopScoreSegmentCollector(collector))
|
||||||
}
|
}
|
||||||
@@ -393,7 +417,10 @@ impl Collector for TopDocs {
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_fruits(&self, child_fruits: Vec<Vec<(Score, DocAddress)>>) -> Result<Self::Fruit> {
|
fn merge_fruits(
|
||||||
|
&self,
|
||||||
|
child_fruits: Vec<Vec<(Score, DocAddress)>>,
|
||||||
|
) -> crate::Result<Self::Fruit> {
|
||||||
self.0.merge_fruits(child_fruits)
|
self.0.merge_fruits(child_fruits)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -572,7 +599,6 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic(expected = "Field requested is not a i64/u64 fast field")]
|
|
||||||
fn test_field_not_fast_field() {
|
fn test_field_not_fast_field() {
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
let title = schema_builder.add_text_field(TITLE, TEXT);
|
let title = schema_builder.add_text_field(TITLE, TEXT);
|
||||||
@@ -587,7 +613,15 @@ mod tests {
|
|||||||
let searcher = index.reader().unwrap().searcher();
|
let searcher = index.reader().unwrap().searcher();
|
||||||
let segment = searcher.segment_reader(0);
|
let segment = searcher.segment_reader(0);
|
||||||
let top_collector = TopDocs::with_limit(4).order_by_u64_field(size);
|
let top_collector = TopDocs::with_limit(4).order_by_u64_field(size);
|
||||||
assert!(top_collector.for_segment(0, segment).is_ok());
|
let err = top_collector.for_segment(0, segment);
|
||||||
|
if let Err(crate::TantivyError::SchemaError(msg)) = err {
|
||||||
|
assert_eq!(
|
||||||
|
msg,
|
||||||
|
"Field requested (Field(1)) is not a i64/u64 fast field."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
assert!(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index(
|
fn index(
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ mod test {
|
|||||||
use super::{CompositeFile, CompositeWrite};
|
use super::{CompositeFile, CompositeWrite};
|
||||||
use crate::common::BinarySerializable;
|
use crate::common::BinarySerializable;
|
||||||
use crate::common::VInt;
|
use crate::common::VInt;
|
||||||
use crate::directory::{Directory, RAMDirectory, ReadOnlyDirectory};
|
use crate::directory::{Directory, RAMDirectory};
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use crate::Result;
|
|
||||||
use crossbeam::channel;
|
use crossbeam::channel;
|
||||||
use rayon::{ThreadPool, ThreadPoolBuilder};
|
use rayon::{ThreadPool, ThreadPoolBuilder};
|
||||||
|
|
||||||
@@ -10,7 +9,9 @@ use rayon::{ThreadPool, ThreadPoolBuilder};
|
|||||||
/// API of a dependency, knowing it might conflict with a different version
|
/// API of a dependency, knowing it might conflict with a different version
|
||||||
/// used by the client. Second, we may stop using rayon in the future.
|
/// used by the client. Second, we may stop using rayon in the future.
|
||||||
pub enum Executor {
|
pub enum Executor {
|
||||||
|
/// Single thread variant of an Executor
|
||||||
SingleThread,
|
SingleThread,
|
||||||
|
/// Thread pool variant of an Executor
|
||||||
ThreadPool(ThreadPool),
|
ThreadPool(ThreadPool),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -20,8 +21,8 @@ impl Executor {
|
|||||||
Executor::SingleThread
|
Executor::SingleThread
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates an Executor that dispatches the tasks in a thread pool.
|
/// Creates an Executor that dispatches the tasks in a thread pool.
|
||||||
pub fn multi_thread(num_threads: usize, prefix: &'static str) -> Result<Executor> {
|
pub fn multi_thread(num_threads: usize, prefix: &'static str) -> crate::Result<Executor> {
|
||||||
let pool = ThreadPoolBuilder::new()
|
let pool = ThreadPoolBuilder::new()
|
||||||
.num_threads(num_threads)
|
.num_threads(num_threads)
|
||||||
.thread_name(move |num| format!("{}{}", prefix, num))
|
.thread_name(move |num| format!("{}{}", prefix, num))
|
||||||
@@ -29,22 +30,22 @@ impl Executor {
|
|||||||
Ok(Executor::ThreadPool(pool))
|
Ok(Executor::ThreadPool(pool))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform a map in the thread pool.
|
/// Perform a map in the thread pool.
|
||||||
//
|
///
|
||||||
// Regardless of the executor (`SingleThread` or `ThreadPool`), panics in the task
|
/// Regardless of the executor (`SingleThread` or `ThreadPool`), panics in the task
|
||||||
// will propagate to the caller.
|
/// will propagate to the caller.
|
||||||
pub fn map<
|
pub fn map<
|
||||||
A: Send,
|
A: Send,
|
||||||
R: Send,
|
R: Send,
|
||||||
AIterator: Iterator<Item = A>,
|
AIterator: Iterator<Item = A>,
|
||||||
F: Sized + Sync + Fn(A) -> Result<R>,
|
F: Sized + Sync + Fn(A) -> crate::Result<R>,
|
||||||
>(
|
>(
|
||||||
&self,
|
&self,
|
||||||
f: F,
|
f: F,
|
||||||
args: AIterator,
|
args: AIterator,
|
||||||
) -> Result<Vec<R>> {
|
) -> crate::Result<Vec<R>> {
|
||||||
match self {
|
match self {
|
||||||
Executor::SingleThread => args.map(f).collect::<Result<_>>(),
|
Executor::SingleThread => args.map(f).collect::<crate::Result<_>>(),
|
||||||
Executor::ThreadPool(pool) => {
|
Executor::ThreadPool(pool) => {
|
||||||
let args_with_indices: Vec<(usize, A)> = args.enumerate().collect();
|
let args_with_indices: Vec<(usize, A)> = args.enumerate().collect();
|
||||||
let num_fruits = args_with_indices.len();
|
let num_fruits = args_with_indices.len();
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use super::segment::create_segment;
|
|
||||||
use super::segment::Segment;
|
use super::segment::Segment;
|
||||||
use crate::core::Executor;
|
use crate::core::Executor;
|
||||||
use crate::core::IndexMeta;
|
use crate::core::IndexMeta;
|
||||||
@@ -20,10 +19,8 @@ use crate::reader::IndexReaderBuilder;
|
|||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use crate::schema::FieldType;
|
use crate::schema::FieldType;
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::tokenizer::BoxedTokenizer;
|
use crate::tokenizer::{TextAnalyzer, TokenizerManager};
|
||||||
use crate::tokenizer::TokenizerManager;
|
|
||||||
use crate::IndexWriter;
|
use crate::IndexWriter;
|
||||||
use crate::Result;
|
|
||||||
use num_cpus;
|
use num_cpus;
|
||||||
use std::borrow::BorrowMut;
|
use std::borrow::BorrowMut;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
@@ -32,7 +29,10 @@ use std::fmt;
|
|||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
fn load_metas(directory: &dyn Directory, inventory: &SegmentMetaInventory) -> Result<IndexMeta> {
|
fn load_metas(
|
||||||
|
directory: &dyn Directory,
|
||||||
|
inventory: &SegmentMetaInventory,
|
||||||
|
) -> crate::Result<IndexMeta> {
|
||||||
let meta_data = directory.atomic_read(&META_FILEPATH)?;
|
let meta_data = directory.atomic_read(&META_FILEPATH)?;
|
||||||
let meta_string = String::from_utf8_lossy(&meta_data);
|
let meta_string = String::from_utf8_lossy(&meta_data);
|
||||||
IndexMeta::deserialize(&meta_string, &inventory)
|
IndexMeta::deserialize(&meta_string, &inventory)
|
||||||
@@ -73,14 +73,14 @@ impl Index {
|
|||||||
|
|
||||||
/// Replace the default single thread search executor pool
|
/// Replace the default single thread search executor pool
|
||||||
/// by a thread pool with a given number of threads.
|
/// by a thread pool with a given number of threads.
|
||||||
pub fn set_multithread_executor(&mut self, num_threads: usize) -> Result<()> {
|
pub fn set_multithread_executor(&mut self, num_threads: usize) -> crate::Result<()> {
|
||||||
self.executor = Arc::new(Executor::multi_thread(num_threads, "thrd-tantivy-search-")?);
|
self.executor = Arc::new(Executor::multi_thread(num_threads, "thrd-tantivy-search-")?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace the default single thread search executor pool
|
/// Replace the default single thread search executor pool
|
||||||
/// by a thread pool with a given number of threads.
|
/// by a thread pool with a given number of threads.
|
||||||
pub fn set_default_multithread_executor(&mut self) -> Result<()> {
|
pub fn set_default_multithread_executor(&mut self) -> crate::Result<()> {
|
||||||
let default_num_threads = num_cpus::get();
|
let default_num_threads = num_cpus::get();
|
||||||
self.set_multithread_executor(default_num_threads)
|
self.set_multithread_executor(default_num_threads)
|
||||||
}
|
}
|
||||||
@@ -99,7 +99,10 @@ impl Index {
|
|||||||
///
|
///
|
||||||
/// If a previous index was in this directory, then its meta file will be destroyed.
|
/// If a previous index was in this directory, then its meta file will be destroyed.
|
||||||
#[cfg(feature = "mmap")]
|
#[cfg(feature = "mmap")]
|
||||||
pub fn create_in_dir<P: AsRef<Path>>(directory_path: P, schema: Schema) -> Result<Index> {
|
pub fn create_in_dir<P: AsRef<Path>>(
|
||||||
|
directory_path: P,
|
||||||
|
schema: Schema,
|
||||||
|
) -> crate::Result<Index> {
|
||||||
let mmap_directory = MmapDirectory::open(directory_path)?;
|
let mmap_directory = MmapDirectory::open(directory_path)?;
|
||||||
if Index::exists(&mmap_directory) {
|
if Index::exists(&mmap_directory) {
|
||||||
return Err(TantivyError::IndexAlreadyExists);
|
return Err(TantivyError::IndexAlreadyExists);
|
||||||
@@ -108,7 +111,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Opens or creates a new index in the provided directory
|
/// Opens or creates a new index in the provided directory
|
||||||
pub fn open_or_create<Dir: Directory>(dir: Dir, schema: Schema) -> Result<Index> {
|
pub fn open_or_create<Dir: Directory>(dir: Dir, schema: Schema) -> crate::Result<Index> {
|
||||||
if !Index::exists(&dir) {
|
if !Index::exists(&dir) {
|
||||||
return Index::create(dir, schema);
|
return Index::create(dir, schema);
|
||||||
}
|
}
|
||||||
@@ -131,13 +134,13 @@ impl Index {
|
|||||||
/// The temp directory is only used for testing the `MmapDirectory`.
|
/// The temp directory is only used for testing the `MmapDirectory`.
|
||||||
/// For other unit tests, prefer the `RAMDirectory`, see: `create_in_ram`.
|
/// For other unit tests, prefer the `RAMDirectory`, see: `create_in_ram`.
|
||||||
#[cfg(feature = "mmap")]
|
#[cfg(feature = "mmap")]
|
||||||
pub fn create_from_tempdir(schema: Schema) -> Result<Index> {
|
pub fn create_from_tempdir(schema: Schema) -> crate::Result<Index> {
|
||||||
let mmap_directory = MmapDirectory::create_from_tempdir()?;
|
let mmap_directory = MmapDirectory::create_from_tempdir()?;
|
||||||
Index::create(mmap_directory, schema)
|
Index::create(mmap_directory, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new index given an implementation of the trait `Directory`
|
/// Creates a new index given an implementation of the trait `Directory`
|
||||||
pub fn create<Dir: Directory>(dir: Dir, schema: Schema) -> Result<Index> {
|
pub fn create<Dir: Directory>(dir: Dir, schema: Schema) -> crate::Result<Index> {
|
||||||
let directory = ManagedDirectory::wrap(dir)?;
|
let directory = ManagedDirectory::wrap(dir)?;
|
||||||
Index::from_directory(directory, schema)
|
Index::from_directory(directory, schema)
|
||||||
}
|
}
|
||||||
@@ -145,7 +148,7 @@ impl Index {
|
|||||||
/// Create a new index from a directory.
|
/// Create a new index from a directory.
|
||||||
///
|
///
|
||||||
/// This will overwrite existing meta.json
|
/// This will overwrite existing meta.json
|
||||||
fn from_directory(mut directory: ManagedDirectory, schema: Schema) -> Result<Index> {
|
fn from_directory(mut directory: ManagedDirectory, schema: Schema) -> crate::Result<Index> {
|
||||||
save_new_metas(schema.clone(), directory.borrow_mut())?;
|
save_new_metas(schema.clone(), directory.borrow_mut())?;
|
||||||
let metas = IndexMeta::with_schema(schema);
|
let metas = IndexMeta::with_schema(schema);
|
||||||
Index::create_from_metas(directory, &metas, SegmentMetaInventory::default())
|
Index::create_from_metas(directory, &metas, SegmentMetaInventory::default())
|
||||||
@@ -156,7 +159,7 @@ impl Index {
|
|||||||
directory: ManagedDirectory,
|
directory: ManagedDirectory,
|
||||||
metas: &IndexMeta,
|
metas: &IndexMeta,
|
||||||
inventory: SegmentMetaInventory,
|
inventory: SegmentMetaInventory,
|
||||||
) -> Result<Index> {
|
) -> crate::Result<Index> {
|
||||||
let schema = metas.schema.clone();
|
let schema = metas.schema.clone();
|
||||||
Ok(Index {
|
Ok(Index {
|
||||||
directory,
|
directory,
|
||||||
@@ -173,11 +176,11 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Helper to access the tokenizer associated to a specific field.
|
/// Helper to access the tokenizer associated to a specific field.
|
||||||
pub fn tokenizer_for_field(&self, field: Field) -> Result<BoxedTokenizer> {
|
pub fn tokenizer_for_field(&self, field: Field) -> crate::Result<TextAnalyzer> {
|
||||||
let field_entry = self.schema.get_field_entry(field);
|
let field_entry = self.schema.get_field_entry(field);
|
||||||
let field_type = field_entry.field_type();
|
let field_type = field_entry.field_type();
|
||||||
let tokenizer_manager: &TokenizerManager = self.tokenizers();
|
let tokenizer_manager: &TokenizerManager = self.tokenizers();
|
||||||
let tokenizer_name_opt: Option<BoxedTokenizer> = match field_type {
|
let tokenizer_name_opt: Option<TextAnalyzer> = match field_type {
|
||||||
FieldType::Str(text_options) => text_options
|
FieldType::Str(text_options) => text_options
|
||||||
.get_indexing_options()
|
.get_indexing_options()
|
||||||
.map(|text_indexing_options| text_indexing_options.tokenizer().to_string())
|
.map(|text_indexing_options| text_indexing_options.tokenizer().to_string())
|
||||||
@@ -196,7 +199,7 @@ impl Index {
|
|||||||
/// Create a default `IndexReader` for the given index.
|
/// Create a default `IndexReader` for the given index.
|
||||||
///
|
///
|
||||||
/// See [`Index.reader_builder()`](#method.reader_builder).
|
/// See [`Index.reader_builder()`](#method.reader_builder).
|
||||||
pub fn reader(&self) -> Result<IndexReader> {
|
pub fn reader(&self) -> crate::Result<IndexReader> {
|
||||||
self.reader_builder().try_into()
|
self.reader_builder().try_into()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -211,7 +214,7 @@ impl Index {
|
|||||||
|
|
||||||
/// Opens a new directory from an index path.
|
/// Opens a new directory from an index path.
|
||||||
#[cfg(feature = "mmap")]
|
#[cfg(feature = "mmap")]
|
||||||
pub fn open_in_dir<P: AsRef<Path>>(directory_path: P) -> Result<Index> {
|
pub fn open_in_dir<P: AsRef<Path>>(directory_path: P) -> crate::Result<Index> {
|
||||||
let mmap_directory = MmapDirectory::open(directory_path)?;
|
let mmap_directory = MmapDirectory::open(directory_path)?;
|
||||||
Index::open(mmap_directory)
|
Index::open(mmap_directory)
|
||||||
}
|
}
|
||||||
@@ -235,7 +238,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Open the index using the provided directory
|
/// Open the index using the provided directory
|
||||||
pub fn open<D: Directory>(directory: D) -> Result<Index> {
|
pub fn open<D: Directory>(directory: D) -> crate::Result<Index> {
|
||||||
let directory = ManagedDirectory::wrap(directory)?;
|
let directory = ManagedDirectory::wrap(directory)?;
|
||||||
let inventory = SegmentMetaInventory::default();
|
let inventory = SegmentMetaInventory::default();
|
||||||
let metas = load_metas(&directory, &inventory)?;
|
let metas = load_metas(&directory, &inventory)?;
|
||||||
@@ -243,7 +246,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Reads the index meta file from the directory.
|
/// Reads the index meta file from the directory.
|
||||||
pub fn load_metas(&self) -> Result<IndexMeta> {
|
pub fn load_metas(&self) -> crate::Result<IndexMeta> {
|
||||||
load_metas(self.directory(), &self.inventory)
|
load_metas(self.directory(), &self.inventory)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -271,7 +274,7 @@ impl Index {
|
|||||||
&self,
|
&self,
|
||||||
num_threads: usize,
|
num_threads: usize,
|
||||||
overall_heap_size_in_bytes: usize,
|
overall_heap_size_in_bytes: usize,
|
||||||
) -> Result<IndexWriter> {
|
) -> crate::Result<IndexWriter> {
|
||||||
let directory_lock = self
|
let directory_lock = self
|
||||||
.directory
|
.directory
|
||||||
.acquire_lock(&INDEX_WRITER_LOCK)
|
.acquire_lock(&INDEX_WRITER_LOCK)
|
||||||
@@ -279,7 +282,7 @@ impl Index {
|
|||||||
TantivyError::LockFailure(
|
TantivyError::LockFailure(
|
||||||
err,
|
err,
|
||||||
Some(
|
Some(
|
||||||
"Failed to acquire index lock. If you are using\
|
"Failed to acquire index lock. If you are using \
|
||||||
a regular directory, this means there is already an \
|
a regular directory, this means there is already an \
|
||||||
`IndexWriter` working on this `Directory`, in this process \
|
`IndexWriter` working on this `Directory`, in this process \
|
||||||
or in a different process."
|
or in a different process."
|
||||||
@@ -306,7 +309,7 @@ impl Index {
|
|||||||
/// If the lockfile already exists, returns `Error::FileAlreadyExists`.
|
/// If the lockfile already exists, returns `Error::FileAlreadyExists`.
|
||||||
/// # Panics
|
/// # Panics
|
||||||
/// If the heap size per thread is too small, panics.
|
/// If the heap size per thread is too small, panics.
|
||||||
pub fn writer(&self, overall_heap_size_in_bytes: usize) -> Result<IndexWriter> {
|
pub fn writer(&self, overall_heap_size_in_bytes: usize) -> crate::Result<IndexWriter> {
|
||||||
let mut num_threads = num_cpus::get();
|
let mut num_threads = num_cpus::get();
|
||||||
let heap_size_in_bytes_per_thread = overall_heap_size_in_bytes / num_threads;
|
let heap_size_in_bytes_per_thread = overall_heap_size_in_bytes / num_threads;
|
||||||
if heap_size_in_bytes_per_thread < HEAP_SIZE_MIN {
|
if heap_size_in_bytes_per_thread < HEAP_SIZE_MIN {
|
||||||
@@ -323,7 +326,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of segments that are searchable
|
/// Returns the list of segments that are searchable
|
||||||
pub fn searchable_segments(&self) -> Result<Vec<Segment>> {
|
pub fn searchable_segments(&self) -> crate::Result<Vec<Segment>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.searchable_segment_metas()?
|
.searchable_segment_metas()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -331,19 +334,25 @@ impl Index {
|
|||||||
.collect())
|
.collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
pub(crate) fn segment(&self, segment_meta: SegmentMeta) -> Segment {
|
||||||
pub fn segment(&self, segment_meta: SegmentMeta) -> Segment {
|
Segment::for_index(self.clone(), segment_meta)
|
||||||
create_segment(self.clone(), segment_meta)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new segment.
|
/// Creates a new segment.
|
||||||
pub fn new_segment(&self) -> Segment {
|
pub fn new_segment(&self) -> Segment {
|
||||||
let mut segment_meta = self
|
let segment_meta = self
|
||||||
.inventory
|
.inventory
|
||||||
.new_segment_meta(SegmentId::generate_random(), 0);
|
.new_segment_meta(SegmentId::generate_random(), 0);
|
||||||
self.segment(segment_meta)
|
self.segment(segment_meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a new segment.
|
||||||
|
pub(crate) fn new_segment_unpersisted(&self) -> Segment {
|
||||||
|
let meta = self
|
||||||
|
.inventory
|
||||||
|
.new_segment_meta(SegmentId::generate_random(), 0);
|
||||||
|
Segment::new_volatile(meta, self.schema())
|
||||||
|
}
|
||||||
/// Return a reference to the index directory.
|
/// Return a reference to the index directory.
|
||||||
pub fn directory(&self) -> &ManagedDirectory {
|
pub fn directory(&self) -> &ManagedDirectory {
|
||||||
&self.directory
|
&self.directory
|
||||||
@@ -356,12 +365,12 @@ impl Index {
|
|||||||
|
|
||||||
/// Reads the meta.json and returns the list of
|
/// Reads the meta.json and returns the list of
|
||||||
/// `SegmentMeta` from the last commit.
|
/// `SegmentMeta` from the last commit.
|
||||||
pub fn searchable_segment_metas(&self) -> Result<Vec<SegmentMeta>> {
|
pub fn searchable_segment_metas(&self) -> crate::Result<Vec<SegmentMeta>> {
|
||||||
Ok(self.load_metas()?.segments)
|
Ok(self.load_metas()?.segments)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of segment ids that are searchable.
|
/// Returns the list of segment ids that are searchable.
|
||||||
pub fn searchable_segment_ids(&self) -> Result<Vec<SegmentId>> {
|
pub fn searchable_segment_ids(&self) -> crate::Result<Vec<SegmentId>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.searchable_segment_metas()?
|
.searchable_segment_metas()?
|
||||||
.iter()
|
.iter()
|
||||||
@@ -370,7 +379,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the set of corrupted files
|
/// Returns the set of corrupted files
|
||||||
pub fn validate_checksum(&self) -> Result<HashSet<PathBuf>> {
|
pub fn validate_checksum(&self) -> crate::Result<HashSet<PathBuf>> {
|
||||||
self.directory.list_damaged().map_err(Into::into)
|
self.directory.list_damaged().map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -429,7 +438,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn create_should_wipeoff_existing() {
|
fn create_should_wipe_off_existing() {
|
||||||
let directory = RAMDirectory::create();
|
let directory = RAMDirectory::create();
|
||||||
assert!(Index::create(directory.clone(), throw_away_schema()).is_ok());
|
assert!(Index::create(directory.clone(), throw_away_schema()).is_ok());
|
||||||
assert!(Index::exists(&directory));
|
assert!(Index::exists(&directory));
|
||||||
@@ -466,7 +475,7 @@ mod tests {
|
|||||||
.try_into()
|
.try_into()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(reader.searcher().num_docs(), 0);
|
assert_eq!(reader.searcher().num_docs(), 0);
|
||||||
test_index_on_commit_reload_policy_aux(field, &index, &reader);
|
test_index_on_commit_reload_policy_aux(field, index.clone(), &index, &reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "mmap")]
|
#[cfg(feature = "mmap")]
|
||||||
@@ -490,7 +499,7 @@ mod tests {
|
|||||||
.try_into()
|
.try_into()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(reader.searcher().num_docs(), 0);
|
assert_eq!(reader.searcher().num_docs(), 0);
|
||||||
test_index_on_commit_reload_policy_aux(field, &index, &reader);
|
test_index_on_commit_reload_policy_aux(field, index.clone(), &index, &reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -532,12 +541,16 @@ mod tests {
|
|||||||
.try_into()
|
.try_into()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(reader.searcher().num_docs(), 0);
|
assert_eq!(reader.searcher().num_docs(), 0);
|
||||||
test_index_on_commit_reload_policy_aux(field, &write_index, &reader);
|
test_index_on_commit_reload_policy_aux(field, read_index, &write_index, &reader);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_index_on_commit_reload_policy_aux(field: Field, index: &Index, reader: &IndexReader) {
|
fn test_index_on_commit_reload_policy_aux(
|
||||||
let mut reader_index = reader.index();
|
field: Field,
|
||||||
|
mut reader_index: Index,
|
||||||
|
index: &Index,
|
||||||
|
reader: &IndexReader,
|
||||||
|
) {
|
||||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||||
let _watch_handle = reader_index.directory_mut().watch(Box::new(move || {
|
let _watch_handle = reader_index.directory_mut().watch(Box::new(move || {
|
||||||
let _ = sender.send(());
|
let _ = sender.send(());
|
||||||
|
|||||||
@@ -35,7 +35,6 @@ impl SegmentMetaInventory {
|
|||||||
segment_id,
|
segment_id,
|
||||||
max_doc,
|
max_doc,
|
||||||
deletes: None,
|
deletes: None,
|
||||||
bundled: false,
|
|
||||||
};
|
};
|
||||||
SegmentMeta::from(self.inventory.track(inner))
|
SegmentMeta::from(self.inventory.track(inner))
|
||||||
}
|
}
|
||||||
@@ -82,19 +81,6 @@ impl SegmentMeta {
|
|||||||
self.tracked.segment_id
|
self.tracked.segment_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_bundled(self) -> SegmentMeta {
|
|
||||||
SegmentMeta::from(self.tracked.map(|inner| InnerSegmentMeta {
|
|
||||||
segment_id: inner.segment_id,
|
|
||||||
max_doc: inner.max_doc,
|
|
||||||
deletes: inner.deletes.clone(),
|
|
||||||
bundled: true,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_bundled(&self) -> bool {
|
|
||||||
self.tracked.bundled
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the number of deleted documents.
|
/// Returns the number of deleted documents.
|
||||||
pub fn num_deleted_docs(&self) -> u32 {
|
pub fn num_deleted_docs(&self) -> u32 {
|
||||||
self.tracked
|
self.tracked
|
||||||
@@ -121,12 +107,8 @@ impl SegmentMeta {
|
|||||||
/// It just joins the segment id with the extension
|
/// It just joins the segment id with the extension
|
||||||
/// associated to a segment component.
|
/// associated to a segment component.
|
||||||
pub fn relative_path(&self, component: SegmentComponent) -> PathBuf {
|
pub fn relative_path(&self, component: SegmentComponent) -> PathBuf {
|
||||||
let suffix = self.suffix(component);
|
let mut path = self.id().uuid_string();
|
||||||
self.relative_path_from_suffix(&suffix)
|
path.push_str(&*match component {
|
||||||
}
|
|
||||||
|
|
||||||
fn suffix(&self, component: SegmentComponent) -> String {
|
|
||||||
match component {
|
|
||||||
SegmentComponent::POSTINGS => ".idx".to_string(),
|
SegmentComponent::POSTINGS => ".idx".to_string(),
|
||||||
SegmentComponent::POSITIONS => ".pos".to_string(),
|
SegmentComponent::POSITIONS => ".pos".to_string(),
|
||||||
SegmentComponent::POSITIONSSKIP => ".posidx".to_string(),
|
SegmentComponent::POSITIONSSKIP => ".posidx".to_string(),
|
||||||
@@ -135,17 +117,7 @@ impl SegmentMeta {
|
|||||||
SegmentComponent::FASTFIELDS => ".fast".to_string(),
|
SegmentComponent::FASTFIELDS => ".fast".to_string(),
|
||||||
SegmentComponent::FIELDNORMS => ".fieldnorm".to_string(),
|
SegmentComponent::FIELDNORMS => ".fieldnorm".to_string(),
|
||||||
SegmentComponent::DELETE => format!(".{}.del", self.delete_opstamp().unwrap_or(0)),
|
SegmentComponent::DELETE => format!(".{}.del", self.delete_opstamp().unwrap_or(0)),
|
||||||
}
|
});
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the relative path of a component of our segment.
|
|
||||||
///
|
|
||||||
/// It just joins the segment id with the extension
|
|
||||||
/// associated to a segment component.
|
|
||||||
pub fn relative_path_from_suffix(&self, suffix: &str) -> PathBuf {
|
|
||||||
let mut path = self.id().uuid_string();
|
|
||||||
path.push_str(".");
|
|
||||||
path.push_str(&suffix);
|
|
||||||
PathBuf::from(path)
|
PathBuf::from(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,7 +161,6 @@ impl SegmentMeta {
|
|||||||
segment_id: inner_meta.segment_id,
|
segment_id: inner_meta.segment_id,
|
||||||
max_doc,
|
max_doc,
|
||||||
deletes: None,
|
deletes: None,
|
||||||
bundled: inner_meta.bundled,
|
|
||||||
});
|
});
|
||||||
SegmentMeta { tracked }
|
SegmentMeta { tracked }
|
||||||
}
|
}
|
||||||
@@ -204,7 +175,6 @@ impl SegmentMeta {
|
|||||||
segment_id: inner_meta.segment_id,
|
segment_id: inner_meta.segment_id,
|
||||||
max_doc: inner_meta.max_doc,
|
max_doc: inner_meta.max_doc,
|
||||||
deletes: Some(delete_meta),
|
deletes: Some(delete_meta),
|
||||||
bundled: inner_meta.bundled,
|
|
||||||
});
|
});
|
||||||
SegmentMeta { tracked }
|
SegmentMeta { tracked }
|
||||||
}
|
}
|
||||||
@@ -215,7 +185,6 @@ struct InnerSegmentMeta {
|
|||||||
segment_id: SegmentId,
|
segment_id: SegmentId,
|
||||||
max_doc: u32,
|
max_doc: u32,
|
||||||
deletes: Option<DeleteMeta>,
|
deletes: Option<DeleteMeta>,
|
||||||
bundled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InnerSegmentMeta {
|
impl InnerSegmentMeta {
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ impl InvertedIndexReader {
|
|||||||
.get_index_record_option()
|
.get_index_record_option()
|
||||||
.unwrap_or(IndexRecordOption::Basic);
|
.unwrap_or(IndexRecordOption::Basic);
|
||||||
InvertedIndexReader {
|
InvertedIndexReader {
|
||||||
termdict: TermDictionary::empty(&field_type),
|
termdict: TermDictionary::empty(),
|
||||||
postings_source: ReadOnlySource::empty(),
|
postings_source: ReadOnlySource::empty(),
|
||||||
positions_source: ReadOnlySource::empty(),
|
positions_source: ReadOnlySource::empty(),
|
||||||
positions_idx_source: ReadOnlySource::empty(),
|
positions_idx_source: ReadOnlySource::empty(),
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ use crate::store::StoreReader;
|
|||||||
use crate::termdict::TermMerger;
|
use crate::termdict::TermMerger;
|
||||||
use crate::DocAddress;
|
use crate::DocAddress;
|
||||||
use crate::Index;
|
use crate::Index;
|
||||||
use crate::Result;
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
@@ -23,8 +22,8 @@ fn collect_segment<C: Collector>(
|
|||||||
weight: &dyn Weight,
|
weight: &dyn Weight,
|
||||||
segment_ord: u32,
|
segment_ord: u32,
|
||||||
segment_reader: &SegmentReader,
|
segment_reader: &SegmentReader,
|
||||||
) -> Result<C::Fruit> {
|
) -> crate::Result<C::Fruit> {
|
||||||
let mut scorer = weight.scorer(segment_reader)?;
|
let mut scorer = weight.scorer(segment_reader, 1.0f32)?;
|
||||||
let mut segment_collector = collector.for_segment(segment_ord as u32, segment_reader)?;
|
let mut segment_collector = collector.for_segment(segment_ord as u32, segment_reader)?;
|
||||||
if let Some(delete_bitset) = segment_reader.delete_bitset() {
|
if let Some(delete_bitset) = segment_reader.delete_bitset() {
|
||||||
scorer.for_each(&mut |doc, score| {
|
scorer.for_each(&mut |doc, score| {
|
||||||
@@ -78,7 +77,7 @@ impl Searcher {
|
|||||||
///
|
///
|
||||||
/// The searcher uses the segment ordinal to route the
|
/// The searcher uses the segment ordinal to route the
|
||||||
/// the request to the right `Segment`.
|
/// the request to the right `Segment`.
|
||||||
pub fn doc(&self, doc_address: DocAddress) -> Result<Document> {
|
pub fn doc(&self, doc_address: DocAddress) -> crate::Result<Document> {
|
||||||
let DocAddress(segment_local_id, doc_id) = doc_address;
|
let DocAddress(segment_local_id, doc_id) = doc_address;
|
||||||
let store_reader = &self.store_readers[segment_local_id as usize];
|
let store_reader = &self.store_readers[segment_local_id as usize];
|
||||||
store_reader.get(doc_id)
|
store_reader.get(doc_id)
|
||||||
@@ -132,7 +131,11 @@ impl Searcher {
|
|||||||
///
|
///
|
||||||
/// Finally, the Collector merges each of the child collectors into itself for result usability
|
/// Finally, the Collector merges each of the child collectors into itself for result usability
|
||||||
/// by the caller.
|
/// by the caller.
|
||||||
pub fn search<C: Collector>(&self, query: &dyn Query, collector: &C) -> Result<C::Fruit> {
|
pub fn search<C: Collector>(
|
||||||
|
&self,
|
||||||
|
query: &dyn Query,
|
||||||
|
collector: &C,
|
||||||
|
) -> crate::Result<C::Fruit> {
|
||||||
let executor = self.index.search_executor();
|
let executor = self.index.search_executor();
|
||||||
self.search_with_executor(query, collector, executor)
|
self.search_with_executor(query, collector, executor)
|
||||||
}
|
}
|
||||||
@@ -154,7 +157,7 @@ impl Searcher {
|
|||||||
query: &dyn Query,
|
query: &dyn Query,
|
||||||
collector: &C,
|
collector: &C,
|
||||||
executor: &Executor,
|
executor: &Executor,
|
||||||
) -> Result<C::Fruit> {
|
) -> crate::Result<C::Fruit> {
|
||||||
let scoring_enabled = collector.requires_scoring();
|
let scoring_enabled = collector.requires_scoring();
|
||||||
let weight = query.weight(self, scoring_enabled)?;
|
let weight = query.weight(self, scoring_enabled)?;
|
||||||
let segment_readers = self.segment_readers();
|
let segment_readers = self.segment_readers();
|
||||||
|
|||||||
@@ -3,19 +3,59 @@ use crate::core::Index;
|
|||||||
use crate::core::SegmentId;
|
use crate::core::SegmentId;
|
||||||
use crate::core::SegmentMeta;
|
use crate::core::SegmentMeta;
|
||||||
use crate::directory::error::{OpenReadError, OpenWriteError};
|
use crate::directory::error::{OpenReadError, OpenWriteError};
|
||||||
use crate::directory::Directory;
|
use crate::directory::{Directory, ManagedDirectory, RAMDirectory};
|
||||||
use crate::directory::{ReadOnlyDirectory, ReadOnlySource, WritePtr};
|
use crate::directory::{ReadOnlySource, WritePtr};
|
||||||
use crate::indexer::segment_serializer::SegmentSerializer;
|
use crate::indexer::segment_serializer::SegmentSerializer;
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::Opstamp;
|
use crate::Opstamp;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) enum SegmentDirectory {
|
||||||
|
Persisted(ManagedDirectory),
|
||||||
|
Volatile(RAMDirectory),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SegmentDirectory {
|
||||||
|
pub fn new_volatile() -> SegmentDirectory {
|
||||||
|
SegmentDirectory::Volatile(RAMDirectory::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ManagedDirectory> for SegmentDirectory {
|
||||||
|
fn from(directory: ManagedDirectory) -> Self {
|
||||||
|
SegmentDirectory::Persisted(directory)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for SegmentDirectory {
|
||||||
|
type Target = dyn Directory;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
match self {
|
||||||
|
SegmentDirectory::Volatile(dir) => dir,
|
||||||
|
SegmentDirectory::Persisted(dir) => dir,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for SegmentDirectory {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
match self {
|
||||||
|
SegmentDirectory::Volatile(dir) => dir,
|
||||||
|
SegmentDirectory::Persisted(dir) => dir,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A segment is a piece of the index.
|
/// A segment is a piece of the index.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Segment {
|
pub struct Segment {
|
||||||
index: Index,
|
schema: Schema,
|
||||||
meta: SegmentMeta,
|
meta: SegmentMeta,
|
||||||
|
directory: SegmentDirectory,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Segment {
|
impl fmt::Debug for Segment {
|
||||||
@@ -24,23 +64,56 @@ impl fmt::Debug for Segment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new segment given an `Index` and a `SegmentId`
|
|
||||||
///
|
|
||||||
/// The function is here to make it private outside `tantivy`.
|
|
||||||
/// #[doc(hidden)]
|
|
||||||
pub fn create_segment(index: Index, meta: SegmentMeta) -> Segment {
|
|
||||||
Segment { index, meta }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Segment {
|
impl Segment {
|
||||||
/// Returns the index the segment belongs to.
|
/// Returns our index's schema.
|
||||||
pub fn index(&self) -> &Index {
|
// TODO return a ref.
|
||||||
&self.index
|
pub fn schema(&self) -> Schema {
|
||||||
|
self.schema.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns our index's schema.
|
pub(crate) fn new_persisted(
|
||||||
pub fn schema(&self) -> Schema {
|
meta: SegmentMeta,
|
||||||
self.index.schema()
|
directory: ManagedDirectory,
|
||||||
|
schema: Schema,
|
||||||
|
) -> Segment {
|
||||||
|
Segment {
|
||||||
|
meta,
|
||||||
|
schema,
|
||||||
|
directory: SegmentDirectory::from(directory),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new segment that embeds its own `RAMDirectory`.
|
||||||
|
///
|
||||||
|
/// That segment is entirely dissociated from the index directory.
|
||||||
|
/// It will be persisted by a background thread in charge of IO.
|
||||||
|
pub fn new_volatile(meta: SegmentMeta, schema: Schema) -> Segment {
|
||||||
|
Segment {
|
||||||
|
schema,
|
||||||
|
meta,
|
||||||
|
directory: SegmentDirectory::new_volatile(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new segment given an `Index` and a `SegmentId`
|
||||||
|
pub(crate) fn for_index(index: Index, meta: SegmentMeta) -> Segment {
|
||||||
|
Segment {
|
||||||
|
directory: SegmentDirectory::Persisted(index.directory().clone()),
|
||||||
|
schema: index.schema(),
|
||||||
|
meta,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn persist(&mut self, mut dest_directory: ManagedDirectory) -> crate::Result<()> {
|
||||||
|
if let SegmentDirectory::Persisted(_) = self.directory {
|
||||||
|
// this segment is already persisted.
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
if let SegmentDirectory::Volatile(ram_directory) = &self.directory {
|
||||||
|
ram_directory.persist(&mut dest_directory)?;
|
||||||
|
}
|
||||||
|
self.directory = SegmentDirectory::Persisted(dest_directory);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the segment meta-information
|
/// Returns the segment meta-information
|
||||||
@@ -54,7 +127,8 @@ impl Segment {
|
|||||||
/// as we finalize a fresh new segment.
|
/// as we finalize a fresh new segment.
|
||||||
pub(crate) fn with_max_doc(self, max_doc: u32) -> Segment {
|
pub(crate) fn with_max_doc(self, max_doc: u32) -> Segment {
|
||||||
Segment {
|
Segment {
|
||||||
index: self.index,
|
directory: self.directory,
|
||||||
|
schema: self.schema,
|
||||||
meta: self.meta.with_max_doc(max_doc),
|
meta: self.meta.with_max_doc(max_doc),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -62,7 +136,8 @@ impl Segment {
|
|||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn with_delete_meta(self, num_deleted_docs: u32, opstamp: Opstamp) -> Segment {
|
pub fn with_delete_meta(self, num_deleted_docs: u32, opstamp: Opstamp) -> Segment {
|
||||||
Segment {
|
Segment {
|
||||||
index: self.index,
|
directory: self.directory,
|
||||||
|
schema: self.schema,
|
||||||
meta: self.meta.with_delete_meta(num_deleted_docs, opstamp),
|
meta: self.meta.with_delete_meta(num_deleted_docs, opstamp),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -83,28 +158,15 @@ impl Segment {
|
|||||||
/// Open one of the component file for a *regular* read.
|
/// Open one of the component file for a *regular* read.
|
||||||
pub fn open_read(&self, component: SegmentComponent) -> Result<ReadOnlySource, OpenReadError> {
|
pub fn open_read(&self, component: SegmentComponent) -> Result<ReadOnlySource, OpenReadError> {
|
||||||
let path = self.relative_path(component);
|
let path = self.relative_path(component);
|
||||||
let source = self.index.directory().open_read(&path)?;
|
let source = self.directory.open_read(&path)?;
|
||||||
Ok(source)
|
Ok(source)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open one of the component file for *regular* write.
|
/// Open one of the component file for *regular* write.
|
||||||
pub fn open_write(&mut self, component: SegmentComponent) -> Result<WritePtr, OpenWriteError> {
|
pub fn open_write(&mut self, component: SegmentComponent) -> Result<WritePtr, OpenWriteError> {
|
||||||
let path = self.relative_path(component);
|
let path = self.relative_path(component);
|
||||||
self.index.directory_mut().open_write(&path)
|
let write = self.directory.open_write(&path)?;
|
||||||
}
|
Ok(write)
|
||||||
|
|
||||||
pub fn open_bundle_writer(&mut self) -> Result<WritePtr, OpenWriteError> {
|
|
||||||
let path = self.meta.relative_path_from_suffix("bundle");
|
|
||||||
self.index.directory_mut().open_write(&path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn open_write_in_directory(
|
|
||||||
&mut self,
|
|
||||||
component: SegmentComponent,
|
|
||||||
directory: &mut dyn Directory,
|
|
||||||
) -> Result<WritePtr, OpenWriteError> {
|
|
||||||
let path = self.relative_path(component);
|
|
||||||
directory.open_write(&path)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ use crate::space_usage::SegmentSpaceUsage;
|
|||||||
use crate::store::StoreReader;
|
use crate::store::StoreReader;
|
||||||
use crate::termdict::TermDictionary;
|
use crate::termdict::TermDictionary;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use fail::fail_point;
|
use fail::fail_point;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
@@ -57,6 +56,68 @@ pub struct SegmentReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentReader {
|
impl SegmentReader {
|
||||||
|
/// Open a new segment for reading.
|
||||||
|
pub fn open(segment: &Segment) -> crate::Result<SegmentReader> {
|
||||||
|
let termdict_source = segment.open_read(SegmentComponent::TERMS)?;
|
||||||
|
let termdict_composite = CompositeFile::open(&termdict_source)?;
|
||||||
|
|
||||||
|
let store_source = segment.open_read(SegmentComponent::STORE)?;
|
||||||
|
|
||||||
|
fail_point!("SegmentReader::open#middle");
|
||||||
|
|
||||||
|
let postings_source = segment.open_read(SegmentComponent::POSTINGS)?;
|
||||||
|
let postings_composite = CompositeFile::open(&postings_source)?;
|
||||||
|
|
||||||
|
let positions_composite = {
|
||||||
|
if let Ok(source) = segment.open_read(SegmentComponent::POSITIONS) {
|
||||||
|
CompositeFile::open(&source)?
|
||||||
|
} else {
|
||||||
|
CompositeFile::empty()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let positions_idx_composite = {
|
||||||
|
if let Ok(source) = segment.open_read(SegmentComponent::POSITIONSSKIP) {
|
||||||
|
CompositeFile::open(&source)?
|
||||||
|
} else {
|
||||||
|
CompositeFile::empty()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let schema = segment.schema();
|
||||||
|
|
||||||
|
let fast_fields_data = segment.open_read(SegmentComponent::FASTFIELDS)?;
|
||||||
|
let fast_fields_composite = CompositeFile::open(&fast_fields_data)?;
|
||||||
|
let fast_field_readers =
|
||||||
|
Arc::new(FastFieldReaders::load_all(&schema, &fast_fields_composite)?);
|
||||||
|
|
||||||
|
let fieldnorms_data = segment.open_read(SegmentComponent::FIELDNORMS)?;
|
||||||
|
let fieldnorms_composite = CompositeFile::open(&fieldnorms_data)?;
|
||||||
|
|
||||||
|
let delete_bitset_opt = if segment.meta().has_deletes() {
|
||||||
|
let delete_data = segment.open_read(SegmentComponent::DELETE)?;
|
||||||
|
Some(DeleteBitSet::open(delete_data))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(SegmentReader {
|
||||||
|
inv_idx_reader_cache: Arc::new(RwLock::new(HashMap::new())),
|
||||||
|
max_doc: segment.meta().max_doc(),
|
||||||
|
num_docs: segment.meta().num_docs(),
|
||||||
|
termdict_composite,
|
||||||
|
postings_composite,
|
||||||
|
fast_fields_readers: fast_field_readers,
|
||||||
|
fieldnorms_composite,
|
||||||
|
segment_id: segment.id(),
|
||||||
|
store_source,
|
||||||
|
delete_bitset_opt,
|
||||||
|
positions_composite,
|
||||||
|
positions_idx_composite,
|
||||||
|
schema,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the highest document id ever attributed in
|
/// Returns the highest document id ever attributed in
|
||||||
/// this segment + 1.
|
/// this segment + 1.
|
||||||
/// Today, `tantivy` does not handle deletes, so it happens
|
/// Today, `tantivy` does not handle deletes, so it happens
|
||||||
@@ -143,68 +204,68 @@ impl SegmentReader {
|
|||||||
pub fn get_store_reader(&self) -> StoreReader {
|
pub fn get_store_reader(&self) -> StoreReader {
|
||||||
StoreReader::from_source(self.store_source.clone())
|
StoreReader::from_source(self.store_source.clone())
|
||||||
}
|
}
|
||||||
|
//
|
||||||
/// Open a new segment for reading.
|
// /// Open a new segment for reading.
|
||||||
pub fn open(segment: &Segment) -> Result<SegmentReader> {
|
// pub fn open(segment: &Segment) -> crate::Result<SegmentReader> {
|
||||||
let termdict_source = segment.open_read(SegmentComponent::TERMS)?;
|
// let termdict_source = segment.open_read(SegmentComponent::TERMS)?;
|
||||||
let termdict_composite = CompositeFile::open(&termdict_source)?;
|
// let termdict_composite = CompositeFile::open(&termdict_source)?;
|
||||||
|
//
|
||||||
let store_source = segment.open_read(SegmentComponent::STORE)?;
|
// let store_source = segment.open_read(SegmentComponent::STORE)?;
|
||||||
|
//
|
||||||
fail_point!("SegmentReader::open#middle");
|
// fail_point!("SegmentReader::open#middle");
|
||||||
|
//
|
||||||
let postings_source = segment.open_read(SegmentComponent::POSTINGS)?;
|
// let postings_source = segment.open_read(SegmentComponent::POSTINGS)?;
|
||||||
let postings_composite = CompositeFile::open(&postings_source)?;
|
// let postings_composite = CompositeFile::open(&postings_source)?;
|
||||||
|
//
|
||||||
let positions_composite = {
|
// let positions_composite = {
|
||||||
if let Ok(source) = segment.open_read(SegmentComponent::POSITIONS) {
|
// if let Ok(source) = segment.open_read(SegmentComponent::POSITIONS) {
|
||||||
CompositeFile::open(&source)?
|
// CompositeFile::open(&source)?
|
||||||
} else {
|
// } else {
|
||||||
CompositeFile::empty()
|
// CompositeFile::empty()
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let positions_idx_composite = {
|
// let positions_idx_composite = {
|
||||||
if let Ok(source) = segment.open_read(SegmentComponent::POSITIONSSKIP) {
|
// if let Ok(source) = segment.open_read(SegmentComponent::POSITIONSSKIP) {
|
||||||
CompositeFile::open(&source)?
|
// CompositeFile::open(&source)?
|
||||||
} else {
|
// } else {
|
||||||
CompositeFile::empty()
|
// CompositeFile::empty()
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
let schema = segment.schema();
|
// let schema = segment.schema();
|
||||||
|
//
|
||||||
let fast_fields_data = segment.open_read(SegmentComponent::FASTFIELDS)?;
|
// let fast_fields_data = segment.open_read(SegmentComponent::FASTFIELDS)?;
|
||||||
let fast_fields_composite = CompositeFile::open(&fast_fields_data)?;
|
// let fast_fields_composite = CompositeFile::open(&fast_fields_data)?;
|
||||||
let fast_field_readers =
|
// let fast_field_readers =
|
||||||
Arc::new(FastFieldReaders::load_all(&schema, &fast_fields_composite)?);
|
// Arc::new(FastFieldReaders::load_all(&schema, &fast_fields_composite)?);
|
||||||
|
//
|
||||||
let fieldnorms_data = segment.open_read(SegmentComponent::FIELDNORMS)?;
|
// let fieldnorms_data = segment.open_read(SegmentComponent::FIELDNORMS)?;
|
||||||
let fieldnorms_composite = CompositeFile::open(&fieldnorms_data)?;
|
// let fieldnorms_composite = CompositeFile::open(&fieldnorms_data)?;
|
||||||
|
//
|
||||||
let delete_bitset_opt = if segment.meta().has_deletes() {
|
// let delete_bitset_opt = if segment.meta().has_deletes() {
|
||||||
let delete_data = segment.open_read(SegmentComponent::DELETE)?;
|
// let delete_data = segment.open_read(SegmentComponent::DELETE)?;
|
||||||
Some(DeleteBitSet::open(delete_data))
|
// Some(DeleteBitSet::open(delete_data))
|
||||||
} else {
|
// } else {
|
||||||
None
|
// None
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
Ok(SegmentReader {
|
// Ok(SegmentReader {
|
||||||
inv_idx_reader_cache: Arc::new(RwLock::new(HashMap::new())),
|
// inv_idx_reader_cache: Arc::new(RwLock::new(HashMap::new())),
|
||||||
max_doc: segment.meta().max_doc(),
|
// max_doc: segment.meta().max_doc(),
|
||||||
num_docs: segment.meta().num_docs(),
|
// num_docs: segment.meta().num_docs(),
|
||||||
termdict_composite,
|
// termdict_composite,
|
||||||
postings_composite,
|
// postings_composite,
|
||||||
fast_fields_readers: fast_field_readers,
|
// fast_fields_readers: fast_field_readers,
|
||||||
fieldnorms_composite,
|
// fieldnorms_composite,
|
||||||
segment_id: segment.id(),
|
// segment_id: segment.id(),
|
||||||
store_source,
|
// store_source,
|
||||||
delete_bitset_opt,
|
// delete_bitset_opt,
|
||||||
positions_composite,
|
// positions_composite,
|
||||||
positions_idx_composite,
|
// positions_idx_composite,
|
||||||
schema,
|
// schema,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
|
|
||||||
/// Returns a field reader associated to the field given in argument.
|
/// Returns a field reader associated to the field given in argument.
|
||||||
/// If the field was not present in the index during indexing time,
|
/// If the field was not present in the index during indexing time,
|
||||||
|
|||||||
@@ -1,97 +0,0 @@
|
|||||||
use crate::directory::directory::ReadOnlyDirectory;
|
|
||||||
use crate::directory::error::OpenReadError;
|
|
||||||
use crate::directory::ReadOnlySource;
|
|
||||||
use crate::error::DataCorruption;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct BundleDirectory {
|
|
||||||
source_map: Arc<HashMap<PathBuf, ReadOnlySource>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BundleDirectory {
|
|
||||||
pub fn from_source(source: ReadOnlySource) -> Result<BundleDirectory, DataCorruption> {
|
|
||||||
let mut index_offset_buf = [0u8; 8];
|
|
||||||
let (body_idx, footer_offset) = source.split_from_end(8);
|
|
||||||
index_offset_buf.copy_from_slice(footer_offset.as_slice());
|
|
||||||
let offset = u64::from_le_bytes(index_offset_buf);
|
|
||||||
let (body_source, idx_source) = body_idx.split(offset as usize);
|
|
||||||
let idx: HashMap<PathBuf, (u64, u64)> = serde_json::from_slice(idx_source.as_slice())
|
|
||||||
.map_err(|err| {
|
|
||||||
let msg = format!("Failed to read index from bundle. {:?}", err);
|
|
||||||
DataCorruption::comment_only(msg)
|
|
||||||
})?;
|
|
||||||
let source_map: HashMap<PathBuf, ReadOnlySource> = idx
|
|
||||||
.into_iter()
|
|
||||||
.map(|(path, (start, stop))| {
|
|
||||||
let source = body_source.slice(start as usize, stop as usize);
|
|
||||||
(path, source)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(BundleDirectory {
|
|
||||||
source_map: Arc::new(source_map),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReadOnlyDirectory for BundleDirectory {
|
|
||||||
fn open_read(&self, path: &Path) -> Result<ReadOnlySource, OpenReadError> {
|
|
||||||
self.source_map
|
|
||||||
.get(path)
|
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| OpenReadError::FileDoesNotExist(path.to_path_buf()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn exists(&self, path: &Path) -> bool {
|
|
||||||
self.source_map.contains_key(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError> {
|
|
||||||
let source = self
|
|
||||||
.source_map
|
|
||||||
.get(path)
|
|
||||||
.ok_or_else(|| OpenReadError::FileDoesNotExist(path.to_path_buf()))?;
|
|
||||||
Ok(source.as_slice().to_vec())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::BundleDirectory;
|
|
||||||
use crate::directory::{RAMDirectory, ReadOnlyDirectory, TerminatingWrite};
|
|
||||||
use crate::Directory;
|
|
||||||
use std::io::Write;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bundle_directory() {
|
|
||||||
let mut ram_directory = RAMDirectory::default();
|
|
||||||
let test_path_atomic = Path::new("testpath_atomic");
|
|
||||||
let test_path_wrt = Path::new("testpath_wrt");
|
|
||||||
assert!(ram_directory
|
|
||||||
.atomic_write(test_path_atomic, b"titi")
|
|
||||||
.is_ok());
|
|
||||||
{
|
|
||||||
let mut test_wrt = ram_directory.open_write(test_path_wrt).unwrap();
|
|
||||||
assert!(test_wrt.write_all(b"toto").is_ok());
|
|
||||||
assert!(test_wrt.terminate().is_ok());
|
|
||||||
}
|
|
||||||
let mut dest_directory = RAMDirectory::default();
|
|
||||||
let bundle_path = Path::new("bundle");
|
|
||||||
let mut wrt = dest_directory.open_write(bundle_path).unwrap();
|
|
||||||
assert!(ram_directory.serialize_bundle(&mut wrt).is_ok());
|
|
||||||
assert!(wrt.terminate().is_ok());
|
|
||||||
let source = dest_directory.open_read(bundle_path).unwrap();
|
|
||||||
let bundle_directory = BundleDirectory::from_source(source).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
&bundle_directory.atomic_read(test_path_atomic).unwrap()[..],
|
|
||||||
b"titi"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
&bundle_directory.open_read(test_path_wrt).unwrap()[..],
|
|
||||||
b"toto"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -100,7 +100,17 @@ fn retry_policy(is_blocking: bool) -> RetryPolicy {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ReadOnlyDirectory {
|
/// Write-once read many (WORM) abstraction for where
|
||||||
|
/// tantivy's data should be stored.
|
||||||
|
///
|
||||||
|
/// There are currently two implementations of `Directory`
|
||||||
|
///
|
||||||
|
/// - The [`MMapDirectory`](struct.MmapDirectory.html), this
|
||||||
|
/// should be your default choice.
|
||||||
|
/// - The [`RAMDirectory`](struct.RAMDirectory.html), which
|
||||||
|
/// should be used mostly for tests.
|
||||||
|
///
|
||||||
|
pub trait Directory: DirectoryClone + fmt::Debug + Send + Sync + 'static {
|
||||||
/// Opens a virtual file for read.
|
/// Opens a virtual file for read.
|
||||||
///
|
///
|
||||||
/// Once a virtual file is open, its data may not
|
/// Once a virtual file is open, its data may not
|
||||||
@@ -112,31 +122,6 @@ pub trait ReadOnlyDirectory {
|
|||||||
/// You should only use this to read files create with [Directory::open_write].
|
/// You should only use this to read files create with [Directory::open_write].
|
||||||
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError>;
|
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError>;
|
||||||
|
|
||||||
/// Returns true iff the file exists
|
|
||||||
fn exists(&self, path: &Path) -> bool;
|
|
||||||
|
|
||||||
/// Reads the full content file that has been written using
|
|
||||||
/// atomic_write.
|
|
||||||
///
|
|
||||||
/// This should only be used for small files.
|
|
||||||
///
|
|
||||||
/// You should only use this to read files create with [Directory::atomic_write].
|
|
||||||
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write-once read many (WORM) abstraction for where
|
|
||||||
/// tantivy's data should be stored.
|
|
||||||
///
|
|
||||||
/// There are currently two implementations of `Directory`
|
|
||||||
///
|
|
||||||
/// - The [`MMapDirectory`](struct.MmapDirectory.html), this
|
|
||||||
/// should be your default choice.
|
|
||||||
/// - The [`RAMDirectory`](struct.RAMDirectory.html), which
|
|
||||||
/// should be used mostly for tests.
|
|
||||||
///
|
|
||||||
pub trait Directory:
|
|
||||||
DirectoryClone + ReadOnlyDirectory + fmt::Debug + Send + Sync + 'static
|
|
||||||
{
|
|
||||||
/// Removes a file
|
/// Removes a file
|
||||||
///
|
///
|
||||||
/// Removing a file will not affect an eventual
|
/// Removing a file will not affect an eventual
|
||||||
@@ -146,6 +131,9 @@ pub trait Directory:
|
|||||||
/// `DeleteError::DoesNotExist`.
|
/// `DeleteError::DoesNotExist`.
|
||||||
fn delete(&self, path: &Path) -> result::Result<(), DeleteError>;
|
fn delete(&self, path: &Path) -> result::Result<(), DeleteError>;
|
||||||
|
|
||||||
|
/// Returns true iff the file exists
|
||||||
|
fn exists(&self, path: &Path) -> bool;
|
||||||
|
|
||||||
/// Opens a writer for the *virtual file* associated with
|
/// Opens a writer for the *virtual file* associated with
|
||||||
/// a Path.
|
/// a Path.
|
||||||
///
|
///
|
||||||
@@ -167,6 +155,14 @@ pub trait Directory:
|
|||||||
/// The file may not previously exist.
|
/// The file may not previously exist.
|
||||||
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError>;
|
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError>;
|
||||||
|
|
||||||
|
/// Reads the full content file that has been written using
|
||||||
|
/// atomic_write.
|
||||||
|
///
|
||||||
|
/// This should only be used for small files.
|
||||||
|
///
|
||||||
|
/// You should only use this to read files create with [Directory::atomic_write].
|
||||||
|
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError>;
|
||||||
|
|
||||||
/// Atomically replace the content of a file with data.
|
/// Atomically replace the content of a file with data.
|
||||||
///
|
///
|
||||||
/// This calls ensure that reads can never *observe*
|
/// This calls ensure that reads can never *observe*
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ use crc32fast::Hasher;
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
const FOOTER_MAX_LEN: usize = 10_000;
|
||||||
|
|
||||||
type CrcHashU32 = u32;
|
type CrcHashU32 = u32;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
@@ -143,12 +145,23 @@ impl BinarySerializable for VersionedFooter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
BinarySerializable::serialize(&VInt(buf.len() as u64), writer)?;
|
BinarySerializable::serialize(&VInt(buf.len() as u64), writer)?;
|
||||||
|
assert!(buf.len() <= FOOTER_MAX_LEN);
|
||||||
writer.write_all(&buf[..])?;
|
writer.write_all(&buf[..])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
let len = VInt::deserialize(reader)?.0 as usize;
|
let len = VInt::deserialize(reader)?.0 as usize;
|
||||||
|
if len > FOOTER_MAX_LEN {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidData,
|
||||||
|
format!(
|
||||||
|
"Footer seems invalid as it suggests a footer len of {}. File is corrupted, \
|
||||||
|
or the index was created with a different & old version of tantivy.",
|
||||||
|
len
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
let mut buf = vec![0u8; len];
|
let mut buf = vec![0u8; len];
|
||||||
reader.read_exact(&mut buf[..])?;
|
reader.read_exact(&mut buf[..])?;
|
||||||
let mut cursor = &buf[..];
|
let mut cursor = &buf[..];
|
||||||
@@ -221,11 +234,12 @@ mod tests {
|
|||||||
|
|
||||||
use super::CrcHashU32;
|
use super::CrcHashU32;
|
||||||
use super::FooterProxy;
|
use super::FooterProxy;
|
||||||
use crate::common::BinarySerializable;
|
use crate::common::{BinarySerializable, VInt};
|
||||||
use crate::directory::footer::{Footer, VersionedFooter};
|
use crate::directory::footer::{Footer, VersionedFooter};
|
||||||
use crate::directory::TerminatingWrite;
|
use crate::directory::TerminatingWrite;
|
||||||
use byteorder::{ByteOrder, LittleEndian};
|
use byteorder::{ByteOrder, LittleEndian};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_versioned_footer() {
|
fn test_versioned_footer() {
|
||||||
@@ -336,4 +350,20 @@ mod tests {
|
|||||||
let res = footer.is_compatible();
|
let res = footer.is_compatible();
|
||||||
assert!(res.is_err());
|
assert!(res.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deserialize_too_large_footer() {
|
||||||
|
let mut buf = vec![];
|
||||||
|
assert!(FooterProxy::new(&mut buf).terminate().is_ok());
|
||||||
|
let mut long_len_buf = [0u8; 10];
|
||||||
|
let num_bytes = VInt(super::FOOTER_MAX_LEN as u64 + 1u64).serialize_into(&mut long_len_buf);
|
||||||
|
buf[0..num_bytes].copy_from_slice(&long_len_buf[..num_bytes]);
|
||||||
|
let err = Footer::deserialize(&mut &buf[..]).unwrap_err();
|
||||||
|
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
|
||||||
|
assert_eq!(
|
||||||
|
err.to_string(),
|
||||||
|
"Footer seems invalid as it suggests a footer len of 10001. File is corrupted, \
|
||||||
|
or the index was created with a different & old version of tantivy."
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ use crate::directory::{WatchCallback, WatchHandle};
|
|||||||
use crate::error::DataCorruption;
|
use crate::error::DataCorruption;
|
||||||
use crate::Directory;
|
use crate::Directory;
|
||||||
|
|
||||||
use crate::directory::directory::ReadOnlyDirectory;
|
|
||||||
use crc32fast::Hasher;
|
use crc32fast::Hasher;
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
@@ -151,7 +150,7 @@ impl ManagedDirectory {
|
|||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed to acquire lock for GC");
|
error!("Failed to acquire lock for GC");
|
||||||
return Err(crate::Error::from(err));
|
return Err(crate::TantivyError::from(err));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -265,6 +264,14 @@ impl ManagedDirectory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Directory for ManagedDirectory {
|
impl Directory for ManagedDirectory {
|
||||||
|
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
||||||
|
let read_only_source = self.directory.open_read(path)?;
|
||||||
|
let (footer, reader) = Footer::extract_footer(read_only_source)
|
||||||
|
.map_err(|err| IOError::with_path(path.to_path_buf(), err))?;
|
||||||
|
footer.is_compatible()?;
|
||||||
|
Ok(reader)
|
||||||
|
}
|
||||||
|
|
||||||
fn open_write(&mut self, path: &Path) -> result::Result<WritePtr, OpenWriteError> {
|
fn open_write(&mut self, path: &Path) -> result::Result<WritePtr, OpenWriteError> {
|
||||||
self.register_file_as_managed(path)
|
self.register_file_as_managed(path)
|
||||||
.map_err(|e| IOError::with_path(path.to_owned(), e))?;
|
.map_err(|e| IOError::with_path(path.to_owned(), e))?;
|
||||||
@@ -282,10 +289,18 @@ impl Directory for ManagedDirectory {
|
|||||||
self.directory.atomic_write(path, data)
|
self.directory.atomic_write(path, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn atomic_read(&self, path: &Path) -> result::Result<Vec<u8>, OpenReadError> {
|
||||||
|
self.directory.atomic_read(path)
|
||||||
|
}
|
||||||
|
|
||||||
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
||||||
self.directory.delete(path)
|
self.directory.delete(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn exists(&self, path: &Path) -> bool {
|
||||||
|
self.directory.exists(path)
|
||||||
|
}
|
||||||
|
|
||||||
fn acquire_lock(&self, lock: &Lock) -> result::Result<DirectoryLock, LockError> {
|
fn acquire_lock(&self, lock: &Lock) -> result::Result<DirectoryLock, LockError> {
|
||||||
self.directory.acquire_lock(lock)
|
self.directory.acquire_lock(lock)
|
||||||
}
|
}
|
||||||
@@ -295,24 +310,6 @@ impl Directory for ManagedDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReadOnlyDirectory for ManagedDirectory {
|
|
||||||
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
|
||||||
let read_only_source = self.directory.open_read(path)?;
|
|
||||||
let (footer, reader) = Footer::extract_footer(read_only_source)
|
|
||||||
.map_err(|err| IOError::with_path(path.to_path_buf(), err))?;
|
|
||||||
footer.is_compatible()?;
|
|
||||||
Ok(reader)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn exists(&self, path: &Path) -> bool {
|
|
||||||
self.directory.exists(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn atomic_read(&self, path: &Path) -> result::Result<Vec<u8>, OpenReadError> {
|
|
||||||
self.directory.atomic_read(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for ManagedDirectory {
|
impl Clone for ManagedDirectory {
|
||||||
fn clone(&self) -> ManagedDirectory {
|
fn clone(&self) -> ManagedDirectory {
|
||||||
ManagedDirectory {
|
ManagedDirectory {
|
||||||
@@ -326,9 +323,7 @@ impl Clone for ManagedDirectory {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests_mmap_specific {
|
mod tests_mmap_specific {
|
||||||
|
|
||||||
use crate::directory::{
|
use crate::directory::{Directory, ManagedDirectory, MmapDirectory, TerminatingWrite};
|
||||||
Directory, ManagedDirectory, MmapDirectory, ReadOnlyDirectory, TerminatingWrite,
|
|
||||||
};
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ use self::notify::RawEvent;
|
|||||||
use self::notify::RecursiveMode;
|
use self::notify::RecursiveMode;
|
||||||
use self::notify::Watcher;
|
use self::notify::Watcher;
|
||||||
use crate::core::META_FILEPATH;
|
use crate::core::META_FILEPATH;
|
||||||
use crate::directory::directory::ReadOnlyDirectory;
|
|
||||||
use crate::directory::error::LockError;
|
use crate::directory::error::LockError;
|
||||||
use crate::directory::error::{
|
use crate::directory::error::{
|
||||||
DeleteError, IOError, OpenDirectoryError, OpenReadError, OpenWriteError,
|
DeleteError, IOError, OpenDirectoryError, OpenReadError, OpenWriteError,
|
||||||
@@ -408,6 +407,24 @@ impl TerminatingWrite for SafeFileWriter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Directory for MmapDirectory {
|
impl Directory for MmapDirectory {
|
||||||
|
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
||||||
|
debug!("Open Read {:?}", path);
|
||||||
|
let full_path = self.resolve_path(path);
|
||||||
|
|
||||||
|
let mut mmap_cache = self.inner.mmap_cache.write().map_err(|_| {
|
||||||
|
let msg = format!(
|
||||||
|
"Failed to acquired write lock \
|
||||||
|
on mmap cache while reading {:?}",
|
||||||
|
path
|
||||||
|
);
|
||||||
|
IOError::with_path(path.to_owned(), make_io_err(msg))
|
||||||
|
})?;
|
||||||
|
Ok(mmap_cache
|
||||||
|
.get_mmap(&full_path)?
|
||||||
|
.map(ReadOnlySource::from)
|
||||||
|
.unwrap_or_else(ReadOnlySource::empty))
|
||||||
|
}
|
||||||
|
|
||||||
/// Any entry associated to the path in the mmap will be
|
/// Any entry associated to the path in the mmap will be
|
||||||
/// removed before the file is deleted.
|
/// removed before the file is deleted.
|
||||||
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
||||||
@@ -426,6 +443,11 @@ impl Directory for MmapDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn exists(&self, path: &Path) -> bool {
|
||||||
|
let full_path = self.resolve_path(path);
|
||||||
|
full_path.exists()
|
||||||
|
}
|
||||||
|
|
||||||
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError> {
|
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError> {
|
||||||
debug!("Open Write {:?}", path);
|
debug!("Open Write {:?}", path);
|
||||||
let full_path = self.resolve_path(path);
|
let full_path = self.resolve_path(path);
|
||||||
@@ -456,6 +478,25 @@ impl Directory for MmapDirectory {
|
|||||||
Ok(BufWriter::new(Box::new(writer)))
|
Ok(BufWriter::new(Box::new(writer)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError> {
|
||||||
|
let full_path = self.resolve_path(path);
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
match File::open(&full_path) {
|
||||||
|
Ok(mut file) => {
|
||||||
|
file.read_to_end(&mut buffer)
|
||||||
|
.map_err(|e| IOError::with_path(path.to_owned(), e))?;
|
||||||
|
Ok(buffer)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
Err(OpenReadError::FileDoesNotExist(path.to_owned()))
|
||||||
|
} else {
|
||||||
|
Err(IOError::with_path(path.to_owned(), e).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn atomic_write(&mut self, path: &Path, data: &[u8]) -> io::Result<()> {
|
fn atomic_write(&mut self, path: &Path, data: &[u8]) -> io::Result<()> {
|
||||||
debug!("Atomic Write {:?}", path);
|
debug!("Atomic Write {:?}", path);
|
||||||
let full_path = self.resolve_path(path);
|
let full_path = self.resolve_path(path);
|
||||||
@@ -489,50 +530,6 @@ impl Directory for MmapDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReadOnlyDirectory for MmapDirectory {
|
|
||||||
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
|
||||||
debug!("Open Read {:?}", path);
|
|
||||||
let full_path = self.resolve_path(path);
|
|
||||||
|
|
||||||
let mut mmap_cache = self.inner.mmap_cache.write().map_err(|_| {
|
|
||||||
let msg = format!(
|
|
||||||
"Failed to acquired write lock \
|
|
||||||
on mmap cache while reading {:?}",
|
|
||||||
path
|
|
||||||
);
|
|
||||||
IOError::with_path(path.to_owned(), make_io_err(msg))
|
|
||||||
})?;
|
|
||||||
Ok(mmap_cache
|
|
||||||
.get_mmap(&full_path)?
|
|
||||||
.map(ReadOnlySource::from)
|
|
||||||
.unwrap_or_else(ReadOnlySource::empty))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn exists(&self, path: &Path) -> bool {
|
|
||||||
let full_path = self.resolve_path(path);
|
|
||||||
full_path.exists()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError> {
|
|
||||||
let full_path = self.resolve_path(path);
|
|
||||||
let mut buffer = Vec::new();
|
|
||||||
match File::open(&full_path) {
|
|
||||||
Ok(mut file) => {
|
|
||||||
file.read_to_end(&mut buffer)
|
|
||||||
.map_err(|e| IOError::with_path(path.to_owned(), e))?;
|
|
||||||
Ok(buffer)
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
if e.kind() == io::ErrorKind::NotFound {
|
|
||||||
Err(OpenReadError::FileDoesNotExist(path.to_owned()))
|
|
||||||
} else {
|
|
||||||
Err(IOError::with_path(path.to_owned(), e).into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ WORM directory abstraction.
|
|||||||
#[cfg(feature = "mmap")]
|
#[cfg(feature = "mmap")]
|
||||||
mod mmap_directory;
|
mod mmap_directory;
|
||||||
|
|
||||||
mod bundle_directory;
|
|
||||||
mod directory;
|
mod directory;
|
||||||
mod directory_lock;
|
mod directory_lock;
|
||||||
mod footer;
|
mod footer;
|
||||||
@@ -20,7 +19,7 @@ mod watch_event_router;
|
|||||||
pub mod error;
|
pub mod error;
|
||||||
|
|
||||||
pub use self::directory::DirectoryLock;
|
pub use self::directory::DirectoryLock;
|
||||||
pub use self::directory::{Directory, DirectoryClone, ReadOnlyDirectory};
|
pub use self::directory::{Directory, DirectoryClone};
|
||||||
pub use self::directory_lock::{Lock, INDEX_WRITER_LOCK, META_LOCK};
|
pub use self::directory_lock::{Lock, INDEX_WRITER_LOCK, META_LOCK};
|
||||||
pub use self::ram_directory::RAMDirectory;
|
pub use self::ram_directory::RAMDirectory;
|
||||||
pub use self::read_only_source::ReadOnlySource;
|
pub use self::read_only_source::ReadOnlySource;
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
use crate::common::CountingWriter;
|
|
||||||
use crate::core::META_FILEPATH;
|
use crate::core::META_FILEPATH;
|
||||||
use crate::directory::directory::ReadOnlyDirectory;
|
|
||||||
use crate::directory::error::{DeleteError, OpenReadError, OpenWriteError};
|
use crate::directory::error::{DeleteError, OpenReadError, OpenWriteError};
|
||||||
use crate::directory::AntiCallToken;
|
use crate::directory::AntiCallToken;
|
||||||
use crate::directory::WatchCallbackList;
|
use crate::directory::WatchCallbackList;
|
||||||
@@ -117,22 +115,6 @@ impl InnerDirectory {
|
|||||||
fn total_mem_usage(&self) -> usize {
|
fn total_mem_usage(&self) -> usize {
|
||||||
self.fs.values().map(|f| f.len()).sum()
|
self.fs.values().map(|f| f.len()).sum()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_bundle(&self, wrt: &mut WritePtr) -> io::Result<()> {
|
|
||||||
let mut counting_writer = CountingWriter::wrap(wrt);
|
|
||||||
let mut file_index: HashMap<PathBuf, (u64, u64)> = HashMap::default();
|
|
||||||
for (path, source) in &self.fs {
|
|
||||||
let start = counting_writer.written_bytes();
|
|
||||||
counting_writer.write_all(source.as_slice())?;
|
|
||||||
let stop = counting_writer.written_bytes();
|
|
||||||
file_index.insert(path.to_path_buf(), (start, stop));
|
|
||||||
}
|
|
||||||
let index_offset = counting_writer.written_bytes();
|
|
||||||
serde_json::to_writer(&mut counting_writer, &file_index)?;
|
|
||||||
let index_offset_buffer = index_offset.to_le_bytes();
|
|
||||||
counting_writer.write_all(&index_offset_buffer[..])?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for RAMDirectory {
|
impl fmt::Debug for RAMDirectory {
|
||||||
@@ -163,17 +145,22 @@ impl RAMDirectory {
|
|||||||
self.fs.read().unwrap().total_mem_usage()
|
self.fs.read().unwrap().total_mem_usage()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serialize the RAMDirectory into a bundle.
|
pub fn persist(&self, dest: &mut dyn Directory) -> crate::Result<()> {
|
||||||
///
|
let wlock = self.fs.write().unwrap();
|
||||||
/// This method will fail, write nothing, and return an error if a
|
for (path, source) in wlock.fs.iter() {
|
||||||
/// clone of this repository exists.
|
let mut dest_wrt = dest.open_write(path)?;
|
||||||
pub fn serialize_bundle(self, wrt: &mut WritePtr) -> io::Result<()> {
|
dest_wrt.write_all(source.as_slice())?;
|
||||||
let inner_directory_rlock = self.fs.read().unwrap();
|
dest_wrt.terminate()?;
|
||||||
inner_directory_rlock.serialize_bundle(wrt)
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Directory for RAMDirectory {
|
impl Directory for RAMDirectory {
|
||||||
|
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
||||||
|
self.fs.read().unwrap().open_read(path)
|
||||||
|
}
|
||||||
|
|
||||||
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
fn delete(&self, path: &Path) -> result::Result<(), DeleteError> {
|
||||||
fail_point!("RAMDirectory::delete", |_| {
|
fail_point!("RAMDirectory::delete", |_| {
|
||||||
use crate::directory::error::IOError;
|
use crate::directory::error::IOError;
|
||||||
@@ -183,6 +170,10 @@ impl Directory for RAMDirectory {
|
|||||||
self.fs.write().unwrap().delete(path)
|
self.fs.write().unwrap().delete(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn exists(&self, path: &Path) -> bool {
|
||||||
|
self.fs.read().unwrap().exists(path)
|
||||||
|
}
|
||||||
|
|
||||||
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError> {
|
fn open_write(&mut self, path: &Path) -> Result<WritePtr, OpenWriteError> {
|
||||||
let mut fs = self.fs.write().unwrap();
|
let mut fs = self.fs.write().unwrap();
|
||||||
let path_buf = PathBuf::from(path);
|
let path_buf = PathBuf::from(path);
|
||||||
@@ -196,6 +187,10 @@ impl Directory for RAMDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError> {
|
||||||
|
Ok(self.open_read(path)?.as_slice().to_owned())
|
||||||
|
}
|
||||||
|
|
||||||
fn atomic_write(&mut self, path: &Path, data: &[u8]) -> io::Result<()> {
|
fn atomic_write(&mut self, path: &Path, data: &[u8]) -> io::Result<()> {
|
||||||
fail_point!("RAMDirectory::atomic_write", |msg| Err(io::Error::new(
|
fail_point!("RAMDirectory::atomic_write", |msg| Err(io::Error::new(
|
||||||
io::ErrorKind::Other,
|
io::ErrorKind::Other,
|
||||||
@@ -219,17 +214,3 @@ impl Directory for RAMDirectory {
|
|||||||
Ok(self.fs.write().unwrap().watch(watch_callback))
|
Ok(self.fs.write().unwrap().watch(watch_callback))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReadOnlyDirectory for RAMDirectory {
|
|
||||||
fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError> {
|
|
||||||
self.fs.read().unwrap().open_read(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn exists(&self, path: &Path) -> bool {
|
|
||||||
self.fs.read().unwrap().exists(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn atomic_read(&self, path: &Path) -> Result<Vec<u8>, OpenReadError> {
|
|
||||||
Ok(self.open_read(path)?.as_slice().to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -25,10 +25,10 @@ impl DataCorruption {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn comment_only<TS: ToString>(comment: TS) -> DataCorruption {
|
pub fn comment_only(comment: String) -> DataCorruption {
|
||||||
DataCorruption {
|
DataCorruption {
|
||||||
filepath: None,
|
filepath: None,
|
||||||
comment: comment.to_string(),
|
comment,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ mod tests {
|
|||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::common::CompositeFile;
|
use crate::common::CompositeFile;
|
||||||
use crate::directory::{Directory, RAMDirectory, ReadOnlyDirectory, WritePtr};
|
use crate::directory::{Directory, RAMDirectory, WritePtr};
|
||||||
use crate::fastfield::FastFieldReader;
|
use crate::fastfield::FastFieldReader;
|
||||||
use crate::merge_policy::NoMergePolicy;
|
use crate::merge_policy::NoMergePolicy;
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
|
|||||||
@@ -7,9 +7,6 @@ pub use self::writer::MultiValueIntFastFieldWriter;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use time;
|
|
||||||
|
|
||||||
use self::time::Duration;
|
|
||||||
use crate::collector::TopDocs;
|
use crate::collector::TopDocs;
|
||||||
use crate::query::QueryParser;
|
use crate::query::QueryParser;
|
||||||
use crate::schema::Cardinality;
|
use crate::schema::Cardinality;
|
||||||
@@ -17,6 +14,7 @@ mod tests {
|
|||||||
use crate::schema::IntOptions;
|
use crate::schema::IntOptions;
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::Index;
|
use crate::Index;
|
||||||
|
use chrono::Duration;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multivalued_u64() {
|
fn test_multivalued_u64() {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use crate::common::compute_num_bits;
|
|||||||
use crate::common::BinarySerializable;
|
use crate::common::BinarySerializable;
|
||||||
use crate::common::CompositeFile;
|
use crate::common::CompositeFile;
|
||||||
use crate::directory::ReadOnlySource;
|
use crate::directory::ReadOnlySource;
|
||||||
use crate::directory::{Directory, RAMDirectory, ReadOnlyDirectory, WritePtr};
|
use crate::directory::{Directory, RAMDirectory, WritePtr};
|
||||||
use crate::fastfield::{FastFieldSerializer, FastFieldsWriter};
|
use crate::fastfield::{FastFieldSerializer, FastFieldsWriter};
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::schema::FAST;
|
use crate::schema::FAST;
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ use crate::fastfield::MultiValueIntFastFieldReader;
|
|||||||
use crate::fastfield::{FastFieldNotAvailableError, FastFieldReader};
|
use crate::fastfield::{FastFieldNotAvailableError, FastFieldReader};
|
||||||
use crate::schema::{Cardinality, Field, FieldType, Schema};
|
use crate::schema::{Cardinality, Field, FieldType, Schema};
|
||||||
use crate::space_usage::PerFieldSpaceUsage;
|
use crate::space_usage::PerFieldSpaceUsage;
|
||||||
use crate::Result;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// Provides access to all of the FastFieldReader.
|
/// Provides access to all of the FastFieldReader.
|
||||||
@@ -54,7 +53,7 @@ impl FastFieldReaders {
|
|||||||
pub(crate) fn load_all(
|
pub(crate) fn load_all(
|
||||||
schema: &Schema,
|
schema: &Schema,
|
||||||
fast_fields_composite: &CompositeFile,
|
fast_fields_composite: &CompositeFile,
|
||||||
) -> Result<FastFieldReaders> {
|
) -> crate::Result<FastFieldReaders> {
|
||||||
let mut fast_field_readers = FastFieldReaders {
|
let mut fast_field_readers = FastFieldReaders {
|
||||||
fast_field_i64: Default::default(),
|
fast_field_i64: Default::default(),
|
||||||
fast_field_u64: Default::default(),
|
fast_field_u64: Default::default(),
|
||||||
|
|||||||
@@ -8,30 +8,33 @@ use crate::core::SegmentComponent;
|
|||||||
use crate::core::SegmentId;
|
use crate::core::SegmentId;
|
||||||
use crate::core::SegmentMeta;
|
use crate::core::SegmentMeta;
|
||||||
use crate::core::SegmentReader;
|
use crate::core::SegmentReader;
|
||||||
use crate::directory::TerminatingWrite;
|
|
||||||
use crate::directory::{DirectoryLock, GarbageCollectionResult};
|
use crate::directory::{DirectoryLock, GarbageCollectionResult};
|
||||||
|
use crate::directory::{TerminatingWrite, WatchCallbackList};
|
||||||
use crate::docset::DocSet;
|
use crate::docset::DocSet;
|
||||||
use crate::error::TantivyError;
|
use crate::error::TantivyError;
|
||||||
use crate::fastfield::write_delete_bitset;
|
use crate::fastfield::write_delete_bitset;
|
||||||
use crate::indexer::delete_queue::{DeleteCursor, DeleteQueue};
|
use crate::indexer::delete_queue::{DeleteCursor, DeleteQueue};
|
||||||
use crate::indexer::doc_opstamp_mapping::DocToOpstampMapping;
|
use crate::indexer::doc_opstamp_mapping::DocToOpstampMapping;
|
||||||
use crate::indexer::operation::DeleteOperation;
|
use crate::indexer::operation::DeleteOperation;
|
||||||
|
use crate::indexer::segment_manager::SegmentRegisters;
|
||||||
|
use crate::indexer::segment_register::SegmentRegister;
|
||||||
use crate::indexer::stamper::Stamper;
|
use crate::indexer::stamper::Stamper;
|
||||||
use crate::indexer::MergePolicy;
|
use crate::indexer::MergePolicy;
|
||||||
use crate::indexer::SegmentEntry;
|
use crate::indexer::SegmentEntry;
|
||||||
use crate::indexer::SegmentWriter;
|
use crate::indexer::SegmentWriter;
|
||||||
|
use crate::reader::NRTReader;
|
||||||
use crate::schema::Document;
|
use crate::schema::Document;
|
||||||
use crate::schema::IndexRecordOption;
|
use crate::schema::IndexRecordOption;
|
||||||
use crate::schema::Term;
|
use crate::schema::Term;
|
||||||
use crate::Opstamp;
|
use crate::tokenizer::TokenizerManager;
|
||||||
|
use crate::{IndexReader, Opstamp};
|
||||||
use crossbeam::channel;
|
use crossbeam::channel;
|
||||||
use futures::executor::block_on;
|
use futures::executor::block_on;
|
||||||
use futures::future::Future;
|
use futures::future::Future;
|
||||||
use smallvec::smallvec;
|
use smallvec::{smallvec, SmallVec};
|
||||||
use smallvec::SmallVec;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::sync::Arc;
|
use std::sync::{Arc, RwLock};
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use std::thread::JoinHandle;
|
use std::thread::JoinHandle;
|
||||||
|
|
||||||
@@ -68,6 +71,8 @@ pub struct IndexWriter {
|
|||||||
// lifetime of the lock with that of the IndexWriter.
|
// lifetime of the lock with that of the IndexWriter.
|
||||||
_directory_lock: Option<DirectoryLock>,
|
_directory_lock: Option<DirectoryLock>,
|
||||||
|
|
||||||
|
segment_registers: Arc<RwLock<SegmentRegisters>>,
|
||||||
|
|
||||||
index: Index,
|
index: Index,
|
||||||
|
|
||||||
heap_size_in_bytes_per_thread: usize,
|
heap_size_in_bytes_per_thread: usize,
|
||||||
@@ -87,6 +92,8 @@ pub struct IndexWriter {
|
|||||||
|
|
||||||
stamper: Stamper,
|
stamper: Stamper,
|
||||||
committed_opstamp: Opstamp,
|
committed_opstamp: Opstamp,
|
||||||
|
|
||||||
|
on_commit: WatchCallbackList,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_deleted_bitset(
|
fn compute_deleted_bitset(
|
||||||
@@ -133,7 +140,6 @@ fn compute_deleted_bitset(
|
|||||||
/// For instance, there was no delete operation between the state of the `segment_entry` and
|
/// For instance, there was no delete operation between the state of the `segment_entry` and
|
||||||
/// the `target_opstamp`, `segment_entry` is not updated.
|
/// the `target_opstamp`, `segment_entry` is not updated.
|
||||||
pub(crate) fn advance_deletes(
|
pub(crate) fn advance_deletes(
|
||||||
mut segment: Segment,
|
|
||||||
segment_entry: &mut SegmentEntry,
|
segment_entry: &mut SegmentEntry,
|
||||||
target_opstamp: Opstamp,
|
target_opstamp: Opstamp,
|
||||||
) -> crate::Result<()> {
|
) -> crate::Result<()> {
|
||||||
@@ -142,28 +148,38 @@ pub(crate) fn advance_deletes(
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if segment_entry.delete_bitset().is_none() && segment_entry.delete_cursor().get().is_none() {
|
let delete_bitset_opt = segment_entry.take_delete_bitset();
|
||||||
|
|
||||||
|
// We avoid directly advancing the `SegmentEntry` delete cursor, because
|
||||||
|
// we do not want to end up in an invalid state if the delete bitset
|
||||||
|
// serialization fails.
|
||||||
|
let mut delete_cursor = segment_entry.delete_cursor();
|
||||||
|
|
||||||
|
if delete_bitset_opt.is_none() && delete_cursor.get().is_none() {
|
||||||
// There has been no `DeleteOperation` between the segment status and `target_opstamp`.
|
// There has been no `DeleteOperation` between the segment status and `target_opstamp`.
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We open our current serialized segment to compute the new deleted bitset.
|
||||||
|
let segment = segment_entry.segment().clone();
|
||||||
let segment_reader = SegmentReader::open(&segment)?;
|
let segment_reader = SegmentReader::open(&segment)?;
|
||||||
|
|
||||||
let max_doc = segment_reader.max_doc();
|
let max_doc = segment_reader.max_doc();
|
||||||
let mut delete_bitset: BitSet = match segment_entry.delete_bitset() {
|
let mut delete_bitset: BitSet =
|
||||||
Some(previous_delete_bitset) => (*previous_delete_bitset).clone(),
|
delete_bitset_opt.unwrap_or_else(|| BitSet::with_max_value(max_doc));
|
||||||
None => BitSet::with_max_value(max_doc),
|
|
||||||
};
|
let num_deleted_docs_before = segment.meta().num_deleted_docs();
|
||||||
|
|
||||||
compute_deleted_bitset(
|
compute_deleted_bitset(
|
||||||
&mut delete_bitset,
|
&mut delete_bitset,
|
||||||
&segment_reader,
|
&segment_reader,
|
||||||
segment_entry.delete_cursor(),
|
&mut delete_cursor,
|
||||||
&DocToOpstampMapping::None,
|
&DocToOpstampMapping::None,
|
||||||
target_opstamp,
|
target_opstamp,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// TODO optimize
|
// TODO optimize... We are simply manipulating bitsets here.
|
||||||
|
// We should be able to compute the union much faster.
|
||||||
if let Some(seg_delete_bitset) = segment_reader.delete_bitset() {
|
if let Some(seg_delete_bitset) = segment_reader.delete_bitset() {
|
||||||
for doc in 0u32..max_doc {
|
for doc in 0u32..max_doc {
|
||||||
if seg_delete_bitset.is_deleted(doc) {
|
if seg_delete_bitset.is_deleted(doc) {
|
||||||
@@ -172,15 +188,23 @@ pub(crate) fn advance_deletes(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let num_deleted_docs = delete_bitset.len();
|
let num_deleted_docs = delete_bitset.len() as u32;
|
||||||
if num_deleted_docs > 0 {
|
|
||||||
segment = segment.with_delete_meta(num_deleted_docs as u32, target_opstamp);
|
if num_deleted_docs > num_deleted_docs_before {
|
||||||
let mut delete_file = segment.open_write(SegmentComponent::DELETE)?;
|
// We need to write a new delete file.
|
||||||
|
let mut delete_file = segment
|
||||||
|
.with_delete_meta(num_deleted_docs, target_opstamp)
|
||||||
|
.open_write(SegmentComponent::DELETE)?;
|
||||||
write_delete_bitset(&delete_bitset, max_doc, &mut delete_file)?;
|
write_delete_bitset(&delete_bitset, max_doc, &mut delete_file)?;
|
||||||
delete_file.terminate()?;
|
delete_file.terminate()?;
|
||||||
|
segment_entry.reset_delete_meta(num_deleted_docs as u32, target_opstamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
segment_entry.set_meta(segment.meta().clone());
|
// Regardless of whether we did end up having to write a new file or not
|
||||||
|
// we advance the `delete_cursor`. This is an optimisation. We want to ensure we do not
|
||||||
|
// check that a given deleted term does not match any of our docs more than once.
|
||||||
|
segment_entry.set_delete_cursor(delete_cursor);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,11 +213,12 @@ fn index_documents(
|
|||||||
segment: Segment,
|
segment: Segment,
|
||||||
grouped_document_iterator: &mut dyn Iterator<Item = OperationGroup>,
|
grouped_document_iterator: &mut dyn Iterator<Item = OperationGroup>,
|
||||||
segment_updater: &mut SegmentUpdater,
|
segment_updater: &mut SegmentUpdater,
|
||||||
|
tokenizers: &TokenizerManager,
|
||||||
mut delete_cursor: DeleteCursor,
|
mut delete_cursor: DeleteCursor,
|
||||||
) -> crate::Result<bool> {
|
) -> crate::Result<bool> {
|
||||||
let schema = segment.schema();
|
let schema = segment.schema();
|
||||||
|
let mut segment_writer =
|
||||||
let mut segment_writer = SegmentWriter::for_segment(memory_budget, segment.clone(), &schema)?;
|
SegmentWriter::for_segment(memory_budget, segment.clone(), &schema, tokenizers)?;
|
||||||
for document_group in grouped_document_iterator {
|
for document_group in grouped_document_iterator {
|
||||||
for doc in document_group {
|
for doc in document_group {
|
||||||
segment_writer.add_document(doc, &schema)?;
|
segment_writer.add_document(doc, &schema)?;
|
||||||
@@ -231,11 +256,7 @@ fn index_documents(
|
|||||||
last_docstamp,
|
last_docstamp,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let segment_entry = SegmentEntry::new(
|
let segment_entry = SegmentEntry::new(segment_with_max_doc, delete_cursor, delete_bitset_opt);
|
||||||
segment_with_max_doc.meta().clone(),
|
|
||||||
delete_cursor,
|
|
||||||
delete_bitset_opt,
|
|
||||||
);
|
|
||||||
block_on(segment_updater.schedule_add_segment(segment_entry))?;
|
block_on(segment_updater.schedule_add_segment(segment_entry))?;
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
@@ -307,16 +328,24 @@ impl IndexWriter {
|
|||||||
|
|
||||||
let delete_queue = DeleteQueue::new();
|
let delete_queue = DeleteQueue::new();
|
||||||
|
|
||||||
let current_opstamp = index.load_metas()?.opstamp;
|
let meta = index.load_metas()?;
|
||||||
|
|
||||||
let stamper = Stamper::new(current_opstamp);
|
let stamper = Stamper::new(meta.opstamp);
|
||||||
|
|
||||||
|
let commited_segments = SegmentRegister::new(
|
||||||
|
index.directory(),
|
||||||
|
&index.schema(),
|
||||||
|
meta.segments,
|
||||||
|
&delete_queue.cursor(),
|
||||||
|
);
|
||||||
|
let segment_registers = Arc::new(RwLock::new(SegmentRegisters::new(commited_segments)));
|
||||||
|
|
||||||
let segment_updater =
|
let segment_updater =
|
||||||
SegmentUpdater::create(index.clone(), stamper.clone(), &delete_queue.cursor())?;
|
SegmentUpdater::create(segment_registers.clone(), index.clone(), stamper.clone())?;
|
||||||
|
|
||||||
let mut index_writer = IndexWriter {
|
let mut index_writer = IndexWriter {
|
||||||
_directory_lock: Some(directory_lock),
|
_directory_lock: Some(directory_lock),
|
||||||
|
segment_registers,
|
||||||
heap_size_in_bytes_per_thread,
|
heap_size_in_bytes_per_thread,
|
||||||
index: index.clone(),
|
index: index.clone(),
|
||||||
|
|
||||||
@@ -330,10 +359,12 @@ impl IndexWriter {
|
|||||||
|
|
||||||
delete_queue,
|
delete_queue,
|
||||||
|
|
||||||
committed_opstamp: current_opstamp,
|
committed_opstamp: meta.opstamp,
|
||||||
stamper,
|
stamper,
|
||||||
|
|
||||||
worker_id: 0,
|
worker_id: 0,
|
||||||
|
|
||||||
|
on_commit: Default::default(),
|
||||||
};
|
};
|
||||||
index_writer.start_workers()?;
|
index_writer.start_workers()?;
|
||||||
Ok(index_writer)
|
Ok(index_writer)
|
||||||
@@ -373,13 +404,6 @@ impl IndexWriter {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
|
||||||
pub fn add_segment(&self, segment_meta: SegmentMeta) -> crate::Result<()> {
|
|
||||||
let delete_cursor = self.delete_queue.cursor();
|
|
||||||
let segment_entry = SegmentEntry::new(segment_meta, delete_cursor, None);
|
|
||||||
block_on(self.segment_updater.schedule_add_segment(segment_entry))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new segment.
|
/// Creates a new segment.
|
||||||
///
|
///
|
||||||
/// This method is useful only for users trying to do complex
|
/// This method is useful only for users trying to do complex
|
||||||
@@ -428,12 +452,13 @@ impl IndexWriter {
|
|||||||
// was dropped.
|
// was dropped.
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let segment = index.new_segment();
|
let segment = index.new_segment_unpersisted();
|
||||||
index_documents(
|
index_documents(
|
||||||
mem_budget,
|
mem_budget,
|
||||||
segment,
|
segment,
|
||||||
&mut document_iterator,
|
&mut document_iterator,
|
||||||
&mut segment_updater,
|
&mut segment_updater,
|
||||||
|
index.tokenizers(),
|
||||||
delete_cursor.clone(),
|
delete_cursor.clone(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
@@ -460,6 +485,21 @@ impl IndexWriter {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO move me
|
||||||
|
pub(crate) fn trigger_commit(&self) -> impl Future<Output = ()> {
|
||||||
|
self.on_commit.broadcast()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reader(&self, num_searchers: usize) -> crate::Result<IndexReader> {
|
||||||
|
let nrt_reader = NRTReader::create(
|
||||||
|
num_searchers,
|
||||||
|
self.index.clone(),
|
||||||
|
self.segment_registers.clone(),
|
||||||
|
&self.on_commit,
|
||||||
|
)?;
|
||||||
|
Ok(IndexReader::NRT(nrt_reader))
|
||||||
|
}
|
||||||
|
|
||||||
/// Detects and removes the files that are not used by the index anymore.
|
/// Detects and removes the files that are not used by the index anymore.
|
||||||
pub fn garbage_collect_files(
|
pub fn garbage_collect_files(
|
||||||
&self,
|
&self,
|
||||||
@@ -603,7 +643,7 @@ impl IndexWriter {
|
|||||||
/// It is also possible to add a payload to the `commit`
|
/// It is also possible to add a payload to the `commit`
|
||||||
/// using this API.
|
/// using this API.
|
||||||
/// See [`PreparedCommit::set_payload()`](PreparedCommit.html)
|
/// See [`PreparedCommit::set_payload()`](PreparedCommit.html)
|
||||||
pub fn prepare_commit(&mut self) -> crate::Result<PreparedCommit> {
|
pub fn prepare_commit(&mut self, soft_commit: bool) -> crate::Result<PreparedCommit> {
|
||||||
// Here, because we join all of the worker threads,
|
// Here, because we join all of the worker threads,
|
||||||
// all of the segment update for this commit have been
|
// all of the segment update for this commit have been
|
||||||
// sent.
|
// sent.
|
||||||
@@ -631,7 +671,7 @@ impl IndexWriter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let commit_opstamp = self.stamper.stamp();
|
let commit_opstamp = self.stamper.stamp();
|
||||||
let prepared_commit = PreparedCommit::new(self, commit_opstamp);
|
let prepared_commit = PreparedCommit::new(self, commit_opstamp, soft_commit);
|
||||||
info!("Prepared commit {}", commit_opstamp);
|
info!("Prepared commit {}", commit_opstamp);
|
||||||
Ok(prepared_commit)
|
Ok(prepared_commit)
|
||||||
}
|
}
|
||||||
@@ -651,7 +691,11 @@ impl IndexWriter {
|
|||||||
/// that made it in the commit.
|
/// that made it in the commit.
|
||||||
///
|
///
|
||||||
pub fn commit(&mut self) -> crate::Result<Opstamp> {
|
pub fn commit(&mut self) -> crate::Result<Opstamp> {
|
||||||
self.prepare_commit()?.commit()
|
self.prepare_commit(false)?.commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn soft_commit(&mut self) -> crate::Result<Opstamp> {
|
||||||
|
self.prepare_commit(true)?.commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn segment_updater(&self) -> &SegmentUpdater {
|
pub(crate) fn segment_updater(&self) -> &SegmentUpdater {
|
||||||
@@ -776,7 +820,6 @@ impl Drop for IndexWriter {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::super::operation::UserOperation;
|
use super::super::operation::UserOperation;
|
||||||
use crate::collector::TopDocs;
|
use crate::collector::TopDocs;
|
||||||
use crate::directory::error::LockError;
|
use crate::directory::error::LockError;
|
||||||
@@ -897,7 +940,7 @@ mod tests {
|
|||||||
let index_writer = index.writer(3_000_000).unwrap();
|
let index_writer = index.writer(3_000_000).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
format!("{:?}", index_writer.get_merge_policy()),
|
format!("{:?}", index_writer.get_merge_policy()),
|
||||||
"LogMergePolicy { min_merge_size: 8, min_layer_size: 10000, \
|
"LogMergePolicy { min_merge_size: 8, max_merge_size: 10000000, min_layer_size: 10000, \
|
||||||
level_log_size: 0.75 }"
|
level_log_size: 0.75 }"
|
||||||
);
|
);
|
||||||
let merge_policy = Box::new(NoMergePolicy::default());
|
let merge_policy = Box::new(NoMergePolicy::default());
|
||||||
@@ -1009,7 +1052,8 @@ mod tests {
|
|||||||
index_writer.add_document(doc!(text_field => "a"));
|
index_writer.add_document(doc!(text_field => "a"));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let mut prepared_commit = index_writer.prepare_commit().expect("commit failed");
|
let mut prepared_commit =
|
||||||
|
index_writer.prepare_commit(false).expect("commit failed");
|
||||||
prepared_commit.set_payload("first commit");
|
prepared_commit.set_payload("first commit");
|
||||||
prepared_commit.commit().expect("commit failed");
|
prepared_commit.commit().expect("commit failed");
|
||||||
}
|
}
|
||||||
@@ -1042,7 +1086,8 @@ mod tests {
|
|||||||
index_writer.add_document(doc!(text_field => "a"));
|
index_writer.add_document(doc!(text_field => "a"));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let mut prepared_commit = index_writer.prepare_commit().expect("commit failed");
|
let mut prepared_commit =
|
||||||
|
index_writer.prepare_commit(false).expect("commit failed");
|
||||||
prepared_commit.set_payload("first commit");
|
prepared_commit.set_payload("first commit");
|
||||||
prepared_commit.abort().expect("commit failed");
|
prepared_commit.abort().expect("commit failed");
|
||||||
}
|
}
|
||||||
@@ -1217,7 +1262,43 @@ mod tests {
|
|||||||
let index = Index::create_in_ram(schema_builder.build());
|
let index = Index::create_in_ram(schema_builder.build());
|
||||||
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
index_writer.add_document(doc!(idfield=>"myid"));
|
index_writer.add_document(doc!(idfield=>"myid"));
|
||||||
let commit = index_writer.commit();
|
assert!(index_writer.commit().is_ok());
|
||||||
assert!(commit.is_ok());
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_index_writer_reader() {
|
||||||
|
let mut schema_builder = schema::Schema::builder();
|
||||||
|
let idfield = schema_builder.add_text_field("id", STRING);
|
||||||
|
schema_builder.add_text_field("optfield", STRING);
|
||||||
|
let index = Index::create_in_ram(schema_builder.build());
|
||||||
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
|
index_writer.add_document(doc!(idfield=>"myid"));
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
let reader = index_writer.reader(2).unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
assert_eq!(searcher.num_docs(), 1u64);
|
||||||
|
index_writer.add_document(doc!(idfield=>"myid"));
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
assert_eq!(reader.searcher().num_docs(), 2u64);
|
||||||
|
assert_eq!(searcher.num_docs(), 1u64);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_index_writer_reader_soft_commit() {
|
||||||
|
let mut schema_builder = schema::Schema::builder();
|
||||||
|
let idfield = schema_builder.add_text_field("id", STRING);
|
||||||
|
schema_builder.add_text_field("optfield", STRING);
|
||||||
|
let index = Index::create_in_ram(schema_builder.build());
|
||||||
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
|
index_writer.add_document(doc!(idfield=>"myid"));
|
||||||
|
assert!(index_writer.soft_commit().is_ok());
|
||||||
|
let nrt_reader = index_writer.reader(2).unwrap();
|
||||||
|
let normal_reader = index.reader_builder().try_into().unwrap();
|
||||||
|
assert_eq!(nrt_reader.searcher().num_docs(), 1u64);
|
||||||
|
assert_eq!(normal_reader.searcher().num_docs(), 0u64);
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
assert!(normal_reader.reload().is_ok());
|
||||||
|
assert_eq!(nrt_reader.searcher().num_docs(), 1u64);
|
||||||
|
assert_eq!(normal_reader.searcher().num_docs(), 1u64);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,12 +6,14 @@ use std::f64;
|
|||||||
const DEFAULT_LEVEL_LOG_SIZE: f64 = 0.75;
|
const DEFAULT_LEVEL_LOG_SIZE: f64 = 0.75;
|
||||||
const DEFAULT_MIN_LAYER_SIZE: u32 = 10_000;
|
const DEFAULT_MIN_LAYER_SIZE: u32 = 10_000;
|
||||||
const DEFAULT_MIN_MERGE_SIZE: usize = 8;
|
const DEFAULT_MIN_MERGE_SIZE: usize = 8;
|
||||||
|
const DEFAULT_MAX_MERGE_SIZE: usize = 10_000_000;
|
||||||
|
|
||||||
/// `LogMergePolicy` tries tries to merge segments that have a similar number of
|
/// `LogMergePolicy` tries tries to merge segments that have a similar number of
|
||||||
/// documents.
|
/// documents.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct LogMergePolicy {
|
pub struct LogMergePolicy {
|
||||||
min_merge_size: usize,
|
min_merge_size: usize,
|
||||||
|
max_merge_size: usize,
|
||||||
min_layer_size: u32,
|
min_layer_size: u32,
|
||||||
level_log_size: f64,
|
level_log_size: f64,
|
||||||
}
|
}
|
||||||
@@ -26,6 +28,12 @@ impl LogMergePolicy {
|
|||||||
self.min_merge_size = min_merge_size;
|
self.min_merge_size = min_merge_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the maximum number docs in a segment for it to be considered for
|
||||||
|
/// merging.
|
||||||
|
pub fn set_max_merge_size(&mut self, max_merge_size: usize) {
|
||||||
|
self.max_merge_size = max_merge_size;
|
||||||
|
}
|
||||||
|
|
||||||
/// Set the minimum segment size under which all segment belong
|
/// Set the minimum segment size under which all segment belong
|
||||||
/// to the same level.
|
/// to the same level.
|
||||||
pub fn set_min_layer_size(&mut self, min_layer_size: u32) {
|
pub fn set_min_layer_size(&mut self, min_layer_size: u32) {
|
||||||
@@ -53,6 +61,7 @@ impl MergePolicy for LogMergePolicy {
|
|||||||
let mut size_sorted_tuples = segments
|
let mut size_sorted_tuples = segments
|
||||||
.iter()
|
.iter()
|
||||||
.map(SegmentMeta::num_docs)
|
.map(SegmentMeta::num_docs)
|
||||||
|
.filter(|s| s <= &(self.max_merge_size as u32))
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.collect::<Vec<(usize, u32)>>();
|
.collect::<Vec<(usize, u32)>>();
|
||||||
|
|
||||||
@@ -86,6 +95,7 @@ impl Default for LogMergePolicy {
|
|||||||
fn default() -> LogMergePolicy {
|
fn default() -> LogMergePolicy {
|
||||||
LogMergePolicy {
|
LogMergePolicy {
|
||||||
min_merge_size: DEFAULT_MIN_MERGE_SIZE,
|
min_merge_size: DEFAULT_MIN_MERGE_SIZE,
|
||||||
|
max_merge_size: DEFAULT_MAX_MERGE_SIZE,
|
||||||
min_layer_size: DEFAULT_MIN_LAYER_SIZE,
|
min_layer_size: DEFAULT_MIN_LAYER_SIZE,
|
||||||
level_log_size: DEFAULT_LEVEL_LOG_SIZE,
|
level_log_size: DEFAULT_LEVEL_LOG_SIZE,
|
||||||
}
|
}
|
||||||
@@ -104,6 +114,7 @@ mod tests {
|
|||||||
fn test_merge_policy() -> LogMergePolicy {
|
fn test_merge_policy() -> LogMergePolicy {
|
||||||
let mut log_merge_policy = LogMergePolicy::default();
|
let mut log_merge_policy = LogMergePolicy::default();
|
||||||
log_merge_policy.set_min_merge_size(3);
|
log_merge_policy.set_min_merge_size(3);
|
||||||
|
log_merge_policy.set_max_merge_size(100_000);
|
||||||
log_merge_policy.set_min_layer_size(2);
|
log_merge_policy.set_min_layer_size(2);
|
||||||
log_merge_policy
|
log_merge_policy
|
||||||
}
|
}
|
||||||
@@ -141,11 +152,11 @@ mod tests {
|
|||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
create_random_segment_meta(1000),
|
create_random_segment_meta(1_000),
|
||||||
create_random_segment_meta(1000),
|
create_random_segment_meta(1_000),
|
||||||
create_random_segment_meta(1000),
|
create_random_segment_meta(1_000),
|
||||||
create_random_segment_meta(10000),
|
create_random_segment_meta(10_000),
|
||||||
create_random_segment_meta(10000),
|
create_random_segment_meta(10_000),
|
||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
create_random_segment_meta(10),
|
create_random_segment_meta(10),
|
||||||
@@ -182,4 +193,19 @@ mod tests {
|
|||||||
let result_list = test_merge_policy().compute_merge_candidates(&test_input);
|
let result_list = test_merge_policy().compute_merge_candidates(&test_input);
|
||||||
assert_eq!(result_list.len(), 1);
|
assert_eq!(result_list.len(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_large_merge_segments() {
|
||||||
|
let test_input = vec![
|
||||||
|
create_random_segment_meta(1_000_000),
|
||||||
|
create_random_segment_meta(100_001),
|
||||||
|
create_random_segment_meta(100_000),
|
||||||
|
create_random_segment_meta(100_000),
|
||||||
|
create_random_segment_meta(100_000),
|
||||||
|
];
|
||||||
|
let result_list = test_merge_policy().compute_merge_candidates(&test_input);
|
||||||
|
// Do not include large segments
|
||||||
|
assert_eq!(result_list.len(), 1);
|
||||||
|
assert_eq!(result_list[0].0.len(), 3)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,8 +21,6 @@ use crate::store::StoreWriter;
|
|||||||
use crate::termdict::TermMerger;
|
use crate::termdict::TermMerger;
|
||||||
use crate::termdict::TermOrdinal;
|
use crate::termdict::TermOrdinal;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::TantivyError;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@@ -143,7 +141,7 @@ impl DeltaComputer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl IndexMerger {
|
impl IndexMerger {
|
||||||
pub fn open(schema: Schema, segments: &[Segment]) -> Result<IndexMerger> {
|
pub fn open(schema: Schema, segments: &[Segment]) -> crate::Result<IndexMerger> {
|
||||||
let mut readers = vec![];
|
let mut readers = vec![];
|
||||||
let mut max_doc: u32 = 0u32;
|
let mut max_doc: u32 = 0u32;
|
||||||
for segment in segments {
|
for segment in segments {
|
||||||
@@ -159,7 +157,7 @@ impl IndexMerger {
|
|||||||
which exceeds the limit {}.",
|
which exceeds the limit {}.",
|
||||||
max_doc, MAX_DOC_LIMIT
|
max_doc, MAX_DOC_LIMIT
|
||||||
);
|
);
|
||||||
return Err(TantivyError::InvalidArgument(err_msg));
|
return Err(crate::TantivyError::InvalidArgument(err_msg));
|
||||||
}
|
}
|
||||||
Ok(IndexMerger {
|
Ok(IndexMerger {
|
||||||
schema,
|
schema,
|
||||||
@@ -168,7 +166,10 @@ impl IndexMerger {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_fieldnorms(&self, fieldnorms_serializer: &mut FieldNormsSerializer) -> Result<()> {
|
fn write_fieldnorms(
|
||||||
|
&self,
|
||||||
|
fieldnorms_serializer: &mut FieldNormsSerializer,
|
||||||
|
) -> crate::Result<()> {
|
||||||
let fields = FieldNormsWriter::fields_with_fieldnorm(&self.schema);
|
let fields = FieldNormsWriter::fields_with_fieldnorm(&self.schema);
|
||||||
let mut fieldnorms_data = Vec::with_capacity(self.max_doc as usize);
|
let mut fieldnorms_data = Vec::with_capacity(self.max_doc as usize);
|
||||||
for field in fields {
|
for field in fields {
|
||||||
@@ -189,7 +190,7 @@ impl IndexMerger {
|
|||||||
&self,
|
&self,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
mut term_ord_mappings: HashMap<Field, TermOrdinalMapping>,
|
mut term_ord_mappings: HashMap<Field, TermOrdinalMapping>,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
for (field, field_entry) in self.schema.fields() {
|
for (field, field_entry) in self.schema.fields() {
|
||||||
let field_type = field_entry.field_type();
|
let field_type = field_entry.field_type();
|
||||||
match *field_type {
|
match *field_type {
|
||||||
@@ -234,7 +235,7 @@ impl IndexMerger {
|
|||||||
&self,
|
&self,
|
||||||
field: Field,
|
field: Field,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
let mut u64_readers = vec![];
|
let mut u64_readers = vec![];
|
||||||
let mut min_value = u64::max_value();
|
let mut min_value = u64::max_value();
|
||||||
let mut max_value = u64::min_value();
|
let mut max_value = u64::min_value();
|
||||||
@@ -284,7 +285,7 @@ impl IndexMerger {
|
|||||||
&self,
|
&self,
|
||||||
field: Field,
|
field: Field,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
let mut total_num_vals = 0u64;
|
let mut total_num_vals = 0u64;
|
||||||
let mut u64s_readers: Vec<MultiValueIntFastFieldReader<u64>> = Vec::new();
|
let mut u64s_readers: Vec<MultiValueIntFastFieldReader<u64>> = Vec::new();
|
||||||
|
|
||||||
@@ -331,7 +332,7 @@ impl IndexMerger {
|
|||||||
field: Field,
|
field: Field,
|
||||||
term_ordinal_mappings: &TermOrdinalMapping,
|
term_ordinal_mappings: &TermOrdinalMapping,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
// Multifastfield consists in 2 fastfields.
|
// Multifastfield consists in 2 fastfields.
|
||||||
// The first serves as an index into the second one and is stricly increasing.
|
// The first serves as an index into the second one and is stricly increasing.
|
||||||
// The second contains the actual values.
|
// The second contains the actual values.
|
||||||
@@ -371,7 +372,7 @@ impl IndexMerger {
|
|||||||
&self,
|
&self,
|
||||||
field: Field,
|
field: Field,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
// Multifastfield consists in 2 fastfields.
|
// Multifastfield consists in 2 fastfields.
|
||||||
// The first serves as an index into the second one and is stricly increasing.
|
// The first serves as an index into the second one and is stricly increasing.
|
||||||
// The second contains the actual values.
|
// The second contains the actual values.
|
||||||
@@ -436,7 +437,7 @@ impl IndexMerger {
|
|||||||
&self,
|
&self,
|
||||||
field: Field,
|
field: Field,
|
||||||
fast_field_serializer: &mut FastFieldSerializer,
|
fast_field_serializer: &mut FastFieldSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
let mut total_num_vals = 0u64;
|
let mut total_num_vals = 0u64;
|
||||||
let mut bytes_readers: Vec<BytesFastFieldReader> = Vec::new();
|
let mut bytes_readers: Vec<BytesFastFieldReader> = Vec::new();
|
||||||
|
|
||||||
@@ -492,7 +493,7 @@ impl IndexMerger {
|
|||||||
indexed_field: Field,
|
indexed_field: Field,
|
||||||
field_type: &FieldType,
|
field_type: &FieldType,
|
||||||
serializer: &mut InvertedIndexSerializer,
|
serializer: &mut InvertedIndexSerializer,
|
||||||
) -> Result<Option<TermOrdinalMapping>> {
|
) -> crate::Result<Option<TermOrdinalMapping>> {
|
||||||
let mut positions_buffer: Vec<u32> = Vec::with_capacity(1_000);
|
let mut positions_buffer: Vec<u32> = Vec::with_capacity(1_000);
|
||||||
let mut delta_computer = DeltaComputer::new();
|
let mut delta_computer = DeltaComputer::new();
|
||||||
let field_readers = self
|
let field_readers = self
|
||||||
@@ -646,7 +647,7 @@ impl IndexMerger {
|
|||||||
fn write_postings(
|
fn write_postings(
|
||||||
&self,
|
&self,
|
||||||
serializer: &mut InvertedIndexSerializer,
|
serializer: &mut InvertedIndexSerializer,
|
||||||
) -> Result<HashMap<Field, TermOrdinalMapping>> {
|
) -> crate::Result<HashMap<Field, TermOrdinalMapping>> {
|
||||||
let mut term_ordinal_mappings = HashMap::new();
|
let mut term_ordinal_mappings = HashMap::new();
|
||||||
for (field, field_entry) in self.schema.fields() {
|
for (field, field_entry) in self.schema.fields() {
|
||||||
if field_entry.is_indexed() {
|
if field_entry.is_indexed() {
|
||||||
@@ -660,7 +661,7 @@ impl IndexMerger {
|
|||||||
Ok(term_ordinal_mappings)
|
Ok(term_ordinal_mappings)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_storable_fields(&self, store_writer: &mut StoreWriter) -> Result<()> {
|
fn write_storable_fields(&self, store_writer: &mut StoreWriter) -> crate::Result<()> {
|
||||||
for reader in &self.readers {
|
for reader in &self.readers {
|
||||||
let store_reader = reader.get_store_reader();
|
let store_reader = reader.get_store_reader();
|
||||||
if reader.num_deleted_docs() > 0 {
|
if reader.num_deleted_docs() > 0 {
|
||||||
@@ -677,7 +678,7 @@ impl IndexMerger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SerializableSegment for IndexMerger {
|
impl SerializableSegment for IndexMerger {
|
||||||
fn write(&self, mut serializer: SegmentSerializer) -> Result<u32> {
|
fn write(&self, mut serializer: SegmentSerializer) -> crate::Result<u32> {
|
||||||
let term_ord_mappings = self.write_postings(serializer.get_postings_serializer())?;
|
let term_ord_mappings = self.write_postings(serializer.get_postings_serializer())?;
|
||||||
self.write_fieldnorms(serializer.get_fieldnorms_serializer())?;
|
self.write_fieldnorms(serializer.get_fieldnorms_serializer())?;
|
||||||
self.write_fast_fields(serializer.get_fast_field_serializer(), term_ord_mappings)?;
|
self.write_fast_fields(serializer.get_fast_field_serializer(), term_ord_mappings)?;
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ pub use self::merge_policy::{MergeCandidate, MergePolicy, NoMergePolicy};
|
|||||||
pub use self::prepared_commit::PreparedCommit;
|
pub use self::prepared_commit::PreparedCommit;
|
||||||
pub use self::segment_entry::SegmentEntry;
|
pub use self::segment_entry::SegmentEntry;
|
||||||
pub use self::segment_manager::SegmentManager;
|
pub use self::segment_manager::SegmentManager;
|
||||||
|
pub(crate) use self::segment_manager::SegmentRegisters;
|
||||||
pub use self::segment_serializer::SegmentSerializer;
|
pub use self::segment_serializer::SegmentSerializer;
|
||||||
pub use self::segment_writer::SegmentWriter;
|
pub use self::segment_writer::SegmentWriter;
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ pub struct AddOperation {
|
|||||||
/// UserOperation is an enum type that encapsulates other operation types.
|
/// UserOperation is an enum type that encapsulates other operation types.
|
||||||
#[derive(Eq, PartialEq, Debug)]
|
#[derive(Eq, PartialEq, Debug)]
|
||||||
pub enum UserOperation {
|
pub enum UserOperation {
|
||||||
|
/// Add operation
|
||||||
Add(Document),
|
Add(Document),
|
||||||
|
/// Delete operation
|
||||||
Delete(Term),
|
Delete(Term),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use super::IndexWriter;
|
use super::IndexWriter;
|
||||||
use crate::Opstamp;
|
use crate::Opstamp;
|
||||||
use crate::Result;
|
|
||||||
use futures::executor::block_on;
|
use futures::executor::block_on;
|
||||||
|
|
||||||
/// A prepared commit
|
/// A prepared commit
|
||||||
@@ -8,14 +7,20 @@ pub struct PreparedCommit<'a> {
|
|||||||
index_writer: &'a mut IndexWriter,
|
index_writer: &'a mut IndexWriter,
|
||||||
payload: Option<String>,
|
payload: Option<String>,
|
||||||
opstamp: Opstamp,
|
opstamp: Opstamp,
|
||||||
|
soft_commit: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PreparedCommit<'a> {
|
impl<'a> PreparedCommit<'a> {
|
||||||
pub(crate) fn new(index_writer: &'a mut IndexWriter, opstamp: Opstamp) -> PreparedCommit<'_> {
|
pub(crate) fn new(
|
||||||
|
index_writer: &'a mut IndexWriter,
|
||||||
|
opstamp: Opstamp,
|
||||||
|
soft_commit: bool,
|
||||||
|
) -> PreparedCommit {
|
||||||
PreparedCommit {
|
PreparedCommit {
|
||||||
index_writer,
|
index_writer,
|
||||||
payload: None,
|
payload: None,
|
||||||
opstamp,
|
opstamp,
|
||||||
|
soft_commit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -27,17 +32,18 @@ impl<'a> PreparedCommit<'a> {
|
|||||||
self.payload = Some(payload.to_string())
|
self.payload = Some(payload.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn abort(self) -> Result<Opstamp> {
|
pub fn abort(self) -> crate::Result<Opstamp> {
|
||||||
self.index_writer.rollback()
|
self.index_writer.rollback()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn commit(self) -> Result<Opstamp> {
|
pub fn commit(self) -> crate::Result<Opstamp> {
|
||||||
info!("committing {}", self.opstamp);
|
info!("committing {}", self.opstamp);
|
||||||
let _ = block_on(
|
block_on(self.index_writer.segment_updater().schedule_commit(
|
||||||
self.index_writer
|
self.opstamp,
|
||||||
.segment_updater()
|
self.payload,
|
||||||
.schedule_commit(self.opstamp, self.payload),
|
self.soft_commit,
|
||||||
);
|
))?;
|
||||||
|
block_on(self.index_writer.trigger_commit());
|
||||||
Ok(self.opstamp)
|
Ok(self.opstamp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
use crate::common::BitSet;
|
use crate::common::BitSet;
|
||||||
use crate::core::SegmentId;
|
use crate::core::SegmentId;
|
||||||
use crate::core::SegmentMeta;
|
use crate::core::SegmentMeta;
|
||||||
|
use crate::directory::ManagedDirectory;
|
||||||
use crate::indexer::delete_queue::DeleteCursor;
|
use crate::indexer::delete_queue::DeleteCursor;
|
||||||
|
use crate::{Opstamp, Segment};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
/// A segment entry describes the state of
|
/// A segment entry describes the state of
|
||||||
@@ -19,55 +21,81 @@ use std::fmt;
|
|||||||
/// in the .del file or in the `delete_bitset`.
|
/// in the .del file or in the `delete_bitset`.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SegmentEntry {
|
pub struct SegmentEntry {
|
||||||
meta: SegmentMeta,
|
segment: Segment,
|
||||||
delete_bitset: Option<BitSet>,
|
delete_bitset: Option<BitSet>,
|
||||||
delete_cursor: DeleteCursor,
|
delete_cursor: DeleteCursor,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentEntry {
|
impl SegmentEntry {
|
||||||
/// Create a new `SegmentEntry`
|
/// Create a new `SegmentEntry`
|
||||||
pub fn new(
|
pub(crate) fn new(
|
||||||
segment_meta: SegmentMeta,
|
segment: Segment,
|
||||||
delete_cursor: DeleteCursor,
|
delete_cursor: DeleteCursor,
|
||||||
delete_bitset: Option<BitSet>,
|
delete_bitset: Option<BitSet>,
|
||||||
) -> SegmentEntry {
|
) -> SegmentEntry {
|
||||||
SegmentEntry {
|
SegmentEntry {
|
||||||
meta: segment_meta,
|
segment,
|
||||||
delete_bitset,
|
delete_bitset,
|
||||||
delete_cursor,
|
delete_cursor,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a reference to the segment entry deleted bitset.
|
pub fn persist(&mut self, dest_directory: ManagedDirectory) -> crate::Result<()> {
|
||||||
|
// TODO take in account delete bitset?
|
||||||
|
self.segment.persist(dest_directory)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn segment(&self) -> &Segment {
|
||||||
|
&self.segment
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `Takes` (as in Option::take) the delete bitset of
|
||||||
|
/// a segment entry.
|
||||||
///
|
///
|
||||||
/// `DocId` in this bitset are flagged as deleted.
|
/// `DocId` in this bitset are flagged as deleted.
|
||||||
pub fn delete_bitset(&self) -> Option<&BitSet> {
|
pub fn take_delete_bitset(&mut self) -> Option<BitSet> {
|
||||||
self.delete_bitset.as_ref()
|
self.delete_bitset.take()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the `SegmentMeta` for this segment.
|
/// Reset the delete informmationo in this segment.
|
||||||
pub fn set_meta(&mut self, segment_meta: SegmentMeta) {
|
///
|
||||||
self.meta = segment_meta;
|
/// The `SegmentEntry` segment's `SegmentMeta` gets updated, and
|
||||||
|
/// any delete bitset is drop and set to None.
|
||||||
|
pub fn reset_delete_meta(&mut self, num_deleted_docs: u32, target_opstamp: Opstamp) {
|
||||||
|
self.segment = self
|
||||||
|
.segment
|
||||||
|
.clone()
|
||||||
|
.with_delete_meta(num_deleted_docs, target_opstamp);
|
||||||
|
self.delete_bitset = None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_delete_cursor(&mut self, delete_cursor: DeleteCursor) {
|
||||||
|
self.delete_cursor = delete_cursor;
|
||||||
|
}
|
||||||
/// Return a reference to the segment_entry's delete cursor
|
/// Return a reference to the segment_entry's delete cursor
|
||||||
pub fn delete_cursor(&mut self) -> &mut DeleteCursor {
|
pub fn delete_cursor(&mut self) -> DeleteCursor {
|
||||||
&mut self.delete_cursor
|
self.delete_cursor.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the segment id.
|
/// Returns the segment id.
|
||||||
pub fn segment_id(&self) -> SegmentId {
|
pub fn segment_id(&self) -> SegmentId {
|
||||||
self.meta.id()
|
self.segment.id()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Accessor to the `SegmentMeta`
|
/// Accessor to the `SegmentMeta`
|
||||||
pub fn meta(&self) -> &SegmentMeta {
|
pub fn meta(&self) -> &SegmentMeta {
|
||||||
&self.meta
|
self.segment.meta()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for SegmentEntry {
|
impl fmt::Debug for SegmentEntry {
|
||||||
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(formatter, "SegmentEntry({:?})", self.meta)
|
let num_deletes = self.delete_bitset.as_ref().map(|bitset| bitset.len());
|
||||||
|
write!(
|
||||||
|
formatter,
|
||||||
|
"SegmentEntry(seg={:?}, ndel={:?})",
|
||||||
|
self.segment, num_deletes
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,14 @@
|
|||||||
use super::segment_register::SegmentRegister;
|
use super::segment_register::SegmentRegister;
|
||||||
use crate::core::SegmentId;
|
use crate::core::SegmentId;
|
||||||
use crate::core::SegmentMeta;
|
use crate::core::SegmentMeta;
|
||||||
use crate::error::TantivyError;
|
|
||||||
use crate::indexer::delete_queue::DeleteCursor;
|
|
||||||
use crate::indexer::SegmentEntry;
|
use crate::indexer::SegmentEntry;
|
||||||
use crate::Result as TantivyResult;
|
use crate::Segment;
|
||||||
use std::collections::hash_set::HashSet;
|
use std::collections::hash_set::HashSet;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::sync::{Arc, RwLock};
|
||||||
use std::sync::RwLock;
|
|
||||||
use std::sync::{RwLockReadGuard, RwLockWriteGuard};
|
use std::sync::{RwLockReadGuard, RwLockWriteGuard};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct SegmentRegisters {
|
pub(crate) struct SegmentRegisters {
|
||||||
uncommitted: SegmentRegister,
|
uncommitted: SegmentRegister,
|
||||||
committed: SegmentRegister,
|
committed: SegmentRegister,
|
||||||
}
|
}
|
||||||
@@ -23,6 +20,17 @@ pub(crate) enum SegmentsStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentRegisters {
|
impl SegmentRegisters {
|
||||||
|
pub fn new(committed: SegmentRegister) -> SegmentRegisters {
|
||||||
|
SegmentRegisters {
|
||||||
|
uncommitted: Default::default(),
|
||||||
|
committed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn committed_segment(&self) -> Vec<Segment> {
|
||||||
|
self.committed.segments()
|
||||||
|
}
|
||||||
|
|
||||||
/// Check if all the segments are committed or uncommited.
|
/// Check if all the segments are committed or uncommited.
|
||||||
///
|
///
|
||||||
/// If some segment is missing or segments are in a different state (this should not happen
|
/// If some segment is missing or segments are in a different state (this should not happen
|
||||||
@@ -45,18 +53,7 @@ impl SegmentRegisters {
|
|||||||
/// changes (merges especially)
|
/// changes (merges especially)
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct SegmentManager {
|
pub struct SegmentManager {
|
||||||
registers: RwLock<SegmentRegisters>,
|
registers: Arc<RwLock<SegmentRegisters>>,
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for SegmentManager {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
|
|
||||||
let lock = self.read();
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{{ uncommitted: {:?}, committed: {:?} }}",
|
|
||||||
lock.uncommitted, lock.committed
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_mergeable_segments(
|
pub fn get_mergeable_segments(
|
||||||
@@ -75,16 +72,8 @@ pub fn get_mergeable_segments(
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentManager {
|
impl SegmentManager {
|
||||||
pub fn from_segments(
|
pub(crate) fn new(registers: Arc<RwLock<SegmentRegisters>>) -> SegmentManager {
|
||||||
segment_metas: Vec<SegmentMeta>,
|
SegmentManager { registers }
|
||||||
delete_cursor: &DeleteCursor,
|
|
||||||
) -> SegmentManager {
|
|
||||||
SegmentManager {
|
|
||||||
registers: RwLock::new(SegmentRegisters {
|
|
||||||
uncommitted: SegmentRegister::default(),
|
|
||||||
committed: SegmentRegister::new(segment_metas, delete_cursor),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all of the segment entries (committed or uncommitted)
|
/// Returns all of the segment entries (committed or uncommitted)
|
||||||
@@ -145,7 +134,7 @@ impl SegmentManager {
|
|||||||
/// Returns an error if some segments are missing, or if
|
/// Returns an error if some segments are missing, or if
|
||||||
/// the `segment_ids` are not either all committed or all
|
/// the `segment_ids` are not either all committed or all
|
||||||
/// uncommitted.
|
/// uncommitted.
|
||||||
pub fn start_merge(&self, segment_ids: &[SegmentId]) -> TantivyResult<Vec<SegmentEntry>> {
|
pub fn start_merge(&self, segment_ids: &[SegmentId]) -> crate::Result<Vec<SegmentEntry>> {
|
||||||
let registers_lock = self.read();
|
let registers_lock = self.read();
|
||||||
let mut segment_entries = vec![];
|
let mut segment_entries = vec![];
|
||||||
if registers_lock.uncommitted.contains_all(segment_ids) {
|
if registers_lock.uncommitted.contains_all(segment_ids) {
|
||||||
@@ -166,7 +155,7 @@ impl SegmentManager {
|
|||||||
let error_msg = "Merge operation sent for segments that are not \
|
let error_msg = "Merge operation sent for segments that are not \
|
||||||
all uncommited or commited."
|
all uncommited or commited."
|
||||||
.to_string();
|
.to_string();
|
||||||
return Err(TantivyError::InvalidArgument(error_msg));
|
return Err(crate::Error::InvalidArgument(error_msg));
|
||||||
}
|
}
|
||||||
Ok(segment_entries)
|
Ok(segment_entries)
|
||||||
}
|
}
|
||||||
@@ -188,7 +177,7 @@ impl SegmentManager {
|
|||||||
.segments_status(before_merge_segment_ids)
|
.segments_status(before_merge_segment_ids)
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
warn!("couldn't find segment in SegmentManager");
|
warn!("couldn't find segment in SegmentManager");
|
||||||
crate::Error::InvalidArgument(
|
crate::TantivyError::InvalidArgument(
|
||||||
"The segments that were merged could not be found in the SegmentManager. \
|
"The segments that were merged could not be found in the SegmentManager. \
|
||||||
This is not necessarily a bug, and can happen after a rollback for instance."
|
This is not necessarily a bug, and can happen after a rollback for instance."
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
use crate::core::SegmentId;
|
use crate::core::SegmentId;
|
||||||
use crate::core::SegmentMeta;
|
use crate::core::SegmentMeta;
|
||||||
|
use crate::directory::ManagedDirectory;
|
||||||
use crate::indexer::delete_queue::DeleteCursor;
|
use crate::indexer::delete_queue::DeleteCursor;
|
||||||
use crate::indexer::segment_entry::SegmentEntry;
|
use crate::indexer::segment_entry::SegmentEntry;
|
||||||
|
use crate::schema::Schema;
|
||||||
|
use crate::Segment;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
@@ -46,6 +49,13 @@ impl SegmentRegister {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn segments(&self) -> Vec<Segment> {
|
||||||
|
self.segment_states
|
||||||
|
.values()
|
||||||
|
.map(|segment_entry| segment_entry.segment().clone())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn segment_entries(&self) -> Vec<SegmentEntry> {
|
pub fn segment_entries(&self) -> Vec<SegmentEntry> {
|
||||||
self.segment_states.values().cloned().collect()
|
self.segment_states.values().cloned().collect()
|
||||||
}
|
}
|
||||||
@@ -79,11 +89,17 @@ impl SegmentRegister {
|
|||||||
self.segment_states.get(segment_id).cloned()
|
self.segment_states.get(segment_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(segment_metas: Vec<SegmentMeta>, delete_cursor: &DeleteCursor) -> SegmentRegister {
|
pub fn new(
|
||||||
|
directory: &ManagedDirectory,
|
||||||
|
schema: &Schema,
|
||||||
|
segment_metas: Vec<SegmentMeta>,
|
||||||
|
delete_cursor: &DeleteCursor,
|
||||||
|
) -> SegmentRegister {
|
||||||
let mut segment_states = HashMap::new();
|
let mut segment_states = HashMap::new();
|
||||||
for segment_meta in segment_metas {
|
for meta in segment_metas {
|
||||||
let segment_id = segment_meta.id();
|
let segment_id = meta.id();
|
||||||
let segment_entry = SegmentEntry::new(segment_meta, delete_cursor.clone(), None);
|
let segment = Segment::new_persisted(meta, directory.clone(), schema.clone());
|
||||||
|
let segment_entry = SegmentEntry::new(segment, delete_cursor.clone(), None);
|
||||||
segment_states.insert(segment_id, segment_entry);
|
segment_states.insert(segment_id, segment_entry);
|
||||||
}
|
}
|
||||||
SegmentRegister { segment_states }
|
SegmentRegister { segment_states }
|
||||||
@@ -95,6 +111,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::core::{SegmentId, SegmentMetaInventory};
|
use crate::core::{SegmentId, SegmentMetaInventory};
|
||||||
use crate::indexer::delete_queue::*;
|
use crate::indexer::delete_queue::*;
|
||||||
|
use crate::schema::Schema;
|
||||||
|
|
||||||
fn segment_ids(segment_register: &SegmentRegister) -> Vec<SegmentId> {
|
fn segment_ids(segment_register: &SegmentRegister) -> Vec<SegmentId> {
|
||||||
segment_register
|
segment_register
|
||||||
@@ -108,6 +125,7 @@ mod tests {
|
|||||||
fn test_segment_register() {
|
fn test_segment_register() {
|
||||||
let inventory = SegmentMetaInventory::default();
|
let inventory = SegmentMetaInventory::default();
|
||||||
let delete_queue = DeleteQueue::new();
|
let delete_queue = DeleteQueue::new();
|
||||||
|
let schema = Schema::builder().build();
|
||||||
|
|
||||||
let mut segment_register = SegmentRegister::default();
|
let mut segment_register = SegmentRegister::default();
|
||||||
let segment_id_a = SegmentId::generate_random();
|
let segment_id_a = SegmentId::generate_random();
|
||||||
@@ -115,21 +133,24 @@ mod tests {
|
|||||||
let segment_id_merged = SegmentId::generate_random();
|
let segment_id_merged = SegmentId::generate_random();
|
||||||
|
|
||||||
{
|
{
|
||||||
let segment_meta = inventory.new_segment_meta(segment_id_a, 0u32);
|
let meta = inventory.new_segment_meta(segment_id_a, 0u32);
|
||||||
let segment_entry = SegmentEntry::new(segment_meta, delete_queue.cursor(), None);
|
let segment = Segment::new_volatile(meta, schema.clone());
|
||||||
|
let segment_entry = SegmentEntry::new(segment, delete_queue.cursor(), None);
|
||||||
segment_register.add_segment_entry(segment_entry);
|
segment_register.add_segment_entry(segment_entry);
|
||||||
}
|
}
|
||||||
assert_eq!(segment_ids(&segment_register), vec![segment_id_a]);
|
assert_eq!(segment_ids(&segment_register), vec![segment_id_a]);
|
||||||
{
|
{
|
||||||
let segment_meta = inventory.new_segment_meta(segment_id_b, 0u32);
|
let segment_meta = inventory.new_segment_meta(segment_id_b, 0u32);
|
||||||
let segment_entry = SegmentEntry::new(segment_meta, delete_queue.cursor(), None);
|
let segment = Segment::new_volatile(segment_meta, schema.clone());
|
||||||
|
let segment_entry = SegmentEntry::new(segment, delete_queue.cursor(), None);
|
||||||
segment_register.add_segment_entry(segment_entry);
|
segment_register.add_segment_entry(segment_entry);
|
||||||
}
|
}
|
||||||
segment_register.remove_segment(&segment_id_a);
|
segment_register.remove_segment(&segment_id_a);
|
||||||
segment_register.remove_segment(&segment_id_b);
|
segment_register.remove_segment(&segment_id_b);
|
||||||
{
|
{
|
||||||
let segment_meta_merged = inventory.new_segment_meta(segment_id_merged, 0u32);
|
let segment_meta_merged = inventory.new_segment_meta(segment_id_merged, 0u32);
|
||||||
let segment_entry = SegmentEntry::new(segment_meta_merged, delete_queue.cursor(), None);
|
let segment = Segment::new_volatile(segment_meta_merged, schema);
|
||||||
|
let segment_entry = SegmentEntry::new(segment, delete_queue.cursor(), None);
|
||||||
segment_register.add_segment_entry(segment_entry);
|
segment_register.add_segment_entry(segment_entry);
|
||||||
}
|
}
|
||||||
assert_eq!(segment_ids(&segment_register), vec![segment_id_merged]);
|
assert_eq!(segment_ids(&segment_register), vec![segment_id_merged]);
|
||||||
|
|||||||
@@ -1,13 +1,8 @@
|
|||||||
use crate::Directory;
|
|
||||||
|
|
||||||
use crate::core::Segment;
|
use crate::core::Segment;
|
||||||
use crate::core::SegmentComponent;
|
use crate::core::SegmentComponent;
|
||||||
use crate::directory::error::OpenWriteError;
|
|
||||||
use crate::directory::{DirectoryClone, RAMDirectory, TerminatingWrite, WritePtr};
|
|
||||||
use crate::fastfield::FastFieldSerializer;
|
use crate::fastfield::FastFieldSerializer;
|
||||||
use crate::fieldnorm::FieldNormsSerializer;
|
use crate::fieldnorm::FieldNormsSerializer;
|
||||||
use crate::postings::InvertedIndexSerializer;
|
use crate::postings::InvertedIndexSerializer;
|
||||||
use crate::schema::Schema;
|
|
||||||
use crate::store::StoreWriter;
|
use crate::store::StoreWriter;
|
||||||
|
|
||||||
/// Segment serializer is in charge of laying out on disk
|
/// Segment serializer is in charge of laying out on disk
|
||||||
@@ -17,50 +12,25 @@ pub struct SegmentSerializer {
|
|||||||
fast_field_serializer: FastFieldSerializer,
|
fast_field_serializer: FastFieldSerializer,
|
||||||
fieldnorms_serializer: FieldNormsSerializer,
|
fieldnorms_serializer: FieldNormsSerializer,
|
||||||
postings_serializer: InvertedIndexSerializer,
|
postings_serializer: InvertedIndexSerializer,
|
||||||
bundle_writer: Option<(RAMDirectory, WritePtr)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct SegmentSerializerWriters {
|
|
||||||
postings_wrt: WritePtr,
|
|
||||||
positions_skip_wrt: WritePtr,
|
|
||||||
positions_wrt: WritePtr,
|
|
||||||
terms_wrt: WritePtr,
|
|
||||||
fast_field_wrt: WritePtr,
|
|
||||||
fieldnorms_wrt: WritePtr,
|
|
||||||
store_wrt: WritePtr,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SegmentSerializerWriters {
|
|
||||||
pub(crate) fn for_segment(segment: &mut Segment) -> Result<Self, OpenWriteError> {
|
|
||||||
Ok(SegmentSerializerWriters {
|
|
||||||
postings_wrt: segment.open_write(SegmentComponent::POSTINGS)?,
|
|
||||||
positions_skip_wrt: segment.open_write(SegmentComponent::POSITIONS)?,
|
|
||||||
positions_wrt: segment.open_write(SegmentComponent::POSITIONSSKIP)?,
|
|
||||||
terms_wrt: segment.open_write(SegmentComponent::TERMS)?,
|
|
||||||
fast_field_wrt: segment.open_write(SegmentComponent::FASTFIELDS)?,
|
|
||||||
fieldnorms_wrt: segment.open_write(SegmentComponent::FIELDNORMS)?,
|
|
||||||
store_wrt: segment.open_write(SegmentComponent::STORE)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentSerializer {
|
impl SegmentSerializer {
|
||||||
pub(crate) fn new(schema: Schema, writers: SegmentSerializerWriters) -> crate::Result<Self> {
|
/// Creates a new `SegmentSerializer`.
|
||||||
let fast_field_serializer = FastFieldSerializer::from_write(writers.fast_field_wrt)?;
|
pub fn for_segment(segment: &mut Segment) -> crate::Result<SegmentSerializer> {
|
||||||
let fieldnorms_serializer = FieldNormsSerializer::from_write(writers.fieldnorms_wrt)?;
|
let store_write = segment.open_write(SegmentComponent::STORE)?;
|
||||||
let postings_serializer = InvertedIndexSerializer::open(
|
|
||||||
schema,
|
let fast_field_write = segment.open_write(SegmentComponent::FASTFIELDS)?;
|
||||||
writers.terms_wrt,
|
let fast_field_serializer = FastFieldSerializer::from_write(fast_field_write)?;
|
||||||
writers.postings_wrt,
|
|
||||||
writers.positions_wrt,
|
let fieldnorms_write = segment.open_write(SegmentComponent::FIELDNORMS)?;
|
||||||
writers.positions_skip_wrt,
|
let fieldnorms_serializer = FieldNormsSerializer::from_write(fieldnorms_write)?;
|
||||||
);
|
|
||||||
|
let postings_serializer = InvertedIndexSerializer::open(segment)?;
|
||||||
Ok(SegmentSerializer {
|
Ok(SegmentSerializer {
|
||||||
store_writer: StoreWriter::new(writers.store_wrt),
|
store_writer: StoreWriter::new(store_write),
|
||||||
fast_field_serializer,
|
fast_field_serializer,
|
||||||
fieldnorms_serializer,
|
fieldnorms_serializer,
|
||||||
postings_serializer,
|
postings_serializer,
|
||||||
bundle_writer: None,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,15 +55,11 @@ impl SegmentSerializer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Finalize the segment serialization.
|
/// Finalize the segment serialization.
|
||||||
pub fn close(mut self) -> crate::Result<()> {
|
pub fn close(self) -> crate::Result<()> {
|
||||||
self.fast_field_serializer.close()?;
|
self.fast_field_serializer.close()?;
|
||||||
self.postings_serializer.close()?;
|
self.postings_serializer.close()?;
|
||||||
self.store_writer.close()?;
|
self.store_writer.close()?;
|
||||||
self.fieldnorms_serializer.close()?;
|
self.fieldnorms_serializer.close()?;
|
||||||
if let Some((ram_directory, mut bundle_wrt)) = self.bundle_writer.take() {
|
|
||||||
ram_directory.serialize_bundle(&mut bundle_wrt)?;
|
|
||||||
bundle_wrt.terminate()?;
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,12 +7,10 @@ use crate::core::SegmentMeta;
|
|||||||
use crate::core::SerializableSegment;
|
use crate::core::SerializableSegment;
|
||||||
use crate::core::META_FILEPATH;
|
use crate::core::META_FILEPATH;
|
||||||
use crate::directory::{Directory, DirectoryClone, GarbageCollectionResult};
|
use crate::directory::{Directory, DirectoryClone, GarbageCollectionResult};
|
||||||
use crate::indexer::delete_queue::DeleteCursor;
|
|
||||||
use crate::indexer::index_writer::advance_deletes;
|
use crate::indexer::index_writer::advance_deletes;
|
||||||
use crate::indexer::merge_operation::MergeOperationInventory;
|
use crate::indexer::merge_operation::MergeOperationInventory;
|
||||||
use crate::indexer::merger::IndexMerger;
|
use crate::indexer::merger::IndexMerger;
|
||||||
use crate::indexer::segment_manager::SegmentsStatus;
|
use crate::indexer::segment_manager::{SegmentRegisters, SegmentsStatus};
|
||||||
use crate::indexer::segment_serializer::SegmentSerializerWriters;
|
|
||||||
use crate::indexer::stamper::Stamper;
|
use crate::indexer::stamper::Stamper;
|
||||||
use crate::indexer::SegmentEntry;
|
use crate::indexer::SegmentEntry;
|
||||||
use crate::indexer::SegmentSerializer;
|
use crate::indexer::SegmentSerializer;
|
||||||
@@ -118,8 +116,7 @@ fn merge(
|
|||||||
|
|
||||||
// First we apply all of the delet to the merged segment, up to the target opstamp.
|
// First we apply all of the delet to the merged segment, up to the target opstamp.
|
||||||
for segment_entry in &mut segment_entries {
|
for segment_entry in &mut segment_entries {
|
||||||
let segment = index.segment(segment_entry.meta().clone());
|
advance_deletes(segment_entry, target_opstamp)?;
|
||||||
advance_deletes(segment, segment_entry, target_opstamp)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let delete_cursor = segment_entries[0].delete_cursor().clone();
|
let delete_cursor = segment_entries[0].delete_cursor().clone();
|
||||||
@@ -133,15 +130,15 @@ fn merge(
|
|||||||
let merger: IndexMerger = IndexMerger::open(index.schema(), &segments[..])?;
|
let merger: IndexMerger = IndexMerger::open(index.schema(), &segments[..])?;
|
||||||
|
|
||||||
// ... we just serialize this index merger in our new segment to merge the two segments.
|
// ... we just serialize this index merger in our new segment to merge the two segments.
|
||||||
let segment_serializer_wrts = SegmentSerializerWriters::for_segment(&mut merged_segment)?;
|
let segment_serializer = SegmentSerializer::for_segment(&mut merged_segment)?;
|
||||||
let segment_serializer =
|
|
||||||
SegmentSerializer::new(merged_segment.schema(), segment_serializer_wrts)?;
|
|
||||||
|
|
||||||
let num_docs = merger.write(segment_serializer)?;
|
let max_doc = merger.write(segment_serializer)?;
|
||||||
|
|
||||||
let segment_meta = index.new_segment_meta(merged_segment.id(), num_docs);
|
Ok(SegmentEntry::new(
|
||||||
|
merged_segment.with_max_doc(max_doc),
|
||||||
Ok(SegmentEntry::new(segment_meta, delete_cursor, None))
|
delete_cursor,
|
||||||
|
None,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct InnerSegmentUpdater {
|
pub(crate) struct InnerSegmentUpdater {
|
||||||
@@ -165,25 +162,28 @@ pub(crate) struct InnerSegmentUpdater {
|
|||||||
|
|
||||||
impl SegmentUpdater {
|
impl SegmentUpdater {
|
||||||
pub fn create(
|
pub fn create(
|
||||||
|
segment_registers: Arc<RwLock<SegmentRegisters>>,
|
||||||
index: Index,
|
index: Index,
|
||||||
stamper: Stamper,
|
stamper: Stamper,
|
||||||
delete_cursor: &DeleteCursor,
|
|
||||||
) -> crate::Result<SegmentUpdater> {
|
) -> crate::Result<SegmentUpdater> {
|
||||||
let segments = index.searchable_segment_metas()?;
|
let segment_manager = SegmentManager::new(segment_registers);
|
||||||
let segment_manager = SegmentManager::from_segments(segments, delete_cursor);
|
|
||||||
let pool = ThreadPoolBuilder::new()
|
let pool = ThreadPoolBuilder::new()
|
||||||
.name_prefix("segment_updater")
|
.name_prefix("segment_updater")
|
||||||
.pool_size(1)
|
.pool_size(1)
|
||||||
.create()
|
.create()
|
||||||
.map_err(|_| {
|
.map_err(|_| {
|
||||||
crate::Error::SystemError("Failed to spawn segment updater thread".to_string())
|
crate::TantivyError::SystemError(
|
||||||
|
"Failed to spawn segment updater thread".to_string(),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
let merge_thread_pool = ThreadPoolBuilder::new()
|
let merge_thread_pool = ThreadPoolBuilder::new()
|
||||||
.name_prefix("merge_thread")
|
.name_prefix("merge_thread")
|
||||||
.pool_size(NUM_MERGE_THREADS)
|
.pool_size(NUM_MERGE_THREADS)
|
||||||
.create()
|
.create()
|
||||||
.map_err(|_| {
|
.map_err(|_| {
|
||||||
crate::Error::SystemError("Failed to spawn segment merging thread".to_string())
|
crate::TantivyError::SystemError(
|
||||||
|
"Failed to spawn segment merging thread".to_string(),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
let index_meta = index.load_metas()?;
|
let index_meta = index.load_metas()?;
|
||||||
Ok(SegmentUpdater(Arc::new(InnerSegmentUpdater {
|
Ok(SegmentUpdater(Arc::new(InnerSegmentUpdater {
|
||||||
@@ -225,7 +225,7 @@ impl SegmentUpdater {
|
|||||||
receiver.unwrap_or_else(|_| {
|
receiver.unwrap_or_else(|_| {
|
||||||
let err_msg =
|
let err_msg =
|
||||||
"A segment_updater future did not success. This should never happen.".to_string();
|
"A segment_updater future did not success. This should never happen.".to_string();
|
||||||
Err(crate::Error::SystemError(err_msg))
|
Err(crate::TantivyError::SystemError(err_msg))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,6 +233,7 @@ impl SegmentUpdater {
|
|||||||
&self,
|
&self,
|
||||||
segment_entry: SegmentEntry,
|
segment_entry: SegmentEntry,
|
||||||
) -> impl Future<Output = crate::Result<()>> {
|
) -> impl Future<Output = crate::Result<()>> {
|
||||||
|
// TODO temporary: serializing the segment at this point.
|
||||||
let segment_updater = self.clone();
|
let segment_updater = self.clone();
|
||||||
self.schedule_future(async move {
|
self.schedule_future(async move {
|
||||||
segment_updater.segment_manager.add_segment(segment_entry);
|
segment_updater.segment_manager.add_segment(segment_entry);
|
||||||
@@ -261,8 +262,7 @@ impl SegmentUpdater {
|
|||||||
fn purge_deletes(&self, target_opstamp: Opstamp) -> crate::Result<Vec<SegmentEntry>> {
|
fn purge_deletes(&self, target_opstamp: Opstamp) -> crate::Result<Vec<SegmentEntry>> {
|
||||||
let mut segment_entries = self.segment_manager.segment_entries();
|
let mut segment_entries = self.segment_manager.segment_entries();
|
||||||
for segment_entry in &mut segment_entries {
|
for segment_entry in &mut segment_entries {
|
||||||
let segment = self.index.segment(segment_entry.meta().clone());
|
advance_deletes(segment_entry, target_opstamp)?;
|
||||||
advance_deletes(segment, segment_entry, target_opstamp)?;
|
|
||||||
}
|
}
|
||||||
Ok(segment_entries)
|
Ok(segment_entries)
|
||||||
}
|
}
|
||||||
@@ -330,12 +330,21 @@ impl SegmentUpdater {
|
|||||||
&self,
|
&self,
|
||||||
opstamp: Opstamp,
|
opstamp: Opstamp,
|
||||||
payload: Option<String>,
|
payload: Option<String>,
|
||||||
|
soft_commit: bool,
|
||||||
) -> impl Future<Output = crate::Result<()>> {
|
) -> impl Future<Output = crate::Result<()>> {
|
||||||
let segment_updater: SegmentUpdater = self.clone();
|
let segment_updater: SegmentUpdater = self.clone();
|
||||||
|
let directory = self.index.directory().clone();
|
||||||
self.schedule_future(async move {
|
self.schedule_future(async move {
|
||||||
let segment_entries = segment_updater.purge_deletes(opstamp)?;
|
let mut segment_entries = segment_updater.purge_deletes(opstamp)?;
|
||||||
|
if !soft_commit {
|
||||||
|
for segment_entry in &mut segment_entries {
|
||||||
|
segment_entry.persist(directory.clone())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
segment_updater.segment_manager.commit(segment_entries);
|
segment_updater.segment_manager.commit(segment_entries);
|
||||||
segment_updater.save_metas(opstamp, payload)?;
|
if !soft_commit {
|
||||||
|
segment_updater.save_metas(opstamp, payload)?;
|
||||||
|
}
|
||||||
let _ = garbage_collect_files(segment_updater.clone()).await;
|
let _ = garbage_collect_files(segment_updater.clone()).await;
|
||||||
segment_updater.consider_merge_options().await;
|
segment_updater.consider_merge_options().await;
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -422,7 +431,7 @@ impl SegmentUpdater {
|
|||||||
});
|
});
|
||||||
|
|
||||||
Ok(merging_future_recv
|
Ok(merging_future_recv
|
||||||
.unwrap_or_else(|_| Err(crate::Error::SystemError("Merge failed".to_string()))))
|
.unwrap_or_else(|_| Err(crate::TantivyError::SystemError("Merge failed".to_string()))))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn consider_merge_options(&self) {
|
async fn consider_merge_options(&self) {
|
||||||
@@ -473,17 +482,14 @@ impl SegmentUpdater {
|
|||||||
let end_merge_future = self.schedule_future(async move {
|
let end_merge_future = self.schedule_future(async move {
|
||||||
info!("End merge {:?}", after_merge_segment_entry.meta());
|
info!("End merge {:?}", after_merge_segment_entry.meta());
|
||||||
{
|
{
|
||||||
let mut delete_cursor = after_merge_segment_entry.delete_cursor().clone();
|
let mut delete_cursor = after_merge_segment_entry.delete_cursor();
|
||||||
if let Some(delete_operation) = delete_cursor.get() {
|
if let Some(delete_operation) = delete_cursor.get() {
|
||||||
let committed_opstamp = segment_updater.load_metas().opstamp;
|
let committed_opstamp = segment_updater.load_metas().opstamp;
|
||||||
if delete_operation.opstamp < committed_opstamp {
|
if delete_operation.opstamp < committed_opstamp {
|
||||||
let index = &segment_updater.index;
|
let _index = &segment_updater.index;
|
||||||
let segment = index.segment(after_merge_segment_entry.meta().clone());
|
if let Err(e) =
|
||||||
if let Err(e) = advance_deletes(
|
advance_deletes(&mut after_merge_segment_entry, committed_opstamp)
|
||||||
segment,
|
{
|
||||||
&mut after_merge_segment_entry,
|
|
||||||
committed_opstamp,
|
|
||||||
) {
|
|
||||||
error!(
|
error!(
|
||||||
"Merge of {:?} was cancelled (advancing deletes failed): {:?}",
|
"Merge of {:?} was cancelled (advancing deletes failed): {:?}",
|
||||||
merge_operation.segment_ids(),
|
merge_operation.segment_ids(),
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use crate::core::Segment;
|
|||||||
use crate::core::SerializableSegment;
|
use crate::core::SerializableSegment;
|
||||||
use crate::fastfield::FastFieldsWriter;
|
use crate::fastfield::FastFieldsWriter;
|
||||||
use crate::fieldnorm::FieldNormsWriter;
|
use crate::fieldnorm::FieldNormsWriter;
|
||||||
use crate::indexer::segment_serializer::{SegmentSerializer, SegmentSerializerWriters};
|
use crate::indexer::segment_serializer::SegmentSerializer;
|
||||||
use crate::postings::compute_table_size;
|
use crate::postings::compute_table_size;
|
||||||
use crate::postings::MultiFieldPostingsWriter;
|
use crate::postings::MultiFieldPostingsWriter;
|
||||||
use crate::schema::FieldType;
|
use crate::schema::FieldType;
|
||||||
@@ -11,21 +11,19 @@ use crate::schema::Schema;
|
|||||||
use crate::schema::Term;
|
use crate::schema::Term;
|
||||||
use crate::schema::Value;
|
use crate::schema::Value;
|
||||||
use crate::schema::{Field, FieldEntry};
|
use crate::schema::{Field, FieldEntry};
|
||||||
use crate::tokenizer::BoxedTokenizer;
|
use crate::tokenizer::TokenizerManager;
|
||||||
use crate::tokenizer::FacetTokenizer;
|
use crate::tokenizer::{BoxTokenStream, FacetTokenizer};
|
||||||
use crate::tokenizer::PreTokenizedStream;
|
use crate::tokenizer::{PreTokenizedStream, TextAnalyzer};
|
||||||
use crate::tokenizer::{TokenStream, TokenStreamChain, Tokenizer};
|
use crate::tokenizer::{TokenStreamChain, Tokenizer};
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Opstamp;
|
use crate::Opstamp;
|
||||||
use crate::Result;
|
|
||||||
use crate::TantivyError;
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
/// Computes the initial size of the hash table.
|
/// Computes the initial size of the hash table.
|
||||||
///
|
///
|
||||||
/// Returns a number of bit `b`, such that the recommended initial table size is 2^b.
|
/// Returns a number of bit `b`, such that the recommended initial table size is 2^b.
|
||||||
fn initial_table_size(per_thread_memory_budget: usize) -> Result<usize> {
|
fn initial_table_size(per_thread_memory_budget: usize) -> crate::Result<usize> {
|
||||||
let table_memory_upper_bound = per_thread_memory_budget / 3;
|
let table_memory_upper_bound = per_thread_memory_budget / 3;
|
||||||
if let Some(limit) = (10..)
|
if let Some(limit) = (10..)
|
||||||
.take_while(|num_bits: &usize| compute_table_size(*num_bits) < table_memory_upper_bound)
|
.take_while(|num_bits: &usize| compute_table_size(*num_bits) < table_memory_upper_bound)
|
||||||
@@ -33,7 +31,7 @@ fn initial_table_size(per_thread_memory_budget: usize) -> Result<usize> {
|
|||||||
{
|
{
|
||||||
Ok(limit.min(19)) // we cap it at 2^19 = 512K.
|
Ok(limit.min(19)) // we cap it at 2^19 = 512K.
|
||||||
} else {
|
} else {
|
||||||
Err(TantivyError::InvalidArgument(
|
Err(crate::TantivyError::InvalidArgument(
|
||||||
format!("per thread memory budget (={}) is too small. Raise the memory budget or lower the number of threads.", per_thread_memory_budget)))
|
format!("per thread memory budget (={}) is too small. Raise the memory budget or lower the number of threads.", per_thread_memory_budget)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -50,7 +48,7 @@ pub struct SegmentWriter {
|
|||||||
fast_field_writers: FastFieldsWriter,
|
fast_field_writers: FastFieldsWriter,
|
||||||
fieldnorms_writer: FieldNormsWriter,
|
fieldnorms_writer: FieldNormsWriter,
|
||||||
doc_opstamps: Vec<Opstamp>,
|
doc_opstamps: Vec<Opstamp>,
|
||||||
tokenizers: Vec<Option<BoxedTokenizer>>,
|
tokenizers: Vec<Option<TextAnalyzer>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SegmentWriter {
|
impl SegmentWriter {
|
||||||
@@ -67,11 +65,12 @@ impl SegmentWriter {
|
|||||||
memory_budget: usize,
|
memory_budget: usize,
|
||||||
mut segment: Segment,
|
mut segment: Segment,
|
||||||
schema: &Schema,
|
schema: &Schema,
|
||||||
) -> Result<SegmentWriter> {
|
tokenizer_manager: &TokenizerManager,
|
||||||
|
) -> crate::Result<SegmentWriter> {
|
||||||
let table_num_bits = initial_table_size(memory_budget)?;
|
let table_num_bits = initial_table_size(memory_budget)?;
|
||||||
let segment_serializer_wrts = SegmentSerializerWriters::for_segment(&mut segment)?;
|
let segment_serializer = SegmentSerializer::for_segment(&mut segment)?;
|
||||||
let segment_serializer = SegmentSerializer::new(segment.schema(), segment_serializer_wrts)?;
|
let multifield_postings = MultiFieldPostingsWriter::new(&schema, table_num_bits);
|
||||||
let multifield_postings = MultiFieldPostingsWriter::new(schema, table_num_bits);
|
|
||||||
let tokenizers = schema
|
let tokenizers = schema
|
||||||
.fields()
|
.fields()
|
||||||
.map(
|
.map(
|
||||||
@@ -80,7 +79,7 @@ impl SegmentWriter {
|
|||||||
.get_indexing_options()
|
.get_indexing_options()
|
||||||
.and_then(|text_index_option| {
|
.and_then(|text_index_option| {
|
||||||
let tokenizer_name = &text_index_option.tokenizer();
|
let tokenizer_name = &text_index_option.tokenizer();
|
||||||
segment.index().tokenizers().get(tokenizer_name)
|
tokenizer_manager.get(tokenizer_name)
|
||||||
}),
|
}),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
@@ -89,9 +88,9 @@ impl SegmentWriter {
|
|||||||
Ok(SegmentWriter {
|
Ok(SegmentWriter {
|
||||||
max_doc: 0,
|
max_doc: 0,
|
||||||
multifield_postings,
|
multifield_postings,
|
||||||
fieldnorms_writer: FieldNormsWriter::for_schema(schema),
|
fieldnorms_writer: FieldNormsWriter::for_schema(&schema),
|
||||||
segment_serializer,
|
segment_serializer,
|
||||||
fast_field_writers: FastFieldsWriter::from_schema(schema),
|
fast_field_writers: FastFieldsWriter::from_schema(&schema),
|
||||||
doc_opstamps: Vec::with_capacity(1_000),
|
doc_opstamps: Vec::with_capacity(1_000),
|
||||||
tokenizers,
|
tokenizers,
|
||||||
})
|
})
|
||||||
@@ -101,7 +100,7 @@ impl SegmentWriter {
|
|||||||
///
|
///
|
||||||
/// Finalize consumes the `SegmentWriter`, so that it cannot
|
/// Finalize consumes the `SegmentWriter`, so that it cannot
|
||||||
/// be used afterwards.
|
/// be used afterwards.
|
||||||
pub fn finalize(mut self) -> Result<Vec<u64>> {
|
pub fn finalize(mut self) -> crate::Result<Vec<u64>> {
|
||||||
self.fieldnorms_writer.fill_up_to_max_doc(self.max_doc);
|
self.fieldnorms_writer.fill_up_to_max_doc(self.max_doc);
|
||||||
write(
|
write(
|
||||||
&self.multifield_postings,
|
&self.multifield_postings,
|
||||||
@@ -160,7 +159,7 @@ impl SegmentWriter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
FieldType::Str(_) => {
|
FieldType::Str(_) => {
|
||||||
let mut token_streams: Vec<Box<dyn TokenStream>> = vec![];
|
let mut token_streams: Vec<BoxTokenStream> = vec![];
|
||||||
let mut offsets = vec![];
|
let mut offsets = vec![];
|
||||||
let mut total_offset = 0;
|
let mut total_offset = 0;
|
||||||
|
|
||||||
@@ -173,7 +172,7 @@ impl SegmentWriter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
token_streams
|
token_streams
|
||||||
.push(Box::new(PreTokenizedStream::from(tok_str.clone())));
|
.push(PreTokenizedStream::from(tok_str.clone()).into());
|
||||||
}
|
}
|
||||||
Value::Str(ref text) => {
|
Value::Str(ref text) => {
|
||||||
if let Some(ref mut tokenizer) =
|
if let Some(ref mut tokenizer) =
|
||||||
@@ -192,8 +191,7 @@ impl SegmentWriter {
|
|||||||
let num_tokens = if token_streams.is_empty() {
|
let num_tokens = if token_streams.is_empty() {
|
||||||
0
|
0
|
||||||
} else {
|
} else {
|
||||||
let mut token_stream: Box<dyn TokenStream> =
|
let mut token_stream = TokenStreamChain::new(offsets, token_streams);
|
||||||
Box::new(TokenStreamChain::new(offsets, token_streams));
|
|
||||||
self.multifield_postings
|
self.multifield_postings
|
||||||
.index_text(doc_id, field, &mut token_stream)
|
.index_text(doc_id, field, &mut token_stream)
|
||||||
};
|
};
|
||||||
@@ -284,7 +282,7 @@ fn write(
|
|||||||
fast_field_writers: &FastFieldsWriter,
|
fast_field_writers: &FastFieldsWriter,
|
||||||
fieldnorms_writer: &FieldNormsWriter,
|
fieldnorms_writer: &FieldNormsWriter,
|
||||||
mut serializer: SegmentSerializer,
|
mut serializer: SegmentSerializer,
|
||||||
) -> Result<()> {
|
) -> crate::Result<()> {
|
||||||
let term_ord_map = multifield_postings.serialize(serializer.get_postings_serializer())?;
|
let term_ord_map = multifield_postings.serialize(serializer.get_postings_serializer())?;
|
||||||
fast_field_writers.serialize(serializer.get_fast_field_serializer(), &term_ord_map)?;
|
fast_field_writers.serialize(serializer.get_fast_field_serializer(), &term_ord_map)?;
|
||||||
fieldnorms_writer.serialize(serializer.get_fieldnorms_serializer())?;
|
fieldnorms_writer.serialize(serializer.get_fieldnorms_serializer())?;
|
||||||
@@ -293,7 +291,7 @@ fn write(
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SerializableSegment for SegmentWriter {
|
impl SerializableSegment for SegmentWriter {
|
||||||
fn write(&self, serializer: SegmentSerializer) -> Result<u32> {
|
fn write(&self, serializer: SegmentSerializer) -> crate::Result<u32> {
|
||||||
let max_doc = self.max_doc;
|
let max_doc = self.max_doc;
|
||||||
write(
|
write(
|
||||||
&self.multifield_postings,
|
&self.multifield_postings,
|
||||||
|
|||||||
@@ -1,18 +1,76 @@
|
|||||||
use crate::Opstamp;
|
use crate::Opstamp;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::sync::atomic::{AtomicU64, Ordering};
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[cfg(not(target_arch = "arm"))]
|
||||||
|
mod atomic_impl {
|
||||||
|
|
||||||
|
use crate::Opstamp;
|
||||||
|
use std::sync::atomic::{AtomicU64, Ordering};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct AtomicU64Wrapper(AtomicU64);
|
||||||
|
|
||||||
|
impl AtomicU64Wrapper {
|
||||||
|
pub fn new(first_opstamp: Opstamp) -> AtomicU64Wrapper {
|
||||||
|
AtomicU64Wrapper(AtomicU64::new(first_opstamp as u64))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fetch_add(&self, val: u64, order: Ordering) -> u64 {
|
||||||
|
self.0.fetch_add(val as u64, order) as u64
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn revert(&self, val: u64, order: Ordering) -> u64 {
|
||||||
|
self.0.store(val, order);
|
||||||
|
val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "arm")]
|
||||||
|
mod atomic_impl {
|
||||||
|
|
||||||
|
use crate::Opstamp;
|
||||||
|
/// Under other architecture, we rely on a mutex.
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
|
use std::sync::RwLock;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct AtomicU64Wrapper(RwLock<u64>);
|
||||||
|
|
||||||
|
impl AtomicU64Wrapper {
|
||||||
|
pub fn new(first_opstamp: Opstamp) -> AtomicU64Wrapper {
|
||||||
|
AtomicU64Wrapper(RwLock::new(first_opstamp))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fetch_add(&self, incr: u64, _order: Ordering) -> u64 {
|
||||||
|
let mut lock = self.0.write().unwrap();
|
||||||
|
let previous_val = *lock;
|
||||||
|
*lock = previous_val + incr;
|
||||||
|
previous_val
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn revert(&self, val: u64, _order: Ordering) -> u64 {
|
||||||
|
let mut lock = self.0.write().unwrap();
|
||||||
|
*lock = val;
|
||||||
|
val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
use self::atomic_impl::AtomicU64Wrapper;
|
||||||
|
|
||||||
/// Stamper provides Opstamps, which is just an auto-increment id to label
|
/// Stamper provides Opstamps, which is just an auto-increment id to label
|
||||||
/// an operation.
|
/// an operation.
|
||||||
///
|
///
|
||||||
/// Cloning does not "fork" the stamp generation. The stamper actually wraps an `Arc`.
|
/// Cloning does not "fork" the stamp generation. The stamper actually wraps an `Arc`.
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct Stamper(Arc<AtomicU64>);
|
pub struct Stamper(Arc<AtomicU64Wrapper>);
|
||||||
|
|
||||||
impl Stamper {
|
impl Stamper {
|
||||||
pub fn new(first_opstamp: Opstamp) -> Stamper {
|
pub fn new(first_opstamp: Opstamp) -> Stamper {
|
||||||
Stamper(Arc::new(AtomicU64::new(first_opstamp)))
|
Stamper(Arc::new(AtomicU64Wrapper::new(first_opstamp)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stamp(&self) -> Opstamp {
|
pub fn stamp(&self) -> Opstamp {
|
||||||
@@ -31,8 +89,7 @@ impl Stamper {
|
|||||||
|
|
||||||
/// Reverts the stamper to a given `Opstamp` value and returns it
|
/// Reverts the stamper to a given `Opstamp` value and returns it
|
||||||
pub fn revert(&self, to_opstamp: Opstamp) -> Opstamp {
|
pub fn revert(&self, to_opstamp: Opstamp) -> Opstamp {
|
||||||
self.0.store(to_opstamp, Ordering::SeqCst);
|
self.0.revert(to_opstamp, Ordering::SeqCst)
|
||||||
to_opstamp
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
10
src/lib.rs
10
src/lib.rs
@@ -121,13 +121,14 @@ mod functional_test;
|
|||||||
mod macros;
|
mod macros;
|
||||||
|
|
||||||
pub use crate::error::TantivyError;
|
pub use crate::error::TantivyError;
|
||||||
|
|
||||||
#[deprecated(since = "0.7.0", note = "please use `tantivy::TantivyError` instead")]
|
|
||||||
pub use crate::error::TantivyError as Error;
|
pub use crate::error::TantivyError as Error;
|
||||||
pub use chrono;
|
pub use chrono;
|
||||||
|
|
||||||
/// Tantivy result.
|
/// Tantivy result.
|
||||||
pub type Result<T> = std::result::Result<T, error::TantivyError>;
|
///
|
||||||
|
/// Within tantivy, please avoid importing `Result` using `use crate::Result`
|
||||||
|
/// and instead, refer to this as `crate::Result<T>`.
|
||||||
|
pub type Result<T> = std::result::Result<T, TantivyError>;
|
||||||
|
|
||||||
/// Tantivy DateTime
|
/// Tantivy DateTime
|
||||||
pub type DateTime = chrono::DateTime<chrono::Utc>;
|
pub type DateTime = chrono::DateTime<chrono::Utc>;
|
||||||
@@ -161,10 +162,11 @@ pub use self::snippet::{Snippet, SnippetGenerator};
|
|||||||
mod docset;
|
mod docset;
|
||||||
pub use self::docset::{DocSet, SkipResult};
|
pub use self::docset::{DocSet, SkipResult};
|
||||||
pub use crate::common::{f64_to_u64, i64_to_u64, u64_to_f64, u64_to_i64};
|
pub use crate::common::{f64_to_u64, i64_to_u64, u64_to_f64, u64_to_i64};
|
||||||
pub use crate::core::SegmentComponent;
|
pub use crate::core::{Executor, SegmentComponent};
|
||||||
pub use crate::core::{Index, IndexMeta, Searcher, Segment, SegmentId, SegmentMeta};
|
pub use crate::core::{Index, IndexMeta, Searcher, Segment, SegmentId, SegmentMeta};
|
||||||
pub use crate::core::{InvertedIndexReader, SegmentReader};
|
pub use crate::core::{InvertedIndexReader, SegmentReader};
|
||||||
pub use crate::directory::Directory;
|
pub use crate::directory::Directory;
|
||||||
|
pub use crate::indexer::operation::UserOperation;
|
||||||
pub use crate::indexer::IndexWriter;
|
pub use crate::indexer::IndexWriter;
|
||||||
pub use crate::postings::Postings;
|
pub use crate::postings::Postings;
|
||||||
pub use crate::reader::LeasedItem;
|
pub use crate::reader::LeasedItem;
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ impl BlockSearcher {
|
|||||||
/// the target.
|
/// the target.
|
||||||
///
|
///
|
||||||
/// The results should be equivalent to
|
/// The results should be equivalent to
|
||||||
/// ```ignore
|
/// ```compile_fail
|
||||||
/// block[..]
|
/// block[..]
|
||||||
// .iter()
|
// .iter()
|
||||||
// .take_while(|&&val| val < target)
|
// .take_while(|&&val| val < target)
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ pub mod tests {
|
|||||||
let schema = schema_builder.build();
|
let schema = schema_builder.build();
|
||||||
let index = Index::create_in_ram(schema);
|
let index = Index::create_in_ram(schema);
|
||||||
let mut segment = index.new_segment();
|
let mut segment = index.new_segment();
|
||||||
let mut posting_serializer = InvertedIndexSerializer::for_segment(&mut segment).unwrap();
|
let mut posting_serializer = InvertedIndexSerializer::open(&mut segment).unwrap();
|
||||||
{
|
{
|
||||||
let mut field_serializer = posting_serializer.new_field(text_field, 120 * 4).unwrap();
|
let mut field_serializer = posting_serializer.new_field(text_field, 120 * 4).unwrap();
|
||||||
field_serializer.new_term("abc".as_bytes()).unwrap();
|
field_serializer.new_term("abc".as_bytes()).unwrap();
|
||||||
@@ -220,7 +220,8 @@ pub mod tests {
|
|||||||
|
|
||||||
{
|
{
|
||||||
let mut segment_writer =
|
let mut segment_writer =
|
||||||
SegmentWriter::for_segment(3_000_000, segment.clone(), &schema).unwrap();
|
SegmentWriter::for_segment(3_000_000, segment.clone(), &schema, index.tokenizers())
|
||||||
|
.unwrap();
|
||||||
{
|
{
|
||||||
let mut doc = Document::default();
|
let mut doc = Document::default();
|
||||||
// checking that position works if the field has two values
|
// checking that position works if the field has two values
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ use crate::termdict::TermOrdinal;
|
|||||||
use crate::tokenizer::TokenStream;
|
use crate::tokenizer::TokenStream;
|
||||||
use crate::tokenizer::{Token, MAX_TOKEN_LEN};
|
use crate::tokenizer::{Token, MAX_TOKEN_LEN};
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io;
|
use std::io;
|
||||||
@@ -129,7 +128,7 @@ impl MultiFieldPostingsWriter {
|
|||||||
pub fn serialize(
|
pub fn serialize(
|
||||||
&self,
|
&self,
|
||||||
serializer: &mut InvertedIndexSerializer,
|
serializer: &mut InvertedIndexSerializer,
|
||||||
) -> Result<HashMap<Field, FnvHashMap<UnorderedTermId, TermOrdinal>>> {
|
) -> crate::Result<HashMap<Field, FnvHashMap<UnorderedTermId, TermOrdinal>>> {
|
||||||
let mut term_offsets: Vec<(&[u8], Addr, UnorderedTermId)> =
|
let mut term_offsets: Vec<(&[u8], Addr, UnorderedTermId)> =
|
||||||
self.term_index.iter().collect();
|
self.term_index.iter().collect();
|
||||||
term_offsets.sort_unstable_by_key(|&(k, _, _)| k);
|
term_offsets.sort_unstable_by_key(|&(k, _, _)| k);
|
||||||
|
|||||||
@@ -10,8 +10,7 @@ use crate::postings::USE_SKIP_INFO_LIMIT;
|
|||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
use crate::schema::{Field, FieldEntry, FieldType};
|
use crate::schema::{Field, FieldEntry, FieldType};
|
||||||
use crate::termdict::{TermDictionaryBuilder, TermOrdinal};
|
use crate::termdict::{TermDictionaryBuilder, TermOrdinal};
|
||||||
use crate::Result;
|
use crate::DocId;
|
||||||
use crate::{Directory, DocId};
|
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
|
|
||||||
/// `InvertedIndexSerializer` is in charge of serializing
|
/// `InvertedIndexSerializer` is in charge of serializing
|
||||||
@@ -54,36 +53,33 @@ pub struct InvertedIndexSerializer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl InvertedIndexSerializer {
|
impl InvertedIndexSerializer {
|
||||||
pub(crate) fn for_segment(segment: &mut Segment) -> crate::Result<Self> {
|
/// Open a new `InvertedIndexSerializer` for the given segment
|
||||||
let schema = segment.schema();
|
fn create(
|
||||||
use crate::core::SegmentComponent;
|
terms_write: CompositeWrite<WritePtr>,
|
||||||
let terms_wrt = segment.open_write(SegmentComponent::TERMS)?;
|
postings_write: CompositeWrite<WritePtr>,
|
||||||
let postings_wrt = segment.open_write(SegmentComponent::POSTINGS)?;
|
positions_write: CompositeWrite<WritePtr>,
|
||||||
let positions_wrt = segment.open_write(SegmentComponent::POSITIONS)?;
|
positionsidx_write: CompositeWrite<WritePtr>,
|
||||||
let positions_idx_wrt = segment.open_write(SegmentComponent::POSITIONSSKIP)?;
|
|
||||||
Ok(Self::open(
|
|
||||||
schema,
|
|
||||||
terms_wrt,
|
|
||||||
postings_wrt,
|
|
||||||
positions_wrt,
|
|
||||||
positions_idx_wrt,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
/// Open a new `PostingsSerializer` for the given segment
|
|
||||||
pub(crate) fn open(
|
|
||||||
schema: Schema,
|
schema: Schema,
|
||||||
terms_wrt: WritePtr,
|
) -> crate::Result<InvertedIndexSerializer> {
|
||||||
postings_wrt: WritePtr,
|
Ok(InvertedIndexSerializer {
|
||||||
positions_wrt: WritePtr,
|
terms_write,
|
||||||
positions_idx_wrt: WritePtr,
|
postings_write,
|
||||||
) -> InvertedIndexSerializer {
|
positions_write,
|
||||||
InvertedIndexSerializer {
|
positionsidx_write,
|
||||||
terms_write: CompositeWrite::wrap(terms_wrt),
|
|
||||||
postings_write: CompositeWrite::wrap(postings_wrt),
|
|
||||||
positions_write: CompositeWrite::wrap(positions_wrt),
|
|
||||||
positionsidx_write: CompositeWrite::wrap(positions_idx_wrt),
|
|
||||||
schema,
|
schema,
|
||||||
}
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open a new `PostingsSerializer` for the given segment
|
||||||
|
pub fn open(segment: &mut Segment) -> crate::Result<InvertedIndexSerializer> {
|
||||||
|
use crate::SegmentComponent::{POSITIONS, POSITIONSSKIP, POSTINGS, TERMS};
|
||||||
|
InvertedIndexSerializer::create(
|
||||||
|
CompositeWrite::wrap(segment.open_write(TERMS)?),
|
||||||
|
CompositeWrite::wrap(segment.open_write(POSTINGS)?),
|
||||||
|
CompositeWrite::wrap(segment.open_write(POSITIONS)?),
|
||||||
|
CompositeWrite::wrap(segment.open_write(POSITIONSSKIP)?),
|
||||||
|
segment.schema(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Must be called before starting pushing terms of
|
/// Must be called before starting pushing terms of
|
||||||
@@ -151,8 +147,7 @@ impl<'a> FieldSerializer<'a> {
|
|||||||
}
|
}
|
||||||
_ => (false, false),
|
_ => (false, false),
|
||||||
};
|
};
|
||||||
let term_dictionary_builder =
|
let term_dictionary_builder = TermDictionaryBuilder::create(term_dictionary_write)?;
|
||||||
TermDictionaryBuilder::create(term_dictionary_write, &field_type)?;
|
|
||||||
let postings_serializer =
|
let postings_serializer =
|
||||||
PostingsSerializer::new(postings_write, term_freq_enabled, position_enabled);
|
PostingsSerializer::new(postings_write, term_freq_enabled, position_enabled);
|
||||||
let positions_serializer_opt = if position_enabled {
|
let positions_serializer_opt = if position_enabled {
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
use crate::core::Searcher;
|
use crate::core::Searcher;
|
||||||
use crate::core::SegmentReader;
|
use crate::core::SegmentReader;
|
||||||
use crate::docset::DocSet;
|
use crate::docset::DocSet;
|
||||||
|
use crate::query::boost_query::BoostScorer;
|
||||||
use crate::query::explanation::does_not_match;
|
use crate::query::explanation::does_not_match;
|
||||||
use crate::query::{Explanation, Query, Scorer, Weight};
|
use crate::query::{Explanation, Query, Scorer, Weight};
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
|
|
||||||
/// Query that matches all of the documents.
|
/// Query that matches all of the documents.
|
||||||
@@ -14,7 +14,7 @@ use crate::Score;
|
|||||||
pub struct AllQuery;
|
pub struct AllQuery;
|
||||||
|
|
||||||
impl Query for AllQuery {
|
impl Query for AllQuery {
|
||||||
fn weight(&self, _: &Searcher, _: bool) -> Result<Box<dyn Weight>> {
|
fn weight(&self, _: &Searcher, _: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
Ok(Box::new(AllWeight))
|
Ok(Box::new(AllWeight))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -23,15 +23,16 @@ impl Query for AllQuery {
|
|||||||
pub struct AllWeight;
|
pub struct AllWeight;
|
||||||
|
|
||||||
impl Weight for AllWeight {
|
impl Weight for AllWeight {
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> crate::Result<Box<dyn Scorer>> {
|
||||||
Ok(Box::new(AllScorer {
|
let all_scorer = AllScorer {
|
||||||
state: State::NotStarted,
|
state: State::NotStarted,
|
||||||
doc: 0u32,
|
doc: 0u32,
|
||||||
max_doc: reader.max_doc(),
|
max_doc: reader.max_doc(),
|
||||||
}))
|
};
|
||||||
|
Ok(Box::new(BoostScorer::new(all_scorer, boost)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> crate::Result<Explanation> {
|
||||||
if doc >= reader.max_doc() {
|
if doc >= reader.max_doc() {
|
||||||
return Err(does_not_match(doc));
|
return Err(does_not_match(doc));
|
||||||
}
|
}
|
||||||
@@ -91,14 +92,12 @@ impl Scorer for AllScorer {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::AllQuery;
|
use super::AllQuery;
|
||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
use crate::schema::{Schema, TEXT};
|
use crate::schema::{Schema, TEXT};
|
||||||
use crate::Index;
|
use crate::Index;
|
||||||
|
|
||||||
#[test]
|
fn create_test_index() -> Index {
|
||||||
fn test_all_query() {
|
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
let field = schema_builder.add_text_field("text", TEXT);
|
let field = schema_builder.add_text_field("text", TEXT);
|
||||||
let schema = schema_builder.build();
|
let schema = schema_builder.build();
|
||||||
@@ -109,13 +108,18 @@ mod tests {
|
|||||||
index_writer.commit().unwrap();
|
index_writer.commit().unwrap();
|
||||||
index_writer.add_document(doc!(field=>"ccc"));
|
index_writer.add_document(doc!(field=>"ccc"));
|
||||||
index_writer.commit().unwrap();
|
index_writer.commit().unwrap();
|
||||||
|
index
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_all_query() {
|
||||||
|
let index = create_test_index();
|
||||||
let reader = index.reader().unwrap();
|
let reader = index.reader().unwrap();
|
||||||
reader.reload().unwrap();
|
|
||||||
let searcher = reader.searcher();
|
let searcher = reader.searcher();
|
||||||
let weight = AllQuery.weight(&searcher, false).unwrap();
|
let weight = AllQuery.weight(&searcher, false).unwrap();
|
||||||
{
|
{
|
||||||
let reader = searcher.segment_reader(0);
|
let reader = searcher.segment_reader(0);
|
||||||
let mut scorer = weight.scorer(reader).unwrap();
|
let mut scorer = weight.scorer(reader, 1.0f32).unwrap();
|
||||||
assert!(scorer.advance());
|
assert!(scorer.advance());
|
||||||
assert_eq!(scorer.doc(), 0u32);
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
assert!(scorer.advance());
|
assert!(scorer.advance());
|
||||||
@@ -124,10 +128,31 @@ mod tests {
|
|||||||
}
|
}
|
||||||
{
|
{
|
||||||
let reader = searcher.segment_reader(1);
|
let reader = searcher.segment_reader(1);
|
||||||
let mut scorer = weight.scorer(reader).unwrap();
|
let mut scorer = weight.scorer(reader, 1.0f32).unwrap();
|
||||||
assert!(scorer.advance());
|
assert!(scorer.advance());
|
||||||
assert_eq!(scorer.doc(), 0u32);
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
assert!(!scorer.advance());
|
assert!(!scorer.advance());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_all_query_with_boost() {
|
||||||
|
let index = create_test_index();
|
||||||
|
let reader = index.reader().unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let weight = AllQuery.weight(&searcher, false).unwrap();
|
||||||
|
let reader = searcher.segment_reader(0);
|
||||||
|
{
|
||||||
|
let mut scorer = weight.scorer(reader, 2.0f32).unwrap();
|
||||||
|
assert!(scorer.advance());
|
||||||
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
|
assert_eq!(scorer.score(), 2.0f32);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let mut scorer = weight.scorer(reader, 1.5f32).unwrap();
|
||||||
|
assert!(scorer.advance());
|
||||||
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
|
assert_eq!(scorer.score(), 1.5f32);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ impl<A> Weight for AutomatonWeight<A>
|
|||||||
where
|
where
|
||||||
A: Automaton + Send + Sync + 'static,
|
A: Automaton + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> Result<Box<dyn Scorer>> {
|
||||||
let max_doc = reader.max_doc();
|
let max_doc = reader.max_doc();
|
||||||
let mut doc_bitset = BitSet::with_max_value(max_doc);
|
let mut doc_bitset = BitSet::with_max_value(max_doc);
|
||||||
|
|
||||||
@@ -58,11 +58,12 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let doc_bitset = BitSetDocSet::from(doc_bitset);
|
let doc_bitset = BitSetDocSet::from(doc_bitset);
|
||||||
Ok(Box::new(ConstScorer::new(doc_bitset)))
|
let const_scorer = ConstScorer::new(doc_bitset, boost);
|
||||||
|
Ok(Box::new(const_scorer))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
||||||
let mut scorer = self.scorer(reader)?;
|
let mut scorer = self.scorer(reader, 1.0f32)?;
|
||||||
if scorer.skip_next(doc) == SkipResult::Reached {
|
if scorer.skip_next(doc) == SkipResult::Reached {
|
||||||
Ok(Explanation::new("AutomatonScorer", 1.0f32))
|
Ok(Explanation::new("AutomatonScorer", 1.0f32))
|
||||||
} else {
|
} else {
|
||||||
@@ -72,3 +73,95 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::AutomatonWeight;
|
||||||
|
use crate::query::Weight;
|
||||||
|
use crate::schema::{Schema, STRING};
|
||||||
|
use crate::Index;
|
||||||
|
use tantivy_fst::Automaton;
|
||||||
|
|
||||||
|
fn create_index() -> Index {
|
||||||
|
let mut schema = Schema::builder();
|
||||||
|
let title = schema.add_text_field("title", STRING);
|
||||||
|
let index = Index::create_in_ram(schema.build());
|
||||||
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
|
index_writer.add_document(doc!(title=>"abc"));
|
||||||
|
index_writer.add_document(doc!(title=>"bcd"));
|
||||||
|
index_writer.add_document(doc!(title=>"abcd"));
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
index
|
||||||
|
}
|
||||||
|
|
||||||
|
enum State {
|
||||||
|
Start,
|
||||||
|
NotMatching,
|
||||||
|
AfterA,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PrefixedByA;
|
||||||
|
|
||||||
|
impl Automaton for PrefixedByA {
|
||||||
|
type State = State;
|
||||||
|
|
||||||
|
fn start(&self) -> Self::State {
|
||||||
|
State::Start
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_match(&self, state: &Self::State) -> bool {
|
||||||
|
match *state {
|
||||||
|
State::AfterA => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn accept(&self, state: &Self::State, byte: u8) -> Self::State {
|
||||||
|
match *state {
|
||||||
|
State::Start => {
|
||||||
|
if byte == b'a' {
|
||||||
|
State::AfterA
|
||||||
|
} else {
|
||||||
|
State::NotMatching
|
||||||
|
}
|
||||||
|
}
|
||||||
|
State::AfterA => State::AfterA,
|
||||||
|
State::NotMatching => State::NotMatching,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_automaton_weight() {
|
||||||
|
let index = create_index();
|
||||||
|
let field = index.schema().get_field("title").unwrap();
|
||||||
|
let automaton_weight = AutomatonWeight::new(field, PrefixedByA);
|
||||||
|
let reader = index.reader().unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let mut scorer = automaton_weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
|
assert!(scorer.advance());
|
||||||
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
|
assert_eq!(scorer.score(), 1.0f32);
|
||||||
|
assert!(scorer.advance());
|
||||||
|
assert_eq!(scorer.doc(), 2u32);
|
||||||
|
assert_eq!(scorer.score(), 1.0f32);
|
||||||
|
assert!(!scorer.advance());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_automaton_weight_boost() {
|
||||||
|
let index = create_index();
|
||||||
|
let field = index.schema().get_field("title").unwrap();
|
||||||
|
let automaton_weight = AutomatonWeight::new(field, PrefixedByA);
|
||||||
|
let reader = index.reader().unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let mut scorer = automaton_weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.32f32)
|
||||||
|
.unwrap();
|
||||||
|
assert!(scorer.advance());
|
||||||
|
assert_eq!(scorer.doc(), 0u32);
|
||||||
|
assert_eq!(scorer.score(), 1.32f32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ fn compute_tf_cache(average_fieldnorm: f32) -> [f32; 256] {
|
|||||||
cache
|
cache
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct BM25Weight {
|
pub struct BM25Weight {
|
||||||
idf_explain: Explanation,
|
idf_explain: Explanation,
|
||||||
weight: f32,
|
weight: f32,
|
||||||
@@ -34,6 +33,15 @@ pub struct BM25Weight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BM25Weight {
|
impl BM25Weight {
|
||||||
|
pub fn boost_by(&self, boost: f32) -> BM25Weight {
|
||||||
|
BM25Weight {
|
||||||
|
idf_explain: self.idf_explain.clone(),
|
||||||
|
weight: self.weight * boost,
|
||||||
|
cache: self.cache,
|
||||||
|
average_fieldnorm: self.average_fieldnorm,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn for_terms(searcher: &Searcher, terms: &[Term]) -> BM25Weight {
|
pub fn for_terms(searcher: &Searcher, terms: &[Term]) -> BM25Weight {
|
||||||
assert!(!terms.is_empty(), "BM25 requires at least one term");
|
assert!(!terms.is_empty(), "BM25 requires at least one term");
|
||||||
let field = terms[0].field();
|
let field = terms[0].field();
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ use crate::query::TermQuery;
|
|||||||
use crate::query::Weight;
|
use crate::query::Weight;
|
||||||
use crate::schema::IndexRecordOption;
|
use crate::schema::IndexRecordOption;
|
||||||
use crate::schema::Term;
|
use crate::schema::Term;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
@@ -30,9 +29,9 @@ use std::collections::BTreeSet;
|
|||||||
///use tantivy::query::{BooleanQuery, Occur, PhraseQuery, Query, TermQuery};
|
///use tantivy::query::{BooleanQuery, Occur, PhraseQuery, Query, TermQuery};
|
||||||
///use tantivy::schema::{IndexRecordOption, Schema, TEXT};
|
///use tantivy::schema::{IndexRecordOption, Schema, TEXT};
|
||||||
///use tantivy::Term;
|
///use tantivy::Term;
|
||||||
///use tantivy::{Index, Result};
|
///use tantivy::Index;
|
||||||
///
|
///
|
||||||
///fn main() -> Result<()> {
|
///fn main() -> tantivy::Result<()> {
|
||||||
/// let mut schema_builder = Schema::builder();
|
/// let mut schema_builder = Schema::builder();
|
||||||
/// let title = schema_builder.add_text_field("title", TEXT);
|
/// let title = schema_builder.add_text_field("title", TEXT);
|
||||||
/// let body = schema_builder.add_text_field("body", TEXT);
|
/// let body = schema_builder.add_text_field("body", TEXT);
|
||||||
@@ -149,14 +148,14 @@ impl From<Vec<(Occur, Box<dyn Query>)>> for BooleanQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Query for BooleanQuery {
|
impl Query for BooleanQuery {
|
||||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
let sub_weights = self
|
let sub_weights = self
|
||||||
.subqueries
|
.subqueries
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(ref occur, ref subquery)| {
|
.map(|&(ref occur, ref subquery)| {
|
||||||
Ok((*occur, subquery.weight(searcher, scoring_enabled)?))
|
Ok((*occur, subquery.weight(searcher, scoring_enabled)?))
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()?;
|
.collect::<crate::Result<_>>()?;
|
||||||
Ok(Box::new(BooleanWeight::new(sub_weights, scoring_enabled)))
|
Ok(Box::new(BooleanWeight::new(sub_weights, scoring_enabled)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ use crate::query::Scorer;
|
|||||||
use crate::query::Union;
|
use crate::query::Union;
|
||||||
use crate::query::Weight;
|
use crate::query::Weight;
|
||||||
use crate::query::{intersect_scorers, Explanation};
|
use crate::query::{intersect_scorers, Explanation};
|
||||||
use crate::Result;
|
|
||||||
use crate::{DocId, SkipResult};
|
use crate::{DocId, SkipResult};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
@@ -56,10 +55,11 @@ impl BooleanWeight {
|
|||||||
fn per_occur_scorers(
|
fn per_occur_scorers(
|
||||||
&self,
|
&self,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
) -> Result<HashMap<Occur, Vec<Box<dyn Scorer>>>> {
|
boost: f32,
|
||||||
|
) -> crate::Result<HashMap<Occur, Vec<Box<dyn Scorer>>>> {
|
||||||
let mut per_occur_scorers: HashMap<Occur, Vec<Box<dyn Scorer>>> = HashMap::new();
|
let mut per_occur_scorers: HashMap<Occur, Vec<Box<dyn Scorer>>> = HashMap::new();
|
||||||
for &(ref occur, ref subweight) in &self.weights {
|
for &(ref occur, ref subweight) in &self.weights {
|
||||||
let sub_scorer: Box<dyn Scorer> = subweight.scorer(reader)?;
|
let sub_scorer: Box<dyn Scorer> = subweight.scorer(reader, boost)?;
|
||||||
per_occur_scorers
|
per_occur_scorers
|
||||||
.entry(*occur)
|
.entry(*occur)
|
||||||
.or_insert_with(Vec::new)
|
.or_insert_with(Vec::new)
|
||||||
@@ -71,8 +71,9 @@ impl BooleanWeight {
|
|||||||
fn complex_scorer<TScoreCombiner: ScoreCombiner>(
|
fn complex_scorer<TScoreCombiner: ScoreCombiner>(
|
||||||
&self,
|
&self,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
) -> Result<Box<dyn Scorer>> {
|
boost: f32,
|
||||||
let mut per_occur_scorers = self.per_occur_scorers(reader)?;
|
) -> crate::Result<Box<dyn Scorer>> {
|
||||||
|
let mut per_occur_scorers = self.per_occur_scorers(reader, boost)?;
|
||||||
|
|
||||||
let should_scorer_opt: Option<Box<dyn Scorer>> = per_occur_scorers
|
let should_scorer_opt: Option<Box<dyn Scorer>> = per_occur_scorers
|
||||||
.remove(&Occur::Should)
|
.remove(&Occur::Should)
|
||||||
@@ -113,7 +114,7 @@ impl BooleanWeight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Weight for BooleanWeight {
|
impl Weight for BooleanWeight {
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> crate::Result<Box<dyn Scorer>> {
|
||||||
if self.weights.is_empty() {
|
if self.weights.is_empty() {
|
||||||
Ok(Box::new(EmptyScorer))
|
Ok(Box::new(EmptyScorer))
|
||||||
} else if self.weights.len() == 1 {
|
} else if self.weights.len() == 1 {
|
||||||
@@ -121,17 +122,17 @@ impl Weight for BooleanWeight {
|
|||||||
if occur == Occur::MustNot {
|
if occur == Occur::MustNot {
|
||||||
Ok(Box::new(EmptyScorer))
|
Ok(Box::new(EmptyScorer))
|
||||||
} else {
|
} else {
|
||||||
weight.scorer(reader)
|
weight.scorer(reader, boost)
|
||||||
}
|
}
|
||||||
} else if self.scoring_enabled {
|
} else if self.scoring_enabled {
|
||||||
self.complex_scorer::<SumWithCoordsCombiner>(reader)
|
self.complex_scorer::<SumWithCoordsCombiner>(reader, boost)
|
||||||
} else {
|
} else {
|
||||||
self.complex_scorer::<DoNothingCombiner>(reader)
|
self.complex_scorer::<DoNothingCombiner>(reader, boost)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> crate::Result<Explanation> {
|
||||||
let mut scorer = self.scorer(reader)?;
|
let mut scorer = self.scorer(reader, 1.0f32)?;
|
||||||
if scorer.skip_next(doc) != SkipResult::Reached {
|
if scorer.skip_next(doc) != SkipResult::Reached {
|
||||||
return Err(does_not_match(doc));
|
return Err(does_not_match(doc));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ mod tests {
|
|||||||
use crate::query::Scorer;
|
use crate::query::Scorer;
|
||||||
use crate::query::TermQuery;
|
use crate::query::TermQuery;
|
||||||
use crate::schema::*;
|
use crate::schema::*;
|
||||||
|
use crate::tests::assert_nearly_equals;
|
||||||
use crate::Index;
|
use crate::Index;
|
||||||
use crate::{DocAddress, DocId};
|
use crate::{DocAddress, DocId};
|
||||||
|
|
||||||
@@ -70,7 +71,9 @@ mod tests {
|
|||||||
let query = query_parser.parse_query("+a").unwrap();
|
let query = query_parser.parse_query("+a").unwrap();
|
||||||
let searcher = index.reader().unwrap().searcher();
|
let searcher = index.reader().unwrap().searcher();
|
||||||
let weight = query.weight(&searcher, true).unwrap();
|
let weight = query.weight(&searcher, true).unwrap();
|
||||||
let scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
assert!(scorer.is::<TermScorer>());
|
assert!(scorer.is::<TermScorer>());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,13 +85,17 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let query = query_parser.parse_query("+a +b +c").unwrap();
|
let query = query_parser.parse_query("+a +b +c").unwrap();
|
||||||
let weight = query.weight(&searcher, true).unwrap();
|
let weight = query.weight(&searcher, true).unwrap();
|
||||||
let scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
assert!(scorer.is::<Intersection<TermScorer>>());
|
assert!(scorer.is::<Intersection<TermScorer>>());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let query = query_parser.parse_query("+a +(b c)").unwrap();
|
let query = query_parser.parse_query("+a +(b c)").unwrap();
|
||||||
let weight = query.weight(&searcher, true).unwrap();
|
let weight = query.weight(&searcher, true).unwrap();
|
||||||
let scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
assert!(scorer.is::<Intersection<Box<dyn Scorer>>>());
|
assert!(scorer.is::<Intersection<Box<dyn Scorer>>>());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -101,7 +108,9 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let query = query_parser.parse_query("+a b").unwrap();
|
let query = query_parser.parse_query("+a b").unwrap();
|
||||||
let weight = query.weight(&searcher, true).unwrap();
|
let weight = query.weight(&searcher, true).unwrap();
|
||||||
let scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
assert!(scorer.is::<RequiredOptionalScorer<
|
assert!(scorer.is::<RequiredOptionalScorer<
|
||||||
Box<dyn Scorer>,
|
Box<dyn Scorer>,
|
||||||
Box<dyn Scorer>,
|
Box<dyn Scorer>,
|
||||||
@@ -111,7 +120,9 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let query = query_parser.parse_query("+a b").unwrap();
|
let query = query_parser.parse_query("+a b").unwrap();
|
||||||
let weight = query.weight(&searcher, false).unwrap();
|
let weight = query.weight(&searcher, false).unwrap();
|
||||||
let scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
assert!(scorer.is::<TermScorer>());
|
assert!(scorer.is::<TermScorer>());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -179,6 +190,50 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_boolean_query_with_weight() {
|
||||||
|
let mut schema_builder = Schema::builder();
|
||||||
|
let text_field = schema_builder.add_text_field("text", TEXT);
|
||||||
|
let schema = schema_builder.build();
|
||||||
|
let index = Index::create_in_ram(schema);
|
||||||
|
{
|
||||||
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
|
index_writer.add_document(doc!(text_field => "a b c"));
|
||||||
|
index_writer.add_document(doc!(text_field => "a c"));
|
||||||
|
index_writer.add_document(doc!(text_field => "b c"));
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
}
|
||||||
|
let term_a: Box<dyn Query> = Box::new(TermQuery::new(
|
||||||
|
Term::from_field_text(text_field, "a"),
|
||||||
|
IndexRecordOption::WithFreqs,
|
||||||
|
));
|
||||||
|
let term_b: Box<dyn Query> = Box::new(TermQuery::new(
|
||||||
|
Term::from_field_text(text_field, "b"),
|
||||||
|
IndexRecordOption::WithFreqs,
|
||||||
|
));
|
||||||
|
let reader = index.reader().unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let boolean_query =
|
||||||
|
BooleanQuery::from(vec![(Occur::Should, term_a), (Occur::Should, term_b)]);
|
||||||
|
let boolean_weight = boolean_query.weight(&searcher, true).unwrap();
|
||||||
|
{
|
||||||
|
let mut boolean_scorer = boolean_weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
|
assert!(boolean_scorer.advance());
|
||||||
|
assert_eq!(boolean_scorer.doc(), 0u32);
|
||||||
|
assert_nearly_equals(boolean_scorer.score(), 0.84163445f32);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let mut boolean_scorer = boolean_weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 2.0f32)
|
||||||
|
.unwrap();
|
||||||
|
assert!(boolean_scorer.advance());
|
||||||
|
assert_eq!(boolean_scorer.doc(), 0u32);
|
||||||
|
assert_nearly_equals(boolean_scorer.score(), 1.6832689f32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_intersection_score() {
|
pub fn test_intersection_score() {
|
||||||
let (index, text_field) = aux_test_helper();
|
let (index, text_field) = aux_test_helper();
|
||||||
@@ -249,7 +304,9 @@ mod tests {
|
|||||||
let query_parser = QueryParser::for_index(&index, vec![title, text]);
|
let query_parser = QueryParser::for_index(&index, vec![title, text]);
|
||||||
let query = query_parser.parse_query("Оксана Лифенко").unwrap();
|
let query = query_parser.parse_query("Оксана Лифенко").unwrap();
|
||||||
let weight = query.weight(&searcher, true).unwrap();
|
let weight = query.weight(&searcher, true).unwrap();
|
||||||
let mut scorer = weight.scorer(searcher.segment_reader(0u32)).unwrap();
|
let mut scorer = weight
|
||||||
|
.scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap();
|
||||||
scorer.advance();
|
scorer.advance();
|
||||||
|
|
||||||
let explanation = query.explain(&searcher, DocAddress(0u32, 0u32)).unwrap();
|
let explanation = query.explain(&searcher, DocAddress(0u32, 0u32)).unwrap();
|
||||||
|
|||||||
164
src/query/boost_query.rs
Normal file
164
src/query/boost_query.rs
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
use crate::common::BitSet;
|
||||||
|
use crate::fastfield::DeleteBitSet;
|
||||||
|
use crate::query::explanation::does_not_match;
|
||||||
|
use crate::query::{Explanation, Query, Scorer, Weight};
|
||||||
|
use crate::{DocId, DocSet, Searcher, SegmentReader, SkipResult, Term};
|
||||||
|
use std::collections::BTreeSet;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// `BoostQuery` is a wrapper over a query used to boost its score.
|
||||||
|
///
|
||||||
|
/// The document set matched by the `BoostQuery` is strictly the same as the underlying query.
|
||||||
|
/// The score of each document, is the score of the underlying query multiplied by the `boost`
|
||||||
|
/// factor.
|
||||||
|
pub struct BoostQuery {
|
||||||
|
query: Box<dyn Query>,
|
||||||
|
boost: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BoostQuery {
|
||||||
|
/// Builds a boost query.
|
||||||
|
pub fn new(query: Box<dyn Query>, boost: f32) -> BoostQuery {
|
||||||
|
BoostQuery { query, boost }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for BoostQuery {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
BoostQuery {
|
||||||
|
query: self.query.box_clone(),
|
||||||
|
boost: self.boost,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for BoostQuery {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "Boost(query={:?}, boost={})", self.query, self.boost)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Query for BoostQuery {
|
||||||
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
|
let weight_without_boost = self.query.weight(searcher, scoring_enabled)?;
|
||||||
|
let boosted_weight = if scoring_enabled {
|
||||||
|
Box::new(BoostWeight::new(weight_without_boost, self.boost))
|
||||||
|
} else {
|
||||||
|
weight_without_boost
|
||||||
|
};
|
||||||
|
Ok(boosted_weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_terms(&self, term_set: &mut BTreeSet<Term>) {
|
||||||
|
self.query.query_terms(term_set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct BoostWeight {
|
||||||
|
weight: Box<dyn Weight>,
|
||||||
|
boost: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BoostWeight {
|
||||||
|
pub fn new(weight: Box<dyn Weight>, boost: f32) -> Self {
|
||||||
|
BoostWeight { weight, boost }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Weight for BoostWeight {
|
||||||
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> crate::Result<Box<dyn Scorer>> {
|
||||||
|
self.weight.scorer(reader, boost * self.boost)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
|
||||||
|
let mut scorer = self.scorer(reader, 1.0f32)?;
|
||||||
|
if scorer.skip_next(doc) != SkipResult::Reached {
|
||||||
|
return Err(does_not_match(doc));
|
||||||
|
}
|
||||||
|
let mut explanation =
|
||||||
|
Explanation::new(format!("Boost x{} of ...", self.boost), scorer.score());
|
||||||
|
let underlying_explanation = self.weight.explain(reader, doc)?;
|
||||||
|
explanation.add_detail(underlying_explanation);
|
||||||
|
Ok(explanation)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
|
||||||
|
self.weight.count(reader)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct BoostScorer<S: Scorer> {
|
||||||
|
underlying: S,
|
||||||
|
boost: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Scorer> BoostScorer<S> {
|
||||||
|
pub fn new(underlying: S, boost: f32) -> BoostScorer<S> {
|
||||||
|
BoostScorer { underlying, boost }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Scorer> DocSet for BoostScorer<S> {
|
||||||
|
fn advance(&mut self) -> bool {
|
||||||
|
self.underlying.advance()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn skip_next(&mut self, target: DocId) -> SkipResult {
|
||||||
|
self.underlying.skip_next(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
|
||||||
|
self.underlying.fill_buffer(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doc(&self) -> u32 {
|
||||||
|
self.underlying.doc()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> u32 {
|
||||||
|
self.underlying.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_to_bitset(&mut self, bitset: &mut BitSet) {
|
||||||
|
self.underlying.append_to_bitset(bitset)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(&mut self, delete_bitset: &DeleteBitSet) -> u32 {
|
||||||
|
self.underlying.count(delete_bitset)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_including_deleted(&mut self) -> u32 {
|
||||||
|
self.underlying.count_including_deleted()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Scorer> Scorer for BoostScorer<S> {
|
||||||
|
fn score(&mut self) -> f32 {
|
||||||
|
self.underlying.score() * self.boost
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::BoostQuery;
|
||||||
|
use crate::query::{AllQuery, Query};
|
||||||
|
use crate::schema::Schema;
|
||||||
|
use crate::{DocAddress, Document, Index};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_boost_query_explain() {
|
||||||
|
let schema = Schema::builder().build();
|
||||||
|
let index = Index::create_in_ram(schema);
|
||||||
|
let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();
|
||||||
|
index_writer.add_document(Document::new());
|
||||||
|
assert!(index_writer.commit().is_ok());
|
||||||
|
let reader = index.reader().unwrap();
|
||||||
|
let searcher = reader.searcher();
|
||||||
|
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
|
||||||
|
let explanation = query.explain(&searcher, DocAddress(0, 0u32)).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
explanation.to_pretty_json(),
|
||||||
|
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of ...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\"\n }\n ]\n}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,6 @@ use crate::query::Weight;
|
|||||||
use crate::query::{Explanation, Query};
|
use crate::query::{Explanation, Query};
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::DocSet;
|
use crate::DocSet;
|
||||||
use crate::Result;
|
|
||||||
use crate::Score;
|
use crate::Score;
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use crate::SegmentReader;
|
use crate::SegmentReader;
|
||||||
@@ -16,11 +15,15 @@ use crate::SegmentReader;
|
|||||||
pub struct EmptyQuery;
|
pub struct EmptyQuery;
|
||||||
|
|
||||||
impl Query for EmptyQuery {
|
impl Query for EmptyQuery {
|
||||||
fn weight(&self, _searcher: &Searcher, _scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(
|
||||||
|
&self,
|
||||||
|
_searcher: &Searcher,
|
||||||
|
_scoring_enabled: bool,
|
||||||
|
) -> crate::Result<Box<dyn Weight>> {
|
||||||
Ok(Box::new(EmptyWeight))
|
Ok(Box::new(EmptyWeight))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn count(&self, _searcher: &Searcher) -> Result<usize> {
|
fn count(&self, _searcher: &Searcher) -> crate::Result<usize> {
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -30,11 +33,11 @@ impl Query for EmptyQuery {
|
|||||||
/// It is useful for tests and handling edge cases.
|
/// It is useful for tests and handling edge cases.
|
||||||
pub struct EmptyWeight;
|
pub struct EmptyWeight;
|
||||||
impl Weight for EmptyWeight {
|
impl Weight for EmptyWeight {
|
||||||
fn scorer(&self, _reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, _reader: &SegmentReader, _boost: f32) -> crate::Result<Box<dyn Scorer>> {
|
||||||
Ok(Box::new(EmptyScorer))
|
Ok(Box::new(EmptyScorer))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, _reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, _reader: &SegmentReader, doc: DocId) -> crate::Result<Explanation> {
|
||||||
Err(does_not_match(doc))
|
Err(does_not_match(doc))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use crate::error::TantivyError::InvalidArgument;
|
|
||||||
use crate::query::{AutomatonWeight, Query, Weight};
|
use crate::query::{AutomatonWeight, Query, Weight};
|
||||||
use crate::schema::Term;
|
use crate::schema::Term;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
|
use crate::TantivyError::InvalidArgument;
|
||||||
use levenshtein_automata::{LevenshteinAutomatonBuilder, DFA};
|
use levenshtein_automata::{LevenshteinAutomatonBuilder, DFA};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
@@ -10,7 +9,7 @@ use std::ops::Range;
|
|||||||
|
|
||||||
/// A range of Levenshtein distances that we will build DFAs for our terms
|
/// A range of Levenshtein distances that we will build DFAs for our terms
|
||||||
/// The computation is exponential, so best keep it to low single digits
|
/// The computation is exponential, so best keep it to low single digits
|
||||||
const VALID_LEVENSHTEIN_DISTANCE_RANGE: Range<u8> = (0..3);
|
const VALID_LEVENSHTEIN_DISTANCE_RANGE: Range<u8> = 0..3;
|
||||||
|
|
||||||
static LEV_BUILDER: Lazy<HashMap<(u8, bool), LevenshteinAutomatonBuilder>> = Lazy::new(|| {
|
static LEV_BUILDER: Lazy<HashMap<(u8, bool), LevenshteinAutomatonBuilder>> = Lazy::new(|| {
|
||||||
let mut lev_builder_cache = HashMap::new();
|
let mut lev_builder_cache = HashMap::new();
|
||||||
@@ -31,9 +30,9 @@ static LEV_BUILDER: Lazy<HashMap<(u8, bool), LevenshteinAutomatonBuilder>> = Laz
|
|||||||
/// use tantivy::collector::{Count, TopDocs};
|
/// use tantivy::collector::{Count, TopDocs};
|
||||||
/// use tantivy::query::FuzzyTermQuery;
|
/// use tantivy::query::FuzzyTermQuery;
|
||||||
/// use tantivy::schema::{Schema, TEXT};
|
/// use tantivy::schema::{Schema, TEXT};
|
||||||
/// use tantivy::{doc, Index, Result, Term};
|
/// use tantivy::{doc, Index, Term};
|
||||||
///
|
///
|
||||||
/// fn example() -> Result<()> {
|
/// fn example() -> tantivy::Result<()> {
|
||||||
/// let mut schema_builder = Schema::builder();
|
/// let mut schema_builder = Schema::builder();
|
||||||
/// let title = schema_builder.add_text_field("title", TEXT);
|
/// let title = schema_builder.add_text_field("title", TEXT);
|
||||||
/// let schema = schema_builder.build();
|
/// let schema = schema_builder.build();
|
||||||
@@ -102,7 +101,7 @@ impl FuzzyTermQuery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn specialized_weight(&self) -> Result<AutomatonWeight<DFA>> {
|
fn specialized_weight(&self) -> crate::Result<AutomatonWeight<DFA>> {
|
||||||
// LEV_BUILDER is a HashMap, whose `get` method returns an Option
|
// LEV_BUILDER is a HashMap, whose `get` method returns an Option
|
||||||
match LEV_BUILDER.get(&(self.distance, false)) {
|
match LEV_BUILDER.get(&(self.distance, false)) {
|
||||||
// Unwrap the option and build the Ok(AutomatonWeight)
|
// Unwrap the option and build the Ok(AutomatonWeight)
|
||||||
@@ -119,7 +118,11 @@ impl FuzzyTermQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Query for FuzzyTermQuery {
|
impl Query for FuzzyTermQuery {
|
||||||
fn weight(&self, _searcher: &Searcher, _scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(
|
||||||
|
&self,
|
||||||
|
_searcher: &Searcher,
|
||||||
|
_scoring_enabled: bool,
|
||||||
|
) -> crate::Result<Box<dyn Weight>> {
|
||||||
Ok(Box::new(self.specialized_weight()?))
|
Ok(Box::new(self.specialized_weight()?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ mod automaton_weight;
|
|||||||
mod bitset;
|
mod bitset;
|
||||||
mod bm25;
|
mod bm25;
|
||||||
mod boolean_query;
|
mod boolean_query;
|
||||||
|
mod boost_query;
|
||||||
mod empty_query;
|
mod empty_query;
|
||||||
mod exclude;
|
mod exclude;
|
||||||
mod explanation;
|
mod explanation;
|
||||||
@@ -37,6 +38,7 @@ pub use self::all_query::{AllQuery, AllScorer, AllWeight};
|
|||||||
pub use self::automaton_weight::AutomatonWeight;
|
pub use self::automaton_weight::AutomatonWeight;
|
||||||
pub use self::bitset::BitSetDocSet;
|
pub use self::bitset::BitSetDocSet;
|
||||||
pub use self::boolean_query::BooleanQuery;
|
pub use self::boolean_query::BooleanQuery;
|
||||||
|
pub use self::boost_query::BoostQuery;
|
||||||
pub use self::empty_query::{EmptyQuery, EmptyScorer, EmptyWeight};
|
pub use self::empty_query::{EmptyQuery, EmptyScorer, EmptyWeight};
|
||||||
pub use self::exclude::Exclude;
|
pub use self::exclude::Exclude;
|
||||||
pub use self::explanation::Explanation;
|
pub use self::explanation::Explanation;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ pub use self::phrase_scorer::PhraseScorer;
|
|||||||
pub use self::phrase_weight::PhraseWeight;
|
pub use self::phrase_weight::PhraseWeight;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
pub mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::collector::tests::{TEST_COLLECTOR_WITHOUT_SCORE, TEST_COLLECTOR_WITH_SCORE};
|
use crate::collector::tests::{TEST_COLLECTOR_WITHOUT_SCORE, TEST_COLLECTOR_WITH_SCORE};
|
||||||
@@ -15,10 +15,10 @@ mod tests {
|
|||||||
use crate::error::TantivyError;
|
use crate::error::TantivyError;
|
||||||
use crate::schema::{Schema, Term, TEXT};
|
use crate::schema::{Schema, Term, TEXT};
|
||||||
use crate::tests::assert_nearly_equals;
|
use crate::tests::assert_nearly_equals;
|
||||||
|
use crate::DocAddress;
|
||||||
use crate::DocId;
|
use crate::DocId;
|
||||||
use crate::{DocAddress, DocSet};
|
|
||||||
|
|
||||||
fn create_index(texts: &[&'static str]) -> Index {
|
pub fn create_index(texts: &[&'static str]) -> Index {
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
let text_field = schema_builder.add_text_field("text", TEXT);
|
let text_field = schema_builder.add_text_field("text", TEXT);
|
||||||
let schema = schema_builder.build();
|
let schema = schema_builder.build();
|
||||||
@@ -102,30 +102,6 @@ mod tests {
|
|||||||
assert!(test_query(vec!["g", "a"]).is_empty());
|
assert!(test_query(vec!["g", "a"]).is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_phrase_count() {
|
|
||||||
let index = create_index(&["a c", "a a b d a b c", " a b"]);
|
|
||||||
let schema = index.schema();
|
|
||||||
let text_field = schema.get_field("text").unwrap();
|
|
||||||
let searcher = index.reader().unwrap().searcher();
|
|
||||||
let phrase_query = PhraseQuery::new(vec![
|
|
||||||
Term::from_field_text(text_field, "a"),
|
|
||||||
Term::from_field_text(text_field, "b"),
|
|
||||||
]);
|
|
||||||
let phrase_weight = phrase_query.phrase_weight(&searcher, true).unwrap();
|
|
||||||
let mut phrase_scorer = phrase_weight
|
|
||||||
.phrase_scorer(searcher.segment_reader(0u32))
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
assert!(phrase_scorer.advance());
|
|
||||||
assert_eq!(phrase_scorer.doc(), 1);
|
|
||||||
assert_eq!(phrase_scorer.phrase_count(), 2);
|
|
||||||
assert!(phrase_scorer.advance());
|
|
||||||
assert_eq!(phrase_scorer.doc(), 2);
|
|
||||||
assert_eq!(phrase_scorer.phrase_count(), 1);
|
|
||||||
assert!(!phrase_scorer.advance());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_phrase_query_no_positions() {
|
pub fn test_phrase_query_no_positions() {
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
|
|||||||
@@ -1,12 +1,10 @@
|
|||||||
use super::PhraseWeight;
|
use super::PhraseWeight;
|
||||||
use crate::core::searcher::Searcher;
|
use crate::core::searcher::Searcher;
|
||||||
use crate::error::TantivyError;
|
|
||||||
use crate::query::bm25::BM25Weight;
|
use crate::query::bm25::BM25Weight;
|
||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
use crate::query::Weight;
|
use crate::query::Weight;
|
||||||
use crate::schema::IndexRecordOption;
|
use crate::schema::IndexRecordOption;
|
||||||
use crate::schema::{Field, Term};
|
use crate::schema::{Field, Term};
|
||||||
use crate::Result;
|
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
/// `PhraseQuery` matches a specific sequence of words.
|
/// `PhraseQuery` matches a specific sequence of words.
|
||||||
@@ -81,7 +79,7 @@ impl PhraseQuery {
|
|||||||
&self,
|
&self,
|
||||||
searcher: &Searcher,
|
searcher: &Searcher,
|
||||||
scoring_enabled: bool,
|
scoring_enabled: bool,
|
||||||
) -> Result<PhraseWeight> {
|
) -> crate::Result<PhraseWeight> {
|
||||||
let schema = searcher.schema();
|
let schema = searcher.schema();
|
||||||
let field_entry = schema.get_field_entry(self.field);
|
let field_entry = schema.get_field_entry(self.field);
|
||||||
let has_positions = field_entry
|
let has_positions = field_entry
|
||||||
@@ -91,7 +89,7 @@ impl PhraseQuery {
|
|||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
if !has_positions {
|
if !has_positions {
|
||||||
let field_name = field_entry.name();
|
let field_name = field_entry.name();
|
||||||
return Err(TantivyError::SchemaError(format!(
|
return Err(crate::TantivyError::SchemaError(format!(
|
||||||
"Applied phrase query on field {:?}, which does not have positions indexed",
|
"Applied phrase query on field {:?}, which does not have positions indexed",
|
||||||
field_name
|
field_name
|
||||||
)));
|
)));
|
||||||
@@ -110,7 +108,7 @@ impl Query for PhraseQuery {
|
|||||||
/// Create the weight associated to a query.
|
/// Create the weight associated to a query.
|
||||||
///
|
///
|
||||||
/// See [`Weight`](./trait.Weight.html).
|
/// See [`Weight`](./trait.Weight.html).
|
||||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
let phrase_weight = self.phrase_weight(searcher, scoring_enabled)?;
|
let phrase_weight = self.phrase_weight(searcher, scoring_enabled)?;
|
||||||
Ok(Box::new(phrase_weight))
|
Ok(Box::new(phrase_weight))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,11 +37,12 @@ impl PhraseWeight {
|
|||||||
reader.get_fieldnorms_reader(field)
|
reader.get_fieldnorms_reader(field)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn phrase_scorer(
|
fn phrase_scorer(
|
||||||
&self,
|
&self,
|
||||||
reader: &SegmentReader,
|
reader: &SegmentReader,
|
||||||
|
boost: f32,
|
||||||
) -> Result<Option<PhraseScorer<SegmentPostings>>> {
|
) -> Result<Option<PhraseScorer<SegmentPostings>>> {
|
||||||
let similarity_weight = self.similarity_weight.clone();
|
let similarity_weight = self.similarity_weight.boost_by(boost);
|
||||||
let fieldnorm_reader = self.fieldnorm_reader(reader);
|
let fieldnorm_reader = self.fieldnorm_reader(reader);
|
||||||
if reader.has_deletes() {
|
if reader.has_deletes() {
|
||||||
let mut term_postings_list = Vec::new();
|
let mut term_postings_list = Vec::new();
|
||||||
@@ -84,8 +85,8 @@ impl PhraseWeight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Weight for PhraseWeight {
|
impl Weight for PhraseWeight {
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> Result<Box<dyn Scorer>> {
|
||||||
if let Some(scorer) = self.phrase_scorer(reader)? {
|
if let Some(scorer) = self.phrase_scorer(reader, boost)? {
|
||||||
Ok(Box::new(scorer))
|
Ok(Box::new(scorer))
|
||||||
} else {
|
} else {
|
||||||
Ok(Box::new(EmptyScorer))
|
Ok(Box::new(EmptyScorer))
|
||||||
@@ -93,7 +94,7 @@ impl Weight for PhraseWeight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
||||||
let scorer_opt = self.phrase_scorer(reader)?;
|
let scorer_opt = self.phrase_scorer(reader, 1.0f32)?;
|
||||||
if scorer_opt.is_none() {
|
if scorer_opt.is_none() {
|
||||||
return Err(does_not_match(doc));
|
return Err(does_not_match(doc));
|
||||||
}
|
}
|
||||||
@@ -109,3 +110,34 @@ impl Weight for PhraseWeight {
|
|||||||
Ok(explanation)
|
Ok(explanation)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::super::tests::create_index;
|
||||||
|
use crate::query::PhraseQuery;
|
||||||
|
use crate::{DocSet, Term};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_phrase_count() {
|
||||||
|
let index = create_index(&["a c", "a a b d a b c", " a b"]);
|
||||||
|
let schema = index.schema();
|
||||||
|
let text_field = schema.get_field("text").unwrap();
|
||||||
|
let searcher = index.reader().unwrap().searcher();
|
||||||
|
let phrase_query = PhraseQuery::new(vec![
|
||||||
|
Term::from_field_text(text_field, "a"),
|
||||||
|
Term::from_field_text(text_field, "b"),
|
||||||
|
]);
|
||||||
|
let phrase_weight = phrase_query.phrase_weight(&searcher, true).unwrap();
|
||||||
|
let mut phrase_scorer = phrase_weight
|
||||||
|
.phrase_scorer(searcher.segment_reader(0u32), 1.0f32)
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert!(phrase_scorer.advance());
|
||||||
|
assert_eq!(phrase_scorer.doc(), 1);
|
||||||
|
assert_eq!(phrase_scorer.phrase_count(), 2);
|
||||||
|
assert!(phrase_scorer.advance());
|
||||||
|
assert_eq!(phrase_scorer.doc(), 2);
|
||||||
|
assert_eq!(phrase_scorer.phrase_count(), 1);
|
||||||
|
assert!(!phrase_scorer.advance());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use super::Weight;
|
|||||||
use crate::core::searcher::Searcher;
|
use crate::core::searcher::Searcher;
|
||||||
use crate::query::Explanation;
|
use crate::query::Explanation;
|
||||||
use crate::DocAddress;
|
use crate::DocAddress;
|
||||||
use crate::Result;
|
|
||||||
use crate::Term;
|
use crate::Term;
|
||||||
use downcast_rs::impl_downcast;
|
use downcast_rs::impl_downcast;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
@@ -48,17 +47,17 @@ pub trait Query: QueryClone + downcast_rs::Downcast + fmt::Debug {
|
|||||||
/// can increase performances.
|
/// can increase performances.
|
||||||
///
|
///
|
||||||
/// See [`Weight`](./trait.Weight.html).
|
/// See [`Weight`](./trait.Weight.html).
|
||||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<dyn Weight>>;
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>>;
|
||||||
|
|
||||||
/// Returns an `Explanation` for the score of the document.
|
/// Returns an `Explanation` for the score of the document.
|
||||||
fn explain(&self, searcher: &Searcher, doc_address: DocAddress) -> Result<Explanation> {
|
fn explain(&self, searcher: &Searcher, doc_address: DocAddress) -> crate::Result<Explanation> {
|
||||||
let reader = searcher.segment_reader(doc_address.segment_ord());
|
let reader = searcher.segment_reader(doc_address.segment_ord());
|
||||||
let weight = self.weight(searcher, true)?;
|
let weight = self.weight(searcher, true)?;
|
||||||
weight.explain(reader, doc_address.doc())
|
weight.explain(reader, doc_address.doc())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the number of documents matching the query.
|
/// Returns the number of documents matching the query.
|
||||||
fn count(&self, searcher: &Searcher) -> Result<usize> {
|
fn count(&self, searcher: &Searcher) -> crate::Result<usize> {
|
||||||
let weight = self.weight(searcher, false)?;
|
let weight = self.weight(searcher, false)?;
|
||||||
let mut result = 0;
|
let mut result = 0;
|
||||||
for reader in searcher.segment_readers() {
|
for reader in searcher.segment_readers() {
|
||||||
@@ -86,11 +85,11 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Query for Box<dyn Query> {
|
impl Query for Box<dyn Query> {
|
||||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
self.as_ref().weight(searcher, scoring_enabled)
|
self.as_ref().weight(searcher, scoring_enabled)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn count(&self, searcher: &Searcher) -> Result<usize> {
|
fn count(&self, searcher: &Searcher) -> crate::Result<usize> {
|
||||||
self.as_ref().count(searcher)
|
self.as_ref().count(searcher)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,17 @@ pub enum LogicalLiteral {
|
|||||||
pub enum LogicalAST {
|
pub enum LogicalAST {
|
||||||
Clause(Vec<(Occur, LogicalAST)>),
|
Clause(Vec<(Occur, LogicalAST)>),
|
||||||
Leaf(Box<LogicalLiteral>),
|
Leaf(Box<LogicalLiteral>),
|
||||||
|
Boost(Box<LogicalAST>, f32),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LogicalAST {
|
||||||
|
pub fn boost(self, boost: f32) -> LogicalAST {
|
||||||
|
if (boost - 1.0f32).abs() < std::f32::EPSILON {
|
||||||
|
self
|
||||||
|
} else {
|
||||||
|
LogicalAST::Boost(Box::new(self), boost)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn occur_letter(occur: Occur) -> &'static str {
|
fn occur_letter(occur: Occur) -> &'static str {
|
||||||
@@ -47,6 +58,7 @@ impl fmt::Debug for LogicalAST {
|
|||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
LogicalAST::Boost(ref ast, boost) => write!(formatter, "{:?}^{}", ast, boost),
|
||||||
LogicalAST::Leaf(ref literal) => write!(formatter, "{:?}", literal),
|
LogicalAST::Leaf(ref literal) => write!(formatter, "{:?}", literal),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use super::logical_ast::*;
|
use super::logical_ast::*;
|
||||||
use crate::core::Index;
|
use crate::core::Index;
|
||||||
use crate::query::AllQuery;
|
|
||||||
use crate::query::BooleanQuery;
|
use crate::query::BooleanQuery;
|
||||||
use crate::query::EmptyQuery;
|
use crate::query::EmptyQuery;
|
||||||
use crate::query::Occur;
|
use crate::query::Occur;
|
||||||
@@ -8,11 +7,13 @@ use crate::query::PhraseQuery;
|
|||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
use crate::query::RangeQuery;
|
use crate::query::RangeQuery;
|
||||||
use crate::query::TermQuery;
|
use crate::query::TermQuery;
|
||||||
|
use crate::query::{AllQuery, BoostQuery};
|
||||||
use crate::schema::{Facet, IndexRecordOption};
|
use crate::schema::{Facet, IndexRecordOption};
|
||||||
use crate::schema::{Field, Schema};
|
use crate::schema::{Field, Schema};
|
||||||
use crate::schema::{FieldType, Term};
|
use crate::schema::{FieldType, Term};
|
||||||
use crate::tokenizer::TokenizerManager;
|
use crate::tokenizer::TokenizerManager;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::num::{ParseFloatError, ParseIntError};
|
use std::num::{ParseFloatError, ParseIntError};
|
||||||
use std::ops::Bound;
|
use std::ops::Bound;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
@@ -144,7 +145,6 @@ fn trim_ast(logical_ast: LogicalAST) -> Option<LogicalAST> {
|
|||||||
///
|
///
|
||||||
/// * must terms: By prepending a term by a `+`, a term can be made required for the search.
|
/// * must terms: By prepending a term by a `+`, a term can be made required for the search.
|
||||||
///
|
///
|
||||||
///
|
|
||||||
/// * phrase terms: Quoted terms become phrase searches on fields that have positions indexed.
|
/// * phrase terms: Quoted terms become phrase searches on fields that have positions indexed.
|
||||||
/// e.g., `title:"Barack Obama"` will only find documents that have "barack" immediately followed
|
/// e.g., `title:"Barack Obama"` will only find documents that have "barack" immediately followed
|
||||||
/// by "obama".
|
/// by "obama".
|
||||||
@@ -158,12 +158,20 @@ fn trim_ast(logical_ast: LogicalAST) -> Option<LogicalAST> {
|
|||||||
///
|
///
|
||||||
/// * all docs query: A plain `*` will match all documents in the index.
|
/// * all docs query: A plain `*` will match all documents in the index.
|
||||||
///
|
///
|
||||||
|
/// Parts of the queries can be boosted by appending `^boostfactor`.
|
||||||
|
/// For instance, `"SRE"^2.0 OR devops^0.4` will boost documents containing `SRE` instead of
|
||||||
|
/// devops. Negative boosts are not allowed.
|
||||||
|
///
|
||||||
|
/// It is also possible to define a boost for a some specific field, at the query parser level.
|
||||||
|
/// (See [`set_boost(...)`](#method.set_field_boost) ). Typically you may want to boost a title
|
||||||
|
/// field.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct QueryParser {
|
pub struct QueryParser {
|
||||||
schema: Schema,
|
schema: Schema,
|
||||||
default_fields: Vec<Field>,
|
default_fields: Vec<Field>,
|
||||||
conjunction_by_default: bool,
|
conjunction_by_default: bool,
|
||||||
tokenizer_manager: TokenizerManager,
|
tokenizer_manager: TokenizerManager,
|
||||||
|
boost: HashMap<Field, f32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl QueryParser {
|
impl QueryParser {
|
||||||
@@ -181,6 +189,7 @@ impl QueryParser {
|
|||||||
default_fields,
|
default_fields,
|
||||||
tokenizer_manager,
|
tokenizer_manager,
|
||||||
conjunction_by_default: false,
|
conjunction_by_default: false,
|
||||||
|
boost: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -201,6 +210,17 @@ impl QueryParser {
|
|||||||
self.conjunction_by_default = true;
|
self.conjunction_by_default = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets a boost for a specific field.
|
||||||
|
///
|
||||||
|
/// The parse query will automatically boost this field.
|
||||||
|
///
|
||||||
|
/// If the query defines a query boost through the query language (e.g: `country:France^3.0`),
|
||||||
|
/// the two boosts (the one defined in the query, and the one defined in the `QueryParser`)
|
||||||
|
/// are multiplied together.
|
||||||
|
pub fn set_field_boost(&mut self, field: Field, boost: f32) {
|
||||||
|
self.boost.insert(field, boost);
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse a query
|
/// Parse a query
|
||||||
///
|
///
|
||||||
/// Note that `parse_query` returns an error if the input
|
/// Note that `parse_query` returns an error if the input
|
||||||
@@ -407,6 +427,10 @@ impl QueryParser {
|
|||||||
self.compute_logical_ast_with_occur(*subquery)?;
|
self.compute_logical_ast_with_occur(*subquery)?;
|
||||||
Ok((Occur::compose(left_occur, right_occur), logical_sub_queries))
|
Ok((Occur::compose(left_occur, right_occur), logical_sub_queries))
|
||||||
}
|
}
|
||||||
|
UserInputAST::Boost(ast, boost) => {
|
||||||
|
let (occur, ast_without_occur) = self.compute_logical_ast_with_occur(*ast)?;
|
||||||
|
Ok((occur, ast_without_occur.boost(boost)))
|
||||||
|
}
|
||||||
UserInputAST::Leaf(leaf) => {
|
UserInputAST::Leaf(leaf) => {
|
||||||
let result_ast = self.compute_logical_ast_from_leaf(*leaf)?;
|
let result_ast = self.compute_logical_ast_from_leaf(*leaf)?;
|
||||||
Ok((Occur::Should, result_ast))
|
Ok((Occur::Should, result_ast))
|
||||||
@@ -414,6 +438,10 @@ impl QueryParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn field_boost(&self, field: Field) -> f32 {
|
||||||
|
self.boost.get(&field).cloned().unwrap_or(1.0f32)
|
||||||
|
}
|
||||||
|
|
||||||
fn compute_logical_ast_from_leaf(
|
fn compute_logical_ast_from_leaf(
|
||||||
&self,
|
&self,
|
||||||
leaf: UserInputLeaf,
|
leaf: UserInputLeaf,
|
||||||
@@ -439,7 +467,9 @@ impl QueryParser {
|
|||||||
let mut asts: Vec<LogicalAST> = Vec::new();
|
let mut asts: Vec<LogicalAST> = Vec::new();
|
||||||
for (field, phrase) in term_phrases {
|
for (field, phrase) in term_phrases {
|
||||||
if let Some(ast) = self.compute_logical_ast_for_leaf(field, &phrase)? {
|
if let Some(ast) = self.compute_logical_ast_for_leaf(field, &phrase)? {
|
||||||
asts.push(LogicalAST::Leaf(Box::new(ast)));
|
// Apply some field specific boost defined at the query parser level.
|
||||||
|
let boost = self.field_boost(field);
|
||||||
|
asts.push(LogicalAST::Leaf(Box::new(ast)).boost(boost));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let result_ast: LogicalAST = if asts.len() == 1 {
|
let result_ast: LogicalAST = if asts.len() == 1 {
|
||||||
@@ -459,14 +489,16 @@ impl QueryParser {
|
|||||||
let mut clauses = fields
|
let mut clauses = fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&field| {
|
.map(|&field| {
|
||||||
|
let boost = self.field_boost(field);
|
||||||
let field_entry = self.schema.get_field_entry(field);
|
let field_entry = self.schema.get_field_entry(field);
|
||||||
let value_type = field_entry.field_type().value_type();
|
let value_type = field_entry.field_type().value_type();
|
||||||
Ok(LogicalAST::Leaf(Box::new(LogicalLiteral::Range {
|
let logical_ast = LogicalAST::Leaf(Box::new(LogicalLiteral::Range {
|
||||||
field,
|
field,
|
||||||
value_type,
|
value_type,
|
||||||
lower: self.resolve_bound(field, &lower)?,
|
lower: self.resolve_bound(field, &lower)?,
|
||||||
upper: self.resolve_bound(field, &upper)?,
|
upper: self.resolve_bound(field, &upper)?,
|
||||||
})))
|
}));
|
||||||
|
Ok(logical_ast.boost(boost))
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, QueryParserError>>()?;
|
.collect::<Result<Vec<_>, QueryParserError>>()?;
|
||||||
let result_ast = if clauses.len() == 1 {
|
let result_ast = if clauses.len() == 1 {
|
||||||
@@ -519,6 +551,11 @@ fn convert_to_query(logical_ast: LogicalAST) -> Box<dyn Query> {
|
|||||||
Some(LogicalAST::Leaf(trimmed_logical_literal)) => {
|
Some(LogicalAST::Leaf(trimmed_logical_literal)) => {
|
||||||
convert_literal_to_query(*trimmed_logical_literal)
|
convert_literal_to_query(*trimmed_logical_literal)
|
||||||
}
|
}
|
||||||
|
Some(LogicalAST::Boost(ast, boost)) => {
|
||||||
|
let query = convert_to_query(*ast);
|
||||||
|
let boosted_query = BoostQuery::new(query, boost);
|
||||||
|
Box::new(boosted_query)
|
||||||
|
}
|
||||||
None => Box::new(EmptyQuery),
|
None => Box::new(EmptyQuery),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -533,12 +570,12 @@ mod test {
|
|||||||
use crate::schema::{IndexRecordOption, TextFieldIndexing, TextOptions};
|
use crate::schema::{IndexRecordOption, TextFieldIndexing, TextOptions};
|
||||||
use crate::schema::{Schema, Term, INDEXED, STORED, STRING, TEXT};
|
use crate::schema::{Schema, Term, INDEXED, STORED, STRING, TEXT};
|
||||||
use crate::tokenizer::{
|
use crate::tokenizer::{
|
||||||
LowerCaser, SimpleTokenizer, StopWordFilter, Tokenizer, TokenizerManager,
|
LowerCaser, SimpleTokenizer, StopWordFilter, TextAnalyzer, TokenizerManager,
|
||||||
};
|
};
|
||||||
use crate::Index;
|
use crate::Index;
|
||||||
use matches::assert_matches;
|
use matches::assert_matches;
|
||||||
|
|
||||||
fn make_query_parser() -> QueryParser {
|
fn make_schema() -> Schema {
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
let text_field_indexing = TextFieldIndexing::default()
|
let text_field_indexing = TextFieldIndexing::default()
|
||||||
.set_tokenizer("en_with_stop_words")
|
.set_tokenizer("en_with_stop_words")
|
||||||
@@ -546,8 +583,8 @@ mod test {
|
|||||||
let text_options = TextOptions::default()
|
let text_options = TextOptions::default()
|
||||||
.set_indexing_options(text_field_indexing)
|
.set_indexing_options(text_field_indexing)
|
||||||
.set_stored();
|
.set_stored();
|
||||||
let title = schema_builder.add_text_field("title", TEXT);
|
schema_builder.add_text_field("title", TEXT);
|
||||||
let text = schema_builder.add_text_field("text", TEXT);
|
schema_builder.add_text_field("text", TEXT);
|
||||||
schema_builder.add_i64_field("signed", INDEXED);
|
schema_builder.add_i64_field("signed", INDEXED);
|
||||||
schema_builder.add_u64_field("unsigned", INDEXED);
|
schema_builder.add_u64_field("unsigned", INDEXED);
|
||||||
schema_builder.add_text_field("notindexed_text", STORED);
|
schema_builder.add_text_field("notindexed_text", STORED);
|
||||||
@@ -558,12 +595,19 @@ mod test {
|
|||||||
schema_builder.add_date_field("date", INDEXED);
|
schema_builder.add_date_field("date", INDEXED);
|
||||||
schema_builder.add_f64_field("float", INDEXED);
|
schema_builder.add_f64_field("float", INDEXED);
|
||||||
schema_builder.add_facet_field("facet");
|
schema_builder.add_facet_field("facet");
|
||||||
let schema = schema_builder.build();
|
schema_builder.build()
|
||||||
let default_fields = vec![title, text];
|
}
|
||||||
|
|
||||||
|
fn make_query_parser() -> QueryParser {
|
||||||
|
let schema = make_schema();
|
||||||
|
let default_fields: Vec<Field> = vec!["title", "text"]
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|field_name| schema.get_field(field_name))
|
||||||
|
.collect();
|
||||||
let tokenizer_manager = TokenizerManager::default();
|
let tokenizer_manager = TokenizerManager::default();
|
||||||
tokenizer_manager.register(
|
tokenizer_manager.register(
|
||||||
"en_with_stop_words",
|
"en_with_stop_words",
|
||||||
SimpleTokenizer
|
TextAnalyzer::from(SimpleTokenizer)
|
||||||
.filter(LowerCaser)
|
.filter(LowerCaser)
|
||||||
.filter(StopWordFilter::remove(vec!["the".to_string()])),
|
.filter(StopWordFilter::remove(vec!["the".to_string()])),
|
||||||
);
|
);
|
||||||
@@ -601,6 +645,45 @@ mod test {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_parse_query_with_boost() {
|
||||||
|
let mut query_parser = make_query_parser();
|
||||||
|
let schema = make_schema();
|
||||||
|
let text_field = schema.get_field("text").unwrap();
|
||||||
|
query_parser.set_field_boost(text_field, 2.0f32);
|
||||||
|
let query = query_parser.parse_query("text:hello").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
format!("{:?}", query),
|
||||||
|
"Boost(query=TermQuery(Term(field=1,bytes=[104, 101, 108, 108, 111])), boost=2)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_parse_query_range_with_boost() {
|
||||||
|
let mut query_parser = make_query_parser();
|
||||||
|
let schema = make_schema();
|
||||||
|
let title_field = schema.get_field("title").unwrap();
|
||||||
|
query_parser.set_field_boost(title_field, 2.0f32);
|
||||||
|
let query = query_parser.parse_query("title:[A TO B]").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
format!("{:?}", query),
|
||||||
|
"Boost(query=RangeQuery { field: Field(0), value_type: Str, left_bound: Included([97]), right_bound: Included([98]) }, boost=2)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_parse_query_with_default_boost_and_custom_boost() {
|
||||||
|
let mut query_parser = make_query_parser();
|
||||||
|
let schema = make_schema();
|
||||||
|
let text_field = schema.get_field("text").unwrap();
|
||||||
|
query_parser.set_field_boost(text_field, 2.0f32);
|
||||||
|
let query = query_parser.parse_query("text:hello^2").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
format!("{:?}", query),
|
||||||
|
"Boost(query=Boost(query=TermQuery(Term(field=1,bytes=[104, 101, 108, 108, 111])), boost=2), boost=2)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_parse_nonindexed_field_yields_error() {
|
pub fn test_parse_nonindexed_field_yields_error() {
|
||||||
let query_parser = make_query_parser();
|
let query_parser = make_query_parser();
|
||||||
|
|||||||
@@ -289,7 +289,7 @@ impl RangeWeight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Weight for RangeWeight {
|
impl Weight for RangeWeight {
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> Result<Box<dyn Scorer>> {
|
||||||
let max_doc = reader.max_doc();
|
let max_doc = reader.max_doc();
|
||||||
let mut doc_bitset = BitSet::with_max_value(max_doc);
|
let mut doc_bitset = BitSet::with_max_value(max_doc);
|
||||||
|
|
||||||
@@ -307,11 +307,11 @@ impl Weight for RangeWeight {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let doc_bitset = BitSetDocSet::from(doc_bitset);
|
let doc_bitset = BitSetDocSet::from(doc_bitset);
|
||||||
Ok(Box::new(ConstScorer::new(doc_bitset)))
|
Ok(Box::new(ConstScorer::new(doc_bitset, boost)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
||||||
let mut scorer = self.scorer(reader)?;
|
let mut scorer = self.scorer(reader, 1.0f32)?;
|
||||||
if scorer.skip_next(doc) != SkipResult::Reached {
|
if scorer.skip_next(doc) != SkipResult::Reached {
|
||||||
return Err(does_not_match(doc));
|
return Err(does_not_match(doc));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use crate::error::TantivyError;
|
use crate::error::TantivyError;
|
||||||
use crate::query::{AutomatonWeight, Query, Weight};
|
use crate::query::{AutomatonWeight, Query, Weight};
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use std::clone::Clone;
|
use std::clone::Clone;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@@ -58,7 +57,7 @@ pub struct RegexQuery {
|
|||||||
|
|
||||||
impl RegexQuery {
|
impl RegexQuery {
|
||||||
/// Creates a new RegexQuery from a given pattern
|
/// Creates a new RegexQuery from a given pattern
|
||||||
pub fn from_pattern(regex_pattern: &str, field: Field) -> Result<Self> {
|
pub fn from_pattern(regex_pattern: &str, field: Field) -> crate::Result<Self> {
|
||||||
let regex = Regex::new(®ex_pattern)
|
let regex = Regex::new(®ex_pattern)
|
||||||
.map_err(|_| TantivyError::InvalidArgument(regex_pattern.to_string()))?;
|
.map_err(|_| TantivyError::InvalidArgument(regex_pattern.to_string()))?;
|
||||||
Ok(RegexQuery::from_regex(regex, field))
|
Ok(RegexQuery::from_regex(regex, field))
|
||||||
@@ -78,7 +77,11 @@ impl RegexQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Query for RegexQuery {
|
impl Query for RegexQuery {
|
||||||
fn weight(&self, _searcher: &Searcher, _scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(
|
||||||
|
&self,
|
||||||
|
_searcher: &Searcher,
|
||||||
|
_scoring_enabled: bool,
|
||||||
|
) -> crate::Result<Box<dyn Weight>> {
|
||||||
Ok(Box::new(self.specialized_weight()))
|
Ok(Box::new(self.specialized_weight()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -115,8 +115,8 @@ mod tests {
|
|||||||
let req = vec![1, 3, 7];
|
let req = vec![1, 3, 7];
|
||||||
let mut reqoptscorer: RequiredOptionalScorer<_, _, SumCombiner> =
|
let mut reqoptscorer: RequiredOptionalScorer<_, _, SumCombiner> =
|
||||||
RequiredOptionalScorer::new(
|
RequiredOptionalScorer::new(
|
||||||
ConstScorer::new(VecDocSet::from(req.clone())),
|
ConstScorer::from(VecDocSet::from(req.clone())),
|
||||||
ConstScorer::new(VecDocSet::from(vec![])),
|
ConstScorer::from(VecDocSet::from(vec![])),
|
||||||
);
|
);
|
||||||
let mut docs = vec![];
|
let mut docs = vec![];
|
||||||
while reqoptscorer.advance() {
|
while reqoptscorer.advance() {
|
||||||
@@ -129,8 +129,8 @@ mod tests {
|
|||||||
fn test_reqopt_scorer() {
|
fn test_reqopt_scorer() {
|
||||||
let mut reqoptscorer: RequiredOptionalScorer<_, _, SumCombiner> =
|
let mut reqoptscorer: RequiredOptionalScorer<_, _, SumCombiner> =
|
||||||
RequiredOptionalScorer::new(
|
RequiredOptionalScorer::new(
|
||||||
ConstScorer::new(VecDocSet::from(vec![1, 3, 7, 8, 9, 10, 13, 15])),
|
ConstScorer::new(VecDocSet::from(vec![1, 3, 7, 8, 9, 10, 13, 15]), 1.0f32),
|
||||||
ConstScorer::new(VecDocSet::from(vec![1, 2, 7, 11, 12, 15])),
|
ConstScorer::new(VecDocSet::from(vec![1, 2, 7, 11, 12, 15]), 1.0f32),
|
||||||
);
|
);
|
||||||
{
|
{
|
||||||
assert!(reqoptscorer.advance());
|
assert!(reqoptscorer.advance());
|
||||||
@@ -183,8 +183,8 @@ mod tests {
|
|||||||
test_skip_against_unoptimized(
|
test_skip_against_unoptimized(
|
||||||
|| {
|
|| {
|
||||||
Box::new(RequiredOptionalScorer::<_, _, DoNothingCombiner>::new(
|
Box::new(RequiredOptionalScorer::<_, _, DoNothingCombiner>::new(
|
||||||
ConstScorer::new(VecDocSet::from(req_docs.clone())),
|
ConstScorer::from(VecDocSet::from(req_docs.clone())),
|
||||||
ConstScorer::new(VecDocSet::from(opt_docs.clone())),
|
ConstScorer::from(VecDocSet::from(opt_docs.clone())),
|
||||||
))
|
))
|
||||||
},
|
},
|
||||||
skip_docs,
|
skip_docs,
|
||||||
|
|||||||
@@ -49,16 +49,14 @@ pub struct ConstScorer<TDocSet: DocSet> {
|
|||||||
|
|
||||||
impl<TDocSet: DocSet> ConstScorer<TDocSet> {
|
impl<TDocSet: DocSet> ConstScorer<TDocSet> {
|
||||||
/// Creates a new `ConstScorer`.
|
/// Creates a new `ConstScorer`.
|
||||||
pub fn new(docset: TDocSet) -> ConstScorer<TDocSet> {
|
pub fn new(docset: TDocSet, score: f32) -> ConstScorer<TDocSet> {
|
||||||
ConstScorer {
|
ConstScorer { docset, score }
|
||||||
docset,
|
|
||||||
score: 1f32,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Sets the constant score to a different value.
|
impl<TDocSet: DocSet> From<TDocSet> for ConstScorer<TDocSet> {
|
||||||
pub fn set_score(&mut self, score: Score) {
|
fn from(docset: TDocSet) -> Self {
|
||||||
self.score = score;
|
ConstScorer::new(docset, 1.0f32)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,6 +88,6 @@ impl<TDocSet: DocSet> DocSet for ConstScorer<TDocSet> {
|
|||||||
|
|
||||||
impl<TDocSet: DocSet + 'static> Scorer for ConstScorer<TDocSet> {
|
impl<TDocSet: DocSet + 'static> Scorer for ConstScorer<TDocSet> {
|
||||||
fn score(&mut self) -> Score {
|
fn score(&mut self) -> Score {
|
||||||
1f32
|
self.score
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
let term_weight = term_query.weight(&searcher, true).unwrap();
|
let term_weight = term_query.weight(&searcher, true).unwrap();
|
||||||
let segment_reader = searcher.segment_reader(0);
|
let segment_reader = searcher.segment_reader(0);
|
||||||
let mut term_scorer = term_weight.scorer(segment_reader).unwrap();
|
let mut term_scorer = term_weight.scorer(segment_reader, 1.0f32).unwrap();
|
||||||
assert!(term_scorer.advance());
|
assert!(term_scorer.advance());
|
||||||
assert_eq!(term_scorer.doc(), 0);
|
assert_eq!(term_scorer.doc(), 0);
|
||||||
assert_eq!(term_scorer.score(), 0.28768212);
|
assert_eq!(term_scorer.score(), 0.28768212);
|
||||||
@@ -112,6 +112,7 @@ mod tests {
|
|||||||
let term_a = Term::from_field_text(text_field, "a");
|
let term_a = Term::from_field_text(text_field, "a");
|
||||||
let term_query = TermQuery::new(term_a, IndexRecordOption::Basic);
|
let term_query = TermQuery::new(term_a, IndexRecordOption::Basic);
|
||||||
let reader = index.reader().unwrap();
|
let reader = index.reader().unwrap();
|
||||||
|
assert_eq!(reader.searcher().segment_readers().len(), 1);
|
||||||
assert_eq!(term_query.count(&*reader.searcher()).unwrap(), 1);
|
assert_eq!(term_query.count(&*reader.searcher()).unwrap(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ use crate::query::bm25::BM25Weight;
|
|||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
use crate::query::Weight;
|
use crate::query::Weight;
|
||||||
use crate::schema::IndexRecordOption;
|
use crate::schema::IndexRecordOption;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use crate::Term;
|
use crate::Term;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
@@ -101,7 +100,7 @@ impl TermQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Query for TermQuery {
|
impl Query for TermQuery {
|
||||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<dyn Weight>> {
|
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
|
||||||
Ok(Box::new(self.specialized_weight(searcher, scoring_enabled)))
|
Ok(Box::new(self.specialized_weight(searcher, scoring_enabled)))
|
||||||
}
|
}
|
||||||
fn query_terms(&self, term_set: &mut BTreeSet<Term>) {
|
fn query_terms(&self, term_set: &mut BTreeSet<Term>) {
|
||||||
|
|||||||
@@ -18,13 +18,13 @@ pub struct TermWeight {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Weight for TermWeight {
|
impl Weight for TermWeight {
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>> {
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> Result<Box<dyn Scorer>> {
|
||||||
let term_scorer = self.scorer_specialized(reader)?;
|
let term_scorer = self.scorer_specialized(reader, boost)?;
|
||||||
Ok(Box::new(term_scorer))
|
Ok(Box::new(term_scorer))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation> {
|
||||||
let mut scorer = self.scorer_specialized(reader)?;
|
let mut scorer = self.scorer_specialized(reader, 1.0f32)?;
|
||||||
if scorer.skip_next(doc) != SkipResult::Reached {
|
if scorer.skip_next(doc) != SkipResult::Reached {
|
||||||
return Err(does_not_match(doc));
|
return Err(does_not_match(doc));
|
||||||
}
|
}
|
||||||
@@ -33,7 +33,7 @@ impl Weight for TermWeight {
|
|||||||
|
|
||||||
fn count(&self, reader: &SegmentReader) -> Result<u32> {
|
fn count(&self, reader: &SegmentReader) -> Result<u32> {
|
||||||
if let Some(delete_bitset) = reader.delete_bitset() {
|
if let Some(delete_bitset) = reader.delete_bitset() {
|
||||||
Ok(self.scorer(reader)?.count(delete_bitset))
|
Ok(self.scorer(reader, 1.0f32)?.count(delete_bitset))
|
||||||
} else {
|
} else {
|
||||||
let field = self.term.field();
|
let field = self.term.field();
|
||||||
Ok(reader
|
Ok(reader
|
||||||
@@ -58,11 +58,11 @@ impl TermWeight {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scorer_specialized(&self, reader: &SegmentReader) -> Result<TermScorer> {
|
fn scorer_specialized(&self, reader: &SegmentReader, boost: f32) -> Result<TermScorer> {
|
||||||
let field = self.term.field();
|
let field = self.term.field();
|
||||||
let inverted_index = reader.inverted_index(field);
|
let inverted_index = reader.inverted_index(field);
|
||||||
let fieldnorm_reader = reader.get_fieldnorms_reader(field);
|
let fieldnorm_reader = reader.get_fieldnorms_reader(field);
|
||||||
let similarity_weight = self.similarity_weight.clone();
|
let similarity_weight = self.similarity_weight.boost_by(boost);
|
||||||
let postings_opt: Option<SegmentPostings> =
|
let postings_opt: Option<SegmentPostings> =
|
||||||
inverted_index.read_postings(&self.term, self.index_record_option);
|
inverted_index.read_postings(&self.term, self.index_record_option);
|
||||||
if let Some(segment_postings) = postings_opt {
|
if let Some(segment_postings) = postings_opt {
|
||||||
|
|||||||
@@ -145,26 +145,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn count_including_deleted(&mut self) -> u32 {
|
|
||||||
let mut count = self.bitsets[self.cursor..HORIZON_NUM_TINYBITSETS]
|
|
||||||
.iter()
|
|
||||||
.map(|bitset| bitset.len())
|
|
||||||
.sum::<u32>();
|
|
||||||
for bitset in self.bitsets.iter_mut() {
|
|
||||||
bitset.clear();
|
|
||||||
}
|
|
||||||
while self.refill() {
|
|
||||||
count += self.bitsets.iter().map(|bitset| bitset.len()).sum::<u32>();
|
|
||||||
for bitset in self.bitsets.iter_mut() {
|
|
||||||
bitset.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.cursor = HORIZON_NUM_TINYBITSETS;
|
|
||||||
count
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO implement `count` efficiently.
|
|
||||||
|
|
||||||
fn skip_next(&mut self, target: DocId) -> SkipResult {
|
fn skip_next(&mut self, target: DocId) -> SkipResult {
|
||||||
if !self.advance() {
|
if !self.advance() {
|
||||||
return SkipResult::End;
|
return SkipResult::End;
|
||||||
@@ -243,6 +223,8 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO implement `count` efficiently.
|
||||||
|
|
||||||
fn doc(&self) -> DocId {
|
fn doc(&self) -> DocId {
|
||||||
self.doc
|
self.doc
|
||||||
}
|
}
|
||||||
@@ -250,6 +232,24 @@ where
|
|||||||
fn size_hint(&self) -> u32 {
|
fn size_hint(&self) -> u32 {
|
||||||
0u32
|
0u32
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn count_including_deleted(&mut self) -> u32 {
|
||||||
|
let mut count = self.bitsets[self.cursor..HORIZON_NUM_TINYBITSETS]
|
||||||
|
.iter()
|
||||||
|
.map(|bitset| bitset.len())
|
||||||
|
.sum::<u32>();
|
||||||
|
for bitset in self.bitsets.iter_mut() {
|
||||||
|
bitset.clear();
|
||||||
|
}
|
||||||
|
while self.refill() {
|
||||||
|
count += self.bitsets.iter().map(|bitset| bitset.len()).sum::<u32>();
|
||||||
|
for bitset in self.bitsets.iter_mut() {
|
||||||
|
bitset.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.cursor = HORIZON_NUM_TINYBITSETS;
|
||||||
|
count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TScorer, TScoreCombiner> Scorer for Union<TScorer, TScoreCombiner>
|
impl<TScorer, TScoreCombiner> Scorer for Union<TScorer, TScoreCombiner>
|
||||||
@@ -290,7 +290,7 @@ mod tests {
|
|||||||
vals.iter()
|
vals.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.map(VecDocSet::from)
|
.map(VecDocSet::from)
|
||||||
.map(ConstScorer::new)
|
.map(|docset| ConstScorer::new(docset, 1.0f32))
|
||||||
.collect::<Vec<ConstScorer<VecDocSet>>>(),
|
.collect::<Vec<ConstScorer<VecDocSet>>>(),
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
@@ -339,7 +339,7 @@ mod tests {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|docs| docs.clone())
|
.map(|docs| docs.clone())
|
||||||
.map(VecDocSet::from)
|
.map(VecDocSet::from)
|
||||||
.map(ConstScorer::new)
|
.map(|docset| ConstScorer::new(docset, 1.0f32))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
));
|
));
|
||||||
res
|
res
|
||||||
@@ -369,8 +369,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_union_skip_corner_case3() {
|
fn test_union_skip_corner_case3() {
|
||||||
let mut docset = Union::<_, DoNothingCombiner>::from(vec![
|
let mut docset = Union::<_, DoNothingCombiner>::from(vec![
|
||||||
ConstScorer::new(VecDocSet::from(vec![0u32, 5u32])),
|
ConstScorer::from(VecDocSet::from(vec![0u32, 5u32])),
|
||||||
ConstScorer::new(VecDocSet::from(vec![1u32, 4u32])),
|
ConstScorer::from(VecDocSet::from(vec![1u32, 4u32])),
|
||||||
]);
|
]);
|
||||||
assert!(docset.advance());
|
assert!(docset.advance());
|
||||||
assert_eq!(docset.doc(), 0u32);
|
assert_eq!(docset.doc(), 0u32);
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use super::Scorer;
|
use super::Scorer;
|
||||||
use crate::core::SegmentReader;
|
use crate::core::SegmentReader;
|
||||||
use crate::query::Explanation;
|
use crate::query::Explanation;
|
||||||
use crate::{DocId, Result};
|
use crate::DocId;
|
||||||
|
|
||||||
/// A Weight is the specialization of a Query
|
/// A Weight is the specialization of a Query
|
||||||
/// for a given set of segments.
|
/// for a given set of segments.
|
||||||
@@ -9,15 +9,18 @@ use crate::{DocId, Result};
|
|||||||
/// See [`Query`](./trait.Query.html).
|
/// See [`Query`](./trait.Query.html).
|
||||||
pub trait Weight: Send + Sync + 'static {
|
pub trait Weight: Send + Sync + 'static {
|
||||||
/// Returns the scorer for the given segment.
|
/// Returns the scorer for the given segment.
|
||||||
|
///
|
||||||
|
/// `boost` is a multiplier to apply to the score.
|
||||||
|
///
|
||||||
/// See [`Query`](./trait.Query.html).
|
/// See [`Query`](./trait.Query.html).
|
||||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<dyn Scorer>>;
|
fn scorer(&self, reader: &SegmentReader, boost: f32) -> crate::Result<Box<dyn Scorer>>;
|
||||||
|
|
||||||
/// Returns an `Explanation` for the given document.
|
/// Returns an `Explanation` for the given document.
|
||||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> Result<Explanation>;
|
fn explain(&self, reader: &SegmentReader, doc: DocId) -> crate::Result<Explanation>;
|
||||||
|
|
||||||
/// Returns the number documents within the given `SegmentReader`.
|
/// Returns the number documents within the given `SegmentReader`.
|
||||||
fn count(&self, reader: &SegmentReader) -> Result<u32> {
|
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
|
||||||
let mut scorer = self.scorer(reader)?;
|
let mut scorer = self.scorer(reader, 1.0f32)?;
|
||||||
if let Some(delete_bitset) = reader.delete_bitset() {
|
if let Some(delete_bitset) = reader.delete_bitset() {
|
||||||
Ok(scorer.count(delete_bitset))
|
Ok(scorer.count(delete_bitset))
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
83
src/reader/index_writer_reader.rs
Normal file
83
src/reader/index_writer_reader.rs
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
use crate::directory::{WatchCallbackList, WatchHandle};
|
||||||
|
use crate::indexer::SegmentRegisters;
|
||||||
|
use crate::reader::pool::Pool;
|
||||||
|
use crate::{Index, LeasedItem, Searcher, Segment, SegmentReader};
|
||||||
|
use std::iter::repeat_with;
|
||||||
|
use std::sync::{Arc, RwLock, Weak};
|
||||||
|
|
||||||
|
struct InnerNRTReader {
|
||||||
|
num_searchers: usize,
|
||||||
|
index: Index,
|
||||||
|
searcher_pool: Pool<Searcher>,
|
||||||
|
segment_registers: Arc<RwLock<SegmentRegisters>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InnerNRTReader {
|
||||||
|
fn load_segment_readers(&self) -> crate::Result<Vec<SegmentReader>> {
|
||||||
|
let segments: Vec<Segment> = {
|
||||||
|
let rlock = self.segment_registers.read().unwrap();
|
||||||
|
rlock.committed_segment()
|
||||||
|
};
|
||||||
|
segments
|
||||||
|
.iter()
|
||||||
|
.map(SegmentReader::open)
|
||||||
|
.collect::<crate::Result<Vec<SegmentReader>>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reload(&self) -> crate::Result<()> {
|
||||||
|
let segment_readers: Vec<SegmentReader> = self.load_segment_readers()?;
|
||||||
|
let schema = self.index.schema();
|
||||||
|
let searchers = repeat_with(|| {
|
||||||
|
Searcher::new(schema.clone(), self.index.clone(), segment_readers.clone())
|
||||||
|
})
|
||||||
|
.take(self.num_searchers)
|
||||||
|
.collect();
|
||||||
|
self.searcher_pool.publish_new_generation(searchers);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn searcher(&self) -> LeasedItem<Searcher> {
|
||||||
|
self.searcher_pool.acquire()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct NRTReader {
|
||||||
|
inner: Arc<InnerNRTReader>,
|
||||||
|
watch_handle: WatchHandle,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NRTReader {
|
||||||
|
pub fn reload(&self) -> crate::Result<()> {
|
||||||
|
self.inner.reload()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn searcher(&self) -> LeasedItem<Searcher> {
|
||||||
|
self.inner.searcher()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn create(
|
||||||
|
num_searchers: usize,
|
||||||
|
index: Index,
|
||||||
|
segment_registers: Arc<RwLock<SegmentRegisters>>,
|
||||||
|
watch_callback_list: &WatchCallbackList,
|
||||||
|
) -> crate::Result<Self> {
|
||||||
|
let inner_reader: Arc<InnerNRTReader> = Arc::new(InnerNRTReader {
|
||||||
|
num_searchers,
|
||||||
|
index,
|
||||||
|
searcher_pool: Pool::new(),
|
||||||
|
segment_registers,
|
||||||
|
});
|
||||||
|
let inner_reader_weak: Weak<InnerNRTReader> = Arc::downgrade(&inner_reader);
|
||||||
|
let watch_handle = watch_callback_list.subscribe(Box::new(move || {
|
||||||
|
if let Some(nrt_reader_arc) = inner_reader_weak.upgrade() {
|
||||||
|
let _ = nrt_reader_arc.reload();
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
inner_reader.reload()?;
|
||||||
|
Ok(NRTReader {
|
||||||
|
inner: inner_reader,
|
||||||
|
watch_handle,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
180
src/reader/meta_file_reader.rs
Normal file
180
src/reader/meta_file_reader.rs
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
use super::pool::Pool;
|
||||||
|
use crate::core::Segment;
|
||||||
|
use crate::directory::Directory;
|
||||||
|
use crate::directory::WatchHandle;
|
||||||
|
use crate::directory::META_LOCK;
|
||||||
|
use crate::Searcher;
|
||||||
|
use crate::SegmentReader;
|
||||||
|
use crate::{Index, LeasedItem};
|
||||||
|
use crate::{IndexReader, Result};
|
||||||
|
use std::iter::repeat_with;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// Defines when a new version of the index should be reloaded.
|
||||||
|
///
|
||||||
|
/// Regardless of whether you search and index in the same process, tantivy does not necessarily
|
||||||
|
/// reflects the change that are commited to your index. `ReloadPolicy` precisely helps you define
|
||||||
|
/// when you want your index to be reloaded.
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub enum ReloadPolicy {
|
||||||
|
/// The index is entirely reloaded manually.
|
||||||
|
/// All updates of the index should be manual.
|
||||||
|
///
|
||||||
|
/// No change is reflected automatically. You are required to call `.load_seacher()` manually.
|
||||||
|
Manual,
|
||||||
|
/// The index is reloaded within milliseconds after a new commit is available.
|
||||||
|
/// This is made possible by watching changes in the `meta.json` file.
|
||||||
|
OnCommit, // TODO add NEAR_REAL_TIME(target_ms)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `IndexReader` builder
|
||||||
|
///
|
||||||
|
/// It makes it possible to set the following values.
|
||||||
|
///
|
||||||
|
/// - `num_searchers` (by default, the number of detected CPU threads):
|
||||||
|
///
|
||||||
|
/// When `num_searchers` queries are requested at the same time, the `num_searchers` will block
|
||||||
|
/// until the one of the searcher in-use gets released.
|
||||||
|
/// - `reload_policy` (by default `ReloadPolicy::OnCommit`):
|
||||||
|
///
|
||||||
|
/// See [`ReloadPolicy`](./enum.ReloadPolicy.html) for more details.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct IndexReaderBuilder {
|
||||||
|
num_searchers: usize,
|
||||||
|
reload_policy: ReloadPolicy,
|
||||||
|
index: Index,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IndexReaderBuilder {
|
||||||
|
pub(crate) fn new(index: Index) -> IndexReaderBuilder {
|
||||||
|
IndexReaderBuilder {
|
||||||
|
num_searchers: num_cpus::get(),
|
||||||
|
reload_policy: ReloadPolicy::Manual,
|
||||||
|
index,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds the reader.
|
||||||
|
///
|
||||||
|
/// Building the reader is a non-trivial operation that requires
|
||||||
|
/// to open different segment readers. It may take hundreds of milliseconds
|
||||||
|
/// of time and it may return an error.
|
||||||
|
/// TODO(pmasurel) Use the `TryInto` trait once it is available in stable.
|
||||||
|
pub fn try_into(self) -> Result<IndexReader> {
|
||||||
|
let inner_reader = MetaFileIndexReaderInner {
|
||||||
|
index: self.index,
|
||||||
|
num_searchers: self.num_searchers,
|
||||||
|
searcher_pool: Pool::new(),
|
||||||
|
};
|
||||||
|
inner_reader.reload()?;
|
||||||
|
let inner_reader_arc = Arc::new(inner_reader);
|
||||||
|
let watch_handle_opt: Option<WatchHandle>;
|
||||||
|
match self.reload_policy {
|
||||||
|
ReloadPolicy::Manual => {
|
||||||
|
// No need to set anything...
|
||||||
|
watch_handle_opt = None;
|
||||||
|
}
|
||||||
|
ReloadPolicy::OnCommit => {
|
||||||
|
let inner_reader_arc_clone = inner_reader_arc.clone();
|
||||||
|
let callback = move || {
|
||||||
|
if let Err(err) = inner_reader_arc_clone.reload() {
|
||||||
|
error!(
|
||||||
|
"Error while loading searcher after commit was detected. {:?}",
|
||||||
|
err
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let watch_handle = inner_reader_arc
|
||||||
|
.index
|
||||||
|
.directory()
|
||||||
|
.watch(Box::new(callback))?;
|
||||||
|
watch_handle_opt = Some(watch_handle);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(IndexReader::from(MetaFileIndexReader {
|
||||||
|
inner: inner_reader_arc,
|
||||||
|
watch_handle_opt,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the reload_policy.
|
||||||
|
///
|
||||||
|
/// See [`ReloadPolicy`](./enum.ReloadPolicy.html) for more details.
|
||||||
|
pub fn reload_policy(mut self, reload_policy: ReloadPolicy) -> IndexReaderBuilder {
|
||||||
|
self.reload_policy = reload_policy;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the number of `Searcher` in the searcher pool.
|
||||||
|
pub fn num_searchers(mut self, num_searchers: usize) -> IndexReaderBuilder {
|
||||||
|
self.num_searchers = num_searchers;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MetaFileIndexReaderInner {
|
||||||
|
num_searchers: usize,
|
||||||
|
searcher_pool: Pool<Searcher>,
|
||||||
|
index: Index,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MetaFileIndexReaderInner {
|
||||||
|
fn load_segment_readers(&self) -> crate::Result<Vec<SegmentReader>> {
|
||||||
|
// We keep the lock until we have effectively finished opening the
|
||||||
|
// the `SegmentReader` because it prevents a diffferent process
|
||||||
|
// to garbage collect these file while we open them.
|
||||||
|
//
|
||||||
|
// Once opened, on linux & mac, the mmap will remain valid after
|
||||||
|
// the file has been deleted
|
||||||
|
// On windows, the file deletion will fail.
|
||||||
|
let _meta_lock = self.index.directory().acquire_lock(&META_LOCK)?;
|
||||||
|
let searchable_segments = self.searchable_segments()?;
|
||||||
|
searchable_segments
|
||||||
|
.iter()
|
||||||
|
.map(SegmentReader::open)
|
||||||
|
.collect::<Result<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reload(&self) -> crate::Result<()> {
|
||||||
|
let segment_readers: Vec<SegmentReader> = self.load_segment_readers()?;
|
||||||
|
let schema = self.index.schema();
|
||||||
|
let searchers = repeat_with(|| {
|
||||||
|
Searcher::new(schema.clone(), self.index.clone(), segment_readers.clone())
|
||||||
|
})
|
||||||
|
.take(self.num_searchers)
|
||||||
|
.collect();
|
||||||
|
self.searcher_pool.publish_new_generation(searchers);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the list of segments that are searchable
|
||||||
|
fn searchable_segments(&self) -> crate::Result<Vec<Segment>> {
|
||||||
|
self.index.searchable_segments()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn searcher(&self) -> LeasedItem<Searcher> {
|
||||||
|
self.searcher_pool.acquire()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `IndexReader` is your entry point to read and search the index.
|
||||||
|
///
|
||||||
|
/// It controls when a new version of the index should be loaded and lends
|
||||||
|
/// you instances of `Searcher` for the last loaded version.
|
||||||
|
///
|
||||||
|
/// `Clone` does not clone the different pool of searcher. `IndexReader`
|
||||||
|
/// just wraps and `Arc`.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct MetaFileIndexReader {
|
||||||
|
inner: Arc<MetaFileIndexReaderInner>,
|
||||||
|
watch_handle_opt: Option<WatchHandle>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MetaFileIndexReader {
|
||||||
|
pub fn reload(&self) -> crate::Result<()> {
|
||||||
|
self.inner.reload()
|
||||||
|
}
|
||||||
|
pub fn searcher(&self) -> LeasedItem<Searcher> {
|
||||||
|
self.inner.searcher()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,32 @@
|
|||||||
|
mod index_writer_reader;
|
||||||
|
mod meta_file_reader;
|
||||||
mod pool;
|
mod pool;
|
||||||
|
|
||||||
|
use self::meta_file_reader::MetaFileIndexReader;
|
||||||
|
pub use self::meta_file_reader::{IndexReaderBuilder, ReloadPolicy};
|
||||||
pub use self::pool::LeasedItem;
|
pub use self::pool::LeasedItem;
|
||||||
use self::pool::Pool;
|
|
||||||
use crate::core::Segment;
|
|
||||||
use crate::directory::Directory;
|
|
||||||
use crate::directory::WatchHandle;
|
|
||||||
use crate::directory::META_LOCK;
|
|
||||||
use crate::Index;
|
pub(crate) use crate::reader::index_writer_reader::NRTReader;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use crate::SegmentReader;
|
|
||||||
use std::sync::Arc;
|
/*
|
||||||
|
//
|
||||||
|
//enum SegmentSource {
|
||||||
|
// FromMetaFile,
|
||||||
|
// FromWriter(Arc<RwLock<SegmentRegisters>>),
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
//impl SegmentSource {
|
||||||
|
// fn from_meta_file() -> SegmentSource {
|
||||||
|
//
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//}
|
||||||
|
|
||||||
/// Defines when a new version of the index should be reloaded.
|
/// Defines when a new version of the index should be reloaded.
|
||||||
///
|
///
|
||||||
@@ -62,7 +78,7 @@ impl IndexReaderBuilder {
|
|||||||
/// to open different segment readers. It may take hundreds of milliseconds
|
/// to open different segment readers. It may take hundreds of milliseconds
|
||||||
/// of time and it may return an error.
|
/// of time and it may return an error.
|
||||||
/// TODO(pmasurel) Use the `TryInto` trait once it is available in stable.
|
/// TODO(pmasurel) Use the `TryInto` trait once it is available in stable.
|
||||||
pub fn try_into(self) -> Result<IndexReader> {
|
pub fn try_into(self) -> crate::Result<IndexReader> {
|
||||||
let inner_reader = InnerIndexReader {
|
let inner_reader = InnerIndexReader {
|
||||||
index: self.index,
|
index: self.index,
|
||||||
num_searchers: self.num_searchers,
|
num_searchers: self.num_searchers,
|
||||||
@@ -121,15 +137,24 @@ struct InnerIndexReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl InnerIndexReader {
|
impl InnerIndexReader {
|
||||||
fn reload(&self) -> Result<()> {
|
fn load_segment_readers(&self) -> crate::Result<Vec<SegmentReader>> {
|
||||||
let segment_readers: Vec<SegmentReader> = {
|
// We keep the lock until we have effectively finished opening the
|
||||||
let _meta_lock = self.index.directory().acquire_lock(&META_LOCK)?;
|
// the `SegmentReader` because it prevents a diffferent process
|
||||||
let searchable_segments = self.searchable_segments()?;
|
// to garbage collect these file while we open them.
|
||||||
searchable_segments
|
//
|
||||||
.iter()
|
// Once opened, on linux & mac, the mmap will remain valid after
|
||||||
.map(SegmentReader::open)
|
// the file has been deleted
|
||||||
.collect::<Result<_>>()?
|
// On windows, the file deletion will fail.
|
||||||
};
|
let _meta_lock = self.index.directory().acquire_lock(&META_LOCK)?;
|
||||||
|
let searchable_segments = self.searchable_segments()?;
|
||||||
|
searchable_segments
|
||||||
|
.iter()
|
||||||
|
.map(SegmentReader::open)
|
||||||
|
.collect::<crate::Result<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reload(&self) -> crate::Result<()> {
|
||||||
|
let segment_readers: Vec<SegmentReader> = self.load_segment_readers()?;
|
||||||
let schema = self.index.schema();
|
let schema = self.index.schema();
|
||||||
let searchers = (0..self.num_searchers)
|
let searchers = (0..self.num_searchers)
|
||||||
.map(|_| Searcher::new(schema.clone(), self.index.clone(), segment_readers.clone()))
|
.map(|_| Searcher::new(schema.clone(), self.index.clone(), segment_readers.clone()))
|
||||||
@@ -139,7 +164,7 @@ impl InnerIndexReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of segments that are searchable
|
/// Returns the list of segments that are searchable
|
||||||
fn searchable_segments(&self) -> Result<Vec<Segment>> {
|
fn searchable_segments(&self) -> crate::Result<Vec<Segment>> {
|
||||||
self.index.searchable_segments()
|
self.index.searchable_segments()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,18 +180,16 @@ impl InnerIndexReader {
|
|||||||
///
|
///
|
||||||
/// `Clone` does not clone the different pool of searcher. `IndexReader`
|
/// `Clone` does not clone the different pool of searcher. `IndexReader`
|
||||||
/// just wraps and `Arc`.
|
/// just wraps and `Arc`.
|
||||||
|
=======
|
||||||
|
>>>>>>> Added NRTReader
|
||||||
|
*/
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct IndexReader {
|
pub enum IndexReader {
|
||||||
inner: Arc<InnerIndexReader>,
|
FromMetaFile(MetaFileIndexReader),
|
||||||
watch_handle_opt: Option<WatchHandle>,
|
NRT(NRTReader),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexReader {
|
impl IndexReader {
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) fn index(&self) -> Index {
|
|
||||||
self.inner.index.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update searchers so that they reflect the state of the last
|
/// Update searchers so that they reflect the state of the last
|
||||||
/// `.commit()`.
|
/// `.commit()`.
|
||||||
///
|
///
|
||||||
@@ -176,8 +199,11 @@ impl IndexReader {
|
|||||||
///
|
///
|
||||||
/// This automatic reload can take 10s of milliseconds to kick in however, and in unit tests
|
/// This automatic reload can take 10s of milliseconds to kick in however, and in unit tests
|
||||||
/// it can be nice to deterministically force the reload of searchers.
|
/// it can be nice to deterministically force the reload of searchers.
|
||||||
pub fn reload(&self) -> Result<()> {
|
pub fn reload(&self) -> crate::Result<()> {
|
||||||
self.inner.reload()
|
match self {
|
||||||
|
IndexReader::FromMetaFile(meta_file_reader) => meta_file_reader.reload(),
|
||||||
|
IndexReader::NRT(nrt_reader) => nrt_reader.reload(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a searcher
|
/// Returns a searcher
|
||||||
@@ -191,6 +217,21 @@ impl IndexReader {
|
|||||||
/// The same searcher must be used for a given query, as it ensures
|
/// The same searcher must be used for a given query, as it ensures
|
||||||
/// the use of a consistent segment set.
|
/// the use of a consistent segment set.
|
||||||
pub fn searcher(&self) -> LeasedItem<Searcher> {
|
pub fn searcher(&self) -> LeasedItem<Searcher> {
|
||||||
self.inner.searcher()
|
match self {
|
||||||
|
IndexReader::FromMetaFile(meta_file_reader) => meta_file_reader.searcher(),
|
||||||
|
IndexReader::NRT(nrt_reader) => nrt_reader.searcher(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<MetaFileIndexReader> for IndexReader {
|
||||||
|
fn from(meta_file_reader: MetaFileIndexReader) -> Self {
|
||||||
|
IndexReader::FromMetaFile(meta_file_reader)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NRTReader> for IndexReader {
|
||||||
|
fn from(nrt_reader: NRTReader) -> Self {
|
||||||
|
IndexReader::NRT(nrt_reader)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -68,7 +68,9 @@ impl<T> Pool<T> {
|
|||||||
/// After publish, all new `Searcher` acquired will be
|
/// After publish, all new `Searcher` acquired will be
|
||||||
/// of the new generation.
|
/// of the new generation.
|
||||||
pub fn publish_new_generation(&self, items: Vec<T>) {
|
pub fn publish_new_generation(&self, items: Vec<T>) {
|
||||||
|
assert!(!items.is_empty());
|
||||||
let next_generation = self.next_generation.fetch_add(1, Ordering::SeqCst) + 1;
|
let next_generation = self.next_generation.fetch_add(1, Ordering::SeqCst) + 1;
|
||||||
|
let num_items = items.len();
|
||||||
for item in items {
|
for item in items {
|
||||||
let gen_item = GenerationItem {
|
let gen_item = GenerationItem {
|
||||||
item,
|
item,
|
||||||
@@ -77,6 +79,23 @@ impl<T> Pool<T> {
|
|||||||
self.queue.push(gen_item);
|
self.queue.push(gen_item);
|
||||||
}
|
}
|
||||||
self.advertise_generation(next_generation);
|
self.advertise_generation(next_generation);
|
||||||
|
// Purge possible previous searchers.
|
||||||
|
//
|
||||||
|
// Assuming at this point no searcher is held more than duration T by the user,
|
||||||
|
// this guarantees that an obsolete searcher will not be uselessly held (and its associated
|
||||||
|
// mmap) for more than duration T.
|
||||||
|
//
|
||||||
|
// Proof: At this point, obsolete searcher that are held by the user will be held for less
|
||||||
|
// than T. When released, they will be dropped as their generation is detected obsolete.
|
||||||
|
//
|
||||||
|
// We still need to ensure that the searcher that are obsolete and in the pool get removed.
|
||||||
|
// The queue currently contains up to 2n searchers, in any random order.
|
||||||
|
//
|
||||||
|
// Half of them are obsoletes. By requesting `(n+1)` fresh searchers, we ensure that all
|
||||||
|
// searcher will be inspected.
|
||||||
|
for _ in 0..=num_items {
|
||||||
|
let _ = self.acquire();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// At the exit of this method,
|
/// At the exit of this method,
|
||||||
|
|||||||
@@ -122,6 +122,11 @@ impl Facet {
|
|||||||
pub fn to_path(&self) -> Vec<&str> {
|
pub fn to_path(&self) -> Vec<&str> {
|
||||||
self.encoded_str().split(|c| c == FACET_SEP_CHAR).collect()
|
self.encoded_str().split(|c| c == FACET_SEP_CHAR).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This function is the inverse of Facet::from(&str).
|
||||||
|
pub fn to_path_string(&self) -> String {
|
||||||
|
format!("{}", self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Borrow<str> for Facet {
|
impl Borrow<str> for Facet {
|
||||||
@@ -265,4 +270,21 @@ mod tests {
|
|||||||
let facet = Facet::from_path(v.iter());
|
let facet = Facet::from_path(v.iter());
|
||||||
assert_eq!(facet.to_path(), v);
|
assert_eq!(facet.to_path(), v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_to_path_string() {
|
||||||
|
let v = ["first", "second", "third/not_fourth"];
|
||||||
|
let facet = Facet::from_path(v.iter());
|
||||||
|
assert_eq!(
|
||||||
|
facet.to_path_string(),
|
||||||
|
String::from("/first/second/third\\/not_fourth")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_to_path_string_empty() {
|
||||||
|
let v: Vec<&str> = vec![];
|
||||||
|
let facet = Facet::from_path(v.iter());
|
||||||
|
assert_eq!(facet.to_path_string(), "/");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
use crate::schema::Field;
|
use crate::schema::Field;
|
||||||
use crate::schema::Value;
|
use crate::schema::Value;
|
||||||
use crate::tokenizer::BoxedTokenizer;
|
use crate::tokenizer::{TextAnalyzer, Token};
|
||||||
use crate::tokenizer::{Token, TokenStream};
|
|
||||||
use crate::Document;
|
use crate::Document;
|
||||||
use crate::Result;
|
|
||||||
use crate::Searcher;
|
use crate::Searcher;
|
||||||
use htmlescape::encode_minimal;
|
use htmlescape::encode_minimal;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
@@ -142,7 +140,7 @@ impl Snippet {
|
|||||||
/// Fragments must be valid in the sense that `&text[fragment.start..fragment.stop]`\
|
/// Fragments must be valid in the sense that `&text[fragment.start..fragment.stop]`\
|
||||||
/// has to be a valid string.
|
/// has to be a valid string.
|
||||||
fn search_fragments<'a>(
|
fn search_fragments<'a>(
|
||||||
tokenizer: &BoxedTokenizer,
|
tokenizer: &TextAnalyzer,
|
||||||
text: &'a str,
|
text: &'a str,
|
||||||
terms: &BTreeMap<String, f32>,
|
terms: &BTreeMap<String, f32>,
|
||||||
max_num_chars: usize,
|
max_num_chars: usize,
|
||||||
@@ -251,7 +249,7 @@ fn select_best_fragment_combination(fragments: &[FragmentCandidate], text: &str)
|
|||||||
/// ```
|
/// ```
|
||||||
pub struct SnippetGenerator {
|
pub struct SnippetGenerator {
|
||||||
terms_text: BTreeMap<String, f32>,
|
terms_text: BTreeMap<String, f32>,
|
||||||
tokenizer: BoxedTokenizer,
|
tokenizer: TextAnalyzer,
|
||||||
field: Field,
|
field: Field,
|
||||||
max_num_chars: usize,
|
max_num_chars: usize,
|
||||||
}
|
}
|
||||||
@@ -262,7 +260,7 @@ impl SnippetGenerator {
|
|||||||
searcher: &Searcher,
|
searcher: &Searcher,
|
||||||
query: &dyn Query,
|
query: &dyn Query,
|
||||||
field: Field,
|
field: Field,
|
||||||
) -> Result<SnippetGenerator> {
|
) -> crate::Result<SnippetGenerator> {
|
||||||
let mut terms = BTreeSet::new();
|
let mut terms = BTreeSet::new();
|
||||||
query.query_terms(&mut terms);
|
query.query_terms(&mut terms);
|
||||||
let terms_text: BTreeMap<String, f32> = terms
|
let terms_text: BTreeMap<String, f32> = terms
|
||||||
@@ -347,12 +345,11 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet() {
|
fn test_snippet() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
let terms = btreemap! {
|
let terms = btreemap! {
|
||||||
String::from("rust") => 1.0,
|
String::from("rust") => 1.0,
|
||||||
String::from("language") => 0.9
|
String::from("language") => 0.9
|
||||||
};
|
};
|
||||||
let fragments = search_fragments(&boxed_tokenizer, TEST_TEXT, &terms, 100);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 100);
|
||||||
assert_eq!(fragments.len(), 7);
|
assert_eq!(fragments.len(), 7);
|
||||||
{
|
{
|
||||||
let first = &fragments[0];
|
let first = &fragments[0];
|
||||||
@@ -374,13 +371,12 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_scored_fragment() {
|
fn test_snippet_scored_fragment() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
{
|
{
|
||||||
let terms = btreemap! {
|
let terms = btreemap! {
|
||||||
String::from("rust") =>1.0f32,
|
String::from("rust") =>1.0f32,
|
||||||
String::from("language") => 0.9f32
|
String::from("language") => 0.9f32
|
||||||
};
|
};
|
||||||
let fragments = search_fragments(&boxed_tokenizer, TEST_TEXT, &terms, 20);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 20);
|
||||||
{
|
{
|
||||||
let first = &fragments[0];
|
let first = &fragments[0];
|
||||||
assert_eq!(first.score, 1.0);
|
assert_eq!(first.score, 1.0);
|
||||||
@@ -389,13 +385,12 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
let snippet = select_best_fragment_combination(&fragments[..], &TEST_TEXT);
|
let snippet = select_best_fragment_combination(&fragments[..], &TEST_TEXT);
|
||||||
assert_eq!(snippet.to_html(), "<b>Rust</b> is a systems")
|
assert_eq!(snippet.to_html(), "<b>Rust</b> is a systems")
|
||||||
}
|
}
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
{
|
{
|
||||||
let terms = btreemap! {
|
let terms = btreemap! {
|
||||||
String::from("rust") =>0.9f32,
|
String::from("rust") =>0.9f32,
|
||||||
String::from("language") => 1.0f32
|
String::from("language") => 1.0f32
|
||||||
};
|
};
|
||||||
let fragments = search_fragments(&boxed_tokenizer, TEST_TEXT, &terms, 20);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 20);
|
||||||
//assert_eq!(fragments.len(), 7);
|
//assert_eq!(fragments.len(), 7);
|
||||||
{
|
{
|
||||||
let first = &fragments[0];
|
let first = &fragments[0];
|
||||||
@@ -409,14 +404,12 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_in_second_fragment() {
|
fn test_snippet_in_second_fragment() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
|
|
||||||
let text = "a b c d e f g";
|
let text = "a b c d e f g";
|
||||||
|
|
||||||
let mut terms = BTreeMap::new();
|
let mut terms = BTreeMap::new();
|
||||||
terms.insert(String::from("c"), 1.0);
|
terms.insert(String::from("c"), 1.0);
|
||||||
|
|
||||||
let fragments = search_fragments(&boxed_tokenizer, &text, &terms, 3);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
|
||||||
|
|
||||||
assert_eq!(fragments.len(), 1);
|
assert_eq!(fragments.len(), 1);
|
||||||
{
|
{
|
||||||
@@ -433,14 +426,12 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_with_term_at_the_end_of_fragment() {
|
fn test_snippet_with_term_at_the_end_of_fragment() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
|
|
||||||
let text = "a b c d e f f g";
|
let text = "a b c d e f f g";
|
||||||
|
|
||||||
let mut terms = BTreeMap::new();
|
let mut terms = BTreeMap::new();
|
||||||
terms.insert(String::from("f"), 1.0);
|
terms.insert(String::from("f"), 1.0);
|
||||||
|
|
||||||
let fragments = search_fragments(&boxed_tokenizer, &text, &terms, 3);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
|
||||||
|
|
||||||
assert_eq!(fragments.len(), 2);
|
assert_eq!(fragments.len(), 2);
|
||||||
{
|
{
|
||||||
@@ -457,15 +448,13 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_with_second_fragment_has_the_highest_score() {
|
fn test_snippet_with_second_fragment_has_the_highest_score() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
|
|
||||||
let text = "a b c d e f g";
|
let text = "a b c d e f g";
|
||||||
|
|
||||||
let mut terms = BTreeMap::new();
|
let mut terms = BTreeMap::new();
|
||||||
terms.insert(String::from("f"), 1.0);
|
terms.insert(String::from("f"), 1.0);
|
||||||
terms.insert(String::from("a"), 0.9);
|
terms.insert(String::from("a"), 0.9);
|
||||||
|
|
||||||
let fragments = search_fragments(&boxed_tokenizer, &text, &terms, 7);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 7);
|
||||||
|
|
||||||
assert_eq!(fragments.len(), 2);
|
assert_eq!(fragments.len(), 2);
|
||||||
{
|
{
|
||||||
@@ -482,14 +471,12 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_with_term_not_in_text() {
|
fn test_snippet_with_term_not_in_text() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
|
|
||||||
let text = "a b c d";
|
let text = "a b c d";
|
||||||
|
|
||||||
let mut terms = BTreeMap::new();
|
let mut terms = BTreeMap::new();
|
||||||
terms.insert(String::from("z"), 1.0);
|
terms.insert(String::from("z"), 1.0);
|
||||||
|
|
||||||
let fragments = search_fragments(&boxed_tokenizer, &text, &terms, 3);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
|
||||||
|
|
||||||
assert_eq!(fragments.len(), 0);
|
assert_eq!(fragments.len(), 0);
|
||||||
|
|
||||||
@@ -500,12 +487,10 @@ Survey in 2016, 2017, and 2018."#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snippet_with_no_terms() {
|
fn test_snippet_with_no_terms() {
|
||||||
let boxed_tokenizer = SimpleTokenizer.into();
|
|
||||||
|
|
||||||
let text = "a b c d";
|
let text = "a b c d";
|
||||||
|
|
||||||
let terms = BTreeMap::new();
|
let terms = BTreeMap::new();
|
||||||
let fragments = search_fragments(&boxed_tokenizer, &text, &terms, 3);
|
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
|
||||||
assert_eq!(fragments.len(), 0);
|
assert_eq!(fragments.len(), 0);
|
||||||
|
|
||||||
let snippet = select_best_fragment_combination(&fragments[..], &text);
|
let snippet = select_best_fragment_combination(&fragments[..], &text);
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pub const COMPRESSION: &str = "snappy";
|
|||||||
|
|
||||||
pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {
|
pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {
|
||||||
compressed.clear();
|
compressed.clear();
|
||||||
let mut encoder = snap::Writer::new(compressed);
|
let mut encoder = snap::write::FrameEncoder::new(compressed);
|
||||||
encoder.write_all(&uncompressed)?;
|
encoder.write_all(&uncompressed)?;
|
||||||
encoder.flush()?;
|
encoder.flush()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -17,6 +17,6 @@ pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()>
|
|||||||
|
|
||||||
pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {
|
pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {
|
||||||
decompressed.clear();
|
decompressed.clear();
|
||||||
snap::Reader::new(compressed).read_to_end(decompressed)?;
|
snap::read::FrameDecoder::new(compressed).read_to_end(decompressed)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ use self::compression_snap::{compress, decompress};
|
|||||||
pub mod tests {
|
pub mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::directory::{Directory, RAMDirectory, ReadOnlyDirectory, WritePtr};
|
use crate::directory::{Directory, RAMDirectory, WritePtr};
|
||||||
use crate::schema::Document;
|
use crate::schema::Document;
|
||||||
use crate::schema::FieldValue;
|
use crate::schema::FieldValue;
|
||||||
use crate::schema::Schema;
|
use crate::schema::Schema;
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
use crate::Result;
|
|
||||||
|
|
||||||
use super::decompress;
|
use super::decompress;
|
||||||
use super::skiplist::SkipList;
|
use super::skiplist::SkipList;
|
||||||
use crate::common::BinarySerializable;
|
use crate::common::BinarySerializable;
|
||||||
@@ -75,7 +73,7 @@ impl StoreReader {
|
|||||||
///
|
///
|
||||||
/// It should not be called to score documents
|
/// It should not be called to score documents
|
||||||
/// for instance.
|
/// for instance.
|
||||||
pub fn get(&self, doc_id: DocId) -> Result<Document> {
|
pub fn get(&self, doc_id: DocId) -> crate::Result<Document> {
|
||||||
let (first_doc_id, block_offset) = self.block_offset(doc_id);
|
let (first_doc_id, block_offset) = self.block_offset(doc_id);
|
||||||
self.read_block(block_offset as usize)?;
|
self.read_block(block_offset as usize)?;
|
||||||
let current_block_mut = self.current_block.borrow_mut();
|
let current_block_mut = self.current_block.borrow_mut();
|
||||||
|
|||||||
@@ -36,9 +36,9 @@ pub use self::termdict::{TermDictionary, TermDictionaryBuilder};
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::{TermDictionary, TermDictionaryBuilder, TermStreamer};
|
use super::{TermDictionary, TermDictionaryBuilder, TermStreamer};
|
||||||
use crate::core::Index;
|
use crate::core::Index;
|
||||||
use crate::directory::{Directory, RAMDirectory, ReadOnlyDirectory, ReadOnlySource};
|
use crate::directory::{Directory, RAMDirectory, ReadOnlySource};
|
||||||
use crate::postings::TermInfo;
|
use crate::postings::TermInfo;
|
||||||
use crate::schema::{Document, FieldType, Schema, TEXT};
|
use crate::schema::{Document, Schema, TEXT};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
@@ -52,6 +52,12 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_term_dictionary() {
|
||||||
|
let empty = TermDictionary::empty();
|
||||||
|
assert!(empty.stream().next().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_term_ordinals() {
|
fn test_term_ordinals() {
|
||||||
const COUNTRIES: [&'static str; 7] = [
|
const COUNTRIES: [&'static str; 7] = [
|
||||||
@@ -67,9 +73,7 @@ mod tests {
|
|||||||
let path = PathBuf::from("TermDictionary");
|
let path = PathBuf::from("TermDictionary");
|
||||||
{
|
{
|
||||||
let write = directory.open_write(&path).unwrap();
|
let write = directory.open_write(&path).unwrap();
|
||||||
let field_type = FieldType::Str(TEXT);
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(write).unwrap();
|
||||||
let mut term_dictionary_builder =
|
|
||||||
TermDictionaryBuilder::create(write, &field_type).unwrap();
|
|
||||||
for term in COUNTRIES.iter() {
|
for term in COUNTRIES.iter() {
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert(term.as_bytes(), &make_term_info(0u64))
|
.insert(term.as_bytes(), &make_term_info(0u64))
|
||||||
@@ -93,9 +97,7 @@ mod tests {
|
|||||||
let path = PathBuf::from("TermDictionary");
|
let path = PathBuf::from("TermDictionary");
|
||||||
{
|
{
|
||||||
let write = directory.open_write(&path).unwrap();
|
let write = directory.open_write(&path).unwrap();
|
||||||
let field_type = FieldType::Str(TEXT);
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(write).unwrap();
|
||||||
let mut term_dictionary_builder =
|
|
||||||
TermDictionaryBuilder::create(write, &field_type).unwrap();
|
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert("abc".as_bytes(), &make_term_info(34u64))
|
.insert("abc".as_bytes(), &make_term_info(34u64))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -179,10 +181,8 @@ mod tests {
|
|||||||
let ids: Vec<_> = (0u32..10_000u32)
|
let ids: Vec<_> = (0u32..10_000u32)
|
||||||
.map(|i| (format!("doc{:0>6}", i), i))
|
.map(|i| (format!("doc{:0>6}", i), i))
|
||||||
.collect();
|
.collect();
|
||||||
let field_type = FieldType::Str(TEXT);
|
|
||||||
let buffer: Vec<u8> = {
|
let buffer: Vec<u8> = {
|
||||||
let mut term_dictionary_builder =
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||||
TermDictionaryBuilder::create(vec![], &field_type).unwrap();
|
|
||||||
for &(ref id, ref i) in &ids {
|
for &(ref id, ref i) in &ids {
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert(id.as_bytes(), &make_term_info(*i as u64))
|
.insert(id.as_bytes(), &make_term_info(*i as u64))
|
||||||
@@ -209,10 +209,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_stream_high_range_prefix_suffix() {
|
fn test_stream_high_range_prefix_suffix() {
|
||||||
let field_type = FieldType::Str(TEXT);
|
|
||||||
let buffer: Vec<u8> = {
|
let buffer: Vec<u8> = {
|
||||||
let mut term_dictionary_builder =
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||||
TermDictionaryBuilder::create(vec![], &field_type).unwrap();
|
|
||||||
// term requires more than 16bits
|
// term requires more than 16bits
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert("abcdefghijklmnopqrstuvwxy", &make_term_info(1))
|
.insert("abcdefghijklmnopqrstuvwxy", &make_term_info(1))
|
||||||
@@ -244,10 +242,8 @@ mod tests {
|
|||||||
let ids: Vec<_> = (0u32..10_000u32)
|
let ids: Vec<_> = (0u32..10_000u32)
|
||||||
.map(|i| (format!("doc{:0>6}", i), i))
|
.map(|i| (format!("doc{:0>6}", i), i))
|
||||||
.collect();
|
.collect();
|
||||||
let field_type = FieldType::Str(TEXT);
|
|
||||||
let buffer: Vec<u8> = {
|
let buffer: Vec<u8> = {
|
||||||
let mut term_dictionary_builder =
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||||
TermDictionaryBuilder::create(vec![], &field_type).unwrap();
|
|
||||||
for &(ref id, ref i) in &ids {
|
for &(ref id, ref i) in &ids {
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert(id.as_bytes(), &make_term_info(*i as u64))
|
.insert(id.as_bytes(), &make_term_info(*i as u64))
|
||||||
@@ -313,10 +309,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_empty_string() {
|
fn test_empty_string() {
|
||||||
let field_type = FieldType::Str(TEXT);
|
|
||||||
let buffer: Vec<u8> = {
|
let buffer: Vec<u8> = {
|
||||||
let mut term_dictionary_builder =
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||||
TermDictionaryBuilder::create(vec![], &field_type).unwrap();
|
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert(&[], &make_term_info(1 as u64))
|
.insert(&[], &make_term_info(1 as u64))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -337,10 +331,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_stream_range_boundaries() {
|
fn test_stream_range_boundaries() {
|
||||||
let field_type = FieldType::Str(TEXT);
|
|
||||||
let buffer: Vec<u8> = {
|
let buffer: Vec<u8> = {
|
||||||
let mut term_dictionary_builder =
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||||
TermDictionaryBuilder::create(vec![], &field_type).unwrap();
|
|
||||||
for i in 0u8..10u8 {
|
for i in 0u8..10u8 {
|
||||||
let number_arr = [i; 1];
|
let number_arr = [i; 1];
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
@@ -352,41 +344,91 @@ mod tests {
|
|||||||
let source = ReadOnlySource::from(buffer);
|
let source = ReadOnlySource::from(buffer);
|
||||||
let term_dictionary: TermDictionary = TermDictionary::from_source(&source);
|
let term_dictionary: TermDictionary = TermDictionary::from_source(&source);
|
||||||
|
|
||||||
let value_list = |mut streamer: TermStreamer<'_>| {
|
let value_list = |mut streamer: TermStreamer<'_>, backwards: bool| {
|
||||||
let mut res: Vec<u32> = vec![];
|
let mut res: Vec<u32> = vec![];
|
||||||
while let Some((_, ref v)) = streamer.next() {
|
while let Some((_, ref v)) = streamer.next() {
|
||||||
res.push(v.doc_freq);
|
res.push(v.doc_freq);
|
||||||
}
|
}
|
||||||
|
if backwards {
|
||||||
|
res.reverse();
|
||||||
|
}
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
{
|
||||||
|
let range = term_dictionary.range().backward().into_stream();
|
||||||
|
assert_eq!(
|
||||||
|
value_list(range, true),
|
||||||
|
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
||||||
|
);
|
||||||
|
}
|
||||||
{
|
{
|
||||||
let range = term_dictionary.range().ge([2u8]).into_stream();
|
let range = term_dictionary.range().ge([2u8]).into_stream();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
value_list(range),
|
value_list(range, false),
|
||||||
|
vec![2u32, 3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let range = term_dictionary.range().ge([2u8]).backward().into_stream();
|
||||||
|
assert_eq!(
|
||||||
|
value_list(range, true),
|
||||||
vec![2u32, 3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
vec![2u32, 3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let range = term_dictionary.range().gt([2u8]).into_stream();
|
let range = term_dictionary.range().gt([2u8]).into_stream();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
value_list(range),
|
value_list(range, false),
|
||||||
|
vec![3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let range = term_dictionary.range().gt([2u8]).backward().into_stream();
|
||||||
|
assert_eq!(
|
||||||
|
value_list(range, true),
|
||||||
vec![3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
vec![3u32, 4u32, 5u32, 6u32, 7u32, 8u32, 9u32]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let range = term_dictionary.range().lt([6u8]).into_stream();
|
let range = term_dictionary.range().lt([6u8]).into_stream();
|
||||||
assert_eq!(value_list(range), vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32]);
|
assert_eq!(
|
||||||
|
value_list(range, false),
|
||||||
|
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let range = term_dictionary.range().lt([6u8]).backward().into_stream();
|
||||||
|
assert_eq!(
|
||||||
|
value_list(range, true),
|
||||||
|
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let range = term_dictionary.range().le([6u8]).into_stream();
|
let range = term_dictionary.range().le([6u8]).into_stream();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
value_list(range),
|
value_list(range, false),
|
||||||
|
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32, 6u32]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let range = term_dictionary.range().le([6u8]).backward().into_stream();
|
||||||
|
assert_eq!(
|
||||||
|
value_list(range, true),
|
||||||
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32, 6u32]
|
vec![0u32, 1u32, 2u32, 3u32, 4u32, 5u32, 6u32]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let range = term_dictionary.range().ge([0u8]).lt([5u8]).into_stream();
|
let range = term_dictionary.range().ge([0u8]).lt([5u8]).into_stream();
|
||||||
assert_eq!(value_list(range), vec![0u32, 1u32, 2u32, 3u32, 4u32]);
|
assert_eq!(value_list(range, false), vec![0u32, 1u32, 2u32, 3u32, 4u32]);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let range = term_dictionary
|
||||||
|
.range()
|
||||||
|
.ge([0u8])
|
||||||
|
.lt([5u8])
|
||||||
|
.backward()
|
||||||
|
.into_stream();
|
||||||
|
assert_eq!(value_list(range, true), vec![0u32, 1u32, 2u32, 3u32, 4u32]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -408,9 +450,7 @@ mod tests {
|
|||||||
let path = PathBuf::from("TermDictionary");
|
let path = PathBuf::from("TermDictionary");
|
||||||
{
|
{
|
||||||
let write = directory.open_write(&path).unwrap();
|
let write = directory.open_write(&path).unwrap();
|
||||||
let field_type = FieldType::Str(TEXT);
|
let mut term_dictionary_builder = TermDictionaryBuilder::create(write).unwrap();
|
||||||
let mut term_dictionary_builder =
|
|
||||||
TermDictionaryBuilder::create(write, &field_type).unwrap();
|
|
||||||
for term in COUNTRIES.iter() {
|
for term in COUNTRIES.iter() {
|
||||||
term_dictionary_builder
|
term_dictionary_builder
|
||||||
.insert(term.as_bytes(), &make_term_info(0u64))
|
.insert(term.as_bytes(), &make_term_info(0u64))
|
||||||
|
|||||||
@@ -51,6 +51,12 @@ where
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterate over the range backwards.
|
||||||
|
pub fn backward(mut self) -> Self {
|
||||||
|
self.stream_builder = self.stream_builder.backward();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates the stream corresponding to the range
|
/// Creates the stream corresponding to the range
|
||||||
/// of terms defined using the `TermStreamerBuilder`.
|
/// of terms defined using the `TermStreamerBuilder`.
|
||||||
pub fn into_stream(self) -> TermStreamer<'a, A> {
|
pub fn into_stream(self) -> TermStreamer<'a, A> {
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ use crate::common::BinarySerializable;
|
|||||||
use crate::common::CountingWriter;
|
use crate::common::CountingWriter;
|
||||||
use crate::directory::ReadOnlySource;
|
use crate::directory::ReadOnlySource;
|
||||||
use crate::postings::TermInfo;
|
use crate::postings::TermInfo;
|
||||||
use crate::schema::FieldType;
|
|
||||||
use crate::termdict::TermOrdinal;
|
use crate::termdict::TermOrdinal;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use tantivy_fst;
|
use tantivy_fst;
|
||||||
use tantivy_fst::raw::Fst;
|
use tantivy_fst::raw::Fst;
|
||||||
@@ -29,7 +29,7 @@ where
|
|||||||
W: Write,
|
W: Write,
|
||||||
{
|
{
|
||||||
/// Creates a new `TermDictionaryBuilder`
|
/// Creates a new `TermDictionaryBuilder`
|
||||||
pub fn create(w: W, _field_type: &FieldType) -> io::Result<Self> {
|
pub fn create(w: W) -> io::Result<Self> {
|
||||||
let fst_builder = tantivy_fst::MapBuilder::new(w).map_err(convert_fst_error)?;
|
let fst_builder = tantivy_fst::MapBuilder::new(w).map_err(convert_fst_error)?;
|
||||||
Ok(TermDictionaryBuilder {
|
Ok(TermDictionaryBuilder {
|
||||||
fst_builder,
|
fst_builder,
|
||||||
@@ -92,6 +92,14 @@ fn open_fst_index(source: ReadOnlySource) -> tantivy_fst::Map<ReadOnlySource> {
|
|||||||
tantivy_fst::Map::from(fst)
|
tantivy_fst::Map::from(fst)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static EMPTY_DATA_SOURCE: Lazy<ReadOnlySource> = Lazy::new(|| {
|
||||||
|
let term_dictionary_data: Vec<u8> = TermDictionaryBuilder::create(Vec::<u8>::new())
|
||||||
|
.expect("Creating a TermDictionaryBuilder in a Vec<u8> should never fail")
|
||||||
|
.finish()
|
||||||
|
.expect("Writing in a Vec<u8> should never fail");
|
||||||
|
ReadOnlySource::from(term_dictionary_data)
|
||||||
|
});
|
||||||
|
|
||||||
/// The term dictionary contains all of the terms in
|
/// The term dictionary contains all of the terms in
|
||||||
/// `tantivy index` in a sorted manner.
|
/// `tantivy index` in a sorted manner.
|
||||||
///
|
///
|
||||||
@@ -122,14 +130,8 @@ impl TermDictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates an empty term dictionary which contains no terms.
|
/// Creates an empty term dictionary which contains no terms.
|
||||||
pub fn empty(field_type: &FieldType) -> Self {
|
pub fn empty() -> Self {
|
||||||
let term_dictionary_data: Vec<u8> =
|
TermDictionary::from_source(&*EMPTY_DATA_SOURCE)
|
||||||
TermDictionaryBuilder::create(Vec::<u8>::new(), &field_type)
|
|
||||||
.expect("Creating a TermDictionaryBuilder in a Vec<u8> should never fail")
|
|
||||||
.finish()
|
|
||||||
.expect("Writing in a Vec<u8> should never fail");
|
|
||||||
let source = ReadOnlySource::from(term_dictionary_data);
|
|
||||||
Self::from_source(&source)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the number of terms in the dictionary.
|
/// Returns the number of terms in the dictionary.
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
//! ```rust
|
//! ```rust
|
||||||
//! use tantivy::tokenizer::*;
|
//! use tantivy::tokenizer::*;
|
||||||
//!
|
//!
|
||||||
//! let tokenizer = RawTokenizer
|
//! let tokenizer = TextAnalyzer::from(RawTokenizer)
|
||||||
//! .filter(AlphaNumOnlyFilter);
|
//! .filter(AlphaNumOnlyFilter);
|
||||||
//!
|
//!
|
||||||
//! let mut stream = tokenizer.token_stream("hello there");
|
//! let mut stream = tokenizer.token_stream("hello there");
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
//! // contains a space
|
//! // contains a space
|
||||||
//! assert!(stream.next().is_none());
|
//! assert!(stream.next().is_none());
|
||||||
//!
|
//!
|
||||||
//! let tokenizer = SimpleTokenizer
|
//! let tokenizer = TextAnalyzer::from(SimpleTokenizer)
|
||||||
//! .filter(AlphaNumOnlyFilter);
|
//! .filter(AlphaNumOnlyFilter);
|
||||||
//!
|
//!
|
||||||
//! let mut stream = tokenizer.token_stream("hello there 💣");
|
//! let mut stream = tokenizer.token_stream("hello there 💣");
|
||||||
@@ -19,56 +19,30 @@
|
|||||||
//! // the "emoji" is dropped because its not an alphanum
|
//! // the "emoji" is dropped because its not an alphanum
|
||||||
//! assert!(stream.next().is_none());
|
//! assert!(stream.next().is_none());
|
||||||
//! ```
|
//! ```
|
||||||
use super::{Token, TokenFilter, TokenStream};
|
use super::{BoxTokenStream, Token, TokenFilter, TokenStream};
|
||||||
|
|
||||||
/// `TokenFilter` that removes all tokens that contain non
|
/// `TokenFilter` that removes all tokens that contain non
|
||||||
/// ascii alphanumeric characters.
|
/// ascii alphanumeric characters.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AlphaNumOnlyFilter;
|
pub struct AlphaNumOnlyFilter;
|
||||||
|
|
||||||
pub struct AlphaNumOnlyFilterStream<TailTokenStream>
|
pub struct AlphaNumOnlyFilterStream<'a> {
|
||||||
where
|
tail: BoxTokenStream<'a>,
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
tail: TailTokenStream,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> AlphaNumOnlyFilterStream<TailTokenStream>
|
impl<'a> AlphaNumOnlyFilterStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn predicate(&self, token: &Token) -> bool {
|
fn predicate(&self, token: &Token) -> bool {
|
||||||
token.text.chars().all(|c| c.is_ascii_alphanumeric())
|
token.text.chars().all(|c| c.is_ascii_alphanumeric())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn wrap(tail: TailTokenStream) -> AlphaNumOnlyFilterStream<TailTokenStream> {
|
impl TokenFilter for AlphaNumOnlyFilter {
|
||||||
AlphaNumOnlyFilterStream { tail }
|
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
|
||||||
|
BoxTokenStream::from(AlphaNumOnlyFilterStream { tail: token_stream })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> TokenFilter<TailTokenStream> for AlphaNumOnlyFilter
|
impl<'a> TokenStream for AlphaNumOnlyFilterStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
type ResultTokenStream = AlphaNumOnlyFilterStream<TailTokenStream>;
|
|
||||||
|
|
||||||
fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream {
|
|
||||||
AlphaNumOnlyFilterStream::wrap(token_stream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<TailTokenStream> TokenStream for AlphaNumOnlyFilterStream<TailTokenStream>
|
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn token(&self) -> &Token {
|
|
||||||
self.tail.token()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_mut(&mut self) -> &mut Token {
|
|
||||||
self.tail.token_mut()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn advance(&mut self) -> bool {
|
fn advance(&mut self) -> bool {
|
||||||
while self.tail.advance() {
|
while self.tail.advance() {
|
||||||
if self.predicate(self.tail.token()) {
|
if self.predicate(self.tail.token()) {
|
||||||
@@ -78,4 +52,12 @@ where
|
|||||||
|
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn token(&self) -> &Token {
|
||||||
|
self.tail.token()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn token_mut(&mut self) -> &mut Token {
|
||||||
|
self.tail.token_mut()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use super::{Token, TokenFilter, TokenStream};
|
use super::{BoxTokenStream, Token, TokenFilter, TokenStream};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
/// This class converts alphabetic, numeric, and symbolic Unicode characters
|
/// This class converts alphabetic, numeric, and symbolic Unicode characters
|
||||||
@@ -7,26 +7,21 @@ use std::mem;
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AsciiFoldingFilter;
|
pub struct AsciiFoldingFilter;
|
||||||
|
|
||||||
impl<TailTokenStream> TokenFilter<TailTokenStream> for AsciiFoldingFilter
|
impl TokenFilter for AsciiFoldingFilter {
|
||||||
where
|
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
|
||||||
TailTokenStream: TokenStream,
|
From::from(AsciiFoldingFilterTokenStream {
|
||||||
{
|
tail: token_stream,
|
||||||
type ResultTokenStream = AsciiFoldingFilterTokenStream<TailTokenStream>;
|
buffer: String::with_capacity(100),
|
||||||
|
})
|
||||||
fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream {
|
|
||||||
AsciiFoldingFilterTokenStream::wrap(token_stream)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AsciiFoldingFilterTokenStream<TailTokenStream> {
|
pub struct AsciiFoldingFilterTokenStream<'a> {
|
||||||
buffer: String,
|
buffer: String,
|
||||||
tail: TailTokenStream,
|
tail: BoxTokenStream<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> TokenStream for AsciiFoldingFilterTokenStream<TailTokenStream>
|
impl<'a> TokenStream for AsciiFoldingFilterTokenStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn advance(&mut self) -> bool {
|
fn advance(&mut self) -> bool {
|
||||||
if !self.tail.advance() {
|
if !self.tail.advance() {
|
||||||
return false;
|
return false;
|
||||||
@@ -48,18 +43,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> AsciiFoldingFilterTokenStream<TailTokenStream>
|
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn wrap(tail: TailTokenStream) -> AsciiFoldingFilterTokenStream<TailTokenStream> {
|
|
||||||
AsciiFoldingFilterTokenStream {
|
|
||||||
tail,
|
|
||||||
buffer: String::with_capacity(100),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns a string that represents the ascii folded version of
|
// Returns a string that represents the ascii folded version of
|
||||||
// the character. If the `char` does not require ascii folding
|
// the character. If the `char` does not require ascii folding
|
||||||
// (e.g. simple ASCII chars like `A`) or if the `char`
|
// (e.g. simple ASCII chars like `A`) or if the `char`
|
||||||
@@ -1561,8 +1544,7 @@ mod tests {
|
|||||||
use crate::tokenizer::AsciiFoldingFilter;
|
use crate::tokenizer::AsciiFoldingFilter;
|
||||||
use crate::tokenizer::RawTokenizer;
|
use crate::tokenizer::RawTokenizer;
|
||||||
use crate::tokenizer::SimpleTokenizer;
|
use crate::tokenizer::SimpleTokenizer;
|
||||||
use crate::tokenizer::TokenStream;
|
use crate::tokenizer::TextAnalyzer;
|
||||||
use crate::tokenizer::Tokenizer;
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -1579,7 +1561,7 @@ mod tests {
|
|||||||
|
|
||||||
fn folding_helper(text: &str) -> Vec<String> {
|
fn folding_helper(text: &str) -> Vec<String> {
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
SimpleTokenizer
|
TextAnalyzer::from(SimpleTokenizer)
|
||||||
.filter(AsciiFoldingFilter)
|
.filter(AsciiFoldingFilter)
|
||||||
.token_stream(text)
|
.token_stream(text)
|
||||||
.process(&mut |token| {
|
.process(&mut |token| {
|
||||||
@@ -1589,7 +1571,9 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn folding_using_raw_tokenizer_helper(text: &str) -> String {
|
fn folding_using_raw_tokenizer_helper(text: &str) -> String {
|
||||||
let mut token_stream = RawTokenizer.filter(AsciiFoldingFilter).token_stream(text);
|
let mut token_stream = TextAnalyzer::from(RawTokenizer)
|
||||||
|
.filter(AsciiFoldingFilter)
|
||||||
|
.token_stream(text);
|
||||||
token_stream.advance();
|
token_stream.advance();
|
||||||
token_stream.token().text.clone()
|
token_stream.token().text.clone()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use super::{Token, TokenStream, Tokenizer};
|
use super::{BoxTokenStream, Token, TokenStream, Tokenizer};
|
||||||
use crate::schema::FACET_SEP_BYTE;
|
use crate::schema::FACET_SEP_BYTE;
|
||||||
|
|
||||||
/// The `FacetTokenizer` process a `Facet` binary representation
|
/// The `FacetTokenizer` process a `Facet` binary representation
|
||||||
@@ -25,15 +25,14 @@ pub struct FacetTokenStream<'a> {
|
|||||||
token: Token,
|
token: Token,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Tokenizer<'a> for FacetTokenizer {
|
impl Tokenizer for FacetTokenizer {
|
||||||
type TokenStreamImpl = FacetTokenStream<'a>;
|
fn token_stream<'a>(&self, text: &'a str) -> BoxTokenStream<'a> {
|
||||||
|
|
||||||
fn token_stream(&self, text: &'a str) -> Self::TokenStreamImpl {
|
|
||||||
FacetTokenStream {
|
FacetTokenStream {
|
||||||
text,
|
text,
|
||||||
state: State::RootFacetNotEmitted, //< pos is the first char that has not been processed yet.
|
state: State::RootFacetNotEmitted, //< pos is the first char that has not been processed yet.
|
||||||
token: Token::default(),
|
token: Token::default(),
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -84,7 +83,7 @@ mod tests {
|
|||||||
|
|
||||||
use super::FacetTokenizer;
|
use super::FacetTokenizer;
|
||||||
use crate::schema::Facet;
|
use crate::schema::Facet;
|
||||||
use crate::tokenizer::{Token, TokenStream, Tokenizer};
|
use crate::tokenizer::{Token, Tokenizer};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_facet_tokenizer() {
|
fn test_facet_tokenizer() {
|
||||||
|
|||||||
@@ -1,24 +1,23 @@
|
|||||||
use super::{Token, TokenFilter, TokenStream};
|
use super::{Token, TokenFilter, TokenStream};
|
||||||
|
use crate::tokenizer::BoxTokenStream;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
impl TokenFilter for LowerCaser {
|
||||||
|
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
|
||||||
|
BoxTokenStream::from(LowerCaserTokenStream {
|
||||||
|
tail: token_stream,
|
||||||
|
buffer: String::with_capacity(100),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Token filter that lowercase terms.
|
/// Token filter that lowercase terms.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct LowerCaser;
|
pub struct LowerCaser;
|
||||||
|
|
||||||
impl<TailTokenStream> TokenFilter<TailTokenStream> for LowerCaser
|
pub struct LowerCaserTokenStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
type ResultTokenStream = LowerCaserTokenStream<TailTokenStream>;
|
|
||||||
|
|
||||||
fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream {
|
|
||||||
LowerCaserTokenStream::wrap(token_stream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LowerCaserTokenStream<TailTokenStream> {
|
|
||||||
buffer: String,
|
buffer: String,
|
||||||
tail: TailTokenStream,
|
tail: BoxTokenStream<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// writes a lowercased version of text into output.
|
// writes a lowercased version of text into output.
|
||||||
@@ -31,18 +30,7 @@ fn to_lowercase_unicode(text: &mut String, output: &mut String) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> TokenStream for LowerCaserTokenStream<TailTokenStream>
|
impl<'a> TokenStream for LowerCaserTokenStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn token(&self) -> &Token {
|
|
||||||
self.tail.token()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_mut(&mut self) -> &mut Token {
|
|
||||||
self.tail.token_mut()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn advance(&mut self) -> bool {
|
fn advance(&mut self) -> bool {
|
||||||
if !self.tail.advance() {
|
if !self.tail.advance() {
|
||||||
return false;
|
return false;
|
||||||
@@ -56,26 +44,19 @@ where
|
|||||||
}
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl<TailTokenStream> LowerCaserTokenStream<TailTokenStream>
|
fn token(&self) -> &Token {
|
||||||
where
|
self.tail.token()
|
||||||
TailTokenStream: TokenStream,
|
}
|
||||||
{
|
|
||||||
fn wrap(tail: TailTokenStream) -> LowerCaserTokenStream<TailTokenStream> {
|
fn token_mut(&mut self) -> &mut Token {
|
||||||
LowerCaserTokenStream {
|
self.tail.token_mut()
|
||||||
tail,
|
|
||||||
buffer: String::with_capacity(100),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::tokenizer::LowerCaser;
|
use crate::tokenizer::{LowerCaser, SimpleTokenizer, TextAnalyzer};
|
||||||
use crate::tokenizer::SimpleTokenizer;
|
|
||||||
use crate::tokenizer::TokenStream;
|
|
||||||
use crate::tokenizer::Tokenizer;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_to_lower_case() {
|
fn test_to_lower_case() {
|
||||||
@@ -87,7 +68,9 @@ mod tests {
|
|||||||
|
|
||||||
fn lowercase_helper(text: &str) -> Vec<String> {
|
fn lowercase_helper(text: &str) -> Vec<String> {
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
let mut token_stream = SimpleTokenizer.filter(LowerCaser).token_stream(text);
|
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
|
||||||
|
.filter(LowerCaser)
|
||||||
|
.token_stream(text);
|
||||||
while token_stream.advance() {
|
while token_stream.advance() {
|
||||||
let token_text = token_stream.token().text.clone();
|
let token_text = token_stream.token().text.clone();
|
||||||
tokens.push(token_text);
|
tokens.push(token_text);
|
||||||
|
|||||||
@@ -64,7 +64,7 @@
|
|||||||
//! ```rust
|
//! ```rust
|
||||||
//! use tantivy::tokenizer::*;
|
//! use tantivy::tokenizer::*;
|
||||||
//!
|
//!
|
||||||
//! let en_stem = SimpleTokenizer
|
//! let en_stem = TextAnalyzer::from(SimpleTokenizer)
|
||||||
//! .filter(RemoveLongFilter::limit(40))
|
//! .filter(RemoveLongFilter::limit(40))
|
||||||
//! .filter(LowerCaser)
|
//! .filter(LowerCaser)
|
||||||
//! .filter(Stemmer::new(Language::English));
|
//! .filter(Stemmer::new(Language::English));
|
||||||
@@ -109,7 +109,7 @@
|
|||||||
//! let index = Index::create_in_ram(schema);
|
//! let index = Index::create_in_ram(schema);
|
||||||
//!
|
//!
|
||||||
//! // We need to register our tokenizer :
|
//! // We need to register our tokenizer :
|
||||||
//! let custom_en_tokenizer = SimpleTokenizer
|
//! let custom_en_tokenizer = TextAnalyzer::from(SimpleTokenizer)
|
||||||
//! .filter(RemoveLongFilter::limit(40))
|
//! .filter(RemoveLongFilter::limit(40))
|
||||||
//! .filter(LowerCaser);
|
//! .filter(LowerCaser);
|
||||||
//! index
|
//! index
|
||||||
@@ -143,10 +143,11 @@ pub use self::simple_tokenizer::SimpleTokenizer;
|
|||||||
pub use self::stemmer::{Language, Stemmer};
|
pub use self::stemmer::{Language, Stemmer};
|
||||||
pub use self::stop_word_filter::StopWordFilter;
|
pub use self::stop_word_filter::StopWordFilter;
|
||||||
pub(crate) use self::token_stream_chain::TokenStreamChain;
|
pub(crate) use self::token_stream_chain::TokenStreamChain;
|
||||||
pub use self::tokenizer::BoxedTokenizer;
|
|
||||||
|
|
||||||
pub use self::tokenized_string::{PreTokenizedStream, PreTokenizedString};
|
pub use self::tokenized_string::{PreTokenizedStream, PreTokenizedString};
|
||||||
pub use self::tokenizer::{Token, TokenFilter, TokenStream, Tokenizer};
|
pub use self::tokenizer::{
|
||||||
|
BoxTokenFilter, BoxTokenStream, TextAnalyzer, Token, TokenFilter, TokenStream, Tokenizer,
|
||||||
|
};
|
||||||
|
|
||||||
pub use self::tokenizer_manager::TokenizerManager;
|
pub use self::tokenizer_manager::TokenizerManager;
|
||||||
|
|
||||||
@@ -160,9 +161,9 @@ pub const MAX_TOKEN_LEN: usize = u16::max_value() as usize - 4;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests {
|
pub mod tests {
|
||||||
use super::{
|
use super::{
|
||||||
Language, LowerCaser, RemoveLongFilter, SimpleTokenizer, Stemmer, Token, Tokenizer,
|
Language, LowerCaser, RemoveLongFilter, SimpleTokenizer, Stemmer, Token, TokenizerManager,
|
||||||
TokenizerManager,
|
|
||||||
};
|
};
|
||||||
|
use crate::tokenizer::TextAnalyzer;
|
||||||
|
|
||||||
/// This is a function that can be used in tests and doc tests
|
/// This is a function that can be used in tests and doc tests
|
||||||
/// to assert a token's correctness.
|
/// to assert a token's correctness.
|
||||||
@@ -229,7 +230,7 @@ pub mod tests {
|
|||||||
let tokenizer_manager = TokenizerManager::default();
|
let tokenizer_manager = TokenizerManager::default();
|
||||||
tokenizer_manager.register(
|
tokenizer_manager.register(
|
||||||
"el_stem",
|
"el_stem",
|
||||||
SimpleTokenizer
|
TextAnalyzer::from(SimpleTokenizer)
|
||||||
.filter(RemoveLongFilter::limit(40))
|
.filter(RemoveLongFilter::limit(40))
|
||||||
.filter(LowerCaser)
|
.filter(LowerCaser)
|
||||||
.filter(Stemmer::new(Language::Greek)),
|
.filter(Stemmer::new(Language::Greek)),
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use super::{Token, TokenStream, Tokenizer};
|
use super::{Token, TokenStream, Tokenizer};
|
||||||
|
use crate::tokenizer::BoxTokenStream;
|
||||||
|
|
||||||
/// Tokenize the text by splitting words into n-grams of the given size(s)
|
/// Tokenize the text by splitting words into n-grams of the given size(s)
|
||||||
///
|
///
|
||||||
@@ -129,11 +130,9 @@ pub struct NgramTokenStream<'a> {
|
|||||||
token: Token,
|
token: Token,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Tokenizer<'a> for NgramTokenizer {
|
impl Tokenizer for NgramTokenizer {
|
||||||
type TokenStreamImpl = NgramTokenStream<'a>;
|
fn token_stream<'a>(&self, text: &'a str) -> BoxTokenStream<'a> {
|
||||||
|
From::from(NgramTokenStream {
|
||||||
fn token_stream(&self, text: &'a str) -> Self::TokenStreamImpl {
|
|
||||||
NgramTokenStream {
|
|
||||||
ngram_charidx_iterator: StutteringIterator::new(
|
ngram_charidx_iterator: StutteringIterator::new(
|
||||||
CodepointFrontiers::for_str(text),
|
CodepointFrontiers::for_str(text),
|
||||||
self.min_gram,
|
self.min_gram,
|
||||||
@@ -142,7 +141,7 @@ impl<'a> Tokenizer<'a> for NgramTokenizer {
|
|||||||
prefix_only: self.prefix_only,
|
prefix_only: self.prefix_only,
|
||||||
text,
|
text,
|
||||||
token: Token::default(),
|
token: Token::default(),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -308,10 +307,10 @@ mod tests {
|
|||||||
use super::NgramTokenizer;
|
use super::NgramTokenizer;
|
||||||
use super::StutteringIterator;
|
use super::StutteringIterator;
|
||||||
use crate::tokenizer::tests::assert_token;
|
use crate::tokenizer::tests::assert_token;
|
||||||
use crate::tokenizer::tokenizer::{TokenStream, Tokenizer};
|
use crate::tokenizer::tokenizer::Tokenizer;
|
||||||
use crate::tokenizer::Token;
|
use crate::tokenizer::{BoxTokenStream, Token};
|
||||||
|
|
||||||
fn test_helper<T: TokenStream>(mut tokenizer: T) -> Vec<Token> {
|
fn test_helper(mut tokenizer: BoxTokenStream) -> Vec<Token> {
|
||||||
let mut tokens: Vec<Token> = vec![];
|
let mut tokens: Vec<Token> = vec![];
|
||||||
tokenizer.process(&mut |token: &Token| tokens.push(token.clone()));
|
tokenizer.process(&mut |token: &Token| tokens.push(token.clone()));
|
||||||
tokens
|
tokens
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use super::{Token, TokenStream, Tokenizer};
|
use super::{Token, TokenStream, Tokenizer};
|
||||||
|
use crate::tokenizer::BoxTokenStream;
|
||||||
|
|
||||||
/// For each value of the field, emit a single unprocessed token.
|
/// For each value of the field, emit a single unprocessed token.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -9,10 +10,8 @@ pub struct RawTokenStream {
|
|||||||
has_token: bool,
|
has_token: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Tokenizer<'a> for RawTokenizer {
|
impl Tokenizer for RawTokenizer {
|
||||||
type TokenStreamImpl = RawTokenStream;
|
fn token_stream<'a>(&self, text: &'a str) -> BoxTokenStream<'a> {
|
||||||
|
|
||||||
fn token_stream(&self, text: &'a str) -> Self::TokenStreamImpl {
|
|
||||||
let token = Token {
|
let token = Token {
|
||||||
offset_from: 0,
|
offset_from: 0,
|
||||||
offset_to: text.len(),
|
offset_to: text.len(),
|
||||||
@@ -24,6 +23,7 @@ impl<'a> Tokenizer<'a> for RawTokenizer {
|
|||||||
token,
|
token,
|
||||||
has_token: true,
|
has_token: true,
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
//! ```rust
|
//! ```rust
|
||||||
//! use tantivy::tokenizer::*;
|
//! use tantivy::tokenizer::*;
|
||||||
//!
|
//!
|
||||||
//! let tokenizer = SimpleTokenizer
|
//! let tokenizer = TextAnalyzer::from(SimpleTokenizer)
|
||||||
//! .filter(RemoveLongFilter::limit(5));
|
//! .filter(RemoveLongFilter::limit(5));
|
||||||
//!
|
//!
|
||||||
//! let mut stream = tokenizer.token_stream("toolong nice");
|
//! let mut stream = tokenizer.token_stream("toolong nice");
|
||||||
@@ -13,6 +13,7 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
use super::{Token, TokenFilter, TokenStream};
|
use super::{Token, TokenFilter, TokenStream};
|
||||||
|
use crate::tokenizer::BoxTokenStream;
|
||||||
|
|
||||||
/// `RemoveLongFilter` removes tokens that are longer
|
/// `RemoveLongFilter` removes tokens that are longer
|
||||||
/// than a given number of bytes (in UTF-8 representation).
|
/// than a given number of bytes (in UTF-8 representation).
|
||||||
@@ -31,56 +32,27 @@ impl RemoveLongFilter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> RemoveLongFilterStream<TailTokenStream>
|
impl<'a> RemoveLongFilterStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn predicate(&self, token: &Token) -> bool {
|
fn predicate(&self, token: &Token) -> bool {
|
||||||
token.text.len() < self.token_length_limit
|
token.text.len() < self.token_length_limit
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn wrap(
|
impl TokenFilter for RemoveLongFilter {
|
||||||
token_length_limit: usize,
|
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
|
||||||
tail: TailTokenStream,
|
BoxTokenStream::from(RemoveLongFilterStream {
|
||||||
) -> RemoveLongFilterStream<TailTokenStream> {
|
token_length_limit: self.length_limit,
|
||||||
RemoveLongFilterStream {
|
tail: token_stream,
|
||||||
token_length_limit,
|
})
|
||||||
tail,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> TokenFilter<TailTokenStream> for RemoveLongFilter
|
pub struct RemoveLongFilterStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
type ResultTokenStream = RemoveLongFilterStream<TailTokenStream>;
|
|
||||||
|
|
||||||
fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream {
|
|
||||||
RemoveLongFilterStream::wrap(self.length_limit, token_stream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct RemoveLongFilterStream<TailTokenStream>
|
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
token_length_limit: usize,
|
token_length_limit: usize,
|
||||||
tail: TailTokenStream,
|
tail: BoxTokenStream<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TailTokenStream> TokenStream for RemoveLongFilterStream<TailTokenStream>
|
impl<'a> TokenStream for RemoveLongFilterStream<'a> {
|
||||||
where
|
|
||||||
TailTokenStream: TokenStream,
|
|
||||||
{
|
|
||||||
fn token(&self) -> &Token {
|
|
||||||
self.tail.token()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_mut(&mut self) -> &mut Token {
|
|
||||||
self.tail.token_mut()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn advance(&mut self) -> bool {
|
fn advance(&mut self) -> bool {
|
||||||
while self.tail.advance() {
|
while self.tail.advance() {
|
||||||
if self.predicate(self.tail.token()) {
|
if self.predicate(self.tail.token()) {
|
||||||
@@ -89,4 +61,12 @@ where
|
|||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn token(&self) -> &Token {
|
||||||
|
self.tail.token()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn token_mut(&mut self) -> &mut Token {
|
||||||
|
self.tail.token_mut()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user