Compare commits

...

7 Commits

Author SHA1 Message Date
Paul Masurel
2a7bde6644 Experimental do not merge 2024-04-19 18:25:23 +09:00
Paul Masurel
659567b4f3 Owned value relying on Vec instead of BTreeMap 2024-04-19 16:29:32 +09:00
PSeitz
0e9fced336 remove JsonTermWriter (#2238)
* remove JsonTermWriter

remove JsonTermWriter
remove path truncation logic, add assertion

* fix json_path_writer add sep logic
2024-04-18 16:28:05 +02:00
PSeitz
b257b960b3 validate sort by field type (#2336)
* validate sort by field type

* Update src/index/index.rs

Co-authored-by: Adam Reichold <adamreichold@users.noreply.github.com>

---------

Co-authored-by: Adam Reichold <adamreichold@users.noreply.github.com>
2024-04-16 04:42:24 +02:00
Adam Reichold
4708171a32 Fix some of the things current Clippy complains about (#2363) 2024-04-16 04:27:06 +02:00
Adam Reichold
b493743f8d Fix trait bound of StoreReader::iter (#2360)
* Fix trait bound of StoreReader::iter

Similar to `StoreReader::get`, `StoreReader::iter` should only require
`DocumentDeserialize` and not `Document`.

* Mark the iterator returned by SegmentReader::doc_ids_alive as Send so it can be used in impls of Stream/AsyncIterator.
2024-04-15 15:50:02 +02:00
trinity-1686a
d2955a3fd2 extend field grouping (#2333)
* extend field grouping
2024-04-15 10:36:32 +02:00
27 changed files with 408 additions and 524 deletions

View File

@@ -1,5 +1,5 @@
use std::io::Write;
use std::{fmt, io, u64};
use std::{fmt, io};
use ownedbytes::OwnedBytes;

View File

@@ -5,6 +5,12 @@ pub const JSON_PATH_SEGMENT_SEP: u8 = 1u8;
pub const JSON_PATH_SEGMENT_SEP_STR: &str =
unsafe { std::str::from_utf8_unchecked(&[JSON_PATH_SEGMENT_SEP]) };
/// Separates the json path and the value in
/// a JSON term binary representation.
pub const JSON_END_OF_PATH: u8 = 0u8;
pub const JSON_END_OF_PATH_STR: &str =
unsafe { std::str::from_utf8_unchecked(&[JSON_END_OF_PATH]) };
/// Create a new JsonPathWriter, that creates flattened json paths for tantivy.
#[derive(Clone, Debug, Default)]
pub struct JsonPathWriter {
@@ -14,6 +20,14 @@ pub struct JsonPathWriter {
}
impl JsonPathWriter {
pub fn with_expand_dots(expand_dots: bool) -> Self {
JsonPathWriter {
path: String::new(),
indices: Vec::new(),
expand_dots,
}
}
pub fn new() -> Self {
JsonPathWriter {
path: String::new(),
@@ -39,8 +53,8 @@ impl JsonPathWriter {
pub fn push(&mut self, segment: &str) {
let len_path = self.path.len();
self.indices.push(len_path);
if !self.path.is_empty() {
self.path.push_str(JSON_PATH_SEGMENT_SEP_STR);
if self.indices.len() > 1 {
self.path.push(JSON_PATH_SEGMENT_SEP as char);
}
self.path.push_str(segment);
if self.expand_dots {
@@ -55,6 +69,12 @@ impl JsonPathWriter {
}
}
/// Set the end of JSON path marker.
#[inline]
pub fn set_end(&mut self) {
self.path.push_str(JSON_END_OF_PATH_STR);
}
/// Remove the last segment. Does nothing if the path is empty.
#[inline]
pub fn pop(&mut self) {
@@ -91,6 +111,7 @@ mod tests {
#[test]
fn json_path_writer_test() {
let mut writer = JsonPathWriter::new();
writer.set_expand_dots(false);
writer.push("root");
assert_eq!(writer.as_str(), "root");
@@ -109,4 +130,15 @@ mod tests {
writer.push("k8s.node.id");
assert_eq!(writer.as_str(), "root\u{1}k8s\u{1}node\u{1}id");
}
#[test]
fn test_json_path_expand_dots_enabled_pop_segment() {
let mut json_writer = JsonPathWriter::with_expand_dots(true);
json_writer.push("hello");
assert_eq!(json_writer.as_str(), "hello");
json_writer.push("color.hue");
assert_eq!(json_writer.as_str(), "hello\x01color\x01hue");
json_writer.pop();
assert_eq!(json_writer.as_str(), "hello");
}
}

View File

@@ -9,7 +9,7 @@ mod byte_count;
mod datetime;
pub mod file_slice;
mod group_by;
mod json_path_writer;
pub mod json_path_writer;
mod serialize;
mod vint;
mod writer;

View File

@@ -218,27 +218,14 @@ fn term_or_phrase_infallible(inp: &str) -> JResult<&str, Option<UserInputLeaf>>
}
fn term_group(inp: &str) -> IResult<&str, UserInputAst> {
let occur_symbol = alt((
value(Occur::MustNot, char('-')),
value(Occur::Must, char('+')),
));
map(
tuple((
terminated(field_name, multispace0),
delimited(
tuple((char('('), multispace0)),
separated_list0(multispace1, tuple((opt(occur_symbol), term_or_phrase))),
char(')'),
),
delimited(tuple((char('('), multispace0)), ast, char(')')),
)),
|(field_name, terms)| {
UserInputAst::Clause(
terms
.into_iter()
.map(|(occur, leaf)| (occur, leaf.set_field(Some(field_name.clone())).into()))
.collect(),
)
|(field_name, mut ast)| {
ast.set_default_field(field_name);
ast
},
)(inp)
}
@@ -258,46 +245,18 @@ fn term_group_precond(inp: &str) -> IResult<&str, (), ()> {
}
fn term_group_infallible(inp: &str) -> JResult<&str, UserInputAst> {
let (mut inp, (field_name, _, _, _)) =
let (inp, (field_name, _, _, _)) =
tuple((field_name, multispace0, char('('), multispace0))(inp).expect("precondition failed");
let mut terms = Vec::new();
let mut errs = Vec::new();
let mut first_round = true;
loop {
let mut space_error = if first_round {
first_round = false;
Vec::new()
} else {
let (rest, (_, err)) = space1_infallible(inp)?;
inp = rest;
err
};
if inp.is_empty() {
errs.push(LenientErrorInternal {
pos: inp.len(),
message: "missing )".to_string(),
});
break Ok((inp, (UserInputAst::Clause(terms), errs)));
}
if let Some(inp) = inp.strip_prefix(')') {
break Ok((inp, (UserInputAst::Clause(terms), errs)));
}
// only append missing space error if we did not reach the end of group
errs.append(&mut space_error);
// here we do the assumption term_or_phrase_infallible always consume something if the
// first byte is not `)` or ' '. If it did not, we would end up looping.
let (rest, ((occur, leaf), mut err)) =
tuple_infallible((occur_symbol, term_or_phrase_infallible))(inp)?;
errs.append(&mut err);
if let Some(leaf) = leaf {
terms.push((occur, leaf.set_field(Some(field_name.clone())).into()));
}
inp = rest;
}
let res = delimited_infallible(
nothing,
map(ast_infallible, |(mut ast, errors)| {
ast.set_default_field(field_name.to_string());
(ast, errors)
}),
opt_i_err(char(')'), "expected ')'"),
)(inp);
res
}
fn exists(inp: &str) -> IResult<&str, UserInputLeaf> {
@@ -1468,8 +1427,18 @@ mod test {
#[test]
fn test_parse_query_term_group() {
test_parse_query_to_ast_helper(r#"field:(abc)"#, r#"(*"field":abc)"#);
test_parse_query_to_ast_helper(r#"field:(abc)"#, r#""field":abc"#);
test_parse_query_to_ast_helper(r#"field:(+a -"b c")"#, r#"(+"field":a -"field":"b c")"#);
test_parse_query_to_ast_helper(r#"field:(a AND "b c")"#, r#"(+"field":a +"field":"b c")"#);
test_parse_query_to_ast_helper(r#"field:(a OR "b c")"#, r#"(?"field":a ?"field":"b c")"#);
test_parse_query_to_ast_helper(
r#"field:(a OR (b AND c))"#,
r#"(?"field":a ?(+"field":b +"field":c))"#,
);
test_parse_query_to_ast_helper(
r#"field:(a [b TO c])"#,
r#"(*"field":a *"field":["b" TO "c"])"#,
);
test_is_parse_err(r#"field:(+a -"b c""#, r#"(+"field":a -"field":"b c")"#);
}

View File

@@ -44,6 +44,26 @@ impl UserInputLeaf {
},
}
}
pub(crate) fn set_default_field(&mut self, default_field: String) {
match self {
UserInputLeaf::Literal(ref mut literal) if literal.field_name.is_none() => {
literal.field_name = Some(default_field)
}
UserInputLeaf::All => {
*self = UserInputLeaf::Exists {
field: default_field,
}
}
UserInputLeaf::Range { ref mut field, .. } if field.is_none() => {
*field = Some(default_field)
}
UserInputLeaf::Set { ref mut field, .. } if field.is_none() => {
*field = Some(default_field)
}
_ => (), // field was already set, do nothing
}
}
}
impl Debug for UserInputLeaf {
@@ -205,6 +225,16 @@ impl UserInputAst {
pub fn or(asts: Vec<UserInputAst>) -> UserInputAst {
UserInputAst::compose(Occur::Should, asts)
}
pub(crate) fn set_default_field(&mut self, field: String) {
match self {
UserInputAst::Clause(clauses) => clauses
.iter_mut()
.for_each(|(_, ast)| ast.set_default_field(field.clone())),
UserInputAst::Leaf(leaf) => leaf.set_default_field(field),
UserInputAst::Boost(ref mut ast, _) => ast.set_default_field(field),
}
}
}
impl From<UserInputLiteral> for UserInputLeaf {

View File

@@ -28,6 +28,7 @@ mod term_agg;
mod term_missing_agg;
use std::collections::HashMap;
use std::fmt;
pub use histogram::*;
pub use range::*;
@@ -72,12 +73,12 @@ impl From<&str> for OrderTarget {
}
}
impl ToString for OrderTarget {
fn to_string(&self) -> String {
impl fmt::Display for OrderTarget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
OrderTarget::Key => "_key".to_string(),
OrderTarget::Count => "_count".to_string(),
OrderTarget::SubAggregation(agg) => agg.to_string(),
OrderTarget::Key => f.write_str("_key"),
OrderTarget::Count => f.write_str("_count"),
OrderTarget::SubAggregation(agg) => agg.fmt(f),
}
}
}

View File

@@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::net::Ipv6Addr;
use columnar::{ColumnarReader, DynamicColumn};
use common::json_path_writer::JSON_PATH_SEGMENT_SEP_STR;
use common::DateTime;
use regex::Regex;
use serde::ser::SerializeMap;
@@ -15,7 +16,6 @@ use crate::aggregation::intermediate_agg_result::{
use crate::aggregation::segment_agg_result::SegmentAggregationCollector;
use crate::aggregation::AggregationError;
use crate::collector::TopNComputer;
use crate::schema::term::JSON_PATH_SEGMENT_SEP_STR;
use crate::schema::OwnedValue;
use crate::{DocAddress, DocId, SegmentOrdinal};

View File

@@ -159,10 +159,6 @@ use itertools::Itertools;
use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer, Serialize};
pub(crate) fn invalid_agg_request(message: String) -> crate::TantivyError {
crate::TantivyError::AggregationError(AggregationError::InvalidRequest(message))
}
fn parse_str_into_f64<E: de::Error>(value: &str) -> Result<f64, E> {
let parsed = value.parse::<f64>().map_err(|_err| {
de::Error::custom(format!("Failed to parse f64 from string: {:?}", value))

View File

@@ -1,7 +1,7 @@
use std::cmp::Ordering;
use std::collections::{btree_map, BTreeMap, BTreeSet, BinaryHeap};
use std::io;
use std::ops::Bound;
use std::{io, u64, usize};
use crate::collector::{Collector, SegmentCollector};
use crate::fastfield::FacetReader;

View File

@@ -1,12 +1,10 @@
use columnar::MonotonicallyMappableToU64;
use common::json_path_writer::JSON_PATH_SEGMENT_SEP;
use common::{replace_in_place, JsonPathWriter};
use rustc_hash::FxHashMap;
use crate::fastfield::FastValue;
use crate::postings::{IndexingContext, IndexingPosition, PostingsWriter};
use crate::schema::document::{ReferenceValue, ReferenceValueLeaf, Value};
use crate::schema::term::JSON_PATH_SEGMENT_SEP;
use crate::schema::{Field, Type, DATE_TIME_PRECISION_INDEXED};
use crate::schema::{Field, Type};
use crate::time::format_description::well_known::Rfc3339;
use crate::time::{OffsetDateTime, UtcOffset};
use crate::tokenizer::TextAnalyzer;
@@ -256,71 +254,45 @@ fn index_json_value<'a, V: Value<'a>>(
}
}
// Tries to infer a JSON type from a string.
pub fn convert_to_fast_value_and_get_term(
json_term_writer: &mut JsonTermWriter,
/// Tries to infer a JSON type from a string and append it to the term.
///
/// The term must be json + JSON path.
pub(crate) fn convert_to_fast_value_and_append_to_json_term(
mut term: Term,
phrase: &str,
) -> Option<Term> {
assert_eq!(
term.value()
.as_json_value_bytes()
.expect("expecting a Term with a json type and json path")
.as_serialized()
.len(),
0,
"JSON value bytes should be empty"
);
if let Ok(dt) = OffsetDateTime::parse(phrase, &Rfc3339) {
let dt_utc = dt.to_offset(UtcOffset::UTC);
return Some(set_fastvalue_and_get_term(
json_term_writer,
DateTime::from_utc(dt_utc),
));
term.append_type_and_fast_value(DateTime::from_utc(dt_utc));
return Some(term);
}
if let Ok(i64_val) = str::parse::<i64>(phrase) {
return Some(set_fastvalue_and_get_term(json_term_writer, i64_val));
term.append_type_and_fast_value(i64_val);
return Some(term);
}
if let Ok(u64_val) = str::parse::<u64>(phrase) {
return Some(set_fastvalue_and_get_term(json_term_writer, u64_val));
term.append_type_and_fast_value(u64_val);
return Some(term);
}
if let Ok(f64_val) = str::parse::<f64>(phrase) {
return Some(set_fastvalue_and_get_term(json_term_writer, f64_val));
term.append_type_and_fast_value(f64_val);
return Some(term);
}
if let Ok(bool_val) = str::parse::<bool>(phrase) {
return Some(set_fastvalue_and_get_term(json_term_writer, bool_val));
term.append_type_and_fast_value(bool_val);
return Some(term);
}
None
}
// helper function to generate a Term from a json fastvalue
pub(crate) fn set_fastvalue_and_get_term<T: FastValue>(
json_term_writer: &mut JsonTermWriter,
value: T,
) -> Term {
json_term_writer.set_fast_value(value);
json_term_writer.term().clone()
}
// helper function to generate a list of terms with their positions from a textual json value
pub(crate) fn set_string_and_get_terms(
json_term_writer: &mut JsonTermWriter,
value: &str,
text_analyzer: &mut TextAnalyzer,
) -> Vec<(usize, Term)> {
let mut positions_and_terms = Vec::<(usize, Term)>::new();
json_term_writer.close_path_and_set_type(Type::Str);
let term_num_bytes = json_term_writer.term_buffer.len_bytes();
let mut token_stream = text_analyzer.token_stream(value);
token_stream.process(&mut |token| {
json_term_writer
.term_buffer
.truncate_value_bytes(term_num_bytes);
json_term_writer
.term_buffer
.append_bytes(token.text.as_bytes());
positions_and_terms.push((token.position, json_term_writer.term().clone()));
});
positions_and_terms
}
/// Writes a value of a JSON field to a `Term`.
/// The Term format is as follows:
/// `[JSON_TYPE][JSON_PATH][JSON_END_OF_PATH][VALUE_BYTES]`
pub struct JsonTermWriter<'a> {
term_buffer: &'a mut Term,
path_stack: Vec<usize>,
expand_dots_enabled: bool,
}
/// Splits a json path supplied to the query parser in such a way that
/// `.` can be escaped.
@@ -377,158 +349,68 @@ pub(crate) fn encode_column_name(
path.into()
}
impl<'a> JsonTermWriter<'a> {
pub fn from_field_and_json_path(
field: Field,
json_path: &str,
expand_dots_enabled: bool,
term_buffer: &'a mut Term,
) -> Self {
term_buffer.set_field_and_type(field, Type::Json);
let mut json_term_writer = Self::wrap(term_buffer, expand_dots_enabled);
for segment in split_json_path(json_path) {
json_term_writer.push_path_segment(&segment);
}
json_term_writer
pub fn term_from_json_paths<'a>(
json_field: Field,
paths: impl Iterator<Item = &'a str>,
expand_dots_enabled: bool,
) -> Term {
let mut json_path = JsonPathWriter::with_expand_dots(expand_dots_enabled);
for path in paths {
json_path.push(path);
}
json_path.set_end();
let mut term = Term::with_type_and_field(Type::Json, json_field);
pub fn wrap(term_buffer: &'a mut Term, expand_dots_enabled: bool) -> Self {
term_buffer.clear_with_type(Type::Json);
let mut path_stack = Vec::with_capacity(10);
path_stack.push(0);
Self {
term_buffer,
path_stack,
expand_dots_enabled,
}
}
fn trim_to_end_of_path(&mut self) {
let end_of_path = *self.path_stack.last().unwrap();
self.term_buffer.truncate_value_bytes(end_of_path);
}
pub fn close_path_and_set_type(&mut self, typ: Type) {
self.trim_to_end_of_path();
self.term_buffer.set_json_path_end();
self.term_buffer.append_bytes(&[typ.to_code()]);
}
// TODO: Remove this function and use JsonPathWriter instead.
pub fn push_path_segment(&mut self, segment: &str) {
// the path stack should never be empty.
self.trim_to_end_of_path();
if self.path_stack.len() > 1 {
self.term_buffer.set_json_path_separator();
}
let appended_segment = self.term_buffer.append_bytes(segment.as_bytes());
if self.expand_dots_enabled {
// We need to replace `.` by JSON_PATH_SEGMENT_SEP.
replace_in_place(b'.', JSON_PATH_SEGMENT_SEP, appended_segment);
}
self.term_buffer.add_json_path_separator();
self.path_stack.push(self.term_buffer.len_bytes());
}
pub fn pop_path_segment(&mut self) {
self.path_stack.pop();
assert!(!self.path_stack.is_empty());
self.trim_to_end_of_path();
}
/// Returns the json path of the term being currently built.
#[cfg(test)]
pub(crate) fn path(&self) -> &[u8] {
let end_of_path = self.path_stack.last().cloned().unwrap_or(1);
&self.term().serialized_value_bytes()[..end_of_path - 1]
}
pub(crate) fn set_fast_value<T: FastValue>(&mut self, val: T) {
self.close_path_and_set_type(T::to_type());
let value = if T::to_type() == Type::Date {
DateTime::from_u64(val.to_u64())
.truncate(DATE_TIME_PRECISION_INDEXED)
.to_u64()
} else {
val.to_u64()
};
self.term_buffer
.append_bytes(value.to_be_bytes().as_slice());
}
pub fn set_str(&mut self, text: &str) {
self.close_path_and_set_type(Type::Str);
self.term_buffer.append_bytes(text.as_bytes());
}
pub fn term(&self) -> &Term {
self.term_buffer
}
term.append_bytes(json_path.as_str().as_bytes());
term
}
#[cfg(test)]
mod tests {
use super::{split_json_path, JsonTermWriter};
use crate::schema::{Field, Type};
use crate::Term;
use super::split_json_path;
use crate::json_utils::term_from_json_paths;
use crate::schema::Field;
#[test]
fn test_json_writer() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("attributes");
json_writer.push_path_segment("color");
json_writer.set_str("red");
let mut term = term_from_json_paths(field, ["attributes", "color"].into_iter(), false);
term.append_type_and_str("red");
assert_eq!(
format!("{:?}", json_writer.term()),
format!("{:?}", term),
"Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")"
);
json_writer.set_str("blue");
assert_eq!(
format!("{:?}", json_writer.term()),
"Term(field=1, type=Json, path=attributes.color, type=Str, \"blue\")"
let mut term = term_from_json_paths(
field,
["attributes", "dimensions", "width"].into_iter(),
false,
);
json_writer.pop_path_segment();
json_writer.push_path_segment("dimensions");
json_writer.push_path_segment("width");
json_writer.set_fast_value(400i64);
term.append_type_and_fast_value(400i64);
assert_eq!(
format!("{:?}", json_writer.term()),
format!("{:?}", term),
"Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)"
);
json_writer.pop_path_segment();
json_writer.push_path_segment("height");
json_writer.set_fast_value(300i64);
assert_eq!(
format!("{:?}", json_writer.term()),
"Term(field=1, type=Json, path=attributes.dimensions.height, type=I64, 300)"
);
}
#[test]
fn test_string_term() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.set_str("red");
assert_eq!(
json_writer.term().serialized_term(),
b"\x00\x00\x00\x01jcolor\x00sred"
)
let mut term = term_from_json_paths(field, ["color"].into_iter(), false);
term.append_type_and_str("red");
assert_eq!(term.serialized_term(), b"\x00\x00\x00\x01jcolor\x00sred")
}
#[test]
fn test_i64_term() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.set_fast_value(-4i64);
let mut term = term_from_json_paths(field, ["color"].into_iter(), false);
term.append_type_and_fast_value(-4i64);
assert_eq!(
json_writer.term().serialized_term(),
term.serialized_term(),
b"\x00\x00\x00\x01jcolor\x00i\x7f\xff\xff\xff\xff\xff\xff\xfc"
)
}
@@ -536,12 +418,11 @@ mod tests {
#[test]
fn test_u64_term() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.set_fast_value(4u64);
let mut term = term_from_json_paths(field, ["color"].into_iter(), false);
term.append_type_and_fast_value(4u64);
assert_eq!(
json_writer.term().serialized_term(),
term.serialized_term(),
b"\x00\x00\x00\x01jcolor\x00u\x00\x00\x00\x00\x00\x00\x00\x04"
)
}
@@ -549,12 +430,10 @@ mod tests {
#[test]
fn test_f64_term() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.set_fast_value(4.0f64);
let mut term = term_from_json_paths(field, ["color"].into_iter(), false);
term.append_type_and_fast_value(4.0f64);
assert_eq!(
json_writer.term().serialized_term(),
term.serialized_term(),
b"\x00\x00\x00\x01jcolor\x00f\xc0\x10\x00\x00\x00\x00\x00\x00"
)
}
@@ -562,90 +441,14 @@ mod tests {
#[test]
fn test_bool_term() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.set_fast_value(true);
let mut term = term_from_json_paths(field, ["color"].into_iter(), false);
term.append_type_and_fast_value(true);
assert_eq!(
json_writer.term().serialized_term(),
term.serialized_term(),
b"\x00\x00\x00\x01jcolor\x00o\x00\x00\x00\x00\x00\x00\x00\x01"
)
}
#[test]
fn test_push_after_set_path_segment() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("attribute");
json_writer.set_str("something");
json_writer.push_path_segment("color");
json_writer.set_str("red");
assert_eq!(
json_writer.term().serialized_term(),
b"\x00\x00\x00\x01jattribute\x01color\x00sred"
)
}
#[test]
fn test_pop_segment() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
json_writer.push_path_segment("hue");
json_writer.pop_path_segment();
json_writer.set_str("red");
assert_eq!(
json_writer.term().serialized_term(),
b"\x00\x00\x00\x01jcolor\x00sred"
)
}
#[test]
fn test_json_writer_path() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color");
assert_eq!(json_writer.path(), b"color");
json_writer.push_path_segment("hue");
assert_eq!(json_writer.path(), b"color\x01hue");
json_writer.set_str("pink");
assert_eq!(json_writer.path(), b"color\x01hue");
}
#[test]
fn test_json_path_expand_dots_disabled() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, false);
json_writer.push_path_segment("color.hue");
assert_eq!(json_writer.path(), b"color.hue");
}
#[test]
fn test_json_path_expand_dots_enabled() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, true);
json_writer.push_path_segment("color.hue");
assert_eq!(json_writer.path(), b"color\x01hue");
}
#[test]
fn test_json_path_expand_dots_enabled_pop_segment() {
let field = Field::from_field_id(1);
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_writer = JsonTermWriter::wrap(&mut term, true);
json_writer.push_path_segment("hello");
assert_eq!(json_writer.path(), b"hello");
json_writer.push_path_segment("color.hue");
assert_eq!(json_writer.path(), b"hello\x01color\x01hue");
json_writer.pop_path_segment();
assert_eq!(json_writer.path(), b"hello");
}
#[test]
fn test_split_json_path_simple() {
let json_path = split_json_path("titi.toto");

View File

@@ -1,9 +1,9 @@
use crate::collector::Count;
use crate::directory::{RamDirectory, WatchCallback};
use crate::indexer::{LogMergePolicy, NoMergePolicy};
use crate::json_utils::JsonTermWriter;
use crate::json_utils::term_from_json_paths;
use crate::query::TermQuery;
use crate::schema::{Field, IndexRecordOption, Schema, Type, INDEXED, STRING, TEXT};
use crate::schema::{Field, IndexRecordOption, Schema, INDEXED, STRING, TEXT};
use crate::tokenizer::TokenizerManager;
use crate::{
Directory, DocSet, Index, IndexBuilder, IndexReader, IndexSettings, IndexWriter, Postings,
@@ -416,16 +416,12 @@ fn test_non_text_json_term_freq() {
let searcher = reader.searcher();
let segment_reader = searcher.segment_reader(0u32);
let inv_idx = segment_reader.inverted_index(field).unwrap();
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
json_term_writer.push_path_segment("tenant_id");
json_term_writer.close_path_and_set_type(Type::U64);
json_term_writer.set_fast_value(75u64);
let mut term = term_from_json_paths(field, ["tenant_id"].iter().cloned(), false);
term.append_type_and_fast_value(75u64);
let postings = inv_idx
.read_postings(
json_term_writer.term(),
IndexRecordOption::WithFreqsAndPositions,
)
.read_postings(&term, IndexRecordOption::WithFreqsAndPositions)
.unwrap()
.unwrap();
assert_eq!(postings.doc(), 0);
@@ -454,16 +450,12 @@ fn test_non_text_json_term_freq_bitpacked() {
let searcher = reader.searcher();
let segment_reader = searcher.segment_reader(0u32);
let inv_idx = segment_reader.inverted_index(field).unwrap();
let mut term = Term::with_type_and_field(Type::Json, field);
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
json_term_writer.push_path_segment("tenant_id");
json_term_writer.close_path_and_set_type(Type::U64);
json_term_writer.set_fast_value(75u64);
let mut term = term_from_json_paths(field, ["tenant_id"].iter().cloned(), false);
term.append_type_and_fast_value(75u64);
let mut postings = inv_idx
.read_postings(
json_term_writer.term(),
IndexRecordOption::WithFreqsAndPositions,
)
.read_postings(&term, IndexRecordOption::WithFreqsAndPositions)
.unwrap()
.unwrap();
assert_eq!(postings.doc(), 0);

View File

@@ -1,3 +1,5 @@
#![allow(deprecated)] // Remove with index sorting
use std::collections::HashSet;
use rand::{thread_rng, Rng};

View File

@@ -20,7 +20,7 @@ use crate::indexer::segment_updater::save_metas;
use crate::indexer::{IndexWriter, SingleSegmentIndexWriter};
use crate::reader::{IndexReader, IndexReaderBuilder};
use crate::schema::document::Document;
use crate::schema::{Field, FieldType, Schema};
use crate::schema::{Field, FieldType, Schema, Type};
use crate::tokenizer::{TextAnalyzer, TokenizerManager};
use crate::SegmentReader;
@@ -248,6 +248,15 @@ impl IndexBuilder {
sort_by_field.field
)));
}
let supported_field_types = [Type::I64, Type::U64, Type::F64, Type::Date];
let field_type = entry.field_type().value_type();
if !supported_field_types.contains(&field_type) {
return Err(TantivyError::InvalidArgument(format!(
"Unsupported field type in sort_by_field: {:?}. Supported field types: \
{:?} ",
field_type, supported_field_types,
)));
}
}
Ok(())
} else {

View File

@@ -1,12 +1,13 @@
use std::io;
use common::json_path_writer::JSON_END_OF_PATH;
use common::BinarySerializable;
use fnv::FnvHashSet;
use crate::directory::FileSlice;
use crate::positions::PositionReader;
use crate::postings::{BlockSegmentPostings, SegmentPostings, TermInfo};
use crate::schema::{IndexRecordOption, Term, Type, JSON_END_OF_PATH};
use crate::schema::{IndexRecordOption, Term, Type};
use crate::termdict::TermDictionary;
/// The inverted index reader is in charge of accessing

View File

@@ -406,7 +406,7 @@ impl SegmentReader {
}
/// Returns an iterator that will iterate over the alive document ids
pub fn doc_ids_alive(&self) -> Box<dyn Iterator<Item = DocId> + '_> {
pub fn doc_ids_alive(&self) -> Box<dyn Iterator<Item = DocId> + Send + '_> {
if let Some(alive_bitset) = &self.alive_bitset_opt {
Box::new(alive_bitset.iter_alive())
} else {

View File

@@ -159,7 +159,7 @@ mod tests_indexsorting {
use crate::indexer::NoMergePolicy;
use crate::query::QueryParser;
use crate::schema::*;
use crate::{DocAddress, Index, IndexSettings, IndexSortByField, Order};
use crate::{DocAddress, Index, IndexBuilder, IndexSettings, IndexSortByField, Order};
fn create_test_index(
index_settings: Option<IndexSettings>,
@@ -557,4 +557,28 @@ mod tests_indexsorting {
&[2000, 8000, 3000]
);
}
#[test]
fn test_text_sort() -> crate::Result<()> {
let mut schema_builder = SchemaBuilder::new();
schema_builder.add_text_field("id", STRING | FAST | STORED);
schema_builder.add_text_field("name", TEXT | STORED);
let resp = IndexBuilder::new()
.schema(schema_builder.build())
.settings(IndexSettings {
sort_by_field: Some(IndexSortByField {
field: "id".to_string(),
order: Order::Asc,
}),
..Default::default()
})
.create_in_ram();
assert!(resp
.unwrap_err()
.to_string()
.contains("Unsupported field type"));
Ok(())
}
}

View File

@@ -156,16 +156,24 @@ mod tests_mmap {
}
#[test]
fn test_json_field_1byte() {
// Test when field name contains a 1 byte, which has special meaning in tantivy.
// Test when field name contains a '1' byte, which has special meaning in tantivy.
// The 1 byte can be addressed as '1' byte or '.'.
let field_name_in = "\u{0001}";
let field_name_out = "\u{0001}";
test_json_field_name(field_name_in, field_name_out);
// Test when field name contains a 1 byte, which has special meaning in tantivy.
// Test when field name contains a '1' byte, which has special meaning in tantivy.
let field_name_in = "\u{0001}";
let field_name_out = ".";
test_json_field_name(field_name_in, field_name_out);
}
#[test]
fn test_json_field_dot() {
// Test when field name contains a '.'
let field_name_in = ".";
let field_name_out = ".";
test_json_field_name(field_name_in, field_name_out);
}
fn test_json_field_name(field_name_in: &str, field_name_out: &str) {
let mut schema_builder = Schema::builder();
@@ -205,10 +213,10 @@ mod tests_mmap {
let reader = index.reader().unwrap();
let searcher = reader.searcher();
let parse_query = QueryParser::for_index(&index, Vec::new());
let test_query = |field_name: &str| {
let query = parse_query.parse_query(field_name).unwrap();
let test_query = |query_str: &str| {
let query = parse_query.parse_query(query_str).unwrap();
let num_docs = searcher.search(&query, &Count).unwrap();
assert_eq!(num_docs, 1);
assert_eq!(num_docs, 1, "{}", query_str);
};
test_query(format!("json.{field_name_out}:test1").as_str());
test_query(format!("json.a{field_name_out}:test2").as_str());

View File

@@ -496,14 +496,14 @@ mod tests {
use tempfile::TempDir;
use crate::collector::{Count, TopDocs};
use crate::core::json_utils::JsonTermWriter;
use crate::directory::RamDirectory;
use crate::fastfield::FastValue;
use crate::json_utils::term_from_json_paths;
use crate::postings::TermInfo;
use crate::query::{PhraseQuery, QueryParser};
use crate::schema::document::Value;
use crate::schema::{
Document, IndexRecordOption, Schema, TextFieldIndexing, TextOptions, Type, STORED, STRING,
TEXT,
Document, IndexRecordOption, Schema, TextFieldIndexing, TextOptions, STORED, STRING, TEXT,
};
use crate::store::{Compressor, StoreReader, StoreWriter};
use crate::time::format_description::well_known::Rfc3339;
@@ -645,115 +645,117 @@ mod tests {
let inv_idx = segment_reader.inverted_index(json_field).unwrap();
let term_dict = inv_idx.terms();
let mut term = Term::with_type_and_field(Type::Json, json_field);
let mut term_stream = term_dict.stream().unwrap();
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
let term_from_path = |paths: &[&str]| -> Term {
term_from_json_paths(json_field, paths.iter().cloned(), false)
};
json_term_writer.push_path_segment("bool");
json_term_writer.set_fast_value(true);
fn set_fast_val<T: FastValue>(val: T, mut term: Term) -> Term {
term.append_type_and_fast_value(val);
term
}
fn set_str(val: &str, mut term: Term) -> Term {
term.append_type_and_str(val);
term
}
let term = term_from_path(&["bool"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(true, term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("complexobject");
json_term_writer.push_path_segment("field.with.dot");
json_term_writer.set_fast_value(1i64);
let term = term_from_path(&["complexobject", "field.with.dot"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(1i64, term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("date");
json_term_writer.set_fast_value(DateTime::from_utc(
OffsetDateTime::parse("1985-04-12T23:20:50.52Z", &Rfc3339).unwrap(),
));
// Date
let term = term_from_path(&["date"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(
DateTime::from_utc(
OffsetDateTime::parse("1985-04-12T23:20:50.52Z", &Rfc3339).unwrap(),
),
term
)
.serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("float");
json_term_writer.set_fast_value(-0.2f64);
// Float
let term = term_from_path(&["float"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(-0.2f64, term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("my_arr");
json_term_writer.set_fast_value(2i64);
// Number In Array
let term = term_from_path(&["my_arr"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(2i64, term).serialized_value_bytes()
);
json_term_writer.set_fast_value(3i64);
let term = term_from_path(&["my_arr"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(3i64, term).serialized_value_bytes()
);
json_term_writer.set_fast_value(4i64);
let term = term_from_path(&["my_arr"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(4i64, term).serialized_value_bytes()
);
json_term_writer.push_path_segment("my_key");
json_term_writer.set_str("tokens");
// El in Array
let term = term_from_path(&["my_arr", "my_key"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_str("tokens", term).serialized_value_bytes()
);
json_term_writer.set_str("two");
let term = term_from_path(&["my_arr", "my_key"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_str("two", term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("signed");
json_term_writer.set_fast_value(-2i64);
// Signed
let term = term_from_path(&["signed"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(-2i64, term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("toto");
json_term_writer.set_str("titi");
let term = term_from_path(&["toto"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_str("titi", term).serialized_value_bytes()
);
json_term_writer.pop_path_segment();
json_term_writer.push_path_segment("unsigned");
json_term_writer.set_fast_value(1i64);
// Unsigned
let term = term_from_path(&["unsigned"]);
assert!(term_stream.advance());
assert_eq!(
term_stream.key(),
json_term_writer.term().serialized_value_bytes()
set_fast_val(1i64, term).serialized_value_bytes()
);
assert!(!term_stream.advance());
}
@@ -774,14 +776,9 @@ mod tests {
let searcher = reader.searcher();
let segment_reader = searcher.segment_reader(0u32);
let inv_index = segment_reader.inverted_index(json_field).unwrap();
let mut term = Term::with_type_and_field(Type::Json, json_field);
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
json_term_writer.push_path_segment("mykey");
json_term_writer.set_str("token");
let term_info = inv_index
.get_term_info(json_term_writer.term())
.unwrap()
.unwrap();
let mut term = term_from_json_paths(json_field, ["mykey"].into_iter(), false);
term.append_type_and_str("token");
let term_info = inv_index.get_term_info(&term).unwrap().unwrap();
assert_eq!(
term_info,
TermInfo {
@@ -818,14 +815,9 @@ mod tests {
let searcher = reader.searcher();
let segment_reader = searcher.segment_reader(0u32);
let inv_index = segment_reader.inverted_index(json_field).unwrap();
let mut term = Term::with_type_and_field(Type::Json, json_field);
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
json_term_writer.push_path_segment("mykey");
json_term_writer.set_str("two tokens");
let term_info = inv_index
.get_term_info(json_term_writer.term())
.unwrap()
.unwrap();
let mut term = term_from_json_paths(json_field, ["mykey"].into_iter(), false);
term.append_type_and_str("two tokens");
let term_info = inv_index.get_term_info(&term).unwrap().unwrap();
assert_eq!(
term_info,
TermInfo {
@@ -863,16 +855,18 @@ mod tests {
writer.commit().unwrap();
let reader = index.reader().unwrap();
let searcher = reader.searcher();
let mut term = Term::with_type_and_field(Type::Json, json_field);
let mut json_term_writer = JsonTermWriter::wrap(&mut term, false);
json_term_writer.push_path_segment("mykey");
json_term_writer.push_path_segment("field");
json_term_writer.set_str("hello");
let hello_term = json_term_writer.term().clone();
json_term_writer.set_str("nothello");
let nothello_term = json_term_writer.term().clone();
json_term_writer.set_str("happy");
let happy_term = json_term_writer.term().clone();
let term = term_from_json_paths(json_field, ["mykey", "field"].into_iter(), false);
let mut hello_term = term.clone();
hello_term.append_type_and_str("hello");
let mut nothello_term = term.clone();
nothello_term.append_type_and_str("nothello");
let mut happy_term = term.clone();
happy_term.append_type_and_str("happy");
let phrase_query = PhraseQuery::new(vec![hello_term, happy_term.clone()]);
assert_eq!(searcher.search(&phrase_query, &Count).unwrap(), 1);
let phrase_query = PhraseQuery::new(vec![nothello_term, happy_term]);

View File

@@ -255,7 +255,7 @@ pub struct Version {
impl fmt::Debug for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_string())
fmt::Display::fmt(self, f)
}
}
@@ -266,9 +266,10 @@ static VERSION: Lazy<Version> = Lazy::new(|| Version {
index_format_version: INDEX_FORMAT_VERSION,
});
impl ToString for Version {
fn to_string(&self) -> String {
format!(
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"tantivy v{}.{}.{}, index_format v{}",
self.major, self.minor, self.patch, self.index_format_version
)

View File

@@ -1,5 +1,6 @@
use std::io;
use common::json_path_writer::JSON_END_OF_PATH;
use stacker::Addr;
use crate::indexer::doc_id_mapping::DocIdMapping;
@@ -7,7 +8,7 @@ use crate::indexer::path_to_unordered_id::OrderedPathId;
use crate::postings::postings_writer::SpecializedPostingsWriter;
use crate::postings::recorder::{BufferLender, DocIdRecorder, Recorder};
use crate::postings::{FieldSerializer, IndexingContext, IndexingPosition, PostingsWriter};
use crate::schema::{Field, Type, JSON_END_OF_PATH};
use crate::schema::{Field, Type};
use crate::tokenizer::TokenStream;
use crate::{DocId, Term};

View File

@@ -10,10 +10,10 @@ use query_grammar::{UserInputAst, UserInputBound, UserInputLeaf, UserInputLitera
use rustc_hash::FxHashMap;
use super::logical_ast::*;
use crate::core::json_utils::{
convert_to_fast_value_and_get_term, set_string_and_get_terms, JsonTermWriter,
};
use crate::index::Index;
use crate::json_utils::{
convert_to_fast_value_and_append_to_json_term, split_json_path, term_from_json_paths,
};
use crate::query::range_query::{is_type_valid_for_fastfield_range_query, RangeQuery};
use crate::query::{
AllQuery, BooleanQuery, BoostQuery, EmptyQuery, FuzzyTermQuery, Occur, PhrasePrefixQuery,
@@ -965,20 +965,33 @@ fn generate_literals_for_json_object(
})?;
let index_record_option = text_options.index_option();
let mut logical_literals = Vec::new();
let mut term = Term::with_capacity(100);
let mut json_term_writer = JsonTermWriter::from_field_and_json_path(
field,
json_path,
json_options.is_expand_dots_enabled(),
&mut term,
);
if let Some(term) = convert_to_fast_value_and_get_term(&mut json_term_writer, phrase) {
let paths = split_json_path(json_path);
let get_term_with_path = || {
term_from_json_paths(
field,
paths.iter().map(|el| el.as_str()),
json_options.is_expand_dots_enabled(),
)
};
// Try to convert the phrase to a fast value
if let Some(term) = convert_to_fast_value_and_append_to_json_term(get_term_with_path(), phrase)
{
logical_literals.push(LogicalLiteral::Term(term));
}
let terms = set_string_and_get_terms(&mut json_term_writer, phrase, &mut text_analyzer);
drop(json_term_writer);
if terms.len() <= 1 {
for (_, term) in terms {
// Try to tokenize the phrase and create Terms.
let mut positions_and_terms = Vec::<(usize, Term)>::new();
let mut token_stream = text_analyzer.token_stream(phrase);
token_stream.process(&mut |token| {
let mut term = get_term_with_path();
term.append_type_and_str(&token.text);
positions_and_terms.push((token.position, term.clone()));
});
if positions_and_terms.len() <= 1 {
for (_, term) in positions_and_terms {
logical_literals.push(LogicalLiteral::Term(term));
}
return Ok(logical_literals);
@@ -989,7 +1002,7 @@ fn generate_literals_for_json_object(
));
}
logical_literals.push(LogicalLiteral::Phrase {
terms,
terms: positions_and_terms,
slop: 0,
prefix: false,
});

View File

@@ -1,15 +1,16 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::net::Ipv6Addr;
use common::DateTime;
use common::{BinarySerializable, DateTime, VInt};
use serde_json::Map;
use crate::schema::document::se::BinaryValueSerializer;
use crate::schema::document::{
DeserializeError, Document, DocumentDeserialize, DocumentDeserializer,
BinaryDocumentDeserializer, BinaryDocumentSerializer, DeserializeError, Document, DocumentDeserialize, DocumentDeserializer, ReferenceValue, ReferenceValueLeaf
};
use crate::schema::field_type::ValueParsingError;
use crate::schema::field_value::FieldValueIter;
use crate::schema::{Facet, Field, FieldValue, NamedFieldDocument, OwnedValue, Schema};
use crate::schema::{Facet, Field, FieldValue, NamedFieldDocument, OwnedValue, Schema, Value};
use crate::tokenizer::PreTokenizedString;
/// TantivyDocument provides a default implementation of the `Document` trait.
@@ -85,6 +86,36 @@ impl IntoIterator for TantivyDocument {
}
impl TantivyDocument {
pub fn to_bytes(&self, buffer: &mut Vec<u8>) -> std::io::Result<()> {
buffer.clear();
let num_field_values = self.field_values.len();
VInt(num_field_values as u64).serialize(buffer)?;
for (field, value_access) in self.iter_fields_and_values() {
field.serialize(buffer)?;
let mut serializer = BinaryValueSerializer::new(buffer);
match value_access.as_value() {
ReferenceValue::Leaf(ReferenceValueLeaf::PreTokStr(pre_tokenized_text)) => {
serializer.serialize_value(ReferenceValue::Leaf::<&'_ OwnedValue>(
ReferenceValueLeaf::Str(&pre_tokenized_text.text),
))?;
}
_ => {
serializer.serialize_value(value_access.as_value())?;
}
}
}
Ok(())
}
pub fn from_bytes(&self, mut payload: &[u8]) -> Self {
let deserializer = BinaryDocumentDeserializer::from_reader(&mut payload).unwrap();
Self::deserialize(deserializer).unwrap()
}
/// Creates a new, empty document object
pub fn new() -> TantivyDocument {
TantivyDocument::default()

View File

@@ -1,4 +1,4 @@
use std::collections::{btree_map, BTreeMap};
use std::collections::BTreeMap;
use std::fmt;
use std::net::Ipv6Addr;
@@ -45,7 +45,7 @@ pub enum OwnedValue {
/// A set of values.
Array(Vec<Self>),
/// Dynamic object value.
Object(BTreeMap<String, Self>),
Object(Vec<(String, Self)>),
/// IpV6 Address. Internally there is no IpV4, it needs to be converted to `Ipv6Addr`.
IpAddr(Ipv6Addr),
}
@@ -148,10 +148,10 @@ impl ValueDeserialize for OwnedValue {
fn visit_object<'de, A>(&self, mut access: A) -> Result<Self::Value, DeserializeError>
where A: ObjectAccess<'de> {
let mut elements = BTreeMap::new();
let mut elements = Vec::with_capacity(access.size_hint());
while let Some((key, value)) = access.next_entry()? {
elements.insert(key, value);
elements.push((key, value));
}
Ok(OwnedValue::Object(elements))
@@ -248,12 +248,13 @@ impl<'de> serde::Deserialize<'de> for OwnedValue {
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where A: MapAccess<'de> {
let mut object = BTreeMap::new();
let mut object =
map.size_hint()
.map(Vec::with_capacity)
.unwrap_or_default();
while let Some((key, value)) = map.next_entry()? {
object.insert(key, value);
object.push((key, value));
}
Ok(OwnedValue::Object(object))
}
}
@@ -363,7 +364,8 @@ impl From<PreTokenizedString> for OwnedValue {
impl From<BTreeMap<String, OwnedValue>> for OwnedValue {
fn from(object: BTreeMap<String, OwnedValue>) -> OwnedValue {
OwnedValue::Object(object)
let key_values = object.into_iter().collect();
OwnedValue::Object(key_values)
}
}
@@ -417,18 +419,15 @@ impl From<serde_json::Value> for OwnedValue {
impl From<serde_json::Map<String, serde_json::Value>> for OwnedValue {
fn from(map: serde_json::Map<String, serde_json::Value>) -> Self {
let mut object = BTreeMap::new();
for (key, value) in map {
object.insert(key, OwnedValue::from(value));
}
let object: Vec<(String, OwnedValue)> = map.into_iter()
.map(|(key, value)| (key, OwnedValue::from(value)))
.collect();
OwnedValue::Object(object)
}
}
/// A wrapper type for iterating over a serde_json object producing reference values.
pub struct ObjectMapIter<'a>(btree_map::Iter<'a, String, OwnedValue>);
pub struct ObjectMapIter<'a>(std::slice::Iter<'a, (String, OwnedValue)>);
impl<'a> Iterator for ObjectMapIter<'a> {
type Item = (&'a str, &'a OwnedValue);

View File

@@ -146,7 +146,7 @@ pub use self::json_object_options::JsonObjectOptions;
pub use self::named_field_document::NamedFieldDocument;
pub use self::numeric_options::NumericOptions;
pub use self::schema::{Schema, SchemaBuilder};
pub use self::term::{Term, ValueBytes, JSON_END_OF_PATH};
pub use self::term::{Term, ValueBytes};
pub use self::text_options::{TextFieldIndexing, TextOptions, STRING, TEXT};
/// Validator for a potential `field_name`.

View File

@@ -3,6 +3,7 @@ use std::net::Ipv6Addr;
use std::{fmt, str};
use columnar::{MonotonicallyMappableToU128, MonotonicallyMappableToU64};
use common::json_path_writer::{JSON_END_OF_PATH, JSON_PATH_SEGMENT_SEP_STR};
use super::date_time_options::DATE_TIME_PRECISION_INDEXED;
use super::Field;
@@ -10,15 +11,6 @@ use crate::fastfield::FastValue;
use crate::schema::{Facet, Type};
use crate::DateTime;
/// Separates the different segments of a json path.
pub const JSON_PATH_SEGMENT_SEP: u8 = 1u8;
pub const JSON_PATH_SEGMENT_SEP_STR: &str =
unsafe { std::str::from_utf8_unchecked(&[JSON_PATH_SEGMENT_SEP]) };
/// Separates the json path and the value in
/// a JSON term binary representation.
pub const JSON_END_OF_PATH: u8 = 0u8;
/// Term represents the value that the token can take.
/// It's a serialized representation over different types.
///
@@ -169,6 +161,10 @@ impl Term {
self.set_bytes(val.to_u64().to_be_bytes().as_ref());
}
/// Append a type marker + fast value to a term.
/// This is used in JSON type to append a fast value after the path.
///
/// It will not clear existing bytes.
pub(crate) fn append_type_and_fast_value<T: FastValue>(&mut self, val: T) {
self.0.push(T::to_type().to_code());
let value = if T::to_type() == Type::Date {
@@ -181,6 +177,15 @@ impl Term {
self.0.extend(value.to_be_bytes().as_ref());
}
/// Append a string type marker + string to a term.
/// This is used in JSON type to append a str after the path.
///
/// It will not clear existing bytes.
pub(crate) fn append_type_and_str(&mut self, val: &str) {
self.0.push(Type::Str.to_code());
self.0.extend(val.as_bytes().as_ref());
}
/// Sets a `Ipv6Addr` value in the term.
pub fn set_ip_addr(&mut self, val: Ipv6Addr) {
self.set_bytes(val.to_u128().to_be_bytes().as_ref());
@@ -192,11 +197,6 @@ impl Term {
self.0.extend(bytes);
}
/// Set the texts only, keeping the field untouched.
pub fn set_text(&mut self, text: &str) {
self.set_bytes(text.as_bytes());
}
/// Truncates the value bytes of the term. Value and field type stays the same.
pub fn truncate_value_bytes(&mut self, len: usize) {
self.0.truncate(len + TERM_METADATA_LENGTH);
@@ -233,27 +233,6 @@ impl Term {
}
&mut self.0[len_before..]
}
/// Appends a JSON_PATH_SEGMENT_SEP to the term.
/// Only used for JSON type.
#[inline]
pub fn add_json_path_separator(&mut self) {
self.0.push(JSON_PATH_SEGMENT_SEP);
}
/// Sets the current end to JSON_END_OF_PATH.
/// Only used for JSON type.
#[inline]
pub fn set_json_path_end(&mut self) {
let buffer_len = self.0.len();
self.0[buffer_len - 1] = JSON_END_OF_PATH;
}
/// Sets the current end to JSON_PATH_SEGMENT_SEP.
/// Only used for JSON type.
#[inline]
pub fn set_json_path_separator(&mut self) {
let buffer_len = self.0.len();
self.0[buffer_len - 1] = JSON_PATH_SEGMENT_SEP;
}
}
impl<B> Term<B>

View File

@@ -14,7 +14,7 @@ use super::Decompressor;
use crate::directory::FileSlice;
use crate::error::DataCorruption;
use crate::fastfield::AliveBitSet;
use crate::schema::document::{BinaryDocumentDeserializer, Document, DocumentDeserialize};
use crate::schema::document::{BinaryDocumentDeserializer, DocumentDeserialize};
use crate::space_usage::StoreSpaceUsage;
use crate::store::index::Checkpoint;
use crate::DocId;
@@ -235,7 +235,7 @@ impl StoreReader {
/// Iterator over all Documents in their order as they are stored in the doc store.
/// Use this, if you want to extract all Documents from the doc store.
/// The `alive_bitset` has to be forwarded from the `SegmentReader` or the results may be wrong.
pub fn iter<'a: 'b, 'b, D: Document + DocumentDeserialize>(
pub fn iter<'a: 'b, 'b, D: DocumentDeserialize>(
&'b self,
alive_bitset: Option<&'a AliveBitSet>,
) -> impl Iterator<Item = crate::Result<D>> + 'b {

View File

@@ -1,6 +1,5 @@
use std::io::{self, Write};
use std::ops::Range;
use std::usize;
use merge::ValueMerger;