mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2025-12-28 04:52:55 +00:00
Compare commits
4 Commits
document-d
...
update_exa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5a716e827 | ||
|
|
4143d31865 | ||
|
|
0c634adbe1 | ||
|
|
2e3641c2ae |
@@ -16,7 +16,7 @@ exclude = ["benches/*.json", "benches/*.txt"]
|
||||
|
||||
[dependencies]
|
||||
# Switch back to the non-forked oneshot crate once https://github.com/faern/oneshot/pull/35 is merged
|
||||
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "c10a3ba" }
|
||||
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "b208f49" }
|
||||
base64 = "0.22.0"
|
||||
byteorder = "1.4.3"
|
||||
crc32fast = "1.3.2"
|
||||
|
||||
@@ -349,7 +349,7 @@ fn get_test_index_bench(cardinality: Cardinality) -> tantivy::Result<Index> {
|
||||
let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap();
|
||||
|
||||
let many_terms_data = (0..150_000)
|
||||
.map(|num| format!("author{}", num))
|
||||
.map(|num| format!("author{num}"))
|
||||
.collect::<Vec<_>>();
|
||||
{
|
||||
let mut rng = StdRng::from_seed([1u8; 32]);
|
||||
|
||||
@@ -141,12 +141,12 @@ pub fn hdfs_index_benchmark(c: &mut Criterion) {
|
||||
let parse_json = false;
|
||||
// for parse_json in [false, true] {
|
||||
let suffix = if parse_json {
|
||||
format!("{}-with-json-parsing", suffix)
|
||||
format!("{suffix}-with-json-parsing")
|
||||
} else {
|
||||
suffix.to_string()
|
||||
};
|
||||
|
||||
let bench_name = format!("{}{}", prefix, suffix);
|
||||
let bench_name = format!("{prefix}{suffix}");
|
||||
group.bench_function(bench_name, |b| {
|
||||
benchmark(b, HDFS_LOGS, schema.clone(), commit, parse_json, is_dynamic)
|
||||
});
|
||||
|
||||
@@ -19,13 +19,14 @@ use tantivy::{doc, Index, IndexWriter, ReloadPolicy};
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn main() -> tantivy::Result<()> {
|
||||
// Let's create a temporary directory for the
|
||||
// sake of this example
|
||||
// Normally you would use `MMapDirectory` instead to persist data on disk.
|
||||
// https://docs.rs/tantivy/latest/tantivy/directory/struct.MmapDirectory.html
|
||||
// But for this example, we will use a temporary directory `TempDir`.
|
||||
let index_path = TempDir::new()?;
|
||||
|
||||
// # Defining the schema
|
||||
//
|
||||
// The Tantivy index requires a very strict schema.
|
||||
// The Tantivy index requires a schema.
|
||||
// The schema declares which fields are in the index,
|
||||
// and for each field, its type and "the way it should
|
||||
// be indexed".
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
use tantivy::collector::TopDocs;
|
||||
use tantivy::query::QueryParser;
|
||||
use tantivy::schema::{DateOptions, Document, Schema, INDEXED, STORED, STRING};
|
||||
use tantivy::schema::{DateOptions, Document, Schema, Value, INDEXED, STORED, STRING};
|
||||
use tantivy::{Index, IndexWriter, TantivyDocument};
|
||||
|
||||
fn main() -> tantivy::Result<()> {
|
||||
@@ -64,6 +64,7 @@ fn main() -> tantivy::Result<()> {
|
||||
assert!(retrieved_doc
|
||||
.get_first(occurred_at)
|
||||
.unwrap()
|
||||
.as_value()
|
||||
.as_datetime()
|
||||
.is_some(),);
|
||||
assert_eq!(
|
||||
|
||||
@@ -61,7 +61,7 @@ fn main() -> tantivy::Result<()> {
|
||||
debris of the winter’s flooding; and sycamores with mottled, white, recumbent \
|
||||
limbs and branches that arch over the pool"
|
||||
))?;
|
||||
println!("add doc {} from thread 1 - opstamp {}", i, opstamp);
|
||||
println!("add doc {i} from thread 1 - opstamp {opstamp}");
|
||||
thread::sleep(Duration::from_millis(20));
|
||||
}
|
||||
Result::<(), TantivyError>::Ok(())
|
||||
@@ -82,7 +82,7 @@ fn main() -> tantivy::Result<()> {
|
||||
body => "Some great book description..."
|
||||
))?
|
||||
};
|
||||
println!("add doc {} from thread 2 - opstamp {}", i, opstamp);
|
||||
println!("add doc {i} from thread 2 - opstamp {opstamp}");
|
||||
thread::sleep(Duration::from_millis(10));
|
||||
}
|
||||
Result::<(), TantivyError>::Ok(())
|
||||
|
||||
@@ -335,8 +335,8 @@ fn get_missing_val(
|
||||
}
|
||||
_ => {
|
||||
return Err(crate::TantivyError::InvalidArgument(format!(
|
||||
"Missing value {:?} for field {} is not supported for column type {:?}",
|
||||
missing, field_name, column_type
|
||||
"Missing value {missing:?} for field {field_name} is not supported for column \
|
||||
type {column_type:?}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
@@ -403,7 +403,7 @@ fn get_dynamic_columns(
|
||||
.iter()
|
||||
.map(|h| h.open())
|
||||
.collect::<io::Result<_>>()?;
|
||||
assert!(!ff_fields.is_empty(), "field {} not found", field_name);
|
||||
assert!(!ff_fields.is_empty(), "field {field_name} not found");
|
||||
Ok(cols)
|
||||
}
|
||||
|
||||
|
||||
@@ -357,8 +357,7 @@ impl SegmentTermCollector {
|
||||
) -> crate::Result<Self> {
|
||||
if field_type == ColumnType::Bytes {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"terms aggregation is not supported for column type {:?}",
|
||||
field_type
|
||||
"terms aggregation is not supported for column type {field_type:?}"
|
||||
)));
|
||||
}
|
||||
let term_buckets = TermBuckets::default();
|
||||
|
||||
@@ -131,8 +131,8 @@ impl<'de> Deserialize<'de> for KeyOrder {
|
||||
))?;
|
||||
if key_order.next().is_some() {
|
||||
return Err(serde::de::Error::custom(format!(
|
||||
"Expected exactly one key-value pair in sort parameter of top_hits, found {:?}",
|
||||
key_order
|
||||
"Expected exactly one key-value pair in sort parameter of top_hits, found \
|
||||
{key_order:?}"
|
||||
)));
|
||||
}
|
||||
Ok(Self { field, order })
|
||||
@@ -144,27 +144,22 @@ fn globbed_string_to_regex(glob: &str) -> Result<Regex, crate::TantivyError> {
|
||||
// Replace `*` glob with `.*` regex
|
||||
let sanitized = format!("^{}$", regex::escape(glob).replace(r"\*", ".*"));
|
||||
Regex::new(&sanitized.replace('*', ".*")).map_err(|e| {
|
||||
crate::TantivyError::SchemaError(format!(
|
||||
"Invalid regex '{}' in docvalue_fields: {}",
|
||||
glob, e
|
||||
))
|
||||
crate::TantivyError::SchemaError(format!("Invalid regex '{glob}' in docvalue_fields: {e}"))
|
||||
})
|
||||
}
|
||||
|
||||
fn use_doc_value_fields_err(parameter: &str) -> crate::Result<()> {
|
||||
Err(crate::TantivyError::AggregationError(
|
||||
AggregationError::InvalidRequest(format!(
|
||||
"The `{}` parameter is not supported, only `docvalue_fields` is supported in \
|
||||
`top_hits` aggregation",
|
||||
parameter
|
||||
"The `{parameter}` parameter is not supported, only `docvalue_fields` is supported in \
|
||||
`top_hits` aggregation"
|
||||
)),
|
||||
))
|
||||
}
|
||||
fn unsupported_err(parameter: &str) -> crate::Result<()> {
|
||||
Err(crate::TantivyError::AggregationError(
|
||||
AggregationError::InvalidRequest(format!(
|
||||
"The `{}` parameter is not supported in the `top_hits` aggregation",
|
||||
parameter
|
||||
"The `{parameter}` parameter is not supported in the `top_hits` aggregation"
|
||||
)),
|
||||
))
|
||||
}
|
||||
@@ -217,8 +212,7 @@ impl TopHitsAggregation {
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!fields.is_empty(),
|
||||
"No fields matched the glob '{}' in docvalue_fields",
|
||||
field
|
||||
"No fields matched the glob '{field}' in docvalue_fields"
|
||||
);
|
||||
Ok(fields)
|
||||
})
|
||||
@@ -254,7 +248,7 @@ impl TopHitsAggregation {
|
||||
.map(|field| {
|
||||
let accessors = accessors
|
||||
.get(field)
|
||||
.unwrap_or_else(|| panic!("field '{}' not found in accessors", field));
|
||||
.unwrap_or_else(|| panic!("field '{field}' not found in accessors"));
|
||||
|
||||
let values: Vec<FastFieldValue> = accessors
|
||||
.iter()
|
||||
|
||||
@@ -158,15 +158,14 @@ use serde::de::{self, Visitor};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
fn parse_str_into_f64<E: de::Error>(value: &str) -> Result<f64, E> {
|
||||
let parsed = value.parse::<f64>().map_err(|_err| {
|
||||
de::Error::custom(format!("Failed to parse f64 from string: {:?}", value))
|
||||
})?;
|
||||
let parsed = value
|
||||
.parse::<f64>()
|
||||
.map_err(|_err| de::Error::custom(format!("Failed to parse f64 from string: {value:?}")))?;
|
||||
|
||||
// Check if the parsed value is NaN or infinity
|
||||
if parsed.is_nan() || parsed.is_infinite() {
|
||||
Err(de::Error::custom(format!(
|
||||
"Value is not a valid f64 (NaN or Infinity): {:?}",
|
||||
value
|
||||
"Value is not a valid f64 (NaN or Infinity): {value:?}"
|
||||
)))
|
||||
} else {
|
||||
Ok(parsed)
|
||||
|
||||
@@ -598,7 +598,7 @@ mod tests {
|
||||
let mid = n % 4;
|
||||
n /= 4;
|
||||
let leaf = n % 5;
|
||||
Facet::from(&format!("/top{}/mid{}/leaf{}", top, mid, leaf))
|
||||
Facet::from(&format!("/top{top}/mid{mid}/leaf{leaf}"))
|
||||
})
|
||||
.collect();
|
||||
for i in 0..num_facets * 10 {
|
||||
@@ -737,7 +737,7 @@ mod tests {
|
||||
vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)]
|
||||
.into_iter()
|
||||
.flat_map(|(c, count)| {
|
||||
let facet = Facet::from(&format!("/facet/{}", c));
|
||||
let facet = Facet::from(&format!("/facet/{c}"));
|
||||
let doc = doc!(facet_field => facet);
|
||||
iter::repeat(doc).take(count)
|
||||
})
|
||||
@@ -785,7 +785,7 @@ mod tests {
|
||||
let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)]
|
||||
.into_iter()
|
||||
.flat_map(|(c, count)| {
|
||||
let facet = Facet::from(&format!("/facet/{}", c));
|
||||
let facet = Facet::from(&format!("/facet/{c}"));
|
||||
let doc = doc!(facet_field => facet);
|
||||
iter::repeat(doc).take(count)
|
||||
})
|
||||
|
||||
@@ -338,14 +338,14 @@ mod tests {
|
||||
let mut term = Term::from_field_json_path(field, "attributes.color", false);
|
||||
term.append_type_and_str("red");
|
||||
assert_eq!(
|
||||
format!("{:?}", term),
|
||||
format!("{term:?}"),
|
||||
"Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")"
|
||||
);
|
||||
|
||||
let mut term = Term::from_field_json_path(field, "attributes.dimensions.width", false);
|
||||
term.append_type_and_fast_value(400i64);
|
||||
assert_eq!(
|
||||
format!("{:?}", term),
|
||||
format!("{term:?}"),
|
||||
"Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -566,7 +566,7 @@ mod tests {
|
||||
let mmap_directory = MmapDirectory::create_from_tempdir().unwrap();
|
||||
let num_paths = 10;
|
||||
let paths: Vec<PathBuf> = (0..num_paths)
|
||||
.map(|i| PathBuf::from(&*format!("file_{}", i)))
|
||||
.map(|i| PathBuf::from(&*format!("file_{i}")))
|
||||
.collect();
|
||||
{
|
||||
for path in &paths {
|
||||
|
||||
@@ -62,7 +62,7 @@ impl FacetReader {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::schema::{Facet, FacetOptions, SchemaBuilder, STORED};
|
||||
use crate::schema::{Facet, FacetOptions, SchemaBuilder, Value, STORED};
|
||||
use crate::{DocAddress, Index, IndexWriter, TantivyDocument};
|
||||
|
||||
#[test]
|
||||
@@ -88,7 +88,9 @@ mod tests {
|
||||
let doc = searcher
|
||||
.doc::<TantivyDocument>(DocAddress::new(0u32, 0u32))
|
||||
.unwrap();
|
||||
let value = doc.get_first(facet_field).and_then(|v| v.as_facet());
|
||||
let value = doc
|
||||
.get_first(facet_field)
|
||||
.and_then(|v| v.as_value().as_facet());
|
||||
assert_eq!(value, None);
|
||||
}
|
||||
|
||||
|
||||
@@ -252,9 +252,8 @@ impl IndexBuilder {
|
||||
let field_type = entry.field_type().value_type();
|
||||
if !supported_field_types.contains(&field_type) {
|
||||
return Err(TantivyError::InvalidArgument(format!(
|
||||
"Unsupported field type in sort_by_field: {:?}. Supported field types: \
|
||||
{:?} ",
|
||||
field_type, supported_field_types,
|
||||
"Unsupported field type in sort_by_field: {field_type:?}. Supported field \
|
||||
types: {supported_field_types:?} ",
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,14 +318,14 @@ impl SegmentReader {
|
||||
if create_canonical {
|
||||
// Without expand dots enabled dots need to be escaped.
|
||||
let escaped_json_path = json_path.replace('.', "\\.");
|
||||
let full_path = format!("{}.{}", field_name, escaped_json_path);
|
||||
let full_path = format!("{field_name}.{escaped_json_path}");
|
||||
let full_path_unescaped = format!("{}.{}", field_name, &json_path);
|
||||
map_to_canonical.insert(full_path_unescaped, full_path.to_string());
|
||||
full_path
|
||||
} else {
|
||||
// With expand dots enabled, we can use '.' instead of '\u{1}'.
|
||||
json_path_sep_to_dot(&mut json_path);
|
||||
format!("{}.{}", field_name, json_path)
|
||||
format!("{field_name}.{json_path}")
|
||||
}
|
||||
};
|
||||
indexed_fields.extend(
|
||||
|
||||
@@ -816,7 +816,7 @@ mod tests {
|
||||
use crate::query::{BooleanQuery, Occur, Query, QueryParser, TermQuery};
|
||||
use crate::schema::{
|
||||
self, Facet, FacetOptions, IndexRecordOption, IpAddrOptions, NumericOptions, Schema,
|
||||
TextFieldIndexing, TextOptions, FAST, INDEXED, STORED, STRING, TEXT,
|
||||
TextFieldIndexing, TextOptions, Value, FAST, INDEXED, STORED, STRING, TEXT,
|
||||
};
|
||||
use crate::store::DOCSTORE_CACHE_CAPACITY;
|
||||
use crate::{
|
||||
@@ -1979,7 +1979,13 @@ mod tests {
|
||||
.unwrap();
|
||||
// test store iterator
|
||||
for doc in store_reader.iter::<TantivyDocument>(segment_reader.alive_bitset()) {
|
||||
let id = doc.unwrap().get_first(id_field).unwrap().as_u64().unwrap();
|
||||
let id = doc
|
||||
.unwrap()
|
||||
.get_first(id_field)
|
||||
.unwrap()
|
||||
.as_value()
|
||||
.as_u64()
|
||||
.unwrap();
|
||||
assert!(expected_ids_and_num_occurrences.contains_key(&id));
|
||||
}
|
||||
// test store random access
|
||||
|
||||
@@ -797,7 +797,7 @@ mod tests {
|
||||
use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery};
|
||||
use crate::schema::{
|
||||
Facet, FacetOptions, IndexRecordOption, NumericOptions, TantivyDocument, Term,
|
||||
TextFieldIndexing, INDEXED, TEXT,
|
||||
TextFieldIndexing, Value, INDEXED, TEXT,
|
||||
};
|
||||
use crate::time::OffsetDateTime;
|
||||
use crate::{
|
||||
@@ -909,15 +909,24 @@ mod tests {
|
||||
}
|
||||
{
|
||||
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 0))?;
|
||||
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("af b"));
|
||||
assert_eq!(
|
||||
doc.get_first(text_field).unwrap().as_value().as_str(),
|
||||
Some("af b")
|
||||
);
|
||||
}
|
||||
{
|
||||
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 1))?;
|
||||
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c"));
|
||||
assert_eq!(
|
||||
doc.get_first(text_field).unwrap().as_value().as_str(),
|
||||
Some("a b c")
|
||||
);
|
||||
}
|
||||
{
|
||||
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 2))?;
|
||||
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c d"));
|
||||
assert_eq!(
|
||||
doc.get_first(text_field).unwrap().as_value().as_str(),
|
||||
Some("a b c d")
|
||||
);
|
||||
}
|
||||
{
|
||||
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 3))?;
|
||||
|
||||
@@ -7,7 +7,7 @@ mod tests {
|
||||
use crate::query::QueryParser;
|
||||
use crate::schema::{
|
||||
self, BytesOptions, Facet, FacetOptions, IndexRecordOption, NumericOptions,
|
||||
TextFieldIndexing, TextOptions,
|
||||
TextFieldIndexing, TextOptions, Value,
|
||||
};
|
||||
use crate::{
|
||||
DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, TantivyDocument,
|
||||
@@ -280,13 +280,16 @@ mod tests {
|
||||
.doc::<TantivyDocument>(DocAddress::new(0, blubber_pos))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc.get_first(my_text_field).unwrap().as_str(),
|
||||
doc.get_first(my_text_field).unwrap().as_value().as_str(),
|
||||
Some("blubber")
|
||||
);
|
||||
let doc = searcher
|
||||
.doc::<TantivyDocument>(DocAddress::new(0, 0))
|
||||
.unwrap();
|
||||
assert_eq!(doc.get_first(int_field).unwrap().as_u64(), Some(1000));
|
||||
assert_eq!(
|
||||
doc.get_first(int_field).unwrap().as_value().as_u64(),
|
||||
Some(1000)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -216,7 +216,7 @@ mod tests_mmap {
|
||||
let test_query = |query_str: &str| {
|
||||
let query = parse_query.parse_query(query_str).unwrap();
|
||||
let num_docs = searcher.search(&query, &Count).unwrap();
|
||||
assert_eq!(num_docs, 1, "{}", query_str);
|
||||
assert_eq!(num_docs, 1, "{query_str}");
|
||||
};
|
||||
test_query(format!("json.{field_name_out}:test1").as_str());
|
||||
test_query(format!("json.a{field_name_out}:test2").as_str());
|
||||
@@ -590,10 +590,10 @@ mod tests_mmap {
|
||||
let query_parser = QueryParser::for_index(&index, vec![]);
|
||||
// Test if field name can be queried
|
||||
for (indexed_field, val) in fields_and_vals.iter() {
|
||||
let query_str = &format!("{}:{}", indexed_field, val);
|
||||
let query_str = &format!("{indexed_field}:{val}");
|
||||
let query = query_parser.parse_query(query_str).unwrap();
|
||||
let count_docs = searcher.search(&*query, &TopDocs::with_limit(2)).unwrap();
|
||||
assert!(!count_docs.is_empty(), "{}:{}", indexed_field, val);
|
||||
assert!(!count_docs.is_empty(), "{indexed_field}:{val}");
|
||||
}
|
||||
// Test if field name can be used for aggregation
|
||||
for (field_name, val) in fields_and_vals.iter() {
|
||||
|
||||
@@ -500,8 +500,8 @@ mod tests {
|
||||
use crate::postings::{Postings, TermInfo};
|
||||
use crate::query::{PhraseQuery, QueryParser};
|
||||
use crate::schema::{
|
||||
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, STORED,
|
||||
STRING, TEXT,
|
||||
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, Value,
|
||||
STORED, STRING, TEXT,
|
||||
};
|
||||
use crate::store::{Compressor, StoreReader, StoreWriter};
|
||||
use crate::time::format_description::well_known::Rfc3339;
|
||||
@@ -555,9 +555,12 @@ mod tests {
|
||||
let doc = reader.get::<TantivyDocument>(0).unwrap();
|
||||
|
||||
assert_eq!(doc.field_values().count(), 2);
|
||||
assert_eq!(doc.get_all(text_field).next().unwrap().as_str(), Some("A"));
|
||||
assert_eq!(
|
||||
doc.get_all(text_field).nth(1).unwrap().as_str(),
|
||||
doc.get_all(text_field).next().unwrap().as_value().as_str(),
|
||||
Some("A")
|
||||
);
|
||||
assert_eq!(
|
||||
doc.get_all(text_field).nth(1).unwrap().as_value().as_str(),
|
||||
Some("title")
|
||||
);
|
||||
}
|
||||
|
||||
@@ -138,8 +138,7 @@ impl FuzzyTermQuery {
|
||||
if json_path_type != Type::Str {
|
||||
return Err(InvalidArgument(format!(
|
||||
"The fuzzy term query requires a string path type for a json term. Found \
|
||||
{:?}",
|
||||
json_path_type
|
||||
{json_path_type:?}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ mod test {
|
||||
Err(crate::TantivyError::InvalidArgument(msg)) => {
|
||||
assert!(msg.contains("error: unclosed group"))
|
||||
}
|
||||
res => panic!("unexpected result: {:?}", res),
|
||||
res => panic!("unexpected result: {res:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,29 +157,24 @@ impl CompactDoc {
|
||||
}
|
||||
|
||||
/// field_values accessor
|
||||
pub fn field_values(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (Field, ReferenceValue<'_, CompactDocValue<'_>>)> {
|
||||
pub fn field_values(&self) -> impl Iterator<Item = (Field, CompactDocValue<'_>)> {
|
||||
self.field_values.iter().map(|field_val| {
|
||||
let field = Field::from_field_id(field_val.field as u32);
|
||||
let val = self.extract_value(field_val.value_addr).unwrap();
|
||||
let val = self.get_compact_doc_value(field_val.value_addr);
|
||||
(field, val)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns all of the `ReferenceValue`s associated the given field
|
||||
pub fn get_all(
|
||||
&self,
|
||||
field: Field,
|
||||
) -> impl Iterator<Item = ReferenceValue<'_, CompactDocValue<'_>>> + '_ {
|
||||
pub fn get_all(&self, field: Field) -> impl Iterator<Item = CompactDocValue<'_>> + '_ {
|
||||
self.field_values
|
||||
.iter()
|
||||
.filter(move |field_value| Field::from_field_id(field_value.field as u32) == field)
|
||||
.map(|val| self.extract_value(val.value_addr).unwrap())
|
||||
.map(|val| self.get_compact_doc_value(val.value_addr))
|
||||
}
|
||||
|
||||
/// Returns the first `ReferenceValue` associated the given field
|
||||
pub fn get_first(&self, field: Field) -> Option<ReferenceValue<'_, CompactDocValue<'_>>> {
|
||||
pub fn get_first(&self, field: Field) -> Option<CompactDocValue<'_>> {
|
||||
self.get_all(field).next()
|
||||
}
|
||||
|
||||
@@ -299,58 +294,11 @@ impl CompactDoc {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_value(
|
||||
&self,
|
||||
ref_value: ValueAddr,
|
||||
) -> io::Result<ReferenceValue<'_, CompactDocValue<'_>>> {
|
||||
match ref_value.type_id {
|
||||
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
|
||||
ValueType::Str => {
|
||||
let str_ref = self.extract_str(ref_value.val_addr);
|
||||
Ok(ReferenceValueLeaf::Str(str_ref).into())
|
||||
}
|
||||
ValueType::Facet => {
|
||||
let str_ref = self.extract_str(ref_value.val_addr);
|
||||
Ok(ReferenceValueLeaf::Facet(str_ref).into())
|
||||
}
|
||||
ValueType::Bytes => {
|
||||
let data = self.extract_bytes(ref_value.val_addr);
|
||||
Ok(ReferenceValueLeaf::Bytes(data).into())
|
||||
}
|
||||
ValueType::U64 => self
|
||||
.read_from::<u64>(ref_value.val_addr)
|
||||
.map(ReferenceValueLeaf::U64)
|
||||
.map(Into::into),
|
||||
ValueType::I64 => self
|
||||
.read_from::<i64>(ref_value.val_addr)
|
||||
.map(ReferenceValueLeaf::I64)
|
||||
.map(Into::into),
|
||||
ValueType::F64 => self
|
||||
.read_from::<f64>(ref_value.val_addr)
|
||||
.map(ReferenceValueLeaf::F64)
|
||||
.map(Into::into),
|
||||
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(ref_value.val_addr != 0).into()),
|
||||
ValueType::Date => self
|
||||
.read_from::<i64>(ref_value.val_addr)
|
||||
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
|
||||
.map(Into::into),
|
||||
ValueType::IpAddr => self
|
||||
.read_from::<u128>(ref_value.val_addr)
|
||||
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
|
||||
.map(Into::into),
|
||||
ValueType::PreTokStr => self
|
||||
.read_from::<PreTokenizedString>(ref_value.val_addr)
|
||||
.map(Into::into)
|
||||
.map(ReferenceValueLeaf::PreTokStr)
|
||||
.map(Into::into),
|
||||
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
|
||||
self,
|
||||
ref_value.val_addr,
|
||||
)?)),
|
||||
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
|
||||
self,
|
||||
ref_value.val_addr,
|
||||
)?)),
|
||||
/// Get CompactDocValue for address
|
||||
fn get_compact_doc_value(&self, value_addr: ValueAddr) -> CompactDocValue<'_> {
|
||||
CompactDocValue {
|
||||
container: self,
|
||||
value_addr,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -410,7 +358,7 @@ impl PartialEq for CompactDoc {
|
||||
let convert_to_comparable_map = |doc: &CompactDoc| {
|
||||
let mut field_value_set: HashMap<Field, HashSet<String>> = Default::default();
|
||||
for field_value in doc.field_values.iter() {
|
||||
let value: OwnedValue = doc.extract_value(field_value.value_addr).unwrap().into();
|
||||
let value: OwnedValue = doc.get_compact_doc_value(field_value.value_addr).into();
|
||||
let value = serde_json::to_string(&value).unwrap();
|
||||
field_value_set
|
||||
.entry(Field::from_field_id(field_value.field as u32))
|
||||
@@ -444,7 +392,19 @@ impl DocumentDeserialize for CompactDoc {
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct CompactDocValue<'a> {
|
||||
container: &'a CompactDoc,
|
||||
value: ValueAddr,
|
||||
value_addr: ValueAddr,
|
||||
}
|
||||
impl PartialEq for CompactDocValue<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let value1: OwnedValue = (*self).into();
|
||||
let value2: OwnedValue = (*other).into();
|
||||
value1 == value2
|
||||
}
|
||||
}
|
||||
impl<'a> From<CompactDocValue<'a>> for OwnedValue {
|
||||
fn from(value: CompactDocValue) -> Self {
|
||||
value.as_value().into()
|
||||
}
|
||||
}
|
||||
impl<'a> Value<'a> for CompactDocValue<'a> {
|
||||
type ArrayIter = CompactDocArrayIter<'a>;
|
||||
@@ -452,7 +412,67 @@ impl<'a> Value<'a> for CompactDocValue<'a> {
|
||||
type ObjectIter = CompactDocObjectIter<'a>;
|
||||
|
||||
fn as_value(&self) -> ReferenceValue<'a, Self> {
|
||||
self.container.extract_value(self.value).unwrap()
|
||||
self.get_ref_value().unwrap()
|
||||
}
|
||||
}
|
||||
impl<'a> CompactDocValue<'a> {
|
||||
fn get_ref_value(&self) -> io::Result<ReferenceValue<'a, CompactDocValue<'a>>> {
|
||||
let addr = self.value_addr.val_addr;
|
||||
match self.value_addr.type_id {
|
||||
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
|
||||
ValueType::Str => {
|
||||
let str_ref = self.container.extract_str(addr);
|
||||
Ok(ReferenceValueLeaf::Str(str_ref).into())
|
||||
}
|
||||
ValueType::Facet => {
|
||||
let str_ref = self.container.extract_str(addr);
|
||||
Ok(ReferenceValueLeaf::Facet(str_ref).into())
|
||||
}
|
||||
ValueType::Bytes => {
|
||||
let data = self.container.extract_bytes(addr);
|
||||
Ok(ReferenceValueLeaf::Bytes(data).into())
|
||||
}
|
||||
ValueType::U64 => self
|
||||
.container
|
||||
.read_from::<u64>(addr)
|
||||
.map(ReferenceValueLeaf::U64)
|
||||
.map(Into::into),
|
||||
ValueType::I64 => self
|
||||
.container
|
||||
.read_from::<i64>(addr)
|
||||
.map(ReferenceValueLeaf::I64)
|
||||
.map(Into::into),
|
||||
ValueType::F64 => self
|
||||
.container
|
||||
.read_from::<f64>(addr)
|
||||
.map(ReferenceValueLeaf::F64)
|
||||
.map(Into::into),
|
||||
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(addr != 0).into()),
|
||||
ValueType::Date => self
|
||||
.container
|
||||
.read_from::<i64>(addr)
|
||||
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
|
||||
.map(Into::into),
|
||||
ValueType::IpAddr => self
|
||||
.container
|
||||
.read_from::<u128>(addr)
|
||||
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
|
||||
.map(Into::into),
|
||||
ValueType::PreTokStr => self
|
||||
.container
|
||||
.read_from::<PreTokenizedString>(addr)
|
||||
.map(Into::into)
|
||||
.map(ReferenceValueLeaf::PreTokStr)
|
||||
.map(Into::into),
|
||||
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
|
||||
self.container,
|
||||
addr,
|
||||
)?)),
|
||||
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
|
||||
self.container,
|
||||
addr,
|
||||
)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -537,7 +557,7 @@ impl BinarySerializable for ValueType {
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Invalid value type id: {}", num),
|
||||
format!("Invalid value type id: {num}"),
|
||||
));
|
||||
};
|
||||
Ok(type_id)
|
||||
@@ -601,9 +621,9 @@ impl<'a> Iterator for CompactDocObjectIter<'a> {
|
||||
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
|
||||
let value = CompactDocValue {
|
||||
container: self.container,
|
||||
value,
|
||||
value_addr: value,
|
||||
};
|
||||
return Some((key, value));
|
||||
Some((key, value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -635,9 +655,9 @@ impl<'a> Iterator for CompactDocArrayIter<'a> {
|
||||
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
|
||||
let value = CompactDocValue {
|
||||
container: self.container,
|
||||
value,
|
||||
value_addr: value,
|
||||
};
|
||||
return Some(value);
|
||||
Some(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -668,7 +688,7 @@ impl<'a> Iterator for FieldValueIterRef<'a> {
|
||||
Field::from_field_id(field_value.field as u32),
|
||||
CompactDocValue::<'a> {
|
||||
container: self.container,
|
||||
value: field_value.value_addr,
|
||||
value_addr: field_value.value_addr,
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
@@ -58,9 +58,8 @@ where W: Write
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"Unexpected number of entries written to serializer, expected {} entries, got \
|
||||
{} entries",
|
||||
num_field_values, actual_length,
|
||||
"Unexpected number of entries written to serializer, expected \
|
||||
{num_field_values} entries, got {actual_length} entries",
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -659,9 +659,9 @@ mod tests {
|
||||
let schema = schema_builder.build();
|
||||
let doc_json = r#"{"date": "2019-10-12T07:20:50.52+02:00"}"#;
|
||||
let doc = TantivyDocument::parse_json(&schema, doc_json).unwrap();
|
||||
let date = doc.get_first(date_field).unwrap();
|
||||
let date = OwnedValue::from(doc.get_first(date_field).unwrap());
|
||||
// Time zone is converted to UTC
|
||||
assert_eq!("Leaf(Date(2019-10-12T05:20:50.52Z))", format!("{date:?}"));
|
||||
assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{date:?}"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -60,7 +60,7 @@ pub mod tests {
|
||||
use crate::directory::{Directory, RamDirectory, WritePtr};
|
||||
use crate::fastfield::AliveBitSet;
|
||||
use crate::schema::{
|
||||
self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, STORED, TEXT,
|
||||
self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, Value, STORED, TEXT,
|
||||
};
|
||||
use crate::{Index, IndexWriter, Term};
|
||||
|
||||
@@ -122,6 +122,7 @@ pub mod tests {
|
||||
.get::<TantivyDocument>(i)?
|
||||
.get_first(field_title)
|
||||
.unwrap()
|
||||
.as_value()
|
||||
.as_str()
|
||||
.unwrap(),
|
||||
format!("Doc {i}")
|
||||
@@ -133,6 +134,7 @@ pub mod tests {
|
||||
let title_content = doc
|
||||
.get_first(field_title)
|
||||
.unwrap()
|
||||
.as_value()
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
@@ -403,7 +403,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::directory::RamDirectory;
|
||||
use crate::schema::{Field, TantivyDocument};
|
||||
use crate::schema::{Field, TantivyDocument, Value};
|
||||
use crate::store::tests::write_lorem_ipsum_store;
|
||||
use crate::store::Compressor;
|
||||
use crate::Directory;
|
||||
@@ -411,7 +411,7 @@ mod tests {
|
||||
const BLOCK_SIZE: usize = 16_384;
|
||||
|
||||
fn get_text_field<'a>(doc: &'a TantivyDocument, field: &'a Field) -> Option<&'a str> {
|
||||
doc.get_first(*field).and_then(|f| f.as_str())
|
||||
doc.get_first(*field).and_then(|f| f.as_value().as_str())
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -93,7 +93,7 @@ fn open_fst_index(fst_file: FileSlice) -> io::Result<tantivy_fst::Map<OwnedBytes
|
||||
let fst = Fst::new(bytes).map_err(|err| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Fst data is corrupted: {:?}", err),
|
||||
format!("Fst data is corrupted: {err:?}"),
|
||||
)
|
||||
})?;
|
||||
Ok(tantivy_fst::Map::from(fst))
|
||||
|
||||
@@ -95,7 +95,7 @@ fn test_term_dictionary_simple() -> crate::Result<()> {
|
||||
#[test]
|
||||
fn test_term_dictionary_stream() -> crate::Result<()> {
|
||||
let ids: Vec<_> = (0u32..10_000u32)
|
||||
.map(|i| (format!("doc{:0>6}", i), i))
|
||||
.map(|i| (format!("doc{i:0>6}"), i))
|
||||
.collect();
|
||||
let buffer: Vec<u8> = {
|
||||
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||
@@ -156,7 +156,7 @@ fn test_stream_high_range_prefix_suffix() -> crate::Result<()> {
|
||||
#[test]
|
||||
fn test_stream_range() -> crate::Result<()> {
|
||||
let ids: Vec<_> = (0u32..10_000u32)
|
||||
.map(|i| (format!("doc{:0>6}", i), i))
|
||||
.map(|i| (format!("doc{i:0>6}"), i))
|
||||
.collect();
|
||||
let buffer: Vec<u8> = {
|
||||
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
|
||||
|
||||
@@ -96,7 +96,7 @@ mod tests {
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap();
|
||||
tokens.push(format!("{}", facet));
|
||||
tokens.push(format!("{facet}"));
|
||||
};
|
||||
FacetTokenizer::default()
|
||||
.token_stream(facet.encoded_str())
|
||||
@@ -116,7 +116,7 @@ mod tests {
|
||||
{
|
||||
let mut add_token = |token: &Token| {
|
||||
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); // ok test
|
||||
tokens.push(format!("{}", facet));
|
||||
tokens.push(format!("{facet}"));
|
||||
};
|
||||
FacetTokenizer::default()
|
||||
.token_stream(facet.encoded_str()) // ok test
|
||||
|
||||
Reference in New Issue
Block a user