Compare commits

..

1 Commits

Author SHA1 Message Date
Adam Reichold
083aec5125 Provide StoreReader::enumerate to simplify creation of secondary indexes
For secondary indexes, it is often necessary to read all documents, compute
some function on them and associated the result with a document ID.

Currently, this requires something like

```rust
let reader = segment.get_store_reader(1)?;

for doc_id in segment.doc_ids_alive() {
    let doc = reader.get(doc_id)?;

    // Use doc and doc_id here ...
}
```

which can be simplified to

```rust
let reader = segment.get_store_reader(1)?;

for res in reader.enumerate() {
    let (doc_id, doc) = res?;

    // Use doc and doc_id here ...
}
```

using the method proposed here.

(I added a new method instead of modifying `StoreReader::iter` to make the
change backwards compatible, i.e. possible to include in a point release.)
2024-05-23 08:30:30 +02:00
31 changed files with 169 additions and 194 deletions

View File

@@ -16,7 +16,7 @@ exclude = ["benches/*.json", "benches/*.txt"]
[dependencies]
# Switch back to the non-forked oneshot crate once https://github.com/faern/oneshot/pull/35 is merged
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "b208f49" }
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "c10a3ba" }
base64 = "0.22.0"
byteorder = "1.4.3"
crc32fast = "1.3.2"

View File

@@ -349,7 +349,7 @@ fn get_test_index_bench(cardinality: Cardinality) -> tantivy::Result<Index> {
let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap();
let many_terms_data = (0..150_000)
.map(|num| format!("author{num}"))
.map(|num| format!("author{}", num))
.collect::<Vec<_>>();
{
let mut rng = StdRng::from_seed([1u8; 32]);

View File

@@ -141,12 +141,12 @@ pub fn hdfs_index_benchmark(c: &mut Criterion) {
let parse_json = false;
// for parse_json in [false, true] {
let suffix = if parse_json {
format!("{suffix}-with-json-parsing")
format!("{}-with-json-parsing", suffix)
} else {
suffix.to_string()
};
let bench_name = format!("{prefix}{suffix}");
let bench_name = format!("{}{}", prefix, suffix);
group.bench_function(bench_name, |b| {
benchmark(b, HDFS_LOGS, schema.clone(), commit, parse_json, is_dynamic)
});

View File

@@ -19,14 +19,13 @@ use tantivy::{doc, Index, IndexWriter, ReloadPolicy};
use tempfile::TempDir;
fn main() -> tantivy::Result<()> {
// Normally you would use `MMapDirectory` instead to persist data on disk.
// https://docs.rs/tantivy/latest/tantivy/directory/struct.MmapDirectory.html
// But for this example, we will use a temporary directory `TempDir`.
// Let's create a temporary directory for the
// sake of this example
let index_path = TempDir::new()?;
// # Defining the schema
//
// The Tantivy index requires a schema.
// The Tantivy index requires a very strict schema.
// The schema declares which fields are in the index,
// and for each field, its type and "the way it should
// be indexed".

View File

@@ -4,7 +4,7 @@
use tantivy::collector::TopDocs;
use tantivy::query::QueryParser;
use tantivy::schema::{DateOptions, Document, Schema, Value, INDEXED, STORED, STRING};
use tantivy::schema::{DateOptions, Document, Schema, INDEXED, STORED, STRING};
use tantivy::{Index, IndexWriter, TantivyDocument};
fn main() -> tantivy::Result<()> {
@@ -64,7 +64,6 @@ fn main() -> tantivy::Result<()> {
assert!(retrieved_doc
.get_first(occurred_at)
.unwrap()
.as_value()
.as_datetime()
.is_some(),);
assert_eq!(

View File

@@ -61,7 +61,7 @@ fn main() -> tantivy::Result<()> {
debris of the winters flooding; and sycamores with mottled, white, recumbent \
limbs and branches that arch over the pool"
))?;
println!("add doc {i} from thread 1 - opstamp {opstamp}");
println!("add doc {} from thread 1 - opstamp {}", i, opstamp);
thread::sleep(Duration::from_millis(20));
}
Result::<(), TantivyError>::Ok(())
@@ -82,7 +82,7 @@ fn main() -> tantivy::Result<()> {
body => "Some great book description..."
))?
};
println!("add doc {i} from thread 2 - opstamp {opstamp}");
println!("add doc {} from thread 2 - opstamp {}", i, opstamp);
thread::sleep(Duration::from_millis(10));
}
Result::<(), TantivyError>::Ok(())

View File

@@ -335,8 +335,8 @@ fn get_missing_val(
}
_ => {
return Err(crate::TantivyError::InvalidArgument(format!(
"Missing value {missing:?} for field {field_name} is not supported for column \
type {column_type:?}"
"Missing value {:?} for field {} is not supported for column type {:?}",
missing, field_name, column_type
)));
}
};
@@ -403,7 +403,7 @@ fn get_dynamic_columns(
.iter()
.map(|h| h.open())
.collect::<io::Result<_>>()?;
assert!(!ff_fields.is_empty(), "field {field_name} not found");
assert!(!ff_fields.is_empty(), "field {} not found", field_name);
Ok(cols)
}

View File

@@ -357,7 +357,8 @@ impl SegmentTermCollector {
) -> crate::Result<Self> {
if field_type == ColumnType::Bytes {
return Err(TantivyError::InvalidArgument(format!(
"terms aggregation is not supported for column type {field_type:?}"
"terms aggregation is not supported for column type {:?}",
field_type
)));
}
let term_buckets = TermBuckets::default();

View File

@@ -131,8 +131,8 @@ impl<'de> Deserialize<'de> for KeyOrder {
))?;
if key_order.next().is_some() {
return Err(serde::de::Error::custom(format!(
"Expected exactly one key-value pair in sort parameter of top_hits, found \
{key_order:?}"
"Expected exactly one key-value pair in sort parameter of top_hits, found {:?}",
key_order
)));
}
Ok(Self { field, order })
@@ -144,22 +144,27 @@ fn globbed_string_to_regex(glob: &str) -> Result<Regex, crate::TantivyError> {
// Replace `*` glob with `.*` regex
let sanitized = format!("^{}$", regex::escape(glob).replace(r"\*", ".*"));
Regex::new(&sanitized.replace('*', ".*")).map_err(|e| {
crate::TantivyError::SchemaError(format!("Invalid regex '{glob}' in docvalue_fields: {e}"))
crate::TantivyError::SchemaError(format!(
"Invalid regex '{}' in docvalue_fields: {}",
glob, e
))
})
}
fn use_doc_value_fields_err(parameter: &str) -> crate::Result<()> {
Err(crate::TantivyError::AggregationError(
AggregationError::InvalidRequest(format!(
"The `{parameter}` parameter is not supported, only `docvalue_fields` is supported in \
`top_hits` aggregation"
"The `{}` parameter is not supported, only `docvalue_fields` is supported in \
`top_hits` aggregation",
parameter
)),
))
}
fn unsupported_err(parameter: &str) -> crate::Result<()> {
Err(crate::TantivyError::AggregationError(
AggregationError::InvalidRequest(format!(
"The `{parameter}` parameter is not supported in the `top_hits` aggregation"
"The `{}` parameter is not supported in the `top_hits` aggregation",
parameter
)),
))
}
@@ -212,7 +217,8 @@ impl TopHitsAggregation {
.collect::<Vec<_>>();
assert!(
!fields.is_empty(),
"No fields matched the glob '{field}' in docvalue_fields"
"No fields matched the glob '{}' in docvalue_fields",
field
);
Ok(fields)
})
@@ -248,7 +254,7 @@ impl TopHitsAggregation {
.map(|field| {
let accessors = accessors
.get(field)
.unwrap_or_else(|| panic!("field '{field}' not found in accessors"));
.unwrap_or_else(|| panic!("field '{}' not found in accessors", field));
let values: Vec<FastFieldValue> = accessors
.iter()

View File

@@ -158,14 +158,15 @@ use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer, Serialize};
fn parse_str_into_f64<E: de::Error>(value: &str) -> Result<f64, E> {
let parsed = value
.parse::<f64>()
.map_err(|_err| de::Error::custom(format!("Failed to parse f64 from string: {value:?}")))?;
let parsed = value.parse::<f64>().map_err(|_err| {
de::Error::custom(format!("Failed to parse f64 from string: {:?}", value))
})?;
// Check if the parsed value is NaN or infinity
if parsed.is_nan() || parsed.is_infinite() {
Err(de::Error::custom(format!(
"Value is not a valid f64 (NaN or Infinity): {value:?}"
"Value is not a valid f64 (NaN or Infinity): {:?}",
value
)))
} else {
Ok(parsed)

View File

@@ -598,7 +598,7 @@ mod tests {
let mid = n % 4;
n /= 4;
let leaf = n % 5;
Facet::from(&format!("/top{top}/mid{mid}/leaf{leaf}"))
Facet::from(&format!("/top{}/mid{}/leaf{}", top, mid, leaf))
})
.collect();
for i in 0..num_facets * 10 {
@@ -737,7 +737,7 @@ mod tests {
vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)]
.into_iter()
.flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{c}"));
let facet = Facet::from(&format!("/facet/{}", c));
let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count)
})
@@ -785,7 +785,7 @@ mod tests {
let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)]
.into_iter()
.flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{c}"));
let facet = Facet::from(&format!("/facet/{}", c));
let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count)
})

View File

@@ -338,14 +338,14 @@ mod tests {
let mut term = Term::from_field_json_path(field, "attributes.color", false);
term.append_type_and_str("red");
assert_eq!(
format!("{term:?}"),
format!("{:?}", term),
"Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")"
);
let mut term = Term::from_field_json_path(field, "attributes.dimensions.width", false);
term.append_type_and_fast_value(400i64);
assert_eq!(
format!("{term:?}"),
format!("{:?}", term),
"Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)"
);
}

View File

@@ -566,7 +566,7 @@ mod tests {
let mmap_directory = MmapDirectory::create_from_tempdir().unwrap();
let num_paths = 10;
let paths: Vec<PathBuf> = (0..num_paths)
.map(|i| PathBuf::from(&*format!("file_{i}")))
.map(|i| PathBuf::from(&*format!("file_{}", i)))
.collect();
{
for path in &paths {

View File

@@ -62,7 +62,7 @@ impl FacetReader {
#[cfg(test)]
mod tests {
use crate::schema::{Facet, FacetOptions, SchemaBuilder, Value, STORED};
use crate::schema::{Facet, FacetOptions, SchemaBuilder, STORED};
use crate::{DocAddress, Index, IndexWriter, TantivyDocument};
#[test]
@@ -88,9 +88,7 @@ mod tests {
let doc = searcher
.doc::<TantivyDocument>(DocAddress::new(0u32, 0u32))
.unwrap();
let value = doc
.get_first(facet_field)
.and_then(|v| v.as_value().as_facet());
let value = doc.get_first(facet_field).and_then(|v| v.as_facet());
assert_eq!(value, None);
}

View File

@@ -252,8 +252,9 @@ impl IndexBuilder {
let field_type = entry.field_type().value_type();
if !supported_field_types.contains(&field_type) {
return Err(TantivyError::InvalidArgument(format!(
"Unsupported field type in sort_by_field: {field_type:?}. Supported field \
types: {supported_field_types:?} ",
"Unsupported field type in sort_by_field: {:?}. Supported field types: \
{:?} ",
field_type, supported_field_types,
)));
}
}

View File

@@ -318,14 +318,14 @@ impl SegmentReader {
if create_canonical {
// Without expand dots enabled dots need to be escaped.
let escaped_json_path = json_path.replace('.', "\\.");
let full_path = format!("{field_name}.{escaped_json_path}");
let full_path = format!("{}.{}", field_name, escaped_json_path);
let full_path_unescaped = format!("{}.{}", field_name, &json_path);
map_to_canonical.insert(full_path_unescaped, full_path.to_string());
full_path
} else {
// With expand dots enabled, we can use '.' instead of '\u{1}'.
json_path_sep_to_dot(&mut json_path);
format!("{field_name}.{json_path}")
format!("{}.{}", field_name, json_path)
}
};
indexed_fields.extend(

View File

@@ -816,7 +816,7 @@ mod tests {
use crate::query::{BooleanQuery, Occur, Query, QueryParser, TermQuery};
use crate::schema::{
self, Facet, FacetOptions, IndexRecordOption, IpAddrOptions, NumericOptions, Schema,
TextFieldIndexing, TextOptions, Value, FAST, INDEXED, STORED, STRING, TEXT,
TextFieldIndexing, TextOptions, FAST, INDEXED, STORED, STRING, TEXT,
};
use crate::store::DOCSTORE_CACHE_CAPACITY;
use crate::{
@@ -1979,13 +1979,7 @@ mod tests {
.unwrap();
// test store iterator
for doc in store_reader.iter::<TantivyDocument>(segment_reader.alive_bitset()) {
let id = doc
.unwrap()
.get_first(id_field)
.unwrap()
.as_value()
.as_u64()
.unwrap();
let id = doc.unwrap().get_first(id_field).unwrap().as_u64().unwrap();
assert!(expected_ids_and_num_occurrences.contains_key(&id));
}
// test store random access

View File

@@ -696,7 +696,7 @@ impl IndexMerger {
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let doc_bytes_it = &mut document_iterators[old_doc_addr.segment_ord as usize];
if let Some(doc_bytes_res) = doc_bytes_it.next() {
let doc_bytes = doc_bytes_res?;
let (_, doc_bytes) = doc_bytes_res?;
store_writer.store_bytes(&doc_bytes)?;
} else {
return Err(DataCorruption::comment_only(format!(
@@ -728,7 +728,7 @@ impl IndexMerger {
|| store_reader.decompressor() != store_writer.compressor().into()
{
for doc_bytes_res in store_reader.iter_raw(reader.alive_bitset()) {
let doc_bytes = doc_bytes_res?;
let (_, doc_bytes) = doc_bytes_res?;
store_writer.store_bytes(&doc_bytes)?;
}
} else {
@@ -797,7 +797,7 @@ mod tests {
use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery};
use crate::schema::{
Facet, FacetOptions, IndexRecordOption, NumericOptions, TantivyDocument, Term,
TextFieldIndexing, Value, INDEXED, TEXT,
TextFieldIndexing, INDEXED, TEXT,
};
use crate::time::OffsetDateTime;
use crate::{
@@ -909,24 +909,15 @@ mod tests {
}
{
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 0))?;
assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("af b")
);
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("af b"));
}
{
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 1))?;
assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("a b c")
);
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c"));
}
{
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 2))?;
assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("a b c d")
);
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c d"));
}
{
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 3))?;

View File

@@ -7,7 +7,7 @@ mod tests {
use crate::query::QueryParser;
use crate::schema::{
self, BytesOptions, Facet, FacetOptions, IndexRecordOption, NumericOptions,
TextFieldIndexing, TextOptions, Value,
TextFieldIndexing, TextOptions,
};
use crate::{
DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, TantivyDocument,
@@ -280,16 +280,13 @@ mod tests {
.doc::<TantivyDocument>(DocAddress::new(0, blubber_pos))
.unwrap();
assert_eq!(
doc.get_first(my_text_field).unwrap().as_value().as_str(),
doc.get_first(my_text_field).unwrap().as_str(),
Some("blubber")
);
let doc = searcher
.doc::<TantivyDocument>(DocAddress::new(0, 0))
.unwrap();
assert_eq!(
doc.get_first(int_field).unwrap().as_value().as_u64(),
Some(1000)
);
assert_eq!(doc.get_first(int_field).unwrap().as_u64(), Some(1000));
}
}

View File

@@ -216,7 +216,7 @@ mod tests_mmap {
let test_query = |query_str: &str| {
let query = parse_query.parse_query(query_str).unwrap();
let num_docs = searcher.search(&query, &Count).unwrap();
assert_eq!(num_docs, 1, "{query_str}");
assert_eq!(num_docs, 1, "{}", query_str);
};
test_query(format!("json.{field_name_out}:test1").as_str());
test_query(format!("json.a{field_name_out}:test2").as_str());
@@ -590,10 +590,10 @@ mod tests_mmap {
let query_parser = QueryParser::for_index(&index, vec![]);
// Test if field name can be queried
for (indexed_field, val) in fields_and_vals.iter() {
let query_str = &format!("{indexed_field}:{val}");
let query_str = &format!("{}:{}", indexed_field, val);
let query = query_parser.parse_query(query_str).unwrap();
let count_docs = searcher.search(&*query, &TopDocs::with_limit(2)).unwrap();
assert!(!count_docs.is_empty(), "{indexed_field}:{val}");
assert!(!count_docs.is_empty(), "{}:{}", indexed_field, val);
}
// Test if field name can be used for aggregation
for (field_name, val) in fields_and_vals.iter() {

View File

@@ -500,8 +500,8 @@ mod tests {
use crate::postings::{Postings, TermInfo};
use crate::query::{PhraseQuery, QueryParser};
use crate::schema::{
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, Value,
STORED, STRING, TEXT,
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, STORED,
STRING, TEXT,
};
use crate::store::{Compressor, StoreReader, StoreWriter};
use crate::time::format_description::well_known::Rfc3339;
@@ -555,12 +555,9 @@ mod tests {
let doc = reader.get::<TantivyDocument>(0).unwrap();
assert_eq!(doc.field_values().count(), 2);
assert_eq!(doc.get_all(text_field).next().unwrap().as_str(), Some("A"));
assert_eq!(
doc.get_all(text_field).next().unwrap().as_value().as_str(),
Some("A")
);
assert_eq!(
doc.get_all(text_field).nth(1).unwrap().as_value().as_str(),
doc.get_all(text_field).nth(1).unwrap().as_str(),
Some("title")
);
}

View File

@@ -138,7 +138,8 @@ impl FuzzyTermQuery {
if json_path_type != Type::Str {
return Err(InvalidArgument(format!(
"The fuzzy term query requires a string path type for a json term. Found \
{json_path_type:?}"
{:?}",
json_path_type
)));
}
}

View File

@@ -185,7 +185,7 @@ mod test {
Err(crate::TantivyError::InvalidArgument(msg)) => {
assert!(msg.contains("error: unclosed group"))
}
res => panic!("unexpected result: {res:?}"),
res => panic!("unexpected result: {:?}", res),
}
}
}

View File

@@ -157,24 +157,29 @@ impl CompactDoc {
}
/// field_values accessor
pub fn field_values(&self) -> impl Iterator<Item = (Field, CompactDocValue<'_>)> {
pub fn field_values(
&self,
) -> impl Iterator<Item = (Field, ReferenceValue<'_, CompactDocValue<'_>>)> {
self.field_values.iter().map(|field_val| {
let field = Field::from_field_id(field_val.field as u32);
let val = self.get_compact_doc_value(field_val.value_addr);
let val = self.extract_value(field_val.value_addr).unwrap();
(field, val)
})
}
/// Returns all of the `ReferenceValue`s associated the given field
pub fn get_all(&self, field: Field) -> impl Iterator<Item = CompactDocValue<'_>> + '_ {
pub fn get_all(
&self,
field: Field,
) -> impl Iterator<Item = ReferenceValue<'_, CompactDocValue<'_>>> + '_ {
self.field_values
.iter()
.filter(move |field_value| Field::from_field_id(field_value.field as u32) == field)
.map(|val| self.get_compact_doc_value(val.value_addr))
.map(|val| self.extract_value(val.value_addr).unwrap())
}
/// Returns the first `ReferenceValue` associated the given field
pub fn get_first(&self, field: Field) -> Option<CompactDocValue<'_>> {
pub fn get_first(&self, field: Field) -> Option<ReferenceValue<'_, CompactDocValue<'_>>> {
self.get_all(field).next()
}
@@ -294,11 +299,58 @@ impl CompactDoc {
}
}
/// Get CompactDocValue for address
fn get_compact_doc_value(&self, value_addr: ValueAddr) -> CompactDocValue<'_> {
CompactDocValue {
container: self,
value_addr,
fn extract_value(
&self,
ref_value: ValueAddr,
) -> io::Result<ReferenceValue<'_, CompactDocValue<'_>>> {
match ref_value.type_id {
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
ValueType::Str => {
let str_ref = self.extract_str(ref_value.val_addr);
Ok(ReferenceValueLeaf::Str(str_ref).into())
}
ValueType::Facet => {
let str_ref = self.extract_str(ref_value.val_addr);
Ok(ReferenceValueLeaf::Facet(str_ref).into())
}
ValueType::Bytes => {
let data = self.extract_bytes(ref_value.val_addr);
Ok(ReferenceValueLeaf::Bytes(data).into())
}
ValueType::U64 => self
.read_from::<u64>(ref_value.val_addr)
.map(ReferenceValueLeaf::U64)
.map(Into::into),
ValueType::I64 => self
.read_from::<i64>(ref_value.val_addr)
.map(ReferenceValueLeaf::I64)
.map(Into::into),
ValueType::F64 => self
.read_from::<f64>(ref_value.val_addr)
.map(ReferenceValueLeaf::F64)
.map(Into::into),
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(ref_value.val_addr != 0).into()),
ValueType::Date => self
.read_from::<i64>(ref_value.val_addr)
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
.map(Into::into),
ValueType::IpAddr => self
.read_from::<u128>(ref_value.val_addr)
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
.map(Into::into),
ValueType::PreTokStr => self
.read_from::<PreTokenizedString>(ref_value.val_addr)
.map(Into::into)
.map(ReferenceValueLeaf::PreTokStr)
.map(Into::into),
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
self,
ref_value.val_addr,
)?)),
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
self,
ref_value.val_addr,
)?)),
}
}
@@ -358,7 +410,7 @@ impl PartialEq for CompactDoc {
let convert_to_comparable_map = |doc: &CompactDoc| {
let mut field_value_set: HashMap<Field, HashSet<String>> = Default::default();
for field_value in doc.field_values.iter() {
let value: OwnedValue = doc.get_compact_doc_value(field_value.value_addr).into();
let value: OwnedValue = doc.extract_value(field_value.value_addr).unwrap().into();
let value = serde_json::to_string(&value).unwrap();
field_value_set
.entry(Field::from_field_id(field_value.field as u32))
@@ -392,19 +444,7 @@ impl DocumentDeserialize for CompactDoc {
#[derive(Debug, Clone, Copy)]
pub struct CompactDocValue<'a> {
container: &'a CompactDoc,
value_addr: ValueAddr,
}
impl PartialEq for CompactDocValue<'_> {
fn eq(&self, other: &Self) -> bool {
let value1: OwnedValue = (*self).into();
let value2: OwnedValue = (*other).into();
value1 == value2
}
}
impl<'a> From<CompactDocValue<'a>> for OwnedValue {
fn from(value: CompactDocValue) -> Self {
value.as_value().into()
}
value: ValueAddr,
}
impl<'a> Value<'a> for CompactDocValue<'a> {
type ArrayIter = CompactDocArrayIter<'a>;
@@ -412,67 +452,7 @@ impl<'a> Value<'a> for CompactDocValue<'a> {
type ObjectIter = CompactDocObjectIter<'a>;
fn as_value(&self) -> ReferenceValue<'a, Self> {
self.get_ref_value().unwrap()
}
}
impl<'a> CompactDocValue<'a> {
fn get_ref_value(&self) -> io::Result<ReferenceValue<'a, CompactDocValue<'a>>> {
let addr = self.value_addr.val_addr;
match self.value_addr.type_id {
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
ValueType::Str => {
let str_ref = self.container.extract_str(addr);
Ok(ReferenceValueLeaf::Str(str_ref).into())
}
ValueType::Facet => {
let str_ref = self.container.extract_str(addr);
Ok(ReferenceValueLeaf::Facet(str_ref).into())
}
ValueType::Bytes => {
let data = self.container.extract_bytes(addr);
Ok(ReferenceValueLeaf::Bytes(data).into())
}
ValueType::U64 => self
.container
.read_from::<u64>(addr)
.map(ReferenceValueLeaf::U64)
.map(Into::into),
ValueType::I64 => self
.container
.read_from::<i64>(addr)
.map(ReferenceValueLeaf::I64)
.map(Into::into),
ValueType::F64 => self
.container
.read_from::<f64>(addr)
.map(ReferenceValueLeaf::F64)
.map(Into::into),
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(addr != 0).into()),
ValueType::Date => self
.container
.read_from::<i64>(addr)
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
.map(Into::into),
ValueType::IpAddr => self
.container
.read_from::<u128>(addr)
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
.map(Into::into),
ValueType::PreTokStr => self
.container
.read_from::<PreTokenizedString>(addr)
.map(Into::into)
.map(ReferenceValueLeaf::PreTokStr)
.map(Into::into),
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
self.container,
addr,
)?)),
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
self.container,
addr,
)?)),
}
self.container.extract_value(self.value).unwrap()
}
}
@@ -557,7 +537,7 @@ impl BinarySerializable for ValueType {
} else {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Invalid value type id: {num}"),
format!("Invalid value type id: {}", num),
));
};
Ok(type_id)
@@ -621,9 +601,9 @@ impl<'a> Iterator for CompactDocObjectIter<'a> {
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
let value = CompactDocValue {
container: self.container,
value_addr: value,
value,
};
Some((key, value))
return Some((key, value));
}
}
@@ -655,9 +635,9 @@ impl<'a> Iterator for CompactDocArrayIter<'a> {
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
let value = CompactDocValue {
container: self.container,
value_addr: value,
value,
};
Some(value)
return Some(value);
}
}
@@ -688,7 +668,7 @@ impl<'a> Iterator for FieldValueIterRef<'a> {
Field::from_field_id(field_value.field as u32),
CompactDocValue::<'a> {
container: self.container,
value_addr: field_value.value_addr,
value: field_value.value_addr,
},
)
})

View File

@@ -58,8 +58,9 @@ where W: Write
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Unexpected number of entries written to serializer, expected \
{num_field_values} entries, got {actual_length} entries",
"Unexpected number of entries written to serializer, expected {} entries, got \
{} entries",
num_field_values, actual_length,
),
));
}

View File

@@ -659,9 +659,9 @@ mod tests {
let schema = schema_builder.build();
let doc_json = r#"{"date": "2019-10-12T07:20:50.52+02:00"}"#;
let doc = TantivyDocument::parse_json(&schema, doc_json).unwrap();
let date = OwnedValue::from(doc.get_first(date_field).unwrap());
let date = doc.get_first(date_field).unwrap();
// Time zone is converted to UTC
assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{date:?}"));
assert_eq!("Leaf(Date(2019-10-12T05:20:50.52Z))", format!("{date:?}"));
}
#[test]

View File

@@ -60,7 +60,7 @@ pub mod tests {
use crate::directory::{Directory, RamDirectory, WritePtr};
use crate::fastfield::AliveBitSet;
use crate::schema::{
self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, Value, STORED, TEXT,
self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, STORED, TEXT,
};
use crate::{Index, IndexWriter, Term};
@@ -122,7 +122,6 @@ pub mod tests {
.get::<TantivyDocument>(i)?
.get_first(field_title)
.unwrap()
.as_value()
.as_str()
.unwrap(),
format!("Doc {i}")
@@ -134,7 +133,6 @@ pub mod tests {
let title_content = doc
.get_first(field_title)
.unwrap()
.as_value()
.as_str()
.unwrap()
.to_string();

View File

@@ -241,12 +241,23 @@ impl StoreReader {
&'b self,
alive_bitset: Option<&'a AliveBitSet>,
) -> impl Iterator<Item = crate::Result<D>> + 'b {
self.enumerate(alive_bitset)
.map(|res| res.map(|(_, doc)| doc))
}
/// A variant of [`iter`][Self::iter] which also yields document ID.
pub fn enumerate<'a: 'b, 'b, D: DocumentDeserialize>(
&'b self,
alive_bitset: Option<&'a AliveBitSet>,
) -> impl Iterator<Item = crate::Result<(DocId, D)>> + 'b {
self.iter_raw(alive_bitset).map(|doc_bytes_res| {
let mut doc_bytes = doc_bytes_res?;
let (doc_id, mut doc_bytes) = doc_bytes_res?;
let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes)
.map_err(crate::TantivyError::from)?;
D::deserialize(deserializer).map_err(crate::TantivyError::from)
let doc = D::deserialize(deserializer).map_err(crate::TantivyError::from)?;
Ok((doc_id, doc))
})
}
@@ -256,7 +267,7 @@ impl StoreReader {
pub(crate) fn iter_raw<'a: 'b, 'b>(
&'b self,
alive_bitset: Option<&'a AliveBitSet>,
) -> impl Iterator<Item = crate::Result<OwnedBytes>> + 'b {
) -> impl Iterator<Item = crate::Result<(DocId, OwnedBytes)>> + 'b {
let last_doc_id = self
.block_checkpoints()
.last()
@@ -284,14 +295,14 @@ impl StoreReader {
let alive = alive_bitset.map_or(true, |bitset| bitset.is_alive(doc_id));
let res = if alive {
Some((curr_block.clone(), doc_pos))
Some((doc_id, curr_block.clone(), doc_pos))
} else {
None
};
doc_pos += 1;
res
})
.map(move |(block, doc_pos)| {
.map(move |(doc_id, block, doc_pos)| {
let block = block
.ok_or_else(|| {
DataCorruption::comment_only(
@@ -304,7 +315,7 @@ impl StoreReader {
})?;
let range = block_read_index(&block, doc_pos)?;
Ok(block.slice(range))
Ok((doc_id, block.slice(range)))
})
}
@@ -403,7 +414,7 @@ mod tests {
use super::*;
use crate::directory::RamDirectory;
use crate::schema::{Field, TantivyDocument, Value};
use crate::schema::{Field, TantivyDocument};
use crate::store::tests::write_lorem_ipsum_store;
use crate::store::Compressor;
use crate::Directory;
@@ -411,7 +422,7 @@ mod tests {
const BLOCK_SIZE: usize = 16_384;
fn get_text_field<'a>(doc: &'a TantivyDocument, field: &'a Field) -> Option<&'a str> {
doc.get_first(*field).and_then(|f| f.as_value().as_str())
doc.get_first(*field).and_then(|f| f.as_str())
}
#[test]

View File

@@ -93,7 +93,7 @@ fn open_fst_index(fst_file: FileSlice) -> io::Result<tantivy_fst::Map<OwnedBytes
let fst = Fst::new(bytes).map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Fst data is corrupted: {err:?}"),
format!("Fst data is corrupted: {:?}", err),
)
})?;
Ok(tantivy_fst::Map::from(fst))

View File

@@ -95,7 +95,7 @@ fn test_term_dictionary_simple() -> crate::Result<()> {
#[test]
fn test_term_dictionary_stream() -> crate::Result<()> {
let ids: Vec<_> = (0u32..10_000u32)
.map(|i| (format!("doc{i:0>6}"), i))
.map(|i| (format!("doc{:0>6}", i), i))
.collect();
let buffer: Vec<u8> = {
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
@@ -156,7 +156,7 @@ fn test_stream_high_range_prefix_suffix() -> crate::Result<()> {
#[test]
fn test_stream_range() -> crate::Result<()> {
let ids: Vec<_> = (0u32..10_000u32)
.map(|i| (format!("doc{i:0>6}"), i))
.map(|i| (format!("doc{:0>6}", i), i))
.collect();
let buffer: Vec<u8> = {
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();

View File

@@ -96,7 +96,7 @@ mod tests {
{
let mut add_token = |token: &Token| {
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap();
tokens.push(format!("{facet}"));
tokens.push(format!("{}", facet));
};
FacetTokenizer::default()
.token_stream(facet.encoded_str())
@@ -116,7 +116,7 @@ mod tests {
{
let mut add_token = |token: &Token| {
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); // ok test
tokens.push(format!("{facet}"));
tokens.push(format!("{}", facet));
};
FacetTokenizer::default()
.token_stream(facet.encoded_str()) // ok test