Compare commits

..

4 Commits

Author SHA1 Message Date
Pascal Seitz
f5a716e827 update basic_search example 2024-05-30 21:56:22 +08:00
Meng Zhang
4143d31865 chore: fix build as the rev is gone (#2417) 2024-05-29 09:49:16 +08:00
Hamir Mahal
0c634adbe1 style: simplify strings with string interpolation (#2412)
* style: simplify strings with string interpolation

* fix: formatting
2024-05-27 09:16:47 +02:00
PSeitz
2e3641c2ae return CompactDocValue instead of trait (#2410)
The CompactDocValue is easier to handle than the trait in some cases like comparison
and conversion
2024-05-27 07:33:50 +02:00
34 changed files with 194 additions and 238 deletions

View File

@@ -16,7 +16,7 @@ exclude = ["benches/*.json", "benches/*.txt"]
[dependencies] [dependencies]
# Switch back to the non-forked oneshot crate once https://github.com/faern/oneshot/pull/35 is merged # Switch back to the non-forked oneshot crate once https://github.com/faern/oneshot/pull/35 is merged
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "c10a3ba" } oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "b208f49" }
base64 = "0.22.0" base64 = "0.22.0"
byteorder = "1.4.3" byteorder = "1.4.3"
crc32fast = "1.3.2" crc32fast = "1.3.2"

View File

@@ -349,7 +349,7 @@ fn get_test_index_bench(cardinality: Cardinality) -> tantivy::Result<Index> {
let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap(); let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap();
let many_terms_data = (0..150_000) let many_terms_data = (0..150_000)
.map(|num| format!("author{}", num)) .map(|num| format!("author{num}"))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
{ {
let mut rng = StdRng::from_seed([1u8; 32]); let mut rng = StdRng::from_seed([1u8; 32]);

View File

@@ -141,12 +141,12 @@ pub fn hdfs_index_benchmark(c: &mut Criterion) {
let parse_json = false; let parse_json = false;
// for parse_json in [false, true] { // for parse_json in [false, true] {
let suffix = if parse_json { let suffix = if parse_json {
format!("{}-with-json-parsing", suffix) format!("{suffix}-with-json-parsing")
} else { } else {
suffix.to_string() suffix.to_string()
}; };
let bench_name = format!("{}{}", prefix, suffix); let bench_name = format!("{prefix}{suffix}");
group.bench_function(bench_name, |b| { group.bench_function(bench_name, |b| {
benchmark(b, HDFS_LOGS, schema.clone(), commit, parse_json, is_dynamic) benchmark(b, HDFS_LOGS, schema.clone(), commit, parse_json, is_dynamic)
}); });

View File

@@ -19,13 +19,14 @@ use tantivy::{doc, Index, IndexWriter, ReloadPolicy};
use tempfile::TempDir; use tempfile::TempDir;
fn main() -> tantivy::Result<()> { fn main() -> tantivy::Result<()> {
// Let's create a temporary directory for the // Normally you would use `MMapDirectory` instead to persist data on disk.
// sake of this example // https://docs.rs/tantivy/latest/tantivy/directory/struct.MmapDirectory.html
// But for this example, we will use a temporary directory `TempDir`.
let index_path = TempDir::new()?; let index_path = TempDir::new()?;
// # Defining the schema // # Defining the schema
// //
// The Tantivy index requires a very strict schema. // The Tantivy index requires a schema.
// The schema declares which fields are in the index, // The schema declares which fields are in the index,
// and for each field, its type and "the way it should // and for each field, its type and "the way it should
// be indexed". // be indexed".

View File

@@ -4,7 +4,7 @@
use tantivy::collector::TopDocs; use tantivy::collector::TopDocs;
use tantivy::query::QueryParser; use tantivy::query::QueryParser;
use tantivy::schema::{DateOptions, Document, Schema, INDEXED, STORED, STRING}; use tantivy::schema::{DateOptions, Document, Schema, Value, INDEXED, STORED, STRING};
use tantivy::{Index, IndexWriter, TantivyDocument}; use tantivy::{Index, IndexWriter, TantivyDocument};
fn main() -> tantivy::Result<()> { fn main() -> tantivy::Result<()> {
@@ -64,6 +64,7 @@ fn main() -> tantivy::Result<()> {
assert!(retrieved_doc assert!(retrieved_doc
.get_first(occurred_at) .get_first(occurred_at)
.unwrap() .unwrap()
.as_value()
.as_datetime() .as_datetime()
.is_some(),); .is_some(),);
assert_eq!( assert_eq!(

View File

@@ -61,7 +61,7 @@ fn main() -> tantivy::Result<()> {
debris of the winters flooding; and sycamores with mottled, white, recumbent \ debris of the winters flooding; and sycamores with mottled, white, recumbent \
limbs and branches that arch over the pool" limbs and branches that arch over the pool"
))?; ))?;
println!("add doc {} from thread 1 - opstamp {}", i, opstamp); println!("add doc {i} from thread 1 - opstamp {opstamp}");
thread::sleep(Duration::from_millis(20)); thread::sleep(Duration::from_millis(20));
} }
Result::<(), TantivyError>::Ok(()) Result::<(), TantivyError>::Ok(())
@@ -82,7 +82,7 @@ fn main() -> tantivy::Result<()> {
body => "Some great book description..." body => "Some great book description..."
))? ))?
}; };
println!("add doc {} from thread 2 - opstamp {}", i, opstamp); println!("add doc {i} from thread 2 - opstamp {opstamp}");
thread::sleep(Duration::from_millis(10)); thread::sleep(Duration::from_millis(10));
} }
Result::<(), TantivyError>::Ok(()) Result::<(), TantivyError>::Ok(())

View File

@@ -335,8 +335,8 @@ fn get_missing_val(
} }
_ => { _ => {
return Err(crate::TantivyError::InvalidArgument(format!( return Err(crate::TantivyError::InvalidArgument(format!(
"Missing value {:?} for field {} is not supported for column type {:?}", "Missing value {missing:?} for field {field_name} is not supported for column \
missing, field_name, column_type type {column_type:?}"
))); )));
} }
}; };
@@ -403,7 +403,7 @@ fn get_dynamic_columns(
.iter() .iter()
.map(|h| h.open()) .map(|h| h.open())
.collect::<io::Result<_>>()?; .collect::<io::Result<_>>()?;
assert!(!ff_fields.is_empty(), "field {} not found", field_name); assert!(!ff_fields.is_empty(), "field {field_name} not found");
Ok(cols) Ok(cols)
} }

View File

@@ -357,8 +357,7 @@ impl SegmentTermCollector {
) -> crate::Result<Self> { ) -> crate::Result<Self> {
if field_type == ColumnType::Bytes { if field_type == ColumnType::Bytes {
return Err(TantivyError::InvalidArgument(format!( return Err(TantivyError::InvalidArgument(format!(
"terms aggregation is not supported for column type {:?}", "terms aggregation is not supported for column type {field_type:?}"
field_type
))); )));
} }
let term_buckets = TermBuckets::default(); let term_buckets = TermBuckets::default();

View File

@@ -131,8 +131,8 @@ impl<'de> Deserialize<'de> for KeyOrder {
))?; ))?;
if key_order.next().is_some() { if key_order.next().is_some() {
return Err(serde::de::Error::custom(format!( return Err(serde::de::Error::custom(format!(
"Expected exactly one key-value pair in sort parameter of top_hits, found {:?}", "Expected exactly one key-value pair in sort parameter of top_hits, found \
key_order {key_order:?}"
))); )));
} }
Ok(Self { field, order }) Ok(Self { field, order })
@@ -144,27 +144,22 @@ fn globbed_string_to_regex(glob: &str) -> Result<Regex, crate::TantivyError> {
// Replace `*` glob with `.*` regex // Replace `*` glob with `.*` regex
let sanitized = format!("^{}$", regex::escape(glob).replace(r"\*", ".*")); let sanitized = format!("^{}$", regex::escape(glob).replace(r"\*", ".*"));
Regex::new(&sanitized.replace('*', ".*")).map_err(|e| { Regex::new(&sanitized.replace('*', ".*")).map_err(|e| {
crate::TantivyError::SchemaError(format!( crate::TantivyError::SchemaError(format!("Invalid regex '{glob}' in docvalue_fields: {e}"))
"Invalid regex '{}' in docvalue_fields: {}",
glob, e
))
}) })
} }
fn use_doc_value_fields_err(parameter: &str) -> crate::Result<()> { fn use_doc_value_fields_err(parameter: &str) -> crate::Result<()> {
Err(crate::TantivyError::AggregationError( Err(crate::TantivyError::AggregationError(
AggregationError::InvalidRequest(format!( AggregationError::InvalidRequest(format!(
"The `{}` parameter is not supported, only `docvalue_fields` is supported in \ "The `{parameter}` parameter is not supported, only `docvalue_fields` is supported in \
`top_hits` aggregation", `top_hits` aggregation"
parameter
)), )),
)) ))
} }
fn unsupported_err(parameter: &str) -> crate::Result<()> { fn unsupported_err(parameter: &str) -> crate::Result<()> {
Err(crate::TantivyError::AggregationError( Err(crate::TantivyError::AggregationError(
AggregationError::InvalidRequest(format!( AggregationError::InvalidRequest(format!(
"The `{}` parameter is not supported in the `top_hits` aggregation", "The `{parameter}` parameter is not supported in the `top_hits` aggregation"
parameter
)), )),
)) ))
} }
@@ -217,8 +212,7 @@ impl TopHitsAggregation {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert!( assert!(
!fields.is_empty(), !fields.is_empty(),
"No fields matched the glob '{}' in docvalue_fields", "No fields matched the glob '{field}' in docvalue_fields"
field
); );
Ok(fields) Ok(fields)
}) })
@@ -254,7 +248,7 @@ impl TopHitsAggregation {
.map(|field| { .map(|field| {
let accessors = accessors let accessors = accessors
.get(field) .get(field)
.unwrap_or_else(|| panic!("field '{}' not found in accessors", field)); .unwrap_or_else(|| panic!("field '{field}' not found in accessors"));
let values: Vec<FastFieldValue> = accessors let values: Vec<FastFieldValue> = accessors
.iter() .iter()

View File

@@ -158,15 +158,14 @@ use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
fn parse_str_into_f64<E: de::Error>(value: &str) -> Result<f64, E> { fn parse_str_into_f64<E: de::Error>(value: &str) -> Result<f64, E> {
let parsed = value.parse::<f64>().map_err(|_err| { let parsed = value
de::Error::custom(format!("Failed to parse f64 from string: {:?}", value)) .parse::<f64>()
})?; .map_err(|_err| de::Error::custom(format!("Failed to parse f64 from string: {value:?}")))?;
// Check if the parsed value is NaN or infinity // Check if the parsed value is NaN or infinity
if parsed.is_nan() || parsed.is_infinite() { if parsed.is_nan() || parsed.is_infinite() {
Err(de::Error::custom(format!( Err(de::Error::custom(format!(
"Value is not a valid f64 (NaN or Infinity): {:?}", "Value is not a valid f64 (NaN or Infinity): {value:?}"
value
))) )))
} else { } else {
Ok(parsed) Ok(parsed)

View File

@@ -598,7 +598,7 @@ mod tests {
let mid = n % 4; let mid = n % 4;
n /= 4; n /= 4;
let leaf = n % 5; let leaf = n % 5;
Facet::from(&format!("/top{}/mid{}/leaf{}", top, mid, leaf)) Facet::from(&format!("/top{top}/mid{mid}/leaf{leaf}"))
}) })
.collect(); .collect();
for i in 0..num_facets * 10 { for i in 0..num_facets * 10 {
@@ -737,7 +737,7 @@ mod tests {
vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)] vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)]
.into_iter() .into_iter()
.flat_map(|(c, count)| { .flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{}", c)); let facet = Facet::from(&format!("/facet/{c}"));
let doc = doc!(facet_field => facet); let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count) iter::repeat(doc).take(count)
}) })
@@ -785,7 +785,7 @@ mod tests {
let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)] let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)]
.into_iter() .into_iter()
.flat_map(|(c, count)| { .flat_map(|(c, count)| {
let facet = Facet::from(&format!("/facet/{}", c)); let facet = Facet::from(&format!("/facet/{c}"));
let doc = doc!(facet_field => facet); let doc = doc!(facet_field => facet);
iter::repeat(doc).take(count) iter::repeat(doc).take(count)
}) })

View File

@@ -338,14 +338,14 @@ mod tests {
let mut term = Term::from_field_json_path(field, "attributes.color", false); let mut term = Term::from_field_json_path(field, "attributes.color", false);
term.append_type_and_str("red"); term.append_type_and_str("red");
assert_eq!( assert_eq!(
format!("{:?}", term), format!("{term:?}"),
"Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")" "Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")"
); );
let mut term = Term::from_field_json_path(field, "attributes.dimensions.width", false); let mut term = Term::from_field_json_path(field, "attributes.dimensions.width", false);
term.append_type_and_fast_value(400i64); term.append_type_and_fast_value(400i64);
assert_eq!( assert_eq!(
format!("{:?}", term), format!("{term:?}"),
"Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)" "Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)"
); );
} }

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::marker::PhantomData;
use std::sync::Arc; use std::sync::Arc;
use std::{fmt, io}; use std::{fmt, io};
@@ -7,7 +6,7 @@ use crate::collector::Collector;
use crate::core::Executor; use crate::core::Executor;
use crate::index::{SegmentId, SegmentReader}; use crate::index::{SegmentId, SegmentReader};
use crate::query::{Bm25StatisticsProvider, EnableScoring, Query}; use crate::query::{Bm25StatisticsProvider, EnableScoring, Query};
use crate::schema::document::{DocumentDeserialize, DocumentDeserializeSeed}; use crate::schema::document::DocumentDeserialize;
use crate::schema::{Schema, Term}; use crate::schema::{Schema, Term};
use crate::space_usage::SearcherSpaceUsage; use crate::space_usage::SearcherSpaceUsage;
use crate::store::{CacheStats, StoreReader}; use crate::store::{CacheStats, StoreReader};
@@ -87,17 +86,8 @@ impl Searcher {
/// The searcher uses the segment ordinal to route the /// The searcher uses the segment ordinal to route the
/// request to the right `Segment`. /// request to the right `Segment`.
pub fn doc<D: DocumentDeserialize>(&self, doc_address: DocAddress) -> crate::Result<D> { pub fn doc<D: DocumentDeserialize>(&self, doc_address: DocAddress) -> crate::Result<D> {
self.doc_seed(doc_address, PhantomData)
}
/// A stateful variant of [`doc`][Self::doc].`
pub fn doc_seed<T: DocumentDeserializeSeed>(
&self,
doc_address: DocAddress,
seed: T,
) -> crate::Result<T::Value> {
let store_reader = &self.inner.store_readers[doc_address.segment_ord as usize]; let store_reader = &self.inner.store_readers[doc_address.segment_ord as usize];
store_reader.get_seed(doc_address.doc_id, seed) store_reader.get(doc_address.doc_id)
} }
/// The cache stats for the underlying store reader. /// The cache stats for the underlying store reader.
@@ -119,21 +109,9 @@ impl Searcher {
&self, &self,
doc_address: DocAddress, doc_address: DocAddress,
) -> crate::Result<D> { ) -> crate::Result<D> {
self.doc_async_seed(doc_address, PhantomData).await
}
#[cfg(feature = "quickwit")]
/// A stateful variant of [`doc_async`][Self::doc_async].
pub async fn doc_async_seed<T: DocumentDeserializeSeed>(
&self,
doc_address: DocAddress,
seed: T,
) -> crate::Result<T::Value> {
let executor = self.inner.index.search_executor(); let executor = self.inner.index.search_executor();
let store_reader = &self.inner.store_readers[doc_address.segment_ord as usize]; let store_reader = &self.inner.store_readers[doc_address.segment_ord as usize];
store_reader store_reader.get_async(doc_address.doc_id, executor).await
.get_async_seed(doc_address.doc_id, executor, seed)
.await
} }
/// Access the schema associated with the index of this searcher. /// Access the schema associated with the index of this searcher.

View File

@@ -566,7 +566,7 @@ mod tests {
let mmap_directory = MmapDirectory::create_from_tempdir().unwrap(); let mmap_directory = MmapDirectory::create_from_tempdir().unwrap();
let num_paths = 10; let num_paths = 10;
let paths: Vec<PathBuf> = (0..num_paths) let paths: Vec<PathBuf> = (0..num_paths)
.map(|i| PathBuf::from(&*format!("file_{}", i))) .map(|i| PathBuf::from(&*format!("file_{i}")))
.collect(); .collect();
{ {
for path in &paths { for path in &paths {

View File

@@ -62,7 +62,7 @@ impl FacetReader {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::schema::{Facet, FacetOptions, SchemaBuilder, STORED}; use crate::schema::{Facet, FacetOptions, SchemaBuilder, Value, STORED};
use crate::{DocAddress, Index, IndexWriter, TantivyDocument}; use crate::{DocAddress, Index, IndexWriter, TantivyDocument};
#[test] #[test]
@@ -88,7 +88,9 @@ mod tests {
let doc = searcher let doc = searcher
.doc::<TantivyDocument>(DocAddress::new(0u32, 0u32)) .doc::<TantivyDocument>(DocAddress::new(0u32, 0u32))
.unwrap(); .unwrap();
let value = doc.get_first(facet_field).and_then(|v| v.as_facet()); let value = doc
.get_first(facet_field)
.and_then(|v| v.as_value().as_facet());
assert_eq!(value, None); assert_eq!(value, None);
} }

View File

@@ -252,9 +252,8 @@ impl IndexBuilder {
let field_type = entry.field_type().value_type(); let field_type = entry.field_type().value_type();
if !supported_field_types.contains(&field_type) { if !supported_field_types.contains(&field_type) {
return Err(TantivyError::InvalidArgument(format!( return Err(TantivyError::InvalidArgument(format!(
"Unsupported field type in sort_by_field: {:?}. Supported field types: \ "Unsupported field type in sort_by_field: {field_type:?}. Supported field \
{:?} ", types: {supported_field_types:?} ",
field_type, supported_field_types,
))); )));
} }
} }

View File

@@ -318,14 +318,14 @@ impl SegmentReader {
if create_canonical { if create_canonical {
// Without expand dots enabled dots need to be escaped. // Without expand dots enabled dots need to be escaped.
let escaped_json_path = json_path.replace('.', "\\."); let escaped_json_path = json_path.replace('.', "\\.");
let full_path = format!("{}.{}", field_name, escaped_json_path); let full_path = format!("{field_name}.{escaped_json_path}");
let full_path_unescaped = format!("{}.{}", field_name, &json_path); let full_path_unescaped = format!("{}.{}", field_name, &json_path);
map_to_canonical.insert(full_path_unescaped, full_path.to_string()); map_to_canonical.insert(full_path_unescaped, full_path.to_string());
full_path full_path
} else { } else {
// With expand dots enabled, we can use '.' instead of '\u{1}'. // With expand dots enabled, we can use '.' instead of '\u{1}'.
json_path_sep_to_dot(&mut json_path); json_path_sep_to_dot(&mut json_path);
format!("{}.{}", field_name, json_path) format!("{field_name}.{json_path}")
} }
}; };
indexed_fields.extend( indexed_fields.extend(

View File

@@ -816,7 +816,7 @@ mod tests {
use crate::query::{BooleanQuery, Occur, Query, QueryParser, TermQuery}; use crate::query::{BooleanQuery, Occur, Query, QueryParser, TermQuery};
use crate::schema::{ use crate::schema::{
self, Facet, FacetOptions, IndexRecordOption, IpAddrOptions, NumericOptions, Schema, self, Facet, FacetOptions, IndexRecordOption, IpAddrOptions, NumericOptions, Schema,
TextFieldIndexing, TextOptions, FAST, INDEXED, STORED, STRING, TEXT, TextFieldIndexing, TextOptions, Value, FAST, INDEXED, STORED, STRING, TEXT,
}; };
use crate::store::DOCSTORE_CACHE_CAPACITY; use crate::store::DOCSTORE_CACHE_CAPACITY;
use crate::{ use crate::{
@@ -1979,7 +1979,13 @@ mod tests {
.unwrap(); .unwrap();
// test store iterator // test store iterator
for doc in store_reader.iter::<TantivyDocument>(segment_reader.alive_bitset()) { for doc in store_reader.iter::<TantivyDocument>(segment_reader.alive_bitset()) {
let id = doc.unwrap().get_first(id_field).unwrap().as_u64().unwrap(); let id = doc
.unwrap()
.get_first(id_field)
.unwrap()
.as_value()
.as_u64()
.unwrap();
assert!(expected_ids_and_num_occurrences.contains_key(&id)); assert!(expected_ids_and_num_occurrences.contains_key(&id));
} }
// test store random access // test store random access

View File

@@ -797,7 +797,7 @@ mod tests {
use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery}; use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery};
use crate::schema::{ use crate::schema::{
Facet, FacetOptions, IndexRecordOption, NumericOptions, TantivyDocument, Term, Facet, FacetOptions, IndexRecordOption, NumericOptions, TantivyDocument, Term,
TextFieldIndexing, INDEXED, TEXT, TextFieldIndexing, Value, INDEXED, TEXT,
}; };
use crate::time::OffsetDateTime; use crate::time::OffsetDateTime;
use crate::{ use crate::{
@@ -909,15 +909,24 @@ mod tests {
} }
{ {
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 0))?; let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 0))?;
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("af b")); assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("af b")
);
} }
{ {
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 1))?; let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 1))?;
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c")); assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("a b c")
);
} }
{ {
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 2))?; let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 2))?;
assert_eq!(doc.get_first(text_field).unwrap().as_str(), Some("a b c d")); assert_eq!(
doc.get_first(text_field).unwrap().as_value().as_str(),
Some("a b c d")
);
} }
{ {
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 3))?; let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0, 3))?;

View File

@@ -7,7 +7,7 @@ mod tests {
use crate::query::QueryParser; use crate::query::QueryParser;
use crate::schema::{ use crate::schema::{
self, BytesOptions, Facet, FacetOptions, IndexRecordOption, NumericOptions, self, BytesOptions, Facet, FacetOptions, IndexRecordOption, NumericOptions,
TextFieldIndexing, TextOptions, TextFieldIndexing, TextOptions, Value,
}; };
use crate::{ use crate::{
DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, TantivyDocument, DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, TantivyDocument,
@@ -280,13 +280,16 @@ mod tests {
.doc::<TantivyDocument>(DocAddress::new(0, blubber_pos)) .doc::<TantivyDocument>(DocAddress::new(0, blubber_pos))
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
doc.get_first(my_text_field).unwrap().as_str(), doc.get_first(my_text_field).unwrap().as_value().as_str(),
Some("blubber") Some("blubber")
); );
let doc = searcher let doc = searcher
.doc::<TantivyDocument>(DocAddress::new(0, 0)) .doc::<TantivyDocument>(DocAddress::new(0, 0))
.unwrap(); .unwrap();
assert_eq!(doc.get_first(int_field).unwrap().as_u64(), Some(1000)); assert_eq!(
doc.get_first(int_field).unwrap().as_value().as_u64(),
Some(1000)
);
} }
} }

View File

@@ -216,7 +216,7 @@ mod tests_mmap {
let test_query = |query_str: &str| { let test_query = |query_str: &str| {
let query = parse_query.parse_query(query_str).unwrap(); let query = parse_query.parse_query(query_str).unwrap();
let num_docs = searcher.search(&query, &Count).unwrap(); let num_docs = searcher.search(&query, &Count).unwrap();
assert_eq!(num_docs, 1, "{}", query_str); assert_eq!(num_docs, 1, "{query_str}");
}; };
test_query(format!("json.{field_name_out}:test1").as_str()); test_query(format!("json.{field_name_out}:test1").as_str());
test_query(format!("json.a{field_name_out}:test2").as_str()); test_query(format!("json.a{field_name_out}:test2").as_str());
@@ -590,10 +590,10 @@ mod tests_mmap {
let query_parser = QueryParser::for_index(&index, vec![]); let query_parser = QueryParser::for_index(&index, vec![]);
// Test if field name can be queried // Test if field name can be queried
for (indexed_field, val) in fields_and_vals.iter() { for (indexed_field, val) in fields_and_vals.iter() {
let query_str = &format!("{}:{}", indexed_field, val); let query_str = &format!("{indexed_field}:{val}");
let query = query_parser.parse_query(query_str).unwrap(); let query = query_parser.parse_query(query_str).unwrap();
let count_docs = searcher.search(&*query, &TopDocs::with_limit(2)).unwrap(); let count_docs = searcher.search(&*query, &TopDocs::with_limit(2)).unwrap();
assert!(!count_docs.is_empty(), "{}:{}", indexed_field, val); assert!(!count_docs.is_empty(), "{indexed_field}:{val}");
} }
// Test if field name can be used for aggregation // Test if field name can be used for aggregation
for (field_name, val) in fields_and_vals.iter() { for (field_name, val) in fields_and_vals.iter() {

View File

@@ -500,8 +500,8 @@ mod tests {
use crate::postings::{Postings, TermInfo}; use crate::postings::{Postings, TermInfo};
use crate::query::{PhraseQuery, QueryParser}; use crate::query::{PhraseQuery, QueryParser};
use crate::schema::{ use crate::schema::{
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, STORED, Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, Value,
STRING, TEXT, STORED, STRING, TEXT,
}; };
use crate::store::{Compressor, StoreReader, StoreWriter}; use crate::store::{Compressor, StoreReader, StoreWriter};
use crate::time::format_description::well_known::Rfc3339; use crate::time::format_description::well_known::Rfc3339;
@@ -555,9 +555,12 @@ mod tests {
let doc = reader.get::<TantivyDocument>(0).unwrap(); let doc = reader.get::<TantivyDocument>(0).unwrap();
assert_eq!(doc.field_values().count(), 2); assert_eq!(doc.field_values().count(), 2);
assert_eq!(doc.get_all(text_field).next().unwrap().as_str(), Some("A"));
assert_eq!( assert_eq!(
doc.get_all(text_field).nth(1).unwrap().as_str(), doc.get_all(text_field).next().unwrap().as_value().as_str(),
Some("A")
);
assert_eq!(
doc.get_all(text_field).nth(1).unwrap().as_value().as_str(),
Some("title") Some("title")
); );
} }

View File

@@ -138,8 +138,7 @@ impl FuzzyTermQuery {
if json_path_type != Type::Str { if json_path_type != Type::Str {
return Err(InvalidArgument(format!( return Err(InvalidArgument(format!(
"The fuzzy term query requires a string path type for a json term. Found \ "The fuzzy term query requires a string path type for a json term. Found \
{:?}", {json_path_type:?}"
json_path_type
))); )));
} }
} }

View File

@@ -185,7 +185,7 @@ mod test {
Err(crate::TantivyError::InvalidArgument(msg)) => { Err(crate::TantivyError::InvalidArgument(msg)) => {
assert!(msg.contains("error: unclosed group")) assert!(msg.contains("error: unclosed group"))
} }
res => panic!("unexpected result: {:?}", res), res => panic!("unexpected result: {res:?}"),
} }
} }
} }

View File

@@ -69,28 +69,6 @@ pub trait DocumentDeserialize: Sized {
where D: DocumentDeserializer<'de>; where D: DocumentDeserializer<'de>;
} }
/// A stateful extension of [`DocumentDeserialize`].
pub trait DocumentDeserializeSeed: Sized {
/// The type produced by using this seed.
type Value;
/// Attempts to deserialize `Self::Value` from the given `seed` and `deserializer`.
fn deserialize<'de, D>(self, deserializer: D) -> Result<Self::Value, DeserializeError>
where D: DocumentDeserializer<'de>;
}
impl<T> DocumentDeserializeSeed for PhantomData<T>
where T: DocumentDeserialize
{
/// The type produced by using this seed.
type Value = T;
fn deserialize<'de, D>(self, deserializer: D) -> Result<Self::Value, DeserializeError>
where D: DocumentDeserializer<'de> {
<T as DocumentDeserialize>::deserialize(deserializer)
}
}
/// A deserializer that can walk through each entry in the document. /// A deserializer that can walk through each entry in the document.
pub trait DocumentDeserializer<'de> { pub trait DocumentDeserializer<'de> {
/// A indicator as to how many values are in the document. /// A indicator as to how many values are in the document.

View File

@@ -157,29 +157,24 @@ impl CompactDoc {
} }
/// field_values accessor /// field_values accessor
pub fn field_values( pub fn field_values(&self) -> impl Iterator<Item = (Field, CompactDocValue<'_>)> {
&self,
) -> impl Iterator<Item = (Field, ReferenceValue<'_, CompactDocValue<'_>>)> {
self.field_values.iter().map(|field_val| { self.field_values.iter().map(|field_val| {
let field = Field::from_field_id(field_val.field as u32); let field = Field::from_field_id(field_val.field as u32);
let val = self.extract_value(field_val.value_addr).unwrap(); let val = self.get_compact_doc_value(field_val.value_addr);
(field, val) (field, val)
}) })
} }
/// Returns all of the `ReferenceValue`s associated the given field /// Returns all of the `ReferenceValue`s associated the given field
pub fn get_all( pub fn get_all(&self, field: Field) -> impl Iterator<Item = CompactDocValue<'_>> + '_ {
&self,
field: Field,
) -> impl Iterator<Item = ReferenceValue<'_, CompactDocValue<'_>>> + '_ {
self.field_values self.field_values
.iter() .iter()
.filter(move |field_value| Field::from_field_id(field_value.field as u32) == field) .filter(move |field_value| Field::from_field_id(field_value.field as u32) == field)
.map(|val| self.extract_value(val.value_addr).unwrap()) .map(|val| self.get_compact_doc_value(val.value_addr))
} }
/// Returns the first `ReferenceValue` associated the given field /// Returns the first `ReferenceValue` associated the given field
pub fn get_first(&self, field: Field) -> Option<ReferenceValue<'_, CompactDocValue<'_>>> { pub fn get_first(&self, field: Field) -> Option<CompactDocValue<'_>> {
self.get_all(field).next() self.get_all(field).next()
} }
@@ -299,58 +294,11 @@ impl CompactDoc {
} }
} }
fn extract_value( /// Get CompactDocValue for address
&self, fn get_compact_doc_value(&self, value_addr: ValueAddr) -> CompactDocValue<'_> {
ref_value: ValueAddr, CompactDocValue {
) -> io::Result<ReferenceValue<'_, CompactDocValue<'_>>> { container: self,
match ref_value.type_id { value_addr,
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
ValueType::Str => {
let str_ref = self.extract_str(ref_value.val_addr);
Ok(ReferenceValueLeaf::Str(str_ref).into())
}
ValueType::Facet => {
let str_ref = self.extract_str(ref_value.val_addr);
Ok(ReferenceValueLeaf::Facet(str_ref).into())
}
ValueType::Bytes => {
let data = self.extract_bytes(ref_value.val_addr);
Ok(ReferenceValueLeaf::Bytes(data).into())
}
ValueType::U64 => self
.read_from::<u64>(ref_value.val_addr)
.map(ReferenceValueLeaf::U64)
.map(Into::into),
ValueType::I64 => self
.read_from::<i64>(ref_value.val_addr)
.map(ReferenceValueLeaf::I64)
.map(Into::into),
ValueType::F64 => self
.read_from::<f64>(ref_value.val_addr)
.map(ReferenceValueLeaf::F64)
.map(Into::into),
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(ref_value.val_addr != 0).into()),
ValueType::Date => self
.read_from::<i64>(ref_value.val_addr)
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
.map(Into::into),
ValueType::IpAddr => self
.read_from::<u128>(ref_value.val_addr)
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
.map(Into::into),
ValueType::PreTokStr => self
.read_from::<PreTokenizedString>(ref_value.val_addr)
.map(Into::into)
.map(ReferenceValueLeaf::PreTokStr)
.map(Into::into),
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
self,
ref_value.val_addr,
)?)),
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
self,
ref_value.val_addr,
)?)),
} }
} }
@@ -410,7 +358,7 @@ impl PartialEq for CompactDoc {
let convert_to_comparable_map = |doc: &CompactDoc| { let convert_to_comparable_map = |doc: &CompactDoc| {
let mut field_value_set: HashMap<Field, HashSet<String>> = Default::default(); let mut field_value_set: HashMap<Field, HashSet<String>> = Default::default();
for field_value in doc.field_values.iter() { for field_value in doc.field_values.iter() {
let value: OwnedValue = doc.extract_value(field_value.value_addr).unwrap().into(); let value: OwnedValue = doc.get_compact_doc_value(field_value.value_addr).into();
let value = serde_json::to_string(&value).unwrap(); let value = serde_json::to_string(&value).unwrap();
field_value_set field_value_set
.entry(Field::from_field_id(field_value.field as u32)) .entry(Field::from_field_id(field_value.field as u32))
@@ -444,7 +392,19 @@ impl DocumentDeserialize for CompactDoc {
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct CompactDocValue<'a> { pub struct CompactDocValue<'a> {
container: &'a CompactDoc, container: &'a CompactDoc,
value: ValueAddr, value_addr: ValueAddr,
}
impl PartialEq for CompactDocValue<'_> {
fn eq(&self, other: &Self) -> bool {
let value1: OwnedValue = (*self).into();
let value2: OwnedValue = (*other).into();
value1 == value2
}
}
impl<'a> From<CompactDocValue<'a>> for OwnedValue {
fn from(value: CompactDocValue) -> Self {
value.as_value().into()
}
} }
impl<'a> Value<'a> for CompactDocValue<'a> { impl<'a> Value<'a> for CompactDocValue<'a> {
type ArrayIter = CompactDocArrayIter<'a>; type ArrayIter = CompactDocArrayIter<'a>;
@@ -452,7 +412,67 @@ impl<'a> Value<'a> for CompactDocValue<'a> {
type ObjectIter = CompactDocObjectIter<'a>; type ObjectIter = CompactDocObjectIter<'a>;
fn as_value(&self) -> ReferenceValue<'a, Self> { fn as_value(&self) -> ReferenceValue<'a, Self> {
self.container.extract_value(self.value).unwrap() self.get_ref_value().unwrap()
}
}
impl<'a> CompactDocValue<'a> {
fn get_ref_value(&self) -> io::Result<ReferenceValue<'a, CompactDocValue<'a>>> {
let addr = self.value_addr.val_addr;
match self.value_addr.type_id {
ValueType::Null => Ok(ReferenceValueLeaf::Null.into()),
ValueType::Str => {
let str_ref = self.container.extract_str(addr);
Ok(ReferenceValueLeaf::Str(str_ref).into())
}
ValueType::Facet => {
let str_ref = self.container.extract_str(addr);
Ok(ReferenceValueLeaf::Facet(str_ref).into())
}
ValueType::Bytes => {
let data = self.container.extract_bytes(addr);
Ok(ReferenceValueLeaf::Bytes(data).into())
}
ValueType::U64 => self
.container
.read_from::<u64>(addr)
.map(ReferenceValueLeaf::U64)
.map(Into::into),
ValueType::I64 => self
.container
.read_from::<i64>(addr)
.map(ReferenceValueLeaf::I64)
.map(Into::into),
ValueType::F64 => self
.container
.read_from::<f64>(addr)
.map(ReferenceValueLeaf::F64)
.map(Into::into),
ValueType::Bool => Ok(ReferenceValueLeaf::Bool(addr != 0).into()),
ValueType::Date => self
.container
.read_from::<i64>(addr)
.map(|ts| ReferenceValueLeaf::Date(DateTime::from_timestamp_nanos(ts)))
.map(Into::into),
ValueType::IpAddr => self
.container
.read_from::<u128>(addr)
.map(|num| ReferenceValueLeaf::IpAddr(Ipv6Addr::from_u128(num)))
.map(Into::into),
ValueType::PreTokStr => self
.container
.read_from::<PreTokenizedString>(addr)
.map(Into::into)
.map(ReferenceValueLeaf::PreTokStr)
.map(Into::into),
ValueType::Object => Ok(ReferenceValue::Object(CompactDocObjectIter::new(
self.container,
addr,
)?)),
ValueType::Array => Ok(ReferenceValue::Array(CompactDocArrayIter::new(
self.container,
addr,
)?)),
}
} }
} }
@@ -537,7 +557,7 @@ impl BinarySerializable for ValueType {
} else { } else {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Invalid value type id: {}", num), format!("Invalid value type id: {num}"),
)); ));
}; };
Ok(type_id) Ok(type_id)
@@ -601,7 +621,7 @@ impl<'a> Iterator for CompactDocObjectIter<'a> {
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?; let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
let value = CompactDocValue { let value = CompactDocValue {
container: self.container, container: self.container,
value, value_addr: value,
}; };
Some((key, value)) Some((key, value))
} }
@@ -635,7 +655,7 @@ impl<'a> Iterator for CompactDocArrayIter<'a> {
let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?; let value = ValueAddr::deserialize(&mut self.node_addresses_slice).ok()?;
let value = CompactDocValue { let value = CompactDocValue {
container: self.container, container: self.container,
value, value_addr: value,
}; };
Some(value) Some(value)
} }
@@ -668,7 +688,7 @@ impl<'a> Iterator for FieldValueIterRef<'a> {
Field::from_field_id(field_value.field as u32), Field::from_field_id(field_value.field as u32),
CompactDocValue::<'a> { CompactDocValue::<'a> {
container: self.container, container: self.container,
value: field_value.value_addr, value_addr: field_value.value_addr,
}, },
) )
}) })

View File

@@ -169,9 +169,8 @@ use std::mem;
pub(crate) use self::de::BinaryDocumentDeserializer; pub(crate) use self::de::BinaryDocumentDeserializer;
pub use self::de::{ pub use self::de::{
ArrayAccess, DeserializeError, DocumentDeserialize, DocumentDeserializeSeed, ArrayAccess, DeserializeError, DocumentDeserialize, DocumentDeserializer, ObjectAccess,
DocumentDeserializer, ObjectAccess, ValueDeserialize, ValueDeserializer, ValueType, ValueDeserialize, ValueDeserializer, ValueType, ValueVisitor,
ValueVisitor,
}; };
pub use self::default_document::{ pub use self::default_document::{
CompactDocArrayIter, CompactDocObjectIter, CompactDocValue, DocParsingError, TantivyDocument, CompactDocArrayIter, CompactDocObjectIter, CompactDocValue, DocParsingError, TantivyDocument,

View File

@@ -58,9 +58,8 @@ where W: Write
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::Other, io::ErrorKind::Other,
format!( format!(
"Unexpected number of entries written to serializer, expected {} entries, got \ "Unexpected number of entries written to serializer, expected \
{} entries", {num_field_values} entries, got {actual_length} entries",
num_field_values, actual_length,
), ),
)); ));
} }

View File

@@ -659,9 +659,9 @@ mod tests {
let schema = schema_builder.build(); let schema = schema_builder.build();
let doc_json = r#"{"date": "2019-10-12T07:20:50.52+02:00"}"#; let doc_json = r#"{"date": "2019-10-12T07:20:50.52+02:00"}"#;
let doc = TantivyDocument::parse_json(&schema, doc_json).unwrap(); let doc = TantivyDocument::parse_json(&schema, doc_json).unwrap();
let date = doc.get_first(date_field).unwrap(); let date = OwnedValue::from(doc.get_first(date_field).unwrap());
// Time zone is converted to UTC // Time zone is converted to UTC
assert_eq!("Leaf(Date(2019-10-12T05:20:50.52Z))", format!("{date:?}")); assert_eq!("Date(2019-10-12T05:20:50.52Z)", format!("{date:?}"));
} }
#[test] #[test]

View File

@@ -60,7 +60,7 @@ pub mod tests {
use crate::directory::{Directory, RamDirectory, WritePtr}; use crate::directory::{Directory, RamDirectory, WritePtr};
use crate::fastfield::AliveBitSet; use crate::fastfield::AliveBitSet;
use crate::schema::{ use crate::schema::{
self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, STORED, TEXT, self, Schema, TantivyDocument, TextFieldIndexing, TextOptions, Value, STORED, TEXT,
}; };
use crate::{Index, IndexWriter, Term}; use crate::{Index, IndexWriter, Term};
@@ -122,6 +122,7 @@ pub mod tests {
.get::<TantivyDocument>(i)? .get::<TantivyDocument>(i)?
.get_first(field_title) .get_first(field_title)
.unwrap() .unwrap()
.as_value()
.as_str() .as_str()
.unwrap(), .unwrap(),
format!("Doc {i}") format!("Doc {i}")
@@ -133,6 +134,7 @@ pub mod tests {
let title_content = doc let title_content = doc
.get_first(field_title) .get_first(field_title)
.unwrap() .unwrap()
.as_value()
.as_str() .as_str()
.unwrap() .unwrap()
.to_string(); .to_string();

View File

@@ -1,6 +1,5 @@
use std::io; use std::io;
use std::iter::Sum; use std::iter::Sum;
use std::marker::PhantomData;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::ops::{AddAssign, Range}; use std::ops::{AddAssign, Range};
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
@@ -15,9 +14,7 @@ use super::Decompressor;
use crate::directory::FileSlice; use crate::directory::FileSlice;
use crate::error::DataCorruption; use crate::error::DataCorruption;
use crate::fastfield::AliveBitSet; use crate::fastfield::AliveBitSet;
use crate::schema::document::{ use crate::schema::document::{BinaryDocumentDeserializer, DocumentDeserialize};
BinaryDocumentDeserializer, DocumentDeserialize, DocumentDeserializeSeed,
};
use crate::space_usage::StoreSpaceUsage; use crate::space_usage::StoreSpaceUsage;
use crate::store::index::Checkpoint; use crate::store::index::Checkpoint;
use crate::DocId; use crate::DocId;
@@ -204,21 +201,11 @@ impl StoreReader {
/// It should not be called to score documents /// It should not be called to score documents
/// for instance. /// for instance.
pub fn get<D: DocumentDeserialize>(&self, doc_id: DocId) -> crate::Result<D> { pub fn get<D: DocumentDeserialize>(&self, doc_id: DocId) -> crate::Result<D> {
self.get_seed(doc_id, PhantomData)
}
/// A stateful version of [`get`][Self::get].
pub fn get_seed<T: DocumentDeserializeSeed>(
&self,
doc_id: DocId,
seed: T,
) -> crate::Result<T::Value> {
let mut doc_bytes = self.get_document_bytes(doc_id)?; let mut doc_bytes = self.get_document_bytes(doc_id)?;
let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes) let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes)
.map_err(crate::TantivyError::from)?; .map_err(crate::TantivyError::from)?;
seed.deserialize(deserializer) D::deserialize(deserializer).map_err(crate::TantivyError::from)
.map_err(crate::TantivyError::from)
} }
/// Returns raw bytes of a given document. /// Returns raw bytes of a given document.
@@ -250,27 +237,16 @@ impl StoreReader {
/// Iterator over all Documents in their order as they are stored in the doc store. /// Iterator over all Documents in their order as they are stored in the doc store.
/// Use this, if you want to extract all Documents from the doc store. /// Use this, if you want to extract all Documents from the doc store.
/// The `alive_bitset` has to be forwarded from the `SegmentReader` or the results may be wrong. /// The `alive_bitset` has to be forwarded from the `SegmentReader` or the results may be wrong.
pub fn iter<'a: 'b, 'b, D: DocumentDeserialize + 'b>( pub fn iter<'a: 'b, 'b, D: DocumentDeserialize>(
&'b self, &'b self,
alive_bitset: Option<&'a AliveBitSet>, alive_bitset: Option<&'a AliveBitSet>,
) -> impl Iterator<Item = crate::Result<D>> + 'b { ) -> impl Iterator<Item = crate::Result<D>> + 'b {
self.iter_seed(alive_bitset, &PhantomData)
}
/// A stateful variant of [`iter`][Self::iter].
pub fn iter_seed<'a: 'b, 'b, T: DocumentDeserializeSeed + Clone + 'b>(
&'b self,
alive_bitset: Option<&'a AliveBitSet>,
seed: &'b T,
) -> impl Iterator<Item = crate::Result<T::Value>> + 'b {
self.iter_raw(alive_bitset).map(|doc_bytes_res| { self.iter_raw(alive_bitset).map(|doc_bytes_res| {
let mut doc_bytes = doc_bytes_res?; let mut doc_bytes = doc_bytes_res?;
let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes) let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes)
.map_err(crate::TantivyError::from)?; .map_err(crate::TantivyError::from)?;
seed.clone() D::deserialize(deserializer).map_err(crate::TantivyError::from)
.deserialize(deserializer)
.map_err(crate::TantivyError::from)
}) })
} }
@@ -413,22 +389,11 @@ impl StoreReader {
doc_id: DocId, doc_id: DocId,
executor: &Executor, executor: &Executor,
) -> crate::Result<D> { ) -> crate::Result<D> {
self.get_async_seed(doc_id, executor, PhantomData).await
}
/// A stateful variant of [`get_async`][Self::get_async].
pub async fn get_async_seed<T: DocumentDeserializeSeed>(
&self,
doc_id: DocId,
executor: &Executor,
seed: T,
) -> crate::Result<T::Value> {
let mut doc_bytes = self.get_document_bytes_async(doc_id, executor).await?; let mut doc_bytes = self.get_document_bytes_async(doc_id, executor).await?;
let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes) let deserializer = BinaryDocumentDeserializer::from_reader(&mut doc_bytes)
.map_err(crate::TantivyError::from)?; .map_err(crate::TantivyError::from)?;
seed.deserialize(deserializer) D::deserialize(deserializer).map_err(crate::TantivyError::from)
.map_err(crate::TantivyError::from)
} }
} }
@@ -438,7 +403,7 @@ mod tests {
use super::*; use super::*;
use crate::directory::RamDirectory; use crate::directory::RamDirectory;
use crate::schema::{Field, TantivyDocument}; use crate::schema::{Field, TantivyDocument, Value};
use crate::store::tests::write_lorem_ipsum_store; use crate::store::tests::write_lorem_ipsum_store;
use crate::store::Compressor; use crate::store::Compressor;
use crate::Directory; use crate::Directory;
@@ -446,7 +411,7 @@ mod tests {
const BLOCK_SIZE: usize = 16_384; const BLOCK_SIZE: usize = 16_384;
fn get_text_field<'a>(doc: &'a TantivyDocument, field: &'a Field) -> Option<&'a str> { fn get_text_field<'a>(doc: &'a TantivyDocument, field: &'a Field) -> Option<&'a str> {
doc.get_first(*field).and_then(|f| f.as_str()) doc.get_first(*field).and_then(|f| f.as_value().as_str())
} }
#[test] #[test]

View File

@@ -93,7 +93,7 @@ fn open_fst_index(fst_file: FileSlice) -> io::Result<tantivy_fst::Map<OwnedBytes
let fst = Fst::new(bytes).map_err(|err| { let fst = Fst::new(bytes).map_err(|err| {
io::Error::new( io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Fst data is corrupted: {:?}", err), format!("Fst data is corrupted: {err:?}"),
) )
})?; })?;
Ok(tantivy_fst::Map::from(fst)) Ok(tantivy_fst::Map::from(fst))

View File

@@ -95,7 +95,7 @@ fn test_term_dictionary_simple() -> crate::Result<()> {
#[test] #[test]
fn test_term_dictionary_stream() -> crate::Result<()> { fn test_term_dictionary_stream() -> crate::Result<()> {
let ids: Vec<_> = (0u32..10_000u32) let ids: Vec<_> = (0u32..10_000u32)
.map(|i| (format!("doc{:0>6}", i), i)) .map(|i| (format!("doc{i:0>6}"), i))
.collect(); .collect();
let buffer: Vec<u8> = { let buffer: Vec<u8> = {
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap(); let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();
@@ -156,7 +156,7 @@ fn test_stream_high_range_prefix_suffix() -> crate::Result<()> {
#[test] #[test]
fn test_stream_range() -> crate::Result<()> { fn test_stream_range() -> crate::Result<()> {
let ids: Vec<_> = (0u32..10_000u32) let ids: Vec<_> = (0u32..10_000u32)
.map(|i| (format!("doc{:0>6}", i), i)) .map(|i| (format!("doc{i:0>6}"), i))
.collect(); .collect();
let buffer: Vec<u8> = { let buffer: Vec<u8> = {
let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap(); let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap();

View File

@@ -96,7 +96,7 @@ mod tests {
{ {
let mut add_token = |token: &Token| { let mut add_token = |token: &Token| {
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap();
tokens.push(format!("{}", facet)); tokens.push(format!("{facet}"));
}; };
FacetTokenizer::default() FacetTokenizer::default()
.token_stream(facet.encoded_str()) .token_stream(facet.encoded_str())
@@ -116,7 +116,7 @@ mod tests {
{ {
let mut add_token = |token: &Token| { let mut add_token = |token: &Token| {
let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); // ok test let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); // ok test
tokens.push(format!("{}", facet)); tokens.push(format!("{facet}"));
}; };
FacetTokenizer::default() FacetTokenizer::default()
.token_stream(facet.encoded_str()) // ok test .token_stream(facet.encoded_str()) // ok test