Expand the DocAddress struct with named fields

This commit is contained in:
Stéphane Campinas
2021-03-28 19:00:23 +02:00
parent 114fbe2512
commit a0ec6e1e9d
18 changed files with 146 additions and 111 deletions

View File

@@ -36,7 +36,7 @@ impl Collector for DocSetCollector {
let mut result = HashSet::with_capacity(len);
for (segment_local_id, docs) in segment_fruits {
for doc in docs {
result.insert(DocAddress(segment_local_id, doc));
result.insert(DocAddress::new(segment_local_id, doc));
}
}
Ok(result)

View File

@@ -47,7 +47,7 @@ use crate::{Score, SegmentReader, TantivyError};
/// let top_docs = searcher.search(&query, &no_filter_collector).unwrap();
///
/// assert_eq!(top_docs.len(), 1);
/// assert_eq!(top_docs[0].1, DocAddress(0, 1));
/// assert_eq!(top_docs[0].1, DocAddress::new(0, 1));
///
/// let filter_all_collector: FilterCollector<_, _, u64> = FilterCollector::new(price, &|value| value < 5u64, TopDocs::with_limit(2));
/// let filtered_top_docs = searcher.search(&query, &filter_all_collector).unwrap();

View File

@@ -53,7 +53,7 @@ pub fn test_filter_collector() {
let top_docs = searcher.search(&query, &filter_some_collector).unwrap();
assert_eq!(top_docs.len(), 1);
assert_eq!(top_docs[0].1, DocAddress(0, 1));
assert_eq!(top_docs[0].1, DocAddress::new(0, 1));
let filter_all_collector: FilterCollector<_, _, u64> =
FilterCollector::new(price, &|value| value < 5u64, TopDocs::with_limit(2));
@@ -126,7 +126,7 @@ impl Collector for TestCollector {
if fruit.docs().is_empty() {
0
} else {
fruit.docs()[0].segment_ord()
fruit.docs()[0].segment_ord
}
});
let mut docs = vec![];
@@ -143,7 +143,7 @@ impl SegmentCollector for TestSegmentCollector {
type Fruit = TestFruit;
fn collect(&mut self, doc: DocId, score: Score) {
self.fruit.docs.push(DocAddress(self.segment_id, doc));
self.fruit.docs.push(DocAddress::new(self.segment_id, doc));
self.fruit.scores.push(score);
}

View File

@@ -169,7 +169,7 @@ impl<T: PartialOrd + Clone> TopSegmentCollector<T> {
.map(|comparable_doc| {
(
comparable_doc.feature,
DocAddress(segment_id, comparable_doc.doc),
DocAddress::new(segment_id, comparable_doc.doc),
)
})
.collect()
@@ -220,9 +220,9 @@ mod tests {
assert_eq!(
top_collector.harvest(),
vec![
(0.8, DocAddress(0, 1)),
(0.3, DocAddress(0, 5)),
(0.2, DocAddress(0, 3))
(0.8, DocAddress::new(0, 1)),
(0.3, DocAddress::new(0, 5)),
(0.2, DocAddress::new(0, 3))
]
);
}
@@ -238,10 +238,10 @@ mod tests {
assert_eq!(
top_collector.harvest(),
vec![
(0.9, DocAddress(0, 7)),
(0.8, DocAddress(0, 1)),
(0.3, DocAddress(0, 5)),
(0.2, DocAddress(0, 3))
(0.9, DocAddress::new(0, 7)),
(0.8, DocAddress::new(0, 1)),
(0.3, DocAddress::new(0, 5)),
(0.2, DocAddress::new(0, 3))
]
);
}
@@ -276,17 +276,17 @@ mod tests {
let results = collector
.merge_fruits(vec![vec![
(0.9, DocAddress(0, 1)),
(0.8, DocAddress(0, 2)),
(0.7, DocAddress(0, 3)),
(0.6, DocAddress(0, 4)),
(0.5, DocAddress(0, 5)),
(0.9, DocAddress::new(0, 1)),
(0.8, DocAddress::new(0, 2)),
(0.7, DocAddress::new(0, 3)),
(0.6, DocAddress::new(0, 4)),
(0.5, DocAddress::new(0, 5)),
]])
.unwrap();
assert_eq!(
results,
vec![(0.8, DocAddress(0, 2)), (0.7, DocAddress(0, 3)),]
vec![(0.8, DocAddress::new(0, 2)), (0.7, DocAddress::new(0, 3)),]
);
}
@@ -295,10 +295,13 @@ mod tests {
let collector = TopCollector::with_limit(2).and_offset(1);
let results = collector
.merge_fruits(vec![vec![(0.9, DocAddress(0, 1)), (0.8, DocAddress(0, 2))]])
.merge_fruits(vec![vec![
(0.9, DocAddress::new(0, 1)),
(0.8, DocAddress::new(0, 2)),
]])
.unwrap();
assert_eq!(results, vec![(0.8, DocAddress(0, 2)),]);
assert_eq!(results, vec![(0.8, DocAddress::new(0, 2)),]);
}
#[test]
@@ -306,7 +309,10 @@ mod tests {
let collector = TopCollector::with_limit(2).and_offset(20);
let results = collector
.merge_fruits(vec![vec![(0.9, DocAddress(0, 1)), (0.8, DocAddress(0, 2))]])
.merge_fruits(vec![vec![
(0.9, DocAddress::new(0, 1)),
(0.8, DocAddress::new(0, 2)),
]])
.unwrap();
assert_eq!(results, vec![]);

View File

@@ -113,8 +113,8 @@ where
/// let query = query_parser.parse_query("diary").unwrap();
/// let top_docs = searcher.search(&query, &TopDocs::with_limit(2)).unwrap();
///
/// assert_eq!(top_docs[0].1, DocAddress(0, 1));
/// assert_eq!(top_docs[1].1, DocAddress(0, 3));
/// assert_eq!(top_docs[0].1, DocAddress::new(0, 1));
/// assert_eq!(top_docs[1].1, DocAddress::new(0, 3));
/// ```
pub struct TopDocs(TopCollector<Score>);
@@ -201,8 +201,8 @@ impl TopDocs {
/// let top_docs = searcher.search(&query, &TopDocs::with_limit(2).and_offset(1)).unwrap();
///
/// assert_eq!(top_docs.len(), 2);
/// assert_eq!(top_docs[0].1, DocAddress(0, 4));
/// assert_eq!(top_docs[1].1, DocAddress(0, 3));
/// assert_eq!(top_docs[0].1, DocAddress::new(0, 4));
/// assert_eq!(top_docs[1].1, DocAddress::new(0, 3));
/// ```
pub fn and_offset(self, offset: usize) -> TopDocs {
TopDocs(self.0.and_offset(offset))
@@ -243,8 +243,8 @@ impl TopDocs {
/// # let query = QueryParser::for_index(&index, vec![title]).parse_query("diary")?;
/// # let top_docs = docs_sorted_by_rating(&reader.searcher(), &query, rating)?;
/// # assert_eq!(top_docs,
/// # vec![(97u64, DocAddress(0u32, 1)),
/// # (80u64, DocAddress(0u32, 3))]);
/// # vec![(97u64, DocAddress::new(0u32, 1)),
/// # (80u64, DocAddress::new(0u32, 3))]);
/// # Ok(())
/// # }
/// /// Searches the document matching the given query, and
@@ -323,8 +323,8 @@ impl TopDocs {
/// # let reader = index.reader()?;
/// # let top_docs = docs_sorted_by_revenue(&reader.searcher(), &AllQuery, rating)?;
/// # assert_eq!(top_docs,
/// # vec![(119_000_000i64, DocAddress(0, 1)),
/// # (92_000_000i64, DocAddress(0, 0))]);
/// # vec![(119_000_000i64, DocAddress::new(0, 1)),
/// # (92_000_000i64, DocAddress::new(0, 0))]);
/// # Ok(())
/// # }
/// /// Searches the document matching the given query, and
@@ -671,7 +671,7 @@ impl Collector for TopDocs {
let fruit = heap
.into_sorted_vec()
.into_iter()
.map(|cid| (cid.feature, DocAddress(segment_ord, cid.doc)))
.map(|cid| (cid.feature, DocAddress::new(segment_ord, cid.doc)))
.collect();
Ok(fruit)
}
@@ -741,9 +741,9 @@ mod tests {
assert_results_equals(
&score_docs,
&[
(0.81221175, DocAddress(0u32, 1)),
(0.5376842, DocAddress(0u32, 2)),
(0.48527452, DocAddress(0, 0)),
(0.81221175, DocAddress::new(0u32, 1)),
(0.5376842, DocAddress::new(0u32, 2)),
(0.48527452, DocAddress::new(0, 0)),
],
);
}
@@ -760,7 +760,7 @@ mod tests {
.searcher()
.search(&text_query, &TopDocs::with_limit(4).and_offset(2))
.unwrap();
assert_results_equals(&score_docs[..], &[(0.48527452, DocAddress(0, 0))]);
assert_results_equals(&score_docs[..], &[(0.48527452, DocAddress::new(0, 0))]);
}
#[test]
@@ -778,8 +778,8 @@ mod tests {
assert_results_equals(
&score_docs,
&[
(0.81221175, DocAddress(0u32, 1)),
(0.5376842, DocAddress(0u32, 2)),
(0.81221175, DocAddress::new(0u32, 1)),
(0.5376842, DocAddress::new(0u32, 2)),
],
);
}
@@ -799,8 +799,8 @@ mod tests {
assert_results_equals(
&score_docs[..],
&[
(0.5376842, DocAddress(0u32, 2)),
(0.48527452, DocAddress(0, 0)),
(0.5376842, DocAddress::new(0u32, 2)),
(0.48527452, DocAddress::new(0, 0)),
],
);
}
@@ -864,9 +864,9 @@ mod tests {
assert_eq!(
&top_docs[..],
&[
(64, DocAddress(0, 1)),
(16, DocAddress(0, 2)),
(12, DocAddress(0, 0))
(64, DocAddress::new(0, 1)),
(16, DocAddress::new(0, 2)),
(12, DocAddress::new(0, 0))
]
);
}
@@ -898,8 +898,8 @@ mod tests {
assert_eq!(
&top_docs[..],
&[
(mr_birthday, DocAddress(0, 1)),
(pr_birthday, DocAddress(0, 0)),
(mr_birthday, DocAddress::new(0, 1)),
(pr_birthday, DocAddress::new(0, 0)),
]
);
Ok(())
@@ -927,7 +927,10 @@ mod tests {
let top_docs: Vec<(i64, DocAddress)> = searcher.search(&AllQuery, &top_collector)?;
assert_eq!(
&top_docs[..],
&[(40i64, DocAddress(0, 1)), (-1i64, DocAddress(0, 0)),]
&[
(40i64, DocAddress::new(0, 1)),
(-1i64, DocAddress::new(0, 0)),
]
);
Ok(())
}
@@ -954,7 +957,10 @@ mod tests {
let top_docs: Vec<(f64, DocAddress)> = searcher.search(&AllQuery, &top_collector)?;
assert_eq!(
&top_docs[..],
&[(40f64, DocAddress(0, 1)), (-1.0f64, DocAddress(0, 0)),]
&[
(40f64, DocAddress::new(0, 1)),
(-1.0f64, DocAddress::new(0, 0)),
]
);
Ok(())
}
@@ -1034,7 +1040,7 @@ mod tests {
assert_eq!(
score_docs,
vec![(1, DocAddress(0, 1)), (0, DocAddress(0, 0)),]
vec![(1, DocAddress::new(0, 1)), (0, DocAddress::new(0, 0)),]
);
}
@@ -1056,7 +1062,7 @@ mod tests {
assert_eq!(
score_docs,
vec![(1, DocAddress(0, 1)), (0, DocAddress(0, 0)),]
vec![(1, DocAddress::new(0, 1)), (0, DocAddress::new(0, 0)),]
);
}

View File

@@ -54,9 +54,8 @@ impl Searcher {
/// The searcher uses the segment ordinal to route the
/// the request to the right `Segment`.
pub fn doc(&self, doc_address: DocAddress) -> crate::Result<Document> {
let DocAddress(segment_local_id, doc_id) = doc_address;
let store_reader = &self.store_readers[segment_local_id as usize];
store_reader.get(doc_id)
let store_reader = &self.store_readers[doc_address.segment_ord as usize];
store_reader.get(doc_address.doc_id)
}
/// Access the schema associated to the index of this searcher.

View File

@@ -56,7 +56,7 @@ mod tests {
fn test_stored_bytes() -> crate::Result<()> {
let searcher = create_index_for_test(STORED)?;
assert_eq!(searcher.num_docs(), 1);
let retrieved_doc = searcher.doc(DocAddress(0u32, 0u32))?;
let retrieved_doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let field = searcher.schema().get_field("string_bytes").unwrap();
let values: Vec<&Value> = retrieved_doc.get_all(field).collect();
assert_eq!(values.len(), 2);
@@ -72,7 +72,7 @@ mod tests {
fn test_non_stored_bytes() -> crate::Result<()> {
let searcher = create_index_for_test(INDEXED)?;
assert_eq!(searcher.num_docs(), 1);
let retrieved_doc = searcher.doc(DocAddress(0u32, 0u32))?;
let retrieved_doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let field = searcher.schema().get_field("string_bytes").unwrap();
assert!(retrieved_doc.get_first(field).is_none());
Ok(())

View File

@@ -105,7 +105,7 @@ mod tests {
let mut facet_ords = Vec::new();
facet_reader.facet_ords(0u32, &mut facet_ords);
assert_eq!(&facet_ords, &[2u64]);
let doc = searcher.doc(DocAddress(0u32, 0u32))?;
let doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let value = doc.get_first(facet_field).and_then(Value::path);
assert_eq!(value, None);
Ok(())
@@ -128,7 +128,7 @@ mod tests {
let mut facet_ords = Vec::new();
facet_reader.facet_ords(0u32, &mut facet_ords);
assert!(facet_ords.is_empty());
let doc = searcher.doc(DocAddress(0u32, 0u32))?;
let doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let value = doc.get_first(facet_field).and_then(Value::path);
assert_eq!(value, Some("/a/b".to_string()));
Ok(())
@@ -151,7 +151,7 @@ mod tests {
let mut facet_ords = Vec::new();
facet_reader.facet_ords(0u32, &mut facet_ords);
assert_eq!(&facet_ords, &[2u64]);
let doc = searcher.doc(DocAddress(0u32, 0u32))?;
let doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let value = doc.get_first(facet_field).and_then(Value::path);
assert_eq!(value, Some("/a/b".to_string()));
Ok(())
@@ -174,7 +174,7 @@ mod tests {
let mut facet_ords = Vec::new();
facet_reader.facet_ords(0u32, &mut facet_ords);
assert!(facet_ords.is_empty());
let doc = searcher.doc(DocAddress(0u32, 0u32))?;
let doc = searcher.doc(DocAddress::new(0u32, 0u32))?;
let value = doc.get_first(facet_field).and_then(Value::path);
assert_eq!(value, None);
Ok(())

View File

@@ -798,49 +798,53 @@ mod tests {
{
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "a")])?,
vec![DocAddress(0, 1), DocAddress(0, 2), DocAddress(0, 4)]
vec![
DocAddress::new(0, 1),
DocAddress::new(0, 2),
DocAddress::new(0, 4)
]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "af")])?,
vec![DocAddress(0, 0), DocAddress(0, 3)]
vec![DocAddress::new(0, 0), DocAddress::new(0, 3)]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "g")])?,
vec![DocAddress(0, 4)]
vec![DocAddress::new(0, 4)]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "b")])?,
vec![
DocAddress(0, 0),
DocAddress(0, 1),
DocAddress(0, 2),
DocAddress(0, 3),
DocAddress(0, 4)
DocAddress::new(0, 0),
DocAddress::new(0, 1),
DocAddress::new(0, 2),
DocAddress::new(0, 3),
DocAddress::new(0, 4)
]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_date(date_field, &curr_time)])?,
vec![DocAddress(0, 0), DocAddress(0, 3)]
vec![DocAddress::new(0, 0), DocAddress::new(0, 3)]
);
}
{
let doc = searcher.doc(DocAddress(0, 0))?;
let doc = searcher.doc(DocAddress::new(0, 0))?;
assert_eq!(doc.get_first(text_field).unwrap().text(), Some("af b"));
}
{
let doc = searcher.doc(DocAddress(0, 1))?;
let doc = searcher.doc(DocAddress::new(0, 1))?;
assert_eq!(doc.get_first(text_field).unwrap().text(), Some("a b c"));
}
{
let doc = searcher.doc(DocAddress(0, 2))?;
let doc = searcher.doc(DocAddress::new(0, 2))?;
assert_eq!(doc.get_first(text_field).unwrap().text(), Some("a b c d"));
}
{
let doc = searcher.doc(DocAddress(0, 3))?;
let doc = searcher.doc(DocAddress::new(0, 3))?;
assert_eq!(doc.get_first(text_field).unwrap().text(), Some("af b"));
}
{
let doc = searcher.doc(DocAddress(0, 4))?;
let doc = searcher.doc(DocAddress::new(0, 4))?;
assert_eq!(doc.get_first(text_field).unwrap().text(), Some("a b c g"));
}
{

View File

@@ -259,15 +259,12 @@ pub type Score = f32;
pub type SegmentLocalId = u32;
impl DocAddress {
/// Return the segment ordinal id that identifies the segment
/// hosting the document in the `Searcher` it is called from.
pub fn segment_ord(self) -> SegmentLocalId {
self.0
}
/// Return the segment-local `DocId`
pub fn doc(self) -> DocId {
self.1
/// Creates a new DocAddress from the segment/docId pair.
pub fn new(segment_ord: SegmentLocalId, doc_id: DocId) -> DocAddress {
DocAddress {
segment_ord,
doc_id,
}
}
}
@@ -280,7 +277,13 @@ impl DocAddress {
/// The id used for the segment is actually an ordinal
/// in the list of `Segment`s held by a `Searcher`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct DocAddress(pub SegmentLocalId, pub DocId);
pub struct DocAddress {
/// The segment ordinal id that identifies the segment
/// hosting the document in the `Searcher` it is called from.
pub segment_ord: SegmentLocalId,
/// The segment-local `DocId`.
pub doc_id: DocId,
}
#[cfg(test)]
mod tests {
@@ -778,30 +781,38 @@ mod tests {
};
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "a")])?,
vec![DocAddress(0, 1), DocAddress(0, 2)]
vec![DocAddress::new(0, 1), DocAddress::new(0, 2)]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "af")])?,
vec![DocAddress(0, 0)]
vec![DocAddress::new(0, 0)]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "b")])?,
vec![DocAddress(0, 0), DocAddress(0, 1), DocAddress(0, 2)]
vec![
DocAddress::new(0, 0),
DocAddress::new(0, 1),
DocAddress::new(0, 2)
]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "c")])?,
vec![DocAddress(0, 1), DocAddress(0, 2)]
vec![DocAddress::new(0, 1), DocAddress::new(0, 2)]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_text(text_field, "d")])?,
vec![DocAddress(0, 2)]
vec![DocAddress::new(0, 2)]
);
assert_eq!(
get_doc_ids(vec![
Term::from_field_text(text_field, "b"),
Term::from_field_text(text_field, "a"),
])?,
vec![DocAddress(0, 0), DocAddress(0, 1), DocAddress(0, 2)]
vec![
DocAddress::new(0, 0),
DocAddress::new(0, 1),
DocAddress::new(0, 2)
]
);
Ok(())
}

View File

@@ -238,9 +238,9 @@ mod tests {
assert_eq!(
docs,
vec![
DocAddress(0u32, 1u32),
DocAddress(0u32, 2u32),
DocAddress(0u32, 3u32)
DocAddress::new(0u32, 1u32),
DocAddress::new(0u32, 2u32),
DocAddress::new(0u32, 3u32)
]
.into_iter()
.collect()
@@ -264,15 +264,24 @@ mod tests {
BooleanQuery::intersection(vec![term_b.box_clone(), term_c.box_clone()]);
{
let docs = searcher.search(&intersection_ab, &DocSetCollector)?;
assert_eq!(docs, vec![DocAddress(0u32, 2u32)].into_iter().collect());
assert_eq!(
docs,
vec![DocAddress::new(0u32, 2u32)].into_iter().collect()
);
}
{
let docs = searcher.search(&intersection_ac, &DocSetCollector)?;
assert_eq!(docs, vec![DocAddress(0u32, 1u32)].into_iter().collect());
assert_eq!(
docs,
vec![DocAddress::new(0u32, 1u32)].into_iter().collect()
);
}
{
let docs = searcher.search(&intersection_bc, &DocSetCollector)?;
assert_eq!(docs, vec![DocAddress(0u32, 0u32)].into_iter().collect());
assert_eq!(
docs,
vec![DocAddress::new(0u32, 0u32)].into_iter().collect()
);
}
Ok(())
}

View File

@@ -128,7 +128,7 @@ mod tests {
.docs()
.iter()
.cloned()
.map(|doc| doc.1)
.map(|doc| doc.doc_id)
.collect::<Vec<DocId>>()
};
{
@@ -196,8 +196,8 @@ mod tests {
let topdocs_no_excluded = matching_topdocs(&boolean_query_no_excluded);
assert_eq!(topdocs_no_excluded.len(), 2);
let (top_score, top_doc) = topdocs_no_excluded[0];
assert_eq!(top_doc, DocAddress(0, 4));
assert_eq!(topdocs_no_excluded[1].1, DocAddress(0, 3)); // ignore score of doc 3.
assert_eq!(top_doc, DocAddress::new(0, 4));
assert_eq!(topdocs_no_excluded[1].1, DocAddress::new(0, 3)); // ignore score of doc 3.
score_doc_4 = top_score;
}
@@ -210,7 +210,7 @@ mod tests {
let topdocs_excluded = matching_topdocs(&boolean_query_two_excluded);
assert_eq!(topdocs_excluded.len(), 1);
let (top_score, top_doc) = topdocs_excluded[0];
assert_eq!(top_doc, DocAddress(0, 4));
assert_eq!(top_doc, DocAddress::new(0, 4));
assert_eq!(top_score, score_doc_4);
}
}
@@ -309,7 +309,7 @@ mod tests {
IndexRecordOption::Basic,
));
let query = BooleanQuery::from(vec![(Occur::Should, term_a), (Occur::Should, term_b)]);
let explanation = query.explain(&searcher, DocAddress(0, 0u32))?;
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32))?;
assert_nearly_equals!(explanation.value(), 0.6931472);
Ok(())
}

View File

@@ -150,7 +150,7 @@ mod tests {
let reader = index.reader().unwrap();
let searcher = reader.searcher();
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
let explanation = query.explain(&searcher, DocAddress(0, 0u32)).unwrap();
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of ...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\",\n \"context\": []\n }\n ],\n \"context\": []\n}"

View File

@@ -58,7 +58,7 @@ pub mod tests {
test_fruits
.docs()
.iter()
.map(|docaddr| docaddr.1)
.map(|docaddr| docaddr.doc_id)
.collect::<Vec<_>>()
};
assert_eq!(test_query(vec!["a", "b"]), vec![1, 2, 3, 4]);
@@ -109,7 +109,7 @@ pub mod tests {
test_fruits
.docs()
.iter()
.map(|docaddr| docaddr.1)
.map(|docaddr| docaddr.doc_id)
.collect::<Vec<_>>()
};
assert_eq!(test_query(vec!["a", "b", "c"]), vec![2, 4]);
@@ -206,8 +206,8 @@ pub mod tests {
.docs()
.to_vec()
};
assert_eq!(test_query(vec!["a", "b"]), vec![DocAddress(0, 1)]);
assert_eq!(test_query(vec!["b", "a"]), vec![DocAddress(0, 2)]);
assert_eq!(test_query(vec!["a", "b"]), vec![DocAddress::new(0, 1)]);
assert_eq!(test_query(vec!["b", "a"]), vec![DocAddress::new(0, 2)]);
}
#[test] // motivated by #234
@@ -233,7 +233,7 @@ pub mod tests {
.expect("search should succeed")
.docs()
.iter()
.map(|doc_address| doc_address.1)
.map(|doc_address| doc_address.doc_id)
.collect::<Vec<DocId>>()
};
assert_eq!(test_query(vec![(0, "a"), (1, "b")]), vec![0]);

View File

@@ -51,9 +51,9 @@ pub trait Query: QueryClone + Send + Sync + downcast_rs::Downcast + fmt::Debug {
/// Returns an `Explanation` for the score of the document.
fn explain(&self, searcher: &Searcher, doc_address: DocAddress) -> crate::Result<Explanation> {
let reader = searcher.segment_reader(doc_address.segment_ord());
let reader = searcher.segment_reader(doc_address.segment_ord);
let weight = self.weight(searcher, true)?;
weight.explain(reader, doc_address.doc())
weight.explain(reader, doc_address.doc_id)
}
/// Returns the number of documents matching the query.

View File

@@ -196,18 +196,18 @@ mod tests {
let term_query = TermQuery::new(term_a, IndexRecordOption::Basic);
let searcher = index.reader()?.searcher();
{
let explanation = term_query.explain(&searcher, DocAddress(0u32, 1u32))?;
let explanation = term_query.explain(&searcher, DocAddress::new(0u32, 1u32))?;
assert_nearly_equals!(explanation.value(), 0.6931472);
}
{
let explanation_err = term_query.explain(&searcher, DocAddress(0u32, 0u32));
let explanation_err = term_query.explain(&searcher, DocAddress::new(0u32, 0u32));
assert!(matches!(
explanation_err,
Err(crate::TantivyError::InvalidArgument(_msg))
));
}
{
let explanation_err = term_query.explain(&searcher, DocAddress(0u32, 3u32));
let explanation_err = term_query.explain(&searcher, DocAddress::new(0u32, 3u32));
assert!(matches!(
explanation_err,
Err(crate::TantivyError::InvalidArgument(_msg))

View File

@@ -192,7 +192,7 @@ impl<'de> Deserialize<'de> for FieldEntry {
Name,
Type,
Options,
};
}
const FIELDS: &[&str] = &["name", "type", "options"];

View File

@@ -154,7 +154,7 @@ mod tests {
let searcher = reader.searcher();
assert_eq!(searcher.num_docs(), 30);
for i in 0..searcher.num_docs() as u32 {
let _doc = searcher.doc(DocAddress(0u32, i))?;
let _doc = searcher.doc(DocAddress::new(0u32, i))?;
}
Ok(())
}