Renaming .values(DocId) to .values_for_doc(DocId) (#1906)

This commit is contained in:
Paul Masurel
2023-02-27 12:15:13 +09:00
committed by GitHub
parent 5f23bb7e65
commit 06850719dc
14 changed files with 38 additions and 35 deletions

View File

@@ -36,7 +36,7 @@ impl BytesColumn {
} }
pub fn term_ords(&self, row_id: RowId) -> impl Iterator<Item = u64> + '_ { pub fn term_ords(&self, row_id: RowId) -> impl Iterator<Item = u64> + '_ {
self.term_ord_column.values(row_id) self.term_ord_column.values_for_doc(row_id)
} }
/// Returns the column of ordinals /// Returns the column of ordinals

View File

@@ -64,10 +64,10 @@ impl<T: PartialOrd + Copy + Debug + Send + Sync + 'static> Column<T> {
} }
pub fn first(&self, row_id: RowId) -> Option<T> { pub fn first(&self, row_id: RowId) -> Option<T> {
self.values(row_id).next() self.values_for_doc(row_id).next()
} }
pub fn values(&self, row_id: RowId) -> impl Iterator<Item = T> + '_ { pub fn values_for_doc(&self, row_id: RowId) -> impl Iterator<Item = T> + '_ {
self.value_row_ids(row_id) self.value_row_ids(row_id)
.map(|value_row_id: RowId| self.values.get_val(value_row_id)) .map(|value_row_id: RowId| self.values.get_val(value_row_id))
} }
@@ -97,7 +97,7 @@ impl<T: PartialOrd + Copy + Debug + Send + Sync + 'static> Column<T> {
/// This method clears the `output` vector. /// This method clears the `output` vector.
pub fn fill_vals(&self, row_id: RowId, output: &mut Vec<T>) { pub fn fill_vals(&self, row_id: RowId, output: &mut Vec<T>) {
output.clear(); output.clear();
output.extend(self.values(row_id)); output.extend(self.values_for_doc(row_id));
} }
pub fn first_or_default_col(self, default_value: T) -> Arc<dyn ColumnValues<T>> { pub fn first_or_default_col(self, default_value: T) -> Arc<dyn ColumnValues<T>> {

View File

@@ -96,7 +96,7 @@ fn compute_term_bitset(column: &BytesColumn, row_bitset: &ReadOnlyBitSet) -> Bit
let num_terms = column.dictionary().num_terms(); let num_terms = column.dictionary().num_terms();
let mut term_bitset = BitSet::with_max_value(num_terms as u32); let mut term_bitset = BitSet::with_max_value(num_terms as u32);
for row_id in row_bitset.iter() { for row_id in row_bitset.iter() {
for term_ord in column.term_ord_column.values(row_id) { for term_ord in column.term_ord_column.values_for_doc(row_id) {
term_bitset.insert(term_ord as u32); term_bitset.insert(term_ord as u32);
} }
} }

View File

@@ -105,7 +105,7 @@ impl SegmentCollector for StatsSegmentCollector {
fn collect(&mut self, doc: u32, _score: Score) { fn collect(&mut self, doc: u32, _score: Score) {
// Since we know the values are single value, we could call `first_or_default_col` on the // Since we know the values are single value, we could call `first_or_default_col` on the
// column and fetch single values. // column and fetch single values.
for value in self.fast_field_reader.values(doc) { for value in self.fast_field_reader.values_for_doc(doc) {
let value = value as f64; let value = value as f64;
self.stats.count += 1; self.stats.count += 1;
self.stats.sum += value; self.stats.sum += value;

View File

@@ -253,7 +253,7 @@ impl SegmentAggregationCollector for SegmentHistogramCollector {
let get_bucket_pos = |val| (get_bucket_pos_f64(val, interval, offset) as i64); let get_bucket_pos = |val| (get_bucket_pos_f64(val, interval, offset) as i64);
for doc in docs { for doc in docs {
for val in accessor.values(*doc) { for val in accessor.values_for_doc(*doc) {
let val = self.f64_from_fastfield_u64(val); let val = self.f64_from_fastfield_u64(val);
let bucket_pos = get_bucket_pos(val); let bucket_pos = get_bucket_pos(val);

View File

@@ -225,7 +225,7 @@ impl SegmentAggregationCollector for SegmentRangeCollector {
let sub_aggregation_accessor = let sub_aggregation_accessor =
&agg_with_accessor.buckets.values[self.accessor_idx].sub_aggregation; &agg_with_accessor.buckets.values[self.accessor_idx].sub_aggregation;
for doc in docs { for doc in docs {
for val in accessor.values(*doc) { for val in accessor.values_for_doc(*doc) {
let bucket_pos = self.get_bucket_pos(val); let bucket_pos = self.get_bucket_pos(val);
let bucket = &mut self.buckets[bucket_pos]; let bucket = &mut self.buckets[bucket_pos];

View File

@@ -319,7 +319,7 @@ impl SegmentAggregationCollector for SegmentTermCollector {
} }
} else { } else {
for doc in docs { for doc in docs {
for term_id in accessor.values(*doc) { for term_id in accessor.values_for_doc(*doc) {
let entry = self let entry = self
.term_buckets .term_buckets
.entries .entries

View File

@@ -181,7 +181,7 @@ impl SegmentStatsCollector {
} }
} else { } else {
for doc in docs { for doc in docs {
for val in field.values(*doc) { for val in field.values_for_doc(*doc) {
let val1 = f64_from_fastfield_u64(val, &self.field_type); let val1 = f64_from_fastfield_u64(val, &self.field_type);
self.stats.collect(val1); self.stats.collect(val1);
} }
@@ -234,7 +234,7 @@ impl SegmentAggregationCollector for SegmentStatsCollector {
) -> crate::Result<()> { ) -> crate::Result<()> {
let field = &agg_with_accessor.metrics.values[self.accessor_idx].accessor; let field = &agg_with_accessor.metrics.values[self.accessor_idx].accessor;
for val in field.values(doc) { for val in field.values_for_doc(doc) {
let val1 = f64_from_fastfield_u64(val, &self.field_type); let val1 = f64_from_fastfield_u64(val, &self.field_type);
self.stats.collect(val1); self.stats.collect(val1);
} }

View File

@@ -200,7 +200,7 @@ impl SegmentCollector for FastFieldSegmentCollector {
type Fruit = Vec<u64>; type Fruit = Vec<u64>;
fn collect(&mut self, doc: DocId, _score: Score) { fn collect(&mut self, doc: DocId, _score: Score) {
self.vals.extend(self.reader.values(doc)); self.vals.extend(self.reader.values_for_doc(doc));
} }
fn harvest(self) -> Vec<u64> { fn harvest(self) -> Vec<u64> {

View File

@@ -51,7 +51,7 @@ impl FacetReader {
/// Return the list of facet ordinals associated with a document. /// Return the list of facet ordinals associated with a document.
pub fn facet_ords(&self, doc: DocId) -> impl Iterator<Item = u64> + '_ { pub fn facet_ords(&self, doc: DocId) -> impl Iterator<Item = u64> + '_ {
self.facet_column.ords().values(doc) self.facet_column.ords().values_for_doc(doc)
} }
/// Accessor to the facet dictionary. /// Accessor to the facet dictionary.

View File

@@ -467,7 +467,7 @@ mod tests {
fn get_vals_for_docs(column: &Column<u64>, docs: Range<u32>) -> Vec<u64> { fn get_vals_for_docs(column: &Column<u64>, docs: Range<u32>) -> Vec<u64> {
docs.into_iter() docs.into_iter()
.flat_map(|doc| column.values(doc)) .flat_map(|doc| column.values_for_doc(doc))
.collect() .collect()
} }
@@ -515,11 +515,11 @@ mod tests {
let segment_reader = searcher.segment_reader(0); let segment_reader = searcher.segment_reader(0);
let fast_fields = segment_reader.fast_fields(); let fast_fields = segment_reader.fast_fields();
let str_column = fast_fields.str("text").unwrap().unwrap(); let str_column = fast_fields.str("text").unwrap().unwrap();
assert!(str_column.ords().values(0u32).eq([1, 0]),); assert!(str_column.ords().values_for_doc(0u32).eq([1, 0]),);
assert!(str_column.ords().values(1u32).next().is_none()); assert!(str_column.ords().values_for_doc(1u32).next().is_none());
assert!(str_column.ords().values(2u32).eq([0]),); assert!(str_column.ords().values_for_doc(2u32).eq([0]),);
assert!(str_column.ords().values(3u32).eq([0, 1]),); assert!(str_column.ords().values_for_doc(3u32).eq([0, 1]),);
assert!(str_column.ords().values(4u32).eq([2]),); assert!(str_column.ords().values_for_doc(4u32).eq([2]),);
let mut str_term = String::default(); let mut str_term = String::default();
assert!(str_column.ord_to_str(0, &mut str_term).unwrap()); assert!(str_column.ord_to_str(0, &mut str_term).unwrap());

View File

@@ -338,20 +338,23 @@ mod tests {
let column_float_opt: Option<Column<f64>> = columns[1].1.open().unwrap().into(); let column_float_opt: Option<Column<f64>> = columns[1].1.open().unwrap().into();
assert!(column_float_opt assert!(column_float_opt
.unwrap() .unwrap()
.values(0) .values_for_doc(0)
.eq([1.02f64].into_iter())); .eq([1.02f64].into_iter()));
} }
{ {
assert_eq!(columns[2].0, "nested\u{1}child"); assert_eq!(columns[2].0, "nested\u{1}child");
let column_nest_child_opt: Option<Column<i64>> = columns[2].1.open().unwrap().into(); let column_nest_child_opt: Option<Column<i64>> = columns[2].1.open().unwrap().into();
assert!(column_nest_child_opt.unwrap().values(0).eq([3].into_iter())); assert!(column_nest_child_opt
.unwrap()
.values_for_doc(0)
.eq([3].into_iter()));
} }
{ {
assert_eq!(columns[3].0, "nested\u{1}child2"); assert_eq!(columns[3].0, "nested\u{1}child2");
let column_nest_child2_opt: Option<Column<i64>> = columns[3].1.open().unwrap().into(); let column_nest_child2_opt: Option<Column<i64>> = columns[3].1.open().unwrap().into();
assert!(column_nest_child2_opt assert!(column_nest_child2_opt
.unwrap() .unwrap()
.values(0) .values_for_doc(0)
.eq([5].into_iter())); .eq([5].into_iter()));
} }
{ {
@@ -390,7 +393,7 @@ mod tests {
assert_eq!(columns[0].0, "obj"); assert_eq!(columns[0].0, "obj");
let dynamic_column = columns[0].1.open().unwrap(); let dynamic_column = columns[0].1.open().unwrap();
let col: Option<Column<i64>> = dynamic_column.into(); let col: Option<Column<i64>> = dynamic_column.into();
let vals: Vec<i64> = col.unwrap().values(0).collect(); let vals: Vec<i64> = col.unwrap().values_for_doc(0).collect();
assert_eq!(&vals, &[18, 19]) assert_eq!(&vals, &[18, 19])
} }

View File

@@ -1490,7 +1490,7 @@ mod tests {
let in_order_alive_ids: Vec<u64> = segment_reader let in_order_alive_ids: Vec<u64> = segment_reader
.doc_ids_alive() .doc_ids_alive()
.flat_map(|doc| fast_field_reader.values(doc)) .flat_map(|doc| fast_field_reader.values_for_doc(doc))
.collect(); .collect();
assert_eq!(&in_order_alive_ids[..], &[9, 8, 7, 6, 5, 4, 1, 0]); assert_eq!(&in_order_alive_ids[..], &[9, 8, 7, 6, 5, 4, 1, 0]);
Ok(()) Ok(())
@@ -1550,7 +1550,7 @@ mod tests {
let fast_field_reader = segment_reader.fast_fields().u64("id")?; let fast_field_reader = segment_reader.fast_fields().u64("id")?;
let in_order_alive_ids: Vec<u64> = segment_reader let in_order_alive_ids: Vec<u64> = segment_reader
.doc_ids_alive() .doc_ids_alive()
.flat_map(|doc| fast_field_reader.values(doc)) .flat_map(|doc| fast_field_reader.values_for_doc(doc))
.collect(); .collect();
assert_eq!(&in_order_alive_ids[..], &[9, 8, 7, 6, 5, 4, 2, 0]); assert_eq!(&in_order_alive_ids[..], &[9, 8, 7, 6, 5, 4, 2, 0]);
Ok(()) Ok(())
@@ -1795,7 +1795,7 @@ mod tests {
let ff_reader = segment_reader.fast_fields().u64("id").unwrap(); let ff_reader = segment_reader.fast_fields().u64("id").unwrap();
segment_reader segment_reader
.doc_ids_alive() .doc_ids_alive()
.flat_map(move |doc| ff_reader.values(doc).collect_vec().into_iter()) .flat_map(move |doc| ff_reader.values_for_doc(doc).collect_vec().into_iter())
}) })
.collect(); .collect();
@@ -1806,7 +1806,7 @@ mod tests {
let ff_reader = segment_reader.fast_fields().u64("id").unwrap(); let ff_reader = segment_reader.fast_fields().u64("id").unwrap();
segment_reader segment_reader
.doc_ids_alive() .doc_ids_alive()
.flat_map(move |doc| ff_reader.values(doc).collect_vec().into_iter()) .flat_map(move |doc| ff_reader.values_for_doc(doc).collect_vec().into_iter())
}) })
.collect(); .collect();
@@ -1824,7 +1824,7 @@ mod tests {
.unwrap() .unwrap()
.unwrap(); .unwrap();
for doc in segment_reader.doc_ids_alive() { for doc in segment_reader.doc_ids_alive() {
all_ips.extend(ip_reader.values(doc)); all_ips.extend(ip_reader.values_for_doc(doc));
} }
num_ips += ip_reader.values.num_vals(); num_ips += ip_reader.values.num_vals();
} }
@@ -1883,7 +1883,7 @@ mod tests {
for reader in searcher.segment_readers() { for reader in searcher.segment_readers() {
if let Some(ff_reader) = reader.fast_fields().column_opt::<Ipv6Addr>("ips").unwrap() { if let Some(ff_reader) = reader.fast_fields().column_opt::<Ipv6Addr>("ips").unwrap() {
for doc in reader.doc_ids_alive() { for doc in reader.doc_ids_alive() {
ips.extend(ff_reader.values(doc)); ips.extend(ff_reader.values_for_doc(doc));
} }
} }
} }
@@ -1915,7 +1915,7 @@ mod tests {
for reader in searcher.segment_readers() { for reader in searcher.segment_readers() {
if let Some(ff_reader) = reader.fast_fields().column_opt::<Ipv6Addr>("ips").unwrap() { if let Some(ff_reader) = reader.fast_fields().column_opt::<Ipv6Addr>("ips").unwrap() {
for doc in reader.doc_ids_alive() { for doc in reader.doc_ids_alive() {
ips.extend(ff_reader.values(doc)); ips.extend(ff_reader.values_for_doc(doc));
} }
} }
} }
@@ -1935,12 +1935,12 @@ mod tests {
.unwrap() .unwrap()
.unwrap(); .unwrap();
for doc in segment_reader.doc_ids_alive() { for doc in segment_reader.doc_ids_alive() {
let vals: Vec<u64> = ff_reader.values(doc).collect(); let vals: Vec<u64> = ff_reader.values_for_doc(doc).collect();
assert_eq!(vals.len(), 2); assert_eq!(vals.len(), 2);
assert_eq!(vals[0], vals[1]); assert_eq!(vals[0], vals[1]);
assert_eq!(id_reader.first(doc), Some(vals[0])); assert_eq!(id_reader.first(doc), Some(vals[0]));
let bool_vals: Vec<bool> = bool_ff_reader.values(doc).collect(); let bool_vals: Vec<bool> = bool_ff_reader.values_for_doc(doc).collect();
assert_eq!(bool_vals.len(), 2); assert_eq!(bool_vals.len(), 2);
assert_ne!(bool_vals[0], bool_vals[1]); assert_ne!(bool_vals[0], bool_vals[1]);
@@ -2236,8 +2236,8 @@ mod tests {
assert_eq!(val_col.get_cardinality(), Cardinality::Multivalued); assert_eq!(val_col.get_cardinality(), Cardinality::Multivalued);
assert_eq!(id_col.first(0u32), Some(1u64)); assert_eq!(id_col.first(0u32), Some(1u64));
assert_eq!(id_col.first(1u32), Some(2u64)); assert_eq!(id_col.first(1u32), Some(2u64));
assert!(val_col.values(0u32).eq([1u64, 1u64].into_iter())); assert!(val_col.values_for_doc(0u32).eq([1u64, 1u64].into_iter()));
assert!(val_col.values(1u32).eq([2u64, 2u64].into_iter())); assert!(val_col.values_for_doc(1u32).eq([2u64, 2u64].into_iter()));
} }
#[test] #[test]

View File

@@ -1497,7 +1497,7 @@ mod tests {
let mut vals: Vec<u64> = Vec::new(); let mut vals: Vec<u64> = Vec::new();
let mut test_vals = move |col: &Column<u64>, doc: DocId, expected: &[u64]| { let mut test_vals = move |col: &Column<u64>, doc: DocId, expected: &[u64]| {
vals.clear(); vals.clear();
vals.extend(col.values(doc)); vals.extend(col.values_for_doc(doc));
assert_eq!(&vals[..], expected); assert_eq!(&vals[..], expected);
}; };