mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2025-12-30 14:02:55 +00:00
Compare commits
1 Commits
columnar_o
...
update_exa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5a716e827 |
@@ -15,7 +15,8 @@ rust-version = "1.63"
|
||||
exclude = ["benches/*.json", "benches/*.txt"]
|
||||
|
||||
[dependencies]
|
||||
oneshot = "0.1.7"
|
||||
# Switch back to the non-forked oneshot crate once https://github.com/faern/oneshot/pull/35 is merged
|
||||
oneshot = { git = "https://github.com/fulmicoton/oneshot.git", rev = "b208f49" }
|
||||
base64 = "0.22.0"
|
||||
byteorder = "1.4.3"
|
||||
crc32fast = "1.3.2"
|
||||
@@ -63,7 +64,7 @@ query-grammar = { version = "0.22.0", path = "./query-grammar", package = "tanti
|
||||
tantivy-bitpacker = { version = "0.6", path = "./bitpacker" }
|
||||
common = { version = "0.7", path = "./common/", package = "tantivy-common" }
|
||||
tokenizer-api = { version = "0.3", path = "./tokenizer-api", package = "tantivy-tokenizer-api" }
|
||||
sketches-ddsketch = { version = "0.3.0", features = ["use_serde"] }
|
||||
sketches-ddsketch = { version = "0.2.1", features = ["use_serde"] }
|
||||
futures-util = { version = "0.3.28", optional = true }
|
||||
fnv = "1.0.7"
|
||||
|
||||
|
||||
@@ -47,7 +47,6 @@ fn bench_agg(mut group: InputGroup<Index>) {
|
||||
register!(group, average_f64);
|
||||
register!(group, average_f64_u64);
|
||||
register!(group, stats_f64);
|
||||
register!(group, extendedstats_f64);
|
||||
register!(group, percentiles_f64);
|
||||
register!(group, terms_few);
|
||||
register!(group, terms_many);
|
||||
@@ -106,12 +105,7 @@ fn stats_f64(index: &Index) {
|
||||
});
|
||||
exec_term_with_agg(index, agg_req)
|
||||
}
|
||||
fn extendedstats_f64(index: &Index) {
|
||||
let agg_req = json!({
|
||||
"extendedstats_f64": { "extended_stats": { "field": "score_f64", } }
|
||||
});
|
||||
exec_term_with_agg(index, agg_req)
|
||||
}
|
||||
|
||||
fn percentiles_f64(index: &Index) {
|
||||
let agg_req = json!({
|
||||
"mypercentiles": {
|
||||
|
||||
@@ -23,12 +23,6 @@ downcast-rs = "1.2.0"
|
||||
proptest = "1"
|
||||
more-asserts = "0.3.1"
|
||||
rand = "0.8"
|
||||
binggan = "0.8.1"
|
||||
|
||||
[[bench]]
|
||||
name = "bench_merge"
|
||||
harness = false
|
||||
|
||||
|
||||
[features]
|
||||
unstable = []
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
#![feature(test)]
|
||||
extern crate test;
|
||||
|
||||
use core::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use binggan::{black_box, BenchRunner};
|
||||
use tantivy_columnar::*;
|
||||
|
||||
enum Card {
|
||||
Multi,
|
||||
Sparse,
|
||||
Dense,
|
||||
}
|
||||
impl Display for Card {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Card::Multi => write!(f, "multi"),
|
||||
Card::Sparse => write!(f, "sparse"),
|
||||
Card::Dense => write!(f, "dense"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const NUM_DOCS: u32 = 100_000;
|
||||
|
||||
fn generate_columnar(card: Card, num_docs: u32) -> ColumnarReader {
|
||||
use tantivy_columnar::ColumnarWriter;
|
||||
|
||||
let mut columnar_writer = ColumnarWriter::default();
|
||||
|
||||
match card {
|
||||
Card::Multi => {
|
||||
columnar_writer.record_numerical(0, "price", 10u64);
|
||||
columnar_writer.record_numerical(0, "price", 10u64);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
for i in 0..num_docs {
|
||||
match card {
|
||||
Card::Multi | Card::Sparse => {
|
||||
if i % 8 == 0 {
|
||||
columnar_writer.record_numerical(i, "price", i as u64);
|
||||
}
|
||||
}
|
||||
Card::Dense => {
|
||||
if i % 6 == 0 {
|
||||
columnar_writer.record_numerical(i, "price", i as u64);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut wrt: Vec<u8> = Vec::new();
|
||||
columnar_writer.serialize(num_docs, None, &mut wrt).unwrap();
|
||||
|
||||
ColumnarReader::open(wrt).unwrap()
|
||||
}
|
||||
fn main() {
|
||||
let mut inputs = Vec::new();
|
||||
|
||||
let mut add_combo = |card1: Card, card2: Card| {
|
||||
inputs.push((
|
||||
format!("merge_{card1}_and_{card2}"),
|
||||
vec![
|
||||
generate_columnar(card1, NUM_DOCS),
|
||||
generate_columnar(card2, NUM_DOCS),
|
||||
],
|
||||
));
|
||||
};
|
||||
|
||||
add_combo(Card::Multi, Card::Multi);
|
||||
add_combo(Card::Dense, Card::Dense);
|
||||
add_combo(Card::Sparse, Card::Sparse);
|
||||
add_combo(Card::Sparse, Card::Dense);
|
||||
add_combo(Card::Multi, Card::Dense);
|
||||
add_combo(Card::Multi, Card::Sparse);
|
||||
|
||||
let runner: BenchRunner = BenchRunner::new();
|
||||
let mut group = runner.new_group();
|
||||
for (input_name, columnar_readers) in inputs.iter() {
|
||||
group.register_with_input(
|
||||
input_name,
|
||||
columnar_readers,
|
||||
move |columnar_readers: &Vec<ColumnarReader>| {
|
||||
let mut out = vec![];
|
||||
let columnar_readers = columnar_readers.iter().collect::<Vec<_>>();
|
||||
let merge_row_order = StackMergeOrder::stack(&columnar_readers[..]);
|
||||
|
||||
let _ = black_box(merge_columnar(
|
||||
&columnar_readers,
|
||||
&[],
|
||||
merge_row_order.into(),
|
||||
&mut out,
|
||||
));
|
||||
},
|
||||
);
|
||||
}
|
||||
group.run();
|
||||
}
|
||||
@@ -150,62 +150,61 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_merge_index_multivalued_sorted() {
|
||||
// let column_indexes: Vec<ColumnIndex> = vec![MultiValueIndex::for_test(&[0, 2,
|
||||
// 5]).into()]; let merge_row_order: MergeRowOrder = ShuffleMergeOrder::for_test(
|
||||
// &[2],
|
||||
// vec![
|
||||
// RowAddr {
|
||||
// segment_ord: 0u32,
|
||||
// row_id: 1u32,
|
||||
// },
|
||||
// RowAddr {
|
||||
// segment_ord: 0u32,
|
||||
// row_id: 0u32,
|
||||
// },
|
||||
// ],
|
||||
// )
|
||||
// .into();
|
||||
// let merged_column_index = merge_column_index(&column_indexes[..], &merge_row_order);
|
||||
// let SerializableColumnIndex::Multivalued(serializable_multivalue_index) =
|
||||
// merged_column_index else { panic!("Excpected a multivalued index")
|
||||
// };
|
||||
// serializable_multivalue_index.doc_ids_with_values_opt.
|
||||
// let start_indexes: Vec<RowId> = start_index_iterable.boxed_iter().collect();
|
||||
// assert_eq!(&start_indexes, &[0, 3, 5]);
|
||||
// }
|
||||
#[test]
|
||||
fn test_merge_index_multivalued_sorted() {
|
||||
let column_indexes: Vec<ColumnIndex> = vec![MultiValueIndex::for_test(&[0, 2, 5]).into()];
|
||||
let merge_row_order: MergeRowOrder = ShuffleMergeOrder::for_test(
|
||||
&[2],
|
||||
vec![
|
||||
RowAddr {
|
||||
segment_ord: 0u32,
|
||||
row_id: 1u32,
|
||||
},
|
||||
RowAddr {
|
||||
segment_ord: 0u32,
|
||||
row_id: 0u32,
|
||||
},
|
||||
],
|
||||
)
|
||||
.into();
|
||||
let merged_column_index = merge_column_index(&column_indexes[..], &merge_row_order);
|
||||
let SerializableColumnIndex::Multivalued(start_index_iterable) = merged_column_index else {
|
||||
panic!("Excpected a multivalued index")
|
||||
};
|
||||
let start_indexes: Vec<RowId> = start_index_iterable.boxed_iter().collect();
|
||||
assert_eq!(&start_indexes, &[0, 3, 5]);
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_merge_index_multivalued_sorted_several_segment() {
|
||||
// let column_indexes: Vec<ColumnIndex> = vec![
|
||||
// MultiValueIndex::for_test(&[0, 2, 5]).into(),
|
||||
// ColumnIndex::Empty { num_docs: 0 },
|
||||
// MultiValueIndex::for_test(&[0, 1, 4]).into(),
|
||||
// ];
|
||||
// let merge_row_order: MergeRowOrder = ShuffleMergeOrder::for_test(
|
||||
// &[2, 0, 2],
|
||||
// vec![
|
||||
// RowAddr {
|
||||
// segment_ord: 2u32,
|
||||
// row_id: 1u32,
|
||||
// },
|
||||
// RowAddr {
|
||||
// segment_ord: 0u32,
|
||||
// row_id: 0u32,
|
||||
// },
|
||||
// RowAddr {
|
||||
// segment_ord: 2u32,
|
||||
// row_id: 0u32,
|
||||
// },
|
||||
// ],
|
||||
// )
|
||||
// .into();
|
||||
// let merged_column_index = merge_column_index(&column_indexes[..], &merge_row_order);
|
||||
// let SerializableColumnIndex::Multivalued(serializable_multivalue_index) =
|
||||
// merged_column_index else { panic!("Excpected a multivalued index")
|
||||
// };
|
||||
// let start_indexes: Vec<RowId> = start_index_iterable.boxed_iter().collect();
|
||||
// assert_eq!(&start_indexes, &[0, 3, 5, 6]);
|
||||
// }
|
||||
#[test]
|
||||
fn test_merge_index_multivalued_sorted_several_segment() {
|
||||
let column_indexes: Vec<ColumnIndex> = vec![
|
||||
MultiValueIndex::for_test(&[0, 2, 5]).into(),
|
||||
ColumnIndex::Empty { num_docs: 0 },
|
||||
MultiValueIndex::for_test(&[0, 1, 4]).into(),
|
||||
];
|
||||
let merge_row_order: MergeRowOrder = ShuffleMergeOrder::for_test(
|
||||
&[2, 0, 2],
|
||||
vec![
|
||||
RowAddr {
|
||||
segment_ord: 2u32,
|
||||
row_id: 1u32,
|
||||
},
|
||||
RowAddr {
|
||||
segment_ord: 0u32,
|
||||
row_id: 0u32,
|
||||
},
|
||||
RowAddr {
|
||||
segment_ord: 2u32,
|
||||
row_id: 0u32,
|
||||
},
|
||||
],
|
||||
)
|
||||
.into();
|
||||
let merged_column_index = merge_column_index(&column_indexes[..], &merge_row_order);
|
||||
let SerializableColumnIndex::Multivalued(start_index_iterable) = merged_column_index else {
|
||||
panic!("Excpected a multivalued index")
|
||||
};
|
||||
let start_indexes: Vec<RowId> = start_index_iterable.boxed_iter().collect();
|
||||
assert_eq!(&start_indexes, &[0, 3, 5, 6]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,23 +9,22 @@ pub fn merge_column_index_shuffled<'a>(
|
||||
cardinality_after_merge: Cardinality,
|
||||
shuffle_merge_order: &'a ShuffleMergeOrder,
|
||||
) -> SerializableColumnIndex<'a> {
|
||||
todo!();
|
||||
// match cardinality_after_merge {
|
||||
// Cardinality::Full => SerializableColumnIndex::Full,
|
||||
// Cardinality::Optional => {
|
||||
// let non_null_row_ids =
|
||||
// merge_column_index_shuffled_optional(column_indexes, shuffle_merge_order);
|
||||
// SerializableColumnIndex::Optional {
|
||||
// non_null_row_ids,
|
||||
// num_rows: shuffle_merge_order.num_rows(),
|
||||
// }
|
||||
// }
|
||||
// Cardinality::Multivalued => {
|
||||
// let multivalue_start_index =
|
||||
// merge_column_index_shuffled_multivalued(column_indexes, shuffle_merge_order);
|
||||
// SerializableColumnIndex::Multivalued(multivalue_start_index)
|
||||
// }
|
||||
// }
|
||||
match cardinality_after_merge {
|
||||
Cardinality::Full => SerializableColumnIndex::Full,
|
||||
Cardinality::Optional => {
|
||||
let non_null_row_ids =
|
||||
merge_column_index_shuffled_optional(column_indexes, shuffle_merge_order);
|
||||
SerializableColumnIndex::Optional {
|
||||
non_null_row_ids,
|
||||
num_rows: shuffle_merge_order.num_rows(),
|
||||
}
|
||||
}
|
||||
Cardinality::Multivalued => {
|
||||
let multivalue_start_index =
|
||||
merge_column_index_shuffled_multivalued(column_indexes, shuffle_merge_order);
|
||||
SerializableColumnIndex::Multivalued(multivalue_start_index)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge several column indexes into one, ordering rows according to the merge_order passed as
|
||||
@@ -138,35 +137,35 @@ mod tests {
|
||||
assert!(integrate_num_vals([3, 0, 10, 20].into_iter()).eq([0, 3, 3, 13, 33].into_iter()));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_merge_column_index_optional_shuffle() {
|
||||
// let optional_index: ColumnIndex = OptionalIndex::for_test(2, &[0]).into();
|
||||
// let column_indexes = [optional_index, ColumnIndex::Full];
|
||||
// let row_addrs = vec![
|
||||
// RowAddr {
|
||||
// segment_ord: 0u32,
|
||||
// row_id: 1u32,
|
||||
// },
|
||||
// RowAddr {
|
||||
// segment_ord: 1u32,
|
||||
// row_id: 0u32,
|
||||
// },
|
||||
// ];
|
||||
// let shuffle_merge_order = ShuffleMergeOrder::for_test(&[2, 1], row_addrs);
|
||||
// let serializable_index = merge_column_index_shuffled(
|
||||
// &column_indexes[..],
|
||||
// Cardinality::Optional,
|
||||
// &shuffle_merge_order,
|
||||
// );
|
||||
// let SerializableColumnIndex::Optional {
|
||||
// non_null_row_ids,
|
||||
// num_rows,
|
||||
// } = serializable_index
|
||||
// else {
|
||||
// panic!()
|
||||
// };
|
||||
// assert_eq!(num_rows, 2);
|
||||
// let non_null_rows: Vec<RowId> = non_null_row_ids.boxed_iter().collect();
|
||||
// assert_eq!(&non_null_rows, &[1]);
|
||||
// }
|
||||
#[test]
|
||||
fn test_merge_column_index_optional_shuffle() {
|
||||
let optional_index: ColumnIndex = OptionalIndex::for_test(2, &[0]).into();
|
||||
let column_indexes = [optional_index, ColumnIndex::Full];
|
||||
let row_addrs = vec![
|
||||
RowAddr {
|
||||
segment_ord: 0u32,
|
||||
row_id: 1u32,
|
||||
},
|
||||
RowAddr {
|
||||
segment_ord: 1u32,
|
||||
row_id: 0u32,
|
||||
},
|
||||
];
|
||||
let shuffle_merge_order = ShuffleMergeOrder::for_test(&[2, 1], row_addrs);
|
||||
let serializable_index = merge_column_index_shuffled(
|
||||
&column_indexes[..],
|
||||
Cardinality::Optional,
|
||||
&shuffle_merge_order,
|
||||
);
|
||||
let SerializableColumnIndex::Optional {
|
||||
non_null_row_ids,
|
||||
num_rows,
|
||||
} = serializable_index
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(num_rows, 2);
|
||||
let non_null_rows: Vec<RowId> = non_null_row_ids.boxed_iter().collect();
|
||||
assert_eq!(&non_null_rows, &[1]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::ops::Range;
|
||||
use std::iter;
|
||||
|
||||
use crate::column_index::multivalued_index::SerializableMultivalueIndex;
|
||||
use crate::column_index::serialize::SerializableOptionalIndex;
|
||||
use crate::column_index::SerializableColumnIndex;
|
||||
use crate::column_index::{SerializableColumnIndex, Set};
|
||||
use crate::iterable::Iterable;
|
||||
use crate::{Cardinality, ColumnIndex, RowId, StackMergeOrder};
|
||||
|
||||
@@ -17,140 +15,23 @@ pub fn merge_column_index_stacked<'a>(
|
||||
) -> SerializableColumnIndex<'a> {
|
||||
match cardinality_after_merge {
|
||||
Cardinality::Full => SerializableColumnIndex::Full,
|
||||
Cardinality::Optional => SerializableColumnIndex::Optional(SerializableOptionalIndex {
|
||||
Cardinality::Optional => SerializableColumnIndex::Optional {
|
||||
non_null_row_ids: Box::new(StackedOptionalIndex {
|
||||
columns,
|
||||
stack_merge_order,
|
||||
}),
|
||||
num_rows: stack_merge_order.num_rows(),
|
||||
}),
|
||||
},
|
||||
Cardinality::Multivalued => {
|
||||
let serializable_multivalue_index =
|
||||
make_serializable_multivalued_index(columns, stack_merge_order);
|
||||
SerializableColumnIndex::Multivalued(serializable_multivalue_index)
|
||||
let stacked_multivalued_index = StackedMultivaluedIndex {
|
||||
columns,
|
||||
stack_merge_order,
|
||||
};
|
||||
SerializableColumnIndex::Multivalued(Box::new(stacked_multivalued_index))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct StackedDocIdsWithValues<'a> {
|
||||
column_indexes: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
}
|
||||
|
||||
impl Iterable<u32> for StackedDocIdsWithValues<'_> {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = u32> + '_> {
|
||||
Box::new((0..self.column_indexes.len()).flat_map(|i| {
|
||||
let column_index = &self.column_indexes[i];
|
||||
let doc_range = self.stack_merge_order.columnar_range(i);
|
||||
get_doc_ids_with_values(column_index, doc_range)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_doc_ids_with_values<'a>(
|
||||
column_index: &'a ColumnIndex,
|
||||
doc_range: Range<u32>,
|
||||
) -> Box<dyn Iterator<Item = u32> + 'a> {
|
||||
match column_index {
|
||||
ColumnIndex::Empty { .. } => Box::new(0..0),
|
||||
ColumnIndex::Full => Box::new(doc_range),
|
||||
ColumnIndex::Optional(optional_index) => Box::new(
|
||||
optional_index
|
||||
.iter_rows()
|
||||
.map(move |row| row + doc_range.start),
|
||||
),
|
||||
ColumnIndex::Multivalued(multivalued_index) => Box::new(
|
||||
multivalued_index
|
||||
.optional_index
|
||||
.iter_rows()
|
||||
.map(move |row| row + doc_range.start),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn stack_doc_ids_with_values<'a>(
|
||||
column_indexes: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
) -> SerializableOptionalIndex<'a> {
|
||||
let num_rows = stack_merge_order.num_rows();
|
||||
SerializableOptionalIndex {
|
||||
non_null_row_ids: Box::new(StackedDocIdsWithValues {
|
||||
column_indexes,
|
||||
stack_merge_order,
|
||||
}),
|
||||
num_rows,
|
||||
}
|
||||
}
|
||||
|
||||
struct StackedStartOffsets<'a> {
|
||||
column_indexes: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
}
|
||||
|
||||
fn get_num_values_iterator<'a>(
|
||||
column_index: &'a ColumnIndex,
|
||||
num_docs: u32,
|
||||
) -> Box<dyn Iterator<Item = u32> + 'a> {
|
||||
match column_index {
|
||||
ColumnIndex::Empty { .. } => Box::new(std::iter::empty()),
|
||||
ColumnIndex::Full => Box::new(std::iter::repeat(1u32).take(num_docs as usize)),
|
||||
ColumnIndex::Optional(optional_index) => {
|
||||
Box::new(std::iter::repeat(1u32).take(optional_index.num_non_nulls() as usize))
|
||||
}
|
||||
ColumnIndex::Multivalued(multivalued_index) => {
|
||||
let vals: Vec<u32> = multivalued_index.start_index_column.iter().collect();
|
||||
Box::new(
|
||||
multivalued_index
|
||||
.start_index_column
|
||||
.iter()
|
||||
.scan(0u32, |previous_start_offset, current_start_offset| {
|
||||
let num_vals = current_start_offset - *previous_start_offset;
|
||||
*previous_start_offset = current_start_offset;
|
||||
Some(num_vals)
|
||||
})
|
||||
.skip(1),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterable for StackedStartOffsets<'a> {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
|
||||
let num_values_it = (0..self.column_indexes.len()).flat_map(|columnar_id| {
|
||||
let num_docs = self.stack_merge_order.columnar_range(columnar_id).len() as u32;
|
||||
let column_index = &self.column_indexes[columnar_id];
|
||||
get_num_values_iterator(column_index, num_docs)
|
||||
});
|
||||
Box::new(std::iter::once(0u64).chain(num_values_it.into_iter().scan(
|
||||
0u64,
|
||||
|cumulated, el| {
|
||||
*cumulated += el as u64;
|
||||
Some(*cumulated)
|
||||
},
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn stack_start_offsets<'a>(
|
||||
column_indexes: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
) -> Box<dyn Iterable + 'a> {
|
||||
Box::new(StackedStartOffsets {
|
||||
column_indexes,
|
||||
stack_merge_order,
|
||||
})
|
||||
}
|
||||
|
||||
fn make_serializable_multivalued_index<'a>(
|
||||
columns: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
) -> SerializableMultivalueIndex<'a> {
|
||||
SerializableMultivalueIndex {
|
||||
doc_ids_with_values: stack_doc_ids_with_values(columns, stack_merge_order),
|
||||
start_offsets: stack_start_offsets(columns, stack_merge_order),
|
||||
}
|
||||
}
|
||||
|
||||
struct StackedOptionalIndex<'a> {
|
||||
columns: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
@@ -181,3 +62,87 @@ impl<'a> Iterable<RowId> for StackedOptionalIndex<'a> {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct StackedMultivaluedIndex<'a> {
|
||||
columns: &'a [ColumnIndex],
|
||||
stack_merge_order: &'a StackMergeOrder,
|
||||
}
|
||||
|
||||
fn convert_column_opt_to_multivalued_index<'a>(
|
||||
column_index_opt: &'a ColumnIndex,
|
||||
num_rows: RowId,
|
||||
) -> Box<dyn Iterator<Item = RowId> + 'a> {
|
||||
match column_index_opt {
|
||||
ColumnIndex::Empty { .. } => Box::new(iter::repeat(0u32).take(num_rows as usize + 1)),
|
||||
ColumnIndex::Full => Box::new(0..num_rows + 1),
|
||||
ColumnIndex::Optional(optional_index) => {
|
||||
Box::new(
|
||||
(0..num_rows)
|
||||
// TODO optimize
|
||||
.map(|row_id| optional_index.rank(row_id))
|
||||
.chain(std::iter::once(optional_index.num_non_nulls())),
|
||||
)
|
||||
}
|
||||
ColumnIndex::Multivalued(multivalued_index) => multivalued_index.start_index_column.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterable<RowId> for StackedMultivaluedIndex<'a> {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = RowId> + '_> {
|
||||
let multivalued_indexes =
|
||||
self.columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(columnar_id, column_opt)| {
|
||||
let num_rows =
|
||||
self.stack_merge_order.columnar_range(columnar_id).len() as RowId;
|
||||
convert_column_opt_to_multivalued_index(column_opt, num_rows)
|
||||
});
|
||||
stack_multivalued_indexes(multivalued_indexes)
|
||||
}
|
||||
}
|
||||
|
||||
// Refactor me
|
||||
fn stack_multivalued_indexes<'a>(
|
||||
mut multivalued_indexes: impl Iterator<Item = Box<dyn Iterator<Item = RowId> + 'a>> + 'a,
|
||||
) -> Box<dyn Iterator<Item = RowId> + 'a> {
|
||||
let mut offset = 0;
|
||||
let mut last_row_id = 0;
|
||||
let mut current_it = multivalued_indexes.next();
|
||||
Box::new(std::iter::from_fn(move || loop {
|
||||
if let Some(row_id) = current_it.as_mut()?.next() {
|
||||
last_row_id = offset + row_id;
|
||||
return Some(last_row_id);
|
||||
}
|
||||
offset = last_row_id;
|
||||
loop {
|
||||
current_it = multivalued_indexes.next();
|
||||
if current_it.as_mut()?.next().is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::RowId;
|
||||
|
||||
fn it<'a>(row_ids: &'a [RowId]) -> Box<dyn Iterator<Item = RowId> + 'a> {
|
||||
Box::new(row_ids.iter().copied())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stack() {
|
||||
let columns = [
|
||||
it(&[0u32, 0u32]),
|
||||
it(&[0u32, 1u32, 1u32, 4u32]),
|
||||
it(&[0u32, 3u32, 5u32]),
|
||||
it(&[0u32, 4u32]),
|
||||
]
|
||||
.into_iter();
|
||||
let start_offsets: Vec<RowId> = super::stack_multivalued_indexes(columns).collect();
|
||||
assert_eq!(start_offsets, &[0, 0, 1, 1, 4, 7, 9, 13]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,8 @@ mod serialize;
|
||||
use std::ops::Range;
|
||||
|
||||
pub use merge::merge_column_index;
|
||||
pub(crate) use multivalued_index::SerializableMultivalueIndex;
|
||||
pub use optional_index::{OptionalIndex, Set};
|
||||
pub use serialize::{
|
||||
open_column_index, serialize_column_index, SerializableColumnIndex, SerializableOptionalIndex,
|
||||
};
|
||||
pub use serialize::{open_column_index, serialize_column_index, SerializableColumnIndex};
|
||||
|
||||
use crate::column_index::multivalued_index::MultiValueIndex;
|
||||
use crate::{Cardinality, DocId, RowId};
|
||||
|
||||
@@ -3,64 +3,35 @@ use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use common::{CountingWriter, OwnedBytes};
|
||||
use common::OwnedBytes;
|
||||
|
||||
use super::optional_index::{open_optional_index, serialize_optional_index};
|
||||
use super::{OptionalIndex, SerializableOptionalIndex, Set};
|
||||
use crate::column_values::{
|
||||
load_u64_based_column_values, serialize_u64_based_column_values, CodecType, ColumnValues,
|
||||
};
|
||||
use crate::iterable::Iterable;
|
||||
use crate::{DocId, RowId};
|
||||
|
||||
pub struct SerializableMultivalueIndex<'a> {
|
||||
pub doc_ids_with_values: SerializableOptionalIndex<'a>,
|
||||
pub start_offsets: Box<dyn Iterable<u64> + 'a>,
|
||||
}
|
||||
|
||||
pub fn serialize_multivalued_index<'a>(
|
||||
multivalued_index: &SerializableMultivalueIndex<'a>,
|
||||
pub fn serialize_multivalued_index(
|
||||
multivalued_index: &dyn Iterable<RowId>,
|
||||
output: &mut impl Write,
|
||||
) -> io::Result<()> {
|
||||
let SerializableMultivalueIndex {
|
||||
doc_ids_with_values,
|
||||
start_offsets,
|
||||
} = multivalued_index;
|
||||
let mut count_writer = CountingWriter::wrap(output);
|
||||
let SerializableOptionalIndex {
|
||||
non_null_row_ids,
|
||||
num_rows,
|
||||
} = doc_ids_with_values;
|
||||
serialize_optional_index(&**non_null_row_ids, *num_rows, &mut count_writer)?;
|
||||
let optional_len = count_writer.written_bytes() as u32;
|
||||
let output = count_writer.finish();
|
||||
serialize_u64_based_column_values(
|
||||
&**start_offsets,
|
||||
multivalued_index,
|
||||
&[CodecType::Bitpacked, CodecType::Linear],
|
||||
output,
|
||||
)?;
|
||||
output.write_all(&optional_len.to_le_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn open_multivalued_index(bytes: OwnedBytes) -> io::Result<MultiValueIndex> {
|
||||
let (body_bytes, optional_index_len) = bytes.rsplit(4);
|
||||
let optional_index_len = u32::from_le_bytes(optional_index_len.as_slice().try_into().unwrap());
|
||||
let (optional_index_bytes, start_index_bytes) = body_bytes.split(optional_index_len as usize);
|
||||
let optional_index = open_optional_index(optional_index_bytes)?;
|
||||
let start_index_column: Arc<dyn ColumnValues<RowId>> =
|
||||
load_u64_based_column_values(start_index_bytes)?;
|
||||
Ok(MultiValueIndex {
|
||||
optional_index,
|
||||
start_index_column,
|
||||
})
|
||||
let start_index_column: Arc<dyn ColumnValues<RowId>> = load_u64_based_column_values(bytes)?;
|
||||
Ok(MultiValueIndex { start_index_column })
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
/// Index to resolve value range for given doc_id.
|
||||
/// Starts at 0.
|
||||
pub struct MultiValueIndex {
|
||||
pub optional_index: OptionalIndex,
|
||||
pub start_index_column: Arc<dyn crate::ColumnValues<RowId>>,
|
||||
}
|
||||
|
||||
@@ -72,27 +43,16 @@ impl std::fmt::Debug for MultiValueIndex {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Arc<dyn ColumnValues<RowId>>> for MultiValueIndex {
|
||||
fn from(start_index_column: Arc<dyn ColumnValues<RowId>>) -> Self {
|
||||
MultiValueIndex { start_index_column }
|
||||
}
|
||||
}
|
||||
|
||||
impl MultiValueIndex {
|
||||
pub fn for_test(start_offsets: &[RowId]) -> MultiValueIndex {
|
||||
assert!(start_offsets.len() > 0);
|
||||
assert_eq!(start_offsets[0], 0);
|
||||
let mut doc_with_values = Vec::new();
|
||||
let mut compact_start_offsets: Vec<u64> = vec![0];
|
||||
for doc in 0..start_offsets.len() - 1 {
|
||||
if start_offsets[doc] < start_offsets[doc + 1] {
|
||||
doc_with_values.push(doc as RowId);
|
||||
compact_start_offsets.push(start_offsets[doc + 1] as u64);
|
||||
}
|
||||
}
|
||||
let serializable_multivalued_index = SerializableMultivalueIndex {
|
||||
doc_ids_with_values: SerializableOptionalIndex {
|
||||
non_null_row_ids: Box::new(&doc_with_values[..]),
|
||||
num_rows: start_offsets.len() as u32 - 1,
|
||||
},
|
||||
start_offsets: Box::new(&compact_start_offsets[..]),
|
||||
};
|
||||
let mut buffer = Vec::new();
|
||||
serialize_multivalued_index(&serializable_multivalued_index, &mut buffer).unwrap();
|
||||
serialize_multivalued_index(&start_offsets, &mut buffer).unwrap();
|
||||
let bytes = OwnedBytes::new(buffer);
|
||||
open_multivalued_index(bytes).unwrap()
|
||||
}
|
||||
@@ -101,19 +61,15 @@ impl MultiValueIndex {
|
||||
/// the given document are `start..end`.
|
||||
#[inline]
|
||||
pub(crate) fn range(&self, doc_id: DocId) -> Range<RowId> {
|
||||
let Some(rank) = self.optional_index.rank_if_exists(doc_id) else {
|
||||
return 0..0;
|
||||
};
|
||||
let start = self.start_index_column.get_val(rank);
|
||||
let end = self.start_index_column.get_val(rank + 1);
|
||||
let start = self.start_index_column.get_val(doc_id);
|
||||
let end = self.start_index_column.get_val(doc_id + 1);
|
||||
start..end
|
||||
}
|
||||
|
||||
/// Returns the number of documents in the index.
|
||||
#[inline]
|
||||
pub fn num_docs(&self) -> u32 {
|
||||
self.optional_index.num_docs()
|
||||
// self.start_index_column.num_vals() - 1
|
||||
self.start_index_column.num_vals() - 1
|
||||
}
|
||||
|
||||
/// Converts a list of ranks (row ids of values) in a 1:n index to the corresponding list of
|
||||
@@ -152,10 +108,6 @@ impl MultiValueIndex {
|
||||
}
|
||||
}
|
||||
ranks.truncate(write_doc_pos);
|
||||
|
||||
for rank in ranks.iter_mut() {
|
||||
*rank = self.optional_index.select(*rank);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +134,6 @@ mod tests {
|
||||
let positions = &[10u32, 11, 15, 20, 21, 22];
|
||||
assert_eq!(index_to_pos_helper(&index, 0..5, positions), vec![1, 3, 4]);
|
||||
assert_eq!(index_to_pos_helper(&index, 1..5, positions), vec![1, 3, 4]);
|
||||
|
||||
assert_eq!(index_to_pos_helper(&index, 0..5, &[9]), vec![0]);
|
||||
assert_eq!(index_to_pos_helper(&index, 1..5, &[10]), vec![1]);
|
||||
assert_eq!(index_to_pos_helper(&index, 1..5, &[11]), vec![1]);
|
||||
|
||||
@@ -86,14 +86,8 @@ pub struct OptionalIndex {
|
||||
block_metas: Arc<[BlockMeta]>,
|
||||
}
|
||||
|
||||
impl<'a> Iterable<u32> for &'a OptionalIndex {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = u32> + '_> {
|
||||
Box::new(self.iter_rows())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for OptionalIndex {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("OptionalIndex")
|
||||
.field("num_rows", &self.num_rows)
|
||||
.field("num_non_null_rows", &self.num_non_null_rows)
|
||||
@@ -202,7 +196,6 @@ impl Set<RowId> for OptionalIndex {
|
||||
} = row_addr_from_row_id(doc_id);
|
||||
let block_meta = self.block_metas[block_id as usize];
|
||||
let block = self.block(block_meta);
|
||||
|
||||
let block_offset_row_id = match block {
|
||||
Block::Dense(dense_block) => dense_block.rank(in_block_row_id),
|
||||
Block::Sparse(sparse_block) => sparse_block.rank(in_block_row_id),
|
||||
@@ -256,10 +249,6 @@ impl Set<RowId> for OptionalIndex {
|
||||
}
|
||||
|
||||
impl OptionalIndex {
|
||||
pub fn new_empty(num_rows: RowId) -> OptionalIndex {
|
||||
Self::for_test(num_rows, &[])
|
||||
}
|
||||
|
||||
pub fn for_test(num_rows: RowId, row_ids: &[RowId]) -> OptionalIndex {
|
||||
assert!(row_ids
|
||||
.last()
|
||||
|
||||
@@ -3,41 +3,28 @@ use std::io::Write;
|
||||
|
||||
use common::{CountingWriter, OwnedBytes};
|
||||
|
||||
use super::multivalued_index::SerializableMultivalueIndex;
|
||||
use super::OptionalIndex;
|
||||
use crate::column_index::multivalued_index::serialize_multivalued_index;
|
||||
use crate::column_index::optional_index::serialize_optional_index;
|
||||
use crate::column_index::ColumnIndex;
|
||||
use crate::iterable::Iterable;
|
||||
use crate::{Cardinality, RowId};
|
||||
|
||||
pub struct SerializableOptionalIndex<'a> {
|
||||
pub non_null_row_ids: Box<dyn Iterable<RowId> + 'a>,
|
||||
pub num_rows: RowId,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a OptionalIndex> for SerializableOptionalIndex<'a> {
|
||||
fn from(optional_index: &'a OptionalIndex) -> Self {
|
||||
SerializableOptionalIndex {
|
||||
non_null_row_ids: Box::new(optional_index),
|
||||
num_rows: optional_index.num_docs(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SerializableColumnIndex<'a> {
|
||||
Full,
|
||||
Optional(SerializableOptionalIndex<'a>),
|
||||
Optional {
|
||||
non_null_row_ids: Box<dyn Iterable<RowId> + 'a>,
|
||||
num_rows: RowId,
|
||||
},
|
||||
// TODO remove the Arc<dyn> apart from serialization this is not
|
||||
// dynamic at all.
|
||||
Multivalued(SerializableMultivalueIndex<'a>),
|
||||
Multivalued(Box<dyn Iterable<RowId> + 'a>),
|
||||
}
|
||||
|
||||
impl<'a> SerializableColumnIndex<'a> {
|
||||
pub fn get_cardinality(&self) -> Cardinality {
|
||||
match self {
|
||||
SerializableColumnIndex::Full => Cardinality::Full,
|
||||
SerializableColumnIndex::Optional(_) => Cardinality::Optional,
|
||||
SerializableColumnIndex::Optional { .. } => Cardinality::Optional,
|
||||
SerializableColumnIndex::Multivalued(_) => Cardinality::Multivalued,
|
||||
}
|
||||
}
|
||||
@@ -53,12 +40,12 @@ pub fn serialize_column_index(
|
||||
output.write_all(&[cardinality])?;
|
||||
match column_index {
|
||||
SerializableColumnIndex::Full => {}
|
||||
SerializableColumnIndex::Optional(SerializableOptionalIndex {
|
||||
SerializableColumnIndex::Optional {
|
||||
non_null_row_ids,
|
||||
num_rows,
|
||||
}) => serialize_optional_index(non_null_row_ids.as_ref(), num_rows, &mut output)?,
|
||||
} => serialize_optional_index(non_null_row_ids.as_ref(), num_rows, &mut output)?,
|
||||
SerializableColumnIndex::Multivalued(multivalued_index) => {
|
||||
serialize_multivalued_index(&multivalued_index, &mut output)?
|
||||
serialize_multivalued_index(&*multivalued_index, &mut output)?
|
||||
}
|
||||
}
|
||||
let column_index_num_bytes = output.written_bytes() as u32;
|
||||
|
||||
@@ -8,7 +8,7 @@ const MAGIC_BYTES: [u8; 4] = [2, 113, 119, 66];
|
||||
|
||||
pub fn footer() -> [u8; VERSION_FOOTER_NUM_BYTES] {
|
||||
let mut footer_bytes = [0u8; VERSION_FOOTER_NUM_BYTES];
|
||||
footer_bytes[0..4].copy_from_slice(&Version::V2.to_bytes());
|
||||
footer_bytes[0..4].copy_from_slice(&Version::V1.to_bytes());
|
||||
footer_bytes[4..8].copy_from_slice(&MAGIC_BYTES[..]);
|
||||
footer_bytes
|
||||
}
|
||||
@@ -24,7 +24,6 @@ pub fn parse_footer(footer_bytes: [u8; VERSION_FOOTER_NUM_BYTES]) -> Result<Vers
|
||||
#[repr(u32)]
|
||||
pub enum Version {
|
||||
V1 = 1u32,
|
||||
V2 = 2u32,
|
||||
}
|
||||
|
||||
impl Version {
|
||||
@@ -35,7 +34,7 @@ impl Version {
|
||||
fn try_from_bytes(bytes: [u8; 4]) -> Result<Version, InvalidData> {
|
||||
let code = u32::from_le_bytes(bytes);
|
||||
match code {
|
||||
2u32 => Ok(Version::V2),
|
||||
1u32 => Ok(Version::V1),
|
||||
_ => Err(InvalidData),
|
||||
}
|
||||
}
|
||||
@@ -50,7 +49,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_footer_dserialization() {
|
||||
let parsed_version: Version = parse_footer(footer()).unwrap();
|
||||
assert_eq!(Version::V2, parsed_version);
|
||||
assert_eq!(Version::V1, parsed_version);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -64,7 +63,7 @@ mod tests {
|
||||
for &i in &version_to_tests {
|
||||
let version_res = Version::try_from_bytes(i.to_le_bytes());
|
||||
if let Ok(version) = version_res {
|
||||
assert_eq!(version, Version::V2);
|
||||
assert_eq!(version, Version::V1);
|
||||
assert_eq!(version.to_bytes(), i.to_le_bytes());
|
||||
valid_versions.insert(i);
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use common::CountingWriter;
|
||||
pub(crate) use serializer::ColumnarSerializer;
|
||||
use stacker::{Addr, ArenaHashMap, MemoryArena};
|
||||
|
||||
use crate::column_index::{SerializableColumnIndex, SerializableOptionalIndex};
|
||||
use crate::column_index::SerializableColumnIndex;
|
||||
use crate::column_values::{MonotonicallyMappableToU128, MonotonicallyMappableToU64};
|
||||
use crate::columnar::column_type::ColumnType;
|
||||
use crate::columnar::writer::column_writers::{
|
||||
@@ -20,7 +20,6 @@ use crate::columnar::writer::column_writers::{
|
||||
};
|
||||
use crate::columnar::writer::value_index::{IndexBuilder, PreallocatedIndexBuilders};
|
||||
use crate::dictionary::{DictionaryBuilder, TermIdMapping, UnorderedId};
|
||||
use crate::iterable::Iterable;
|
||||
use crate::value::{Coerce, NumericalType, NumericalValue};
|
||||
use crate::{Cardinality, RowId};
|
||||
|
||||
@@ -636,16 +635,16 @@ fn send_to_serialize_column_mappable_to_u128<
|
||||
let optional_index_builder = value_index_builders.borrow_optional_index_builder();
|
||||
consume_operation_iterator(op_iterator, optional_index_builder, values);
|
||||
let optional_index = optional_index_builder.finish(num_rows);
|
||||
SerializableColumnIndex::Optional(SerializableOptionalIndex {
|
||||
SerializableColumnIndex::Optional {
|
||||
num_rows,
|
||||
non_null_row_ids: Box::new(optional_index),
|
||||
})
|
||||
}
|
||||
}
|
||||
Cardinality::Multivalued => {
|
||||
let multivalued_index_builder = value_index_builders.borrow_multivalued_index_builder();
|
||||
consume_operation_iterator(op_iterator, multivalued_index_builder, values);
|
||||
let serializable_multivalued_index = multivalued_index_builder.finish(num_rows);
|
||||
SerializableColumnIndex::Multivalued(serializable_multivalued_index)
|
||||
let multivalued_index = multivalued_index_builder.finish(num_rows);
|
||||
SerializableColumnIndex::Multivalued(Box::new(multivalued_index))
|
||||
}
|
||||
};
|
||||
crate::column::serialize_column_mappable_to_u128(
|
||||
@@ -688,21 +687,19 @@ fn send_to_serialize_column_mappable_to_u64(
|
||||
let optional_index_builder = value_index_builders.borrow_optional_index_builder();
|
||||
consume_operation_iterator(op_iterator, optional_index_builder, values);
|
||||
let optional_index = optional_index_builder.finish(num_rows);
|
||||
SerializableColumnIndex::Optional(SerializableOptionalIndex {
|
||||
SerializableColumnIndex::Optional {
|
||||
non_null_row_ids: Box::new(optional_index),
|
||||
num_rows,
|
||||
})
|
||||
}
|
||||
}
|
||||
Cardinality::Multivalued => {
|
||||
let multivalued_index_builder = value_index_builders.borrow_multivalued_index_builder();
|
||||
consume_operation_iterator(op_iterator, multivalued_index_builder, values);
|
||||
let multivalued_index = multivalued_index_builder.finish(num_rows);
|
||||
if sort_values_within_row {
|
||||
// not supported in this hack
|
||||
todo!()
|
||||
// sort_values_within_row_in_place(multivalued_index, values);
|
||||
sort_values_within_row_in_place(multivalued_index, values);
|
||||
}
|
||||
let serializable_multivalued_index = multivalued_index_builder.finish(num_rows);
|
||||
SerializableColumnIndex::Multivalued(serializable_multivalued_index)
|
||||
SerializableColumnIndex::Multivalued(Box::new(multivalued_index))
|
||||
}
|
||||
};
|
||||
crate::column::serialize_column_mappable_to_u64(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::column_index::{SerializableMultivalueIndex, SerializableOptionalIndex};
|
||||
use crate::iterable::Iterable;
|
||||
use crate::RowId;
|
||||
|
||||
@@ -60,50 +59,32 @@ impl IndexBuilder for OptionalIndexBuilder {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultivaluedIndexBuilder {
|
||||
doc_with_values: Vec<RowId>,
|
||||
start_offsets: Vec<u64>,
|
||||
total_num_vals_seen: u64,
|
||||
current_row: RowId,
|
||||
current_row_has_value: bool,
|
||||
start_offsets: Vec<RowId>,
|
||||
total_num_vals_seen: u32,
|
||||
}
|
||||
|
||||
impl MultivaluedIndexBuilder {
|
||||
pub fn finish(&mut self, num_docs: RowId) -> SerializableMultivalueIndex<'_> {
|
||||
self.start_offsets.push(self.total_num_vals_seen as u64);
|
||||
let non_null_row_ids: Box<dyn Iterable<RowId>> = Box::new(&self.doc_with_values[..]);
|
||||
SerializableMultivalueIndex {
|
||||
doc_ids_with_values: SerializableOptionalIndex {
|
||||
non_null_row_ids,
|
||||
num_rows: num_docs,
|
||||
},
|
||||
start_offsets: Box::new(&self.start_offsets[..]),
|
||||
}
|
||||
pub fn finish(&mut self, num_docs: RowId) -> &[u32] {
|
||||
self.start_offsets
|
||||
.resize(num_docs as usize + 1, self.total_num_vals_seen);
|
||||
&self.start_offsets[..]
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.doc_with_values.clear();
|
||||
self.start_offsets.clear();
|
||||
self.start_offsets.push(0u32);
|
||||
self.total_num_vals_seen = 0;
|
||||
self.current_row = 0;
|
||||
self.current_row_has_value = false;
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexBuilder for MultivaluedIndexBuilder {
|
||||
fn record_row(&mut self, row_id: RowId) {
|
||||
self.current_row = row_id;
|
||||
self.current_row_has_value = false;
|
||||
// self.start_offsets
|
||||
// .resize(row_id as usize + 1, self.total_num_vals_seen);
|
||||
self.start_offsets
|
||||
.resize(row_id as usize + 1, self.total_num_vals_seen);
|
||||
}
|
||||
|
||||
fn record_value(&mut self) {
|
||||
if !self.current_row_has_value {
|
||||
self.current_row_has_value = true;
|
||||
self.doc_with_values.push(self.current_row);
|
||||
self.start_offsets.push(self.total_num_vals_seen as u64);
|
||||
}
|
||||
self.total_num_vals_seen += 1u64;
|
||||
self.total_num_vals_seen += 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,32 +141,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multivalued_value_index_builder_simple() {
|
||||
let mut multivalued_value_index_builder = MultivaluedIndexBuilder::default();
|
||||
{
|
||||
multivalued_value_index_builder.record_row(0u32);
|
||||
multivalued_value_index_builder.record_value();
|
||||
multivalued_value_index_builder.record_value();
|
||||
let serialized_multivalue_index = multivalued_value_index_builder.finish(1u32);
|
||||
let start_offsets: Vec<u64> = serialized_multivalue_index
|
||||
.start_offsets
|
||||
.boxed_iter()
|
||||
.collect();
|
||||
assert_eq!(&start_offsets, &[0, 2]);
|
||||
}
|
||||
multivalued_value_index_builder.reset();
|
||||
multivalued_value_index_builder.record_row(0u32);
|
||||
multivalued_value_index_builder.record_value();
|
||||
multivalued_value_index_builder.record_value();
|
||||
let serialized_multivalue_index = multivalued_value_index_builder.finish(1u32);
|
||||
let start_offsets: Vec<u64> = serialized_multivalue_index
|
||||
.start_offsets
|
||||
.boxed_iter()
|
||||
.collect();
|
||||
assert_eq!(&start_offsets, &[0, 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multivalued_value_index_builder() {
|
||||
let mut multivalued_value_index_builder = MultivaluedIndexBuilder::default();
|
||||
@@ -194,30 +149,17 @@ mod tests {
|
||||
multivalued_value_index_builder.record_value();
|
||||
multivalued_value_index_builder.record_row(2u32);
|
||||
multivalued_value_index_builder.record_value();
|
||||
let SerializableMultivalueIndex {
|
||||
doc_ids_with_values,
|
||||
start_offsets,
|
||||
} = multivalued_value_index_builder.finish(4u32);
|
||||
assert_eq!(doc_ids_with_values.num_rows, 4u32);
|
||||
let doc_ids_with_values: Vec<u32> =
|
||||
doc_ids_with_values.non_null_row_ids.boxed_iter().collect();
|
||||
assert_eq!(&doc_ids_with_values, &[1u32, 2u32]);
|
||||
let start_offsets: Vec<u64> = start_offsets.boxed_iter().collect::<Vec<u64>>();
|
||||
assert_eq!(&start_offsets[..], &[0, 2, 3]);
|
||||
// assert!(doc_ids_with_values_opt.is_some());
|
||||
// assert!(doc_ids_with_values_opt.is_some());
|
||||
|
||||
// assert_eq!(
|
||||
// multivalued_value_index_builder.finish(4u32).to_vec(),
|
||||
// vec![0, 0, 2, 3, 3]
|
||||
// );
|
||||
// multivalued_value_index_builder.reset();
|
||||
// multivalued_value_index_builder.record_row(2u32);
|
||||
// multivalued_value_index_builder.record_value();
|
||||
// multivalued_value_index_builder.record_value();
|
||||
// assert_eq!(
|
||||
// multivalued_value_index_builder.finish(4u32).to_vec(),
|
||||
// vec![0, 0, 0, 2, 2]
|
||||
// );
|
||||
assert_eq!(
|
||||
multivalued_value_index_builder.finish(4u32).to_vec(),
|
||||
vec![0, 0, 2, 3, 3]
|
||||
);
|
||||
multivalued_value_index_builder.reset();
|
||||
multivalued_value_index_builder.record_row(2u32);
|
||||
multivalued_value_index_builder.record_value();
|
||||
multivalued_value_index_builder.record_value();
|
||||
assert_eq!(
|
||||
multivalued_value_index_builder.finish(4u32).to_vec(),
|
||||
vec![0, 0, 0, 2, 2]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{ColumnValues, RowId};
|
||||
|
||||
pub trait Iterable<T = u64> {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = T> + '_>;
|
||||
@@ -20,9 +17,3 @@ where Range<T>: Iterator<Item = T>
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterable for Arc<dyn crate::ColumnValues<RowId>> {
|
||||
fn boxed_iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
|
||||
Box::new(self.iter().map(|row_id| row_id as u64))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::columnar::{ColumnType, ColumnTypeCategory};
|
||||
use crate::dynamic_column::{DynamicColumn, DynamicColumnHandle};
|
||||
use crate::value::{Coerce, NumericalValue};
|
||||
use crate::{
|
||||
BytesColumn, Cardinality, Column, ColumnIndex, ColumnarReader, ColumnarWriter, RowAddr, RowId,
|
||||
BytesColumn, Cardinality, Column, ColumnarReader, ColumnarWriter, RowAddr, RowId,
|
||||
ShuffleMergeOrder, StackMergeOrder,
|
||||
};
|
||||
|
||||
@@ -79,7 +79,7 @@ fn test_dataframe_writer_u64_multivalued() {
|
||||
assert_eq!(columnar.num_columns(), 1);
|
||||
let cols: Vec<DynamicColumnHandle> = columnar.read_columns("divisor").unwrap();
|
||||
assert_eq!(cols.len(), 1);
|
||||
assert_eq!(cols[0].num_bytes(), 50);
|
||||
assert_eq!(cols[0].num_bytes(), 29);
|
||||
let dyn_i64_col = cols[0].open().unwrap();
|
||||
let DynamicColumn::I64(divisor_col) = dyn_i64_col else {
|
||||
panic!();
|
||||
@@ -448,7 +448,6 @@ fn assert_columnar_eq(
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_column_eq<T: Copy + PartialOrd + Debug + Send + Sync + 'static>(
|
||||
left: &Column<T>,
|
||||
right: &Column<T>,
|
||||
@@ -842,27 +841,26 @@ fn columnar_docs_and_remap(
|
||||
)
|
||||
}
|
||||
|
||||
// proptest! {
|
||||
// #![proptest_config(ProptestConfig::with_cases(1000))]
|
||||
// #[test]
|
||||
// fn test_columnar_merge_and_remap_proptest((columnar_docs, shuffle_merge_order) in
|
||||
// columnar_docs_and_remap()) { let shuffled_rows: Vec<Vec<(&'static str, ColumnValue)>> =
|
||||
// shuffle_merge_order.iter() .map(|row_addr| columnar_docs[row_addr.segment_ord as
|
||||
// usize][row_addr.row_id as usize].clone()) .collect();
|
||||
// let expected_merged_columnar = build_columnar(&shuffled_rows[..]);
|
||||
// let columnar_readers: Vec<ColumnarReader> = columnar_docs.iter()
|
||||
// .map(|docs| build_columnar(&docs[..]))
|
||||
// .collect::<Vec<_>>();
|
||||
// let columnar_readers_arr: Vec<&ColumnarReader> = columnar_readers.iter().collect();
|
||||
// let mut output: Vec<u8> = Vec::new();
|
||||
// let segment_num_rows: Vec<RowId> = columnar_docs.iter().map(|docs| docs.len() as
|
||||
// RowId).collect(); let shuffle_merge_order =
|
||||
// ShuffleMergeOrder::for_test(&segment_num_rows, shuffle_merge_order);
|
||||
// crate::merge_columnar(&columnar_readers_arr[..], &[], shuffle_merge_order.into(), &mut
|
||||
// output).unwrap(); let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
// assert_columnar_eq(&merged_columnar, &expected_merged_columnar, true);
|
||||
// }
|
||||
// }
|
||||
proptest! {
|
||||
#![proptest_config(ProptestConfig::with_cases(1000))]
|
||||
#[test]
|
||||
fn test_columnar_merge_and_remap_proptest((columnar_docs, shuffle_merge_order) in columnar_docs_and_remap()) {
|
||||
let shuffled_rows: Vec<Vec<(&'static str, ColumnValue)>> = shuffle_merge_order.iter()
|
||||
.map(|row_addr| columnar_docs[row_addr.segment_ord as usize][row_addr.row_id as usize].clone())
|
||||
.collect();
|
||||
let expected_merged_columnar = build_columnar(&shuffled_rows[..]);
|
||||
let columnar_readers: Vec<ColumnarReader> = columnar_docs.iter()
|
||||
.map(|docs| build_columnar(&docs[..]))
|
||||
.collect::<Vec<_>>();
|
||||
let columnar_readers_arr: Vec<&ColumnarReader> = columnar_readers.iter().collect();
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
let segment_num_rows: Vec<RowId> = columnar_docs.iter().map(|docs| docs.len() as RowId).collect();
|
||||
let shuffle_merge_order = ShuffleMergeOrder::for_test(&segment_num_rows, shuffle_merge_order);
|
||||
crate::merge_columnar(&columnar_readers_arr[..], &[], shuffle_merge_order.into(), &mut output).unwrap();
|
||||
let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
assert_columnar_eq(&merged_columnar, &expected_merged_columnar, true);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_columnar_merge_empty() {
|
||||
@@ -884,64 +882,64 @@ fn test_columnar_merge_empty() {
|
||||
assert_eq!(merged_columnar.num_columns(), 0);
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_columnar_merge_single_str_column() {
|
||||
// let columnar_reader_1 = build_columnar(&[]);
|
||||
// let rows: &[Vec<_>] = &[vec![("c1", ColumnValue::Str("a"))]][..];
|
||||
// let columnar_reader_2 = build_columnar(rows);
|
||||
// let mut output: Vec<u8> = Vec::new();
|
||||
// let segment_num_rows: Vec<RowId> = vec![0, 1];
|
||||
// let shuffle_merge_order = ShuffleMergeOrder::for_test(
|
||||
// &segment_num_rows,
|
||||
// vec![RowAddr {
|
||||
// segment_ord: 1u32,
|
||||
// row_id: 0u32,
|
||||
// }],
|
||||
// );
|
||||
// crate::merge_columnar(
|
||||
// &[&columnar_reader_1, &columnar_reader_2],
|
||||
// &[],
|
||||
// shuffle_merge_order.into(),
|
||||
// &mut output,
|
||||
// )
|
||||
// .unwrap();
|
||||
// let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
// assert_eq!(merged_columnar.num_rows(), 1);
|
||||
// assert_eq!(merged_columnar.num_columns(), 1);
|
||||
// }
|
||||
#[test]
|
||||
fn test_columnar_merge_single_str_column() {
|
||||
let columnar_reader_1 = build_columnar(&[]);
|
||||
let rows: &[Vec<_>] = &[vec![("c1", ColumnValue::Str("a"))]][..];
|
||||
let columnar_reader_2 = build_columnar(rows);
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
let segment_num_rows: Vec<RowId> = vec![0, 1];
|
||||
let shuffle_merge_order = ShuffleMergeOrder::for_test(
|
||||
&segment_num_rows,
|
||||
vec![RowAddr {
|
||||
segment_ord: 1u32,
|
||||
row_id: 0u32,
|
||||
}],
|
||||
);
|
||||
crate::merge_columnar(
|
||||
&[&columnar_reader_1, &columnar_reader_2],
|
||||
&[],
|
||||
shuffle_merge_order.into(),
|
||||
&mut output,
|
||||
)
|
||||
.unwrap();
|
||||
let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
assert_eq!(merged_columnar.num_rows(), 1);
|
||||
assert_eq!(merged_columnar.num_columns(), 1);
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_delete_decrease_cardinality() {
|
||||
// let columnar_reader_1 = build_columnar(&[]);
|
||||
// let rows: &[Vec<_>] = &[
|
||||
// vec![
|
||||
// ("c", ColumnValue::from(0i64)),
|
||||
// ("c", ColumnValue::from(0i64)),
|
||||
// ],
|
||||
// vec![("c", ColumnValue::from(0i64))],
|
||||
// ][..];
|
||||
// // c is multivalued here
|
||||
// let columnar_reader_2 = build_columnar(rows);
|
||||
// let mut output: Vec<u8> = Vec::new();
|
||||
// let shuffle_merge_order = ShuffleMergeOrder::for_test(
|
||||
// &[0, 2],
|
||||
// vec![RowAddr {
|
||||
// segment_ord: 1u32,
|
||||
// row_id: 1u32,
|
||||
// }],
|
||||
// );
|
||||
// crate::merge_columnar(
|
||||
// &[&columnar_reader_1, &columnar_reader_2],
|
||||
// &[],
|
||||
// shuffle_merge_order.into(),
|
||||
// &mut output,
|
||||
// )
|
||||
// .unwrap();
|
||||
// let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
// assert_eq!(merged_columnar.num_rows(), 1);
|
||||
// assert_eq!(merged_columnar.num_columns(), 1);
|
||||
// let cols = merged_columnar.read_columns("c").unwrap();
|
||||
// assert_eq!(cols.len(), 1);
|
||||
// assert_eq!(cols[0].column_type(), ColumnType::I64);
|
||||
// assert_eq!(cols[0].open().unwrap().get_cardinality(), Cardinality::Full);
|
||||
// }
|
||||
#[test]
|
||||
fn test_delete_decrease_cardinality() {
|
||||
let columnar_reader_1 = build_columnar(&[]);
|
||||
let rows: &[Vec<_>] = &[
|
||||
vec![
|
||||
("c", ColumnValue::from(0i64)),
|
||||
("c", ColumnValue::from(0i64)),
|
||||
],
|
||||
vec![("c", ColumnValue::from(0i64))],
|
||||
][..];
|
||||
// c is multivalued here
|
||||
let columnar_reader_2 = build_columnar(rows);
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
let shuffle_merge_order = ShuffleMergeOrder::for_test(
|
||||
&[0, 2],
|
||||
vec![RowAddr {
|
||||
segment_ord: 1u32,
|
||||
row_id: 1u32,
|
||||
}],
|
||||
);
|
||||
crate::merge_columnar(
|
||||
&[&columnar_reader_1, &columnar_reader_2],
|
||||
&[],
|
||||
shuffle_merge_order.into(),
|
||||
&mut output,
|
||||
)
|
||||
.unwrap();
|
||||
let merged_columnar = ColumnarReader::open(output).unwrap();
|
||||
assert_eq!(merged_columnar.num_rows(), 1);
|
||||
assert_eq!(merged_columnar.num_columns(), 1);
|
||||
let cols = merged_columnar.read_columns("c").unwrap();
|
||||
assert_eq!(cols.len(), 1);
|
||||
assert_eq!(cols[0].column_type(), ColumnType::I64);
|
||||
assert_eq!(cols[0].open().unwrap().get_cardinality(), Cardinality::Full);
|
||||
}
|
||||
|
||||
@@ -19,13 +19,14 @@ use tantivy::{doc, Index, IndexWriter, ReloadPolicy};
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn main() -> tantivy::Result<()> {
|
||||
// Let's create a temporary directory for the
|
||||
// sake of this example
|
||||
// Normally you would use `MMapDirectory` instead to persist data on disk.
|
||||
// https://docs.rs/tantivy/latest/tantivy/directory/struct.MmapDirectory.html
|
||||
// But for this example, we will use a temporary directory `TempDir`.
|
||||
let index_path = TempDir::new()?;
|
||||
|
||||
// # Defining the schema
|
||||
//
|
||||
// The Tantivy index requires a very strict schema.
|
||||
// The Tantivy index requires a schema.
|
||||
// The schema declares which fields are in the index,
|
||||
// and for each field, its type and "the way it should
|
||||
// be indexed".
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::borrow::Cow;
|
||||
use std::iter::once;
|
||||
|
||||
use nom::branch::alt;
|
||||
@@ -20,7 +19,7 @@ use crate::Occur;
|
||||
// Note: '-' char is only forbidden at the beginning of a field name, would be clearer to add it to
|
||||
// special characters.
|
||||
const SPECIAL_CHARS: &[char] = &[
|
||||
'+', '^', '`', ':', '{', '}', '"', '\'', '[', ']', '(', ')', '!', '\\', '*', ' ',
|
||||
'+', '^', '`', ':', '{', '}', '"', '[', ']', '(', ')', '!', '\\', '*', ' ',
|
||||
];
|
||||
|
||||
/// consume a field name followed by colon. Return the field name with escape sequence
|
||||
@@ -42,92 +41,36 @@ fn field_name(inp: &str) -> IResult<&str, String> {
|
||||
)(inp)
|
||||
}
|
||||
|
||||
const ESCAPE_IN_WORD: &[char] = &['^', '`', ':', '{', '}', '"', '\'', '[', ']', '(', ')', '\\'];
|
||||
|
||||
fn interpret_escape(source: &str) -> String {
|
||||
let mut res = String::with_capacity(source.len());
|
||||
let mut in_escape = false;
|
||||
let require_escape = |c: char| c.is_whitespace() || ESCAPE_IN_WORD.contains(&c) || c == '-';
|
||||
|
||||
for c in source.chars() {
|
||||
if in_escape {
|
||||
if !require_escape(c) {
|
||||
// we re-add the escape sequence
|
||||
res.push('\\');
|
||||
}
|
||||
res.push(c);
|
||||
in_escape = false;
|
||||
} else if c == '\\' {
|
||||
in_escape = true;
|
||||
} else {
|
||||
res.push(c);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Consume a word outside of any context.
|
||||
// TODO should support escape sequences
|
||||
fn word(inp: &str) -> IResult<&str, Cow<str>> {
|
||||
fn word(inp: &str) -> IResult<&str, &str> {
|
||||
map_res(
|
||||
recognize(tuple((
|
||||
alt((
|
||||
preceded(char('\\'), anychar),
|
||||
satisfy(|c| !c.is_whitespace() && !ESCAPE_IN_WORD.contains(&c) && c != '-'),
|
||||
)),
|
||||
many0(alt((
|
||||
preceded(char('\\'), anychar),
|
||||
satisfy(|c: char| !c.is_whitespace() && !ESCAPE_IN_WORD.contains(&c)),
|
||||
))),
|
||||
satisfy(|c| {
|
||||
!c.is_whitespace()
|
||||
&& !['-', '^', '`', ':', '{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||
}),
|
||||
many0(satisfy(|c: char| {
|
||||
!c.is_whitespace() && ![':', '^', '{', '}', '"', '[', ']', '(', ')'].contains(&c)
|
||||
})),
|
||||
))),
|
||||
|s| match s {
|
||||
"OR" | "AND" | "NOT" | "IN" => Err(Error::new(inp, ErrorKind::Tag)),
|
||||
s if s.contains('\\') => Ok(Cow::Owned(interpret_escape(s))),
|
||||
s => Ok(Cow::Borrowed(s)),
|
||||
_ => Ok(s),
|
||||
},
|
||||
)(inp)
|
||||
}
|
||||
|
||||
fn word_infallible(
|
||||
delimiter: &str,
|
||||
emit_error: bool,
|
||||
) -> impl Fn(&str) -> JResult<&str, Option<Cow<str>>> + '_ {
|
||||
// emit error is set when receiving an unescaped `:` should emit an error
|
||||
|
||||
move |inp| {
|
||||
map(
|
||||
opt_i_err(
|
||||
preceded(
|
||||
multispace0,
|
||||
recognize(many1(alt((
|
||||
preceded(char::<&str, _>('\\'), anychar),
|
||||
satisfy(|c| !c.is_whitespace() && !delimiter.contains(c)),
|
||||
)))),
|
||||
),
|
||||
"expected word",
|
||||
fn word_infallible(delimiter: &str) -> impl Fn(&str) -> JResult<&str, Option<&str>> + '_ {
|
||||
|inp| {
|
||||
opt_i_err(
|
||||
preceded(
|
||||
multispace0,
|
||||
recognize(many1(satisfy(|c| {
|
||||
!c.is_whitespace() && !delimiter.contains(c)
|
||||
}))),
|
||||
),
|
||||
|(opt_s, mut errors)| match opt_s {
|
||||
Some(s) => {
|
||||
if emit_error
|
||||
&& (s
|
||||
.as_bytes()
|
||||
.windows(2)
|
||||
.any(|window| window[0] != b'\\' && window[1] == b':')
|
||||
|| s.starts_with(':'))
|
||||
{
|
||||
errors.push(LenientErrorInternal {
|
||||
pos: inp.len(),
|
||||
message: "parsed possible invalid field as term".to_string(),
|
||||
});
|
||||
}
|
||||
if s.contains('\\') {
|
||||
(Some(Cow::Owned(interpret_escape(s))), errors)
|
||||
} else {
|
||||
(Some(Cow::Borrowed(s)), errors)
|
||||
}
|
||||
}
|
||||
None => (None, errors),
|
||||
},
|
||||
"expected word",
|
||||
)(inp)
|
||||
}
|
||||
}
|
||||
@@ -216,7 +159,7 @@ fn simple_term_infallible(
|
||||
(value((), char('\'')), simple_quotes),
|
||||
),
|
||||
// numbers are parsed with words in this case, as we allow string starting with a -
|
||||
map(word_infallible(delimiter, true), |(text, errors)| {
|
||||
map(word_infallible(delimiter), |(text, errors)| {
|
||||
(text.map(|text| (Delimiter::None, text.to_string())), errors)
|
||||
}),
|
||||
)(inp)
|
||||
@@ -379,6 +322,15 @@ fn literal_no_group_infallible(inp: &str) -> JResult<&str, Option<UserInputAst>>
|
||||
|((field_name, _, leaf), mut errors)| {
|
||||
(
|
||||
leaf.map(|leaf| {
|
||||
if matches!(&leaf, UserInputLeaf::Literal(literal)
|
||||
if literal.phrase.contains(':') && literal.delimiter == Delimiter::None)
|
||||
&& field_name.is_none()
|
||||
{
|
||||
errors.push(LenientErrorInternal {
|
||||
pos: inp.len(),
|
||||
message: "parsed possible invalid field as term".to_string(),
|
||||
});
|
||||
}
|
||||
if matches!(&leaf, UserInputLeaf::Literal(literal)
|
||||
if literal.phrase == "NOT" && literal.delimiter == Delimiter::None)
|
||||
&& field_name.is_none()
|
||||
@@ -497,20 +449,20 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
tuple_infallible((
|
||||
opt_i(anychar),
|
||||
space0_infallible,
|
||||
word_infallible("]}", false),
|
||||
word_infallible("]}"),
|
||||
space1_infallible,
|
||||
opt_i_err(
|
||||
terminated(tag("TO"), alt((value((), multispace1), value((), eof)))),
|
||||
"missing keyword TO",
|
||||
),
|
||||
word_infallible("]}", false),
|
||||
word_infallible("]}"),
|
||||
opt_i_err(one_of("]}"), "missing range delimiter"),
|
||||
)),
|
||||
|(
|
||||
(lower_bound_kind, _multispace0, lower, _multispace1, to, upper, upper_bound_kind),
|
||||
errs,
|
||||
)| {
|
||||
let lower_bound = match (lower_bound_kind, lower.as_deref()) {
|
||||
let lower_bound = match (lower_bound_kind, lower) {
|
||||
(_, Some("*")) => UserInputBound::Unbounded,
|
||||
(_, None) => UserInputBound::Unbounded,
|
||||
// if it is some, TO was actually the bound (i.e. [TO TO something])
|
||||
@@ -519,7 +471,7 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
(Some('{'), Some(bound)) => UserInputBound::Exclusive(bound.to_string()),
|
||||
_ => unreachable!("precondition failed, range did not start with [ or {{"),
|
||||
};
|
||||
let upper_bound = match (upper_bound_kind, upper.as_deref()) {
|
||||
let upper_bound = match (upper_bound_kind, upper) {
|
||||
(_, Some("*")) => UserInputBound::Unbounded,
|
||||
(_, None) => UserInputBound::Unbounded,
|
||||
(Some(']'), Some(bound)) => UserInputBound::Inclusive(bound.to_string()),
|
||||
@@ -536,7 +488,7 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
(
|
||||
(
|
||||
value((), tag(">=")),
|
||||
map(word_infallible("", false), |(bound, err)| {
|
||||
map(word_infallible(""), |(bound, err)| {
|
||||
(
|
||||
(
|
||||
bound
|
||||
@@ -550,7 +502,7 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
),
|
||||
(
|
||||
value((), tag("<=")),
|
||||
map(word_infallible("", false), |(bound, err)| {
|
||||
map(word_infallible(""), |(bound, err)| {
|
||||
(
|
||||
(
|
||||
UserInputBound::Unbounded,
|
||||
@@ -564,7 +516,7 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
),
|
||||
(
|
||||
value((), tag(">")),
|
||||
map(word_infallible("", false), |(bound, err)| {
|
||||
map(word_infallible(""), |(bound, err)| {
|
||||
(
|
||||
(
|
||||
bound
|
||||
@@ -578,7 +530,7 @@ fn range_infallible(inp: &str) -> JResult<&str, UserInputLeaf> {
|
||||
),
|
||||
(
|
||||
value((), tag("<")),
|
||||
map(word_infallible("", false), |(bound, err)| {
|
||||
map(word_infallible(""), |(bound, err)| {
|
||||
(
|
||||
(
|
||||
UserInputBound::Unbounded,
|
||||
@@ -1205,12 +1157,6 @@ mod test {
|
||||
test_parse_query_to_ast_helper("weight: <= 70", "\"weight\":{\"*\" TO \"70\"]");
|
||||
|
||||
test_parse_query_to_ast_helper("weight: <= 70.5", "\"weight\":{\"*\" TO \"70.5\"]");
|
||||
|
||||
test_parse_query_to_ast_helper(">a", "{\"a\" TO \"*\"}");
|
||||
test_parse_query_to_ast_helper(">=a", "[\"a\" TO \"*\"}");
|
||||
test_parse_query_to_ast_helper("<a", "{\"*\" TO \"a\"}");
|
||||
test_parse_query_to_ast_helper("<=a", "{\"*\" TO \"a\"]");
|
||||
test_parse_query_to_ast_helper("<=bsd", "{\"*\" TO \"bsd\"]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1644,21 +1590,5 @@ mod test {
|
||||
r#"myfield:'hello\"happy\'tax'"#,
|
||||
r#""myfield":'hello"happy'tax'"#,
|
||||
);
|
||||
// we don't process escape sequence for chars which don't require it
|
||||
test_parse_query_to_ast_helper(r#"abc\*"#, r#"abc\*"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_queries_with_colons() {
|
||||
test_parse_query_to_ast_helper(r#""abc:def""#, r#""abc:def""#);
|
||||
test_parse_query_to_ast_helper(r#"'abc:def'"#, r#"'abc:def'"#);
|
||||
test_parse_query_to_ast_helper(r#"abc\:def"#, r#"abc:def"#);
|
||||
test_parse_query_to_ast_helper(r#""abc\:def""#, r#""abc:def""#);
|
||||
test_parse_query_to_ast_helper(r#"'abc\:def'"#, r#"'abc:def'"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_field() {
|
||||
test_is_parse_err(r#"!bc:def"#, "!bc:def");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ use super::bucket::{
|
||||
DateHistogramAggregationReq, HistogramAggregation, RangeAggregation, TermsAggregation,
|
||||
};
|
||||
use super::metric::{
|
||||
AverageAggregation, CountAggregation, ExtendedStatsAggregation, MaxAggregation, MinAggregation,
|
||||
AverageAggregation, CountAggregation, MaxAggregation, MinAggregation,
|
||||
PercentilesAggregationReq, StatsAggregation, SumAggregation, TopHitsAggregation,
|
||||
};
|
||||
|
||||
@@ -146,11 +146,6 @@ pub enum AggregationVariants {
|
||||
/// extracted values.
|
||||
#[serde(rename = "stats")]
|
||||
Stats(StatsAggregation),
|
||||
/// Computes a collection of estended statistics (`min`, `max`, `sum`, `count`, `avg`,
|
||||
/// `sum_of_squares`, `variance`, `variance_sampling`, `std_deviation`,
|
||||
/// `std_deviation_sampling`) over the extracted values.
|
||||
#[serde(rename = "extended_stats")]
|
||||
ExtendedStats(ExtendedStatsAggregation),
|
||||
/// Computes the sum of the extracted values.
|
||||
#[serde(rename = "sum")]
|
||||
Sum(SumAggregation),
|
||||
@@ -175,7 +170,6 @@ impl AggregationVariants {
|
||||
AggregationVariants::Max(max) => vec![max.field_name()],
|
||||
AggregationVariants::Min(min) => vec![min.field_name()],
|
||||
AggregationVariants::Stats(stats) => vec![stats.field_name()],
|
||||
AggregationVariants::ExtendedStats(extended_stats) => vec![extended_stats.field_name()],
|
||||
AggregationVariants::Sum(sum) => vec![sum.field_name()],
|
||||
AggregationVariants::Percentiles(per) => vec![per.field_name()],
|
||||
AggregationVariants::TopHits(top_hits) => top_hits.field_names(),
|
||||
@@ -203,12 +197,6 @@ impl AggregationVariants {
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_top_hits(&self) -> Option<&TopHitsAggregation> {
|
||||
match &self {
|
||||
AggregationVariants::TopHits(top_hits) => Some(top_hits),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_percentile(&self) -> Option<&PercentilesAggregationReq> {
|
||||
match &self {
|
||||
|
||||
@@ -11,8 +11,8 @@ use super::bucket::{
|
||||
DateHistogramAggregationReq, HistogramAggregation, RangeAggregation, TermsAggregation,
|
||||
};
|
||||
use super::metric::{
|
||||
AverageAggregation, CountAggregation, ExtendedStatsAggregation, MaxAggregation, MinAggregation,
|
||||
StatsAggregation, SumAggregation,
|
||||
AverageAggregation, CountAggregation, MaxAggregation, MinAggregation, StatsAggregation,
|
||||
SumAggregation,
|
||||
};
|
||||
use super::segment_agg_result::AggregationLimits;
|
||||
use super::VecWithNames;
|
||||
@@ -276,10 +276,6 @@ impl AggregationWithAccessor {
|
||||
field: ref field_name,
|
||||
..
|
||||
})
|
||||
| ExtendedStats(ExtendedStatsAggregation {
|
||||
field: ref field_name,
|
||||
..
|
||||
})
|
||||
| Sum(SumAggregation {
|
||||
field: ref field_name,
|
||||
..
|
||||
|
||||
@@ -8,9 +8,7 @@ use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::bucket::GetDocCount;
|
||||
use super::metric::{
|
||||
ExtendedStats, PercentilesMetricResult, SingleMetricResult, Stats, TopHitsMetricResult,
|
||||
};
|
||||
use super::metric::{PercentilesMetricResult, SingleMetricResult, Stats, TopHitsMetricResult};
|
||||
use super::{AggregationError, Key};
|
||||
use crate::TantivyError;
|
||||
|
||||
@@ -90,8 +88,6 @@ pub enum MetricResult {
|
||||
Min(SingleMetricResult),
|
||||
/// Stats metric result.
|
||||
Stats(Stats),
|
||||
/// ExtendedStats metric result.
|
||||
ExtendedStats(Box<ExtendedStats>),
|
||||
/// Sum metric result.
|
||||
Sum(SingleMetricResult),
|
||||
/// Percentiles metric result.
|
||||
@@ -108,7 +104,6 @@ impl MetricResult {
|
||||
MetricResult::Max(max) => Ok(max.value),
|
||||
MetricResult::Min(min) => Ok(min.value),
|
||||
MetricResult::Stats(stats) => stats.get_value(agg_property),
|
||||
MetricResult::ExtendedStats(extended_stats) => extended_stats.get_value(agg_property),
|
||||
MetricResult::Sum(sum) => Ok(sum.value),
|
||||
MetricResult::Percentiles(_) => Err(TantivyError::AggregationError(
|
||||
AggregationError::InvalidRequest("percentiles can't be used to order".to_string()),
|
||||
|
||||
@@ -19,8 +19,8 @@ use super::bucket::{
|
||||
GetDocCount, Order, OrderTarget, RangeAggregation, TermsAggregation,
|
||||
};
|
||||
use super::metric::{
|
||||
IntermediateAverage, IntermediateCount, IntermediateExtendedStats, IntermediateMax,
|
||||
IntermediateMin, IntermediateStats, IntermediateSum, PercentilesCollector, TopHitsTopNComputer,
|
||||
IntermediateAverage, IntermediateCount, IntermediateMax, IntermediateMin, IntermediateStats,
|
||||
IntermediateSum, PercentilesCollector, TopHitsTopNComputer,
|
||||
};
|
||||
use super::segment_agg_result::AggregationLimits;
|
||||
use super::{format_date, AggregationError, Key, SerializedKey};
|
||||
@@ -215,9 +215,6 @@ pub(crate) fn empty_from_req(req: &Aggregation) -> IntermediateAggregationResult
|
||||
Stats(_) => IntermediateAggregationResult::Metric(IntermediateMetricResult::Stats(
|
||||
IntermediateStats::default(),
|
||||
)),
|
||||
ExtendedStats(_) => IntermediateAggregationResult::Metric(
|
||||
IntermediateMetricResult::ExtendedStats(IntermediateExtendedStats::default()),
|
||||
),
|
||||
Sum(_) => IntermediateAggregationResult::Metric(IntermediateMetricResult::Sum(
|
||||
IntermediateSum::default(),
|
||||
)),
|
||||
@@ -225,7 +222,7 @@ pub(crate) fn empty_from_req(req: &Aggregation) -> IntermediateAggregationResult
|
||||
IntermediateMetricResult::Percentiles(PercentilesCollector::default()),
|
||||
),
|
||||
TopHits(ref req) => IntermediateAggregationResult::Metric(
|
||||
IntermediateMetricResult::TopHits(TopHitsTopNComputer::new(req)),
|
||||
IntermediateMetricResult::TopHits(TopHitsTopNComputer::new(req.clone())),
|
||||
),
|
||||
}
|
||||
}
|
||||
@@ -285,8 +282,6 @@ pub enum IntermediateMetricResult {
|
||||
Min(IntermediateMin),
|
||||
/// Intermediate stats result.
|
||||
Stats(IntermediateStats),
|
||||
/// Intermediate stats result.
|
||||
ExtendedStats(IntermediateExtendedStats),
|
||||
/// Intermediate sum result.
|
||||
Sum(IntermediateSum),
|
||||
/// Intermediate top_hits result
|
||||
@@ -311,9 +306,6 @@ impl IntermediateMetricResult {
|
||||
IntermediateMetricResult::Stats(intermediate_stats) => {
|
||||
MetricResult::Stats(intermediate_stats.finalize())
|
||||
}
|
||||
IntermediateMetricResult::ExtendedStats(intermediate_stats) => {
|
||||
MetricResult::ExtendedStats(intermediate_stats.finalize())
|
||||
}
|
||||
IntermediateMetricResult::Sum(intermediate_sum) => {
|
||||
MetricResult::Sum(intermediate_sum.finalize().into())
|
||||
}
|
||||
@@ -354,12 +346,6 @@ impl IntermediateMetricResult {
|
||||
) => {
|
||||
stats_left.merge_fruits(stats_right);
|
||||
}
|
||||
(
|
||||
IntermediateMetricResult::ExtendedStats(extended_stats_left),
|
||||
IntermediateMetricResult::ExtendedStats(extended_stats_right),
|
||||
) => {
|
||||
extended_stats_left.merge_fruits(extended_stats_right);
|
||||
}
|
||||
(IntermediateMetricResult::Sum(sum_left), IntermediateMetricResult::Sum(sum_right)) => {
|
||||
sum_left.merge_fruits(sum_right);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,6 @@
|
||||
|
||||
mod average;
|
||||
mod count;
|
||||
mod extended_stats;
|
||||
mod max;
|
||||
mod min;
|
||||
mod percentiles;
|
||||
@@ -30,7 +29,6 @@ use std::collections::HashMap;
|
||||
|
||||
pub use average::*;
|
||||
pub use count::*;
|
||||
pub use extended_stats::*;
|
||||
pub use max::*;
|
||||
pub use min::*;
|
||||
pub use percentiles::*;
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::*;
|
||||
@@ -87,15 +85,13 @@ impl Stats {
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct IntermediateStats {
|
||||
/// The number of extracted values.
|
||||
pub(crate) count: u64,
|
||||
count: u64,
|
||||
/// The sum of the extracted values.
|
||||
pub(crate) sum: f64,
|
||||
/// delta for sum needed for [Kahan algorithm for summation](https://en.wikipedia.org/wiki/Kahan_summation_algorithm)
|
||||
pub(crate) delta: f64,
|
||||
sum: f64,
|
||||
/// The min value.
|
||||
pub(crate) min: f64,
|
||||
min: f64,
|
||||
/// The max value.
|
||||
pub(crate) max: f64,
|
||||
max: f64,
|
||||
}
|
||||
|
||||
impl Default for IntermediateStats {
|
||||
@@ -103,7 +99,6 @@ impl Default for IntermediateStats {
|
||||
Self {
|
||||
count: 0,
|
||||
sum: 0.0,
|
||||
delta: 0.0,
|
||||
min: f64::MAX,
|
||||
max: f64::MIN,
|
||||
}
|
||||
@@ -114,13 +109,7 @@ impl IntermediateStats {
|
||||
/// Merges the other stats intermediate result into self.
|
||||
pub fn merge_fruits(&mut self, other: IntermediateStats) {
|
||||
self.count += other.count;
|
||||
|
||||
// kahan algorithm for sum
|
||||
let y = other.sum - (self.delta + other.delta);
|
||||
let t = self.sum + y;
|
||||
self.delta = (t - self.sum) - y;
|
||||
self.sum = t;
|
||||
|
||||
self.sum += other.sum;
|
||||
self.min = self.min.min(other.min);
|
||||
self.max = self.max.max(other.max);
|
||||
}
|
||||
@@ -152,15 +141,9 @@ impl IntermediateStats {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(in crate::aggregation::metric) fn collect(&mut self, value: f64) {
|
||||
fn collect(&mut self, value: f64) {
|
||||
self.count += 1;
|
||||
|
||||
// kahan algorithm for sum
|
||||
let y = value - self.delta;
|
||||
let t = self.sum + y;
|
||||
self.delta = (t - self.sum) - y;
|
||||
self.sum = t;
|
||||
|
||||
self.sum += value;
|
||||
self.min = self.min.min(value);
|
||||
self.max = self.max.max(value);
|
||||
}
|
||||
@@ -305,6 +288,7 @@ impl SegmentAggregationCollector for SegmentStatsCollector {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::aggregation::agg_req::{Aggregation, Aggregations};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::net::Ipv6Addr;
|
||||
|
||||
use columnar::{Column, ColumnType, ColumnarReader, DynamicColumn};
|
||||
use columnar::{ColumnarReader, DynamicColumn};
|
||||
use common::json_path_writer::JSON_PATH_SEGMENT_SEP_STR;
|
||||
use common::DateTime;
|
||||
use regex::Regex;
|
||||
@@ -443,10 +443,10 @@ impl std::cmp::PartialEq for TopHitsTopNComputer {
|
||||
|
||||
impl TopHitsTopNComputer {
|
||||
/// Create a new TopHitsCollector
|
||||
pub fn new(req: &TopHitsAggregation) -> Self {
|
||||
pub fn new(req: TopHitsAggregation) -> Self {
|
||||
Self {
|
||||
top_n: TopNComputer::new(req.size + req.from.unwrap_or(0)),
|
||||
req: req.clone(),
|
||||
req,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -491,6 +491,7 @@ impl TopHitsTopNComputer {
|
||||
pub(crate) struct TopHitsSegmentCollector {
|
||||
segment_ordinal: SegmentOrdinal,
|
||||
accessor_idx: usize,
|
||||
req: TopHitsAggregation,
|
||||
top_n: TopNComputer<Vec<DocValueAndOrder>, DocAddress, false>,
|
||||
}
|
||||
|
||||
@@ -501,6 +502,7 @@ impl TopHitsSegmentCollector {
|
||||
segment_ordinal: SegmentOrdinal,
|
||||
) -> Self {
|
||||
Self {
|
||||
req: req.clone(),
|
||||
top_n: TopNComputer::new(req.size + req.from.unwrap_or(0)),
|
||||
segment_ordinal,
|
||||
accessor_idx,
|
||||
@@ -509,13 +511,14 @@ impl TopHitsSegmentCollector {
|
||||
fn into_top_hits_collector(
|
||||
self,
|
||||
value_accessors: &HashMap<String, Vec<DynamicColumn>>,
|
||||
req: &TopHitsAggregation,
|
||||
) -> TopHitsTopNComputer {
|
||||
let mut top_hits_computer = TopHitsTopNComputer::new(req);
|
||||
let mut top_hits_computer = TopHitsTopNComputer::new(self.req.clone());
|
||||
let top_results = self.top_n.into_vec();
|
||||
|
||||
for res in top_results {
|
||||
let doc_value_fields = req.get_document_field_data(value_accessors, res.doc.doc_id);
|
||||
let doc_value_fields = self
|
||||
.req
|
||||
.get_document_field_data(value_accessors, res.doc.doc_id);
|
||||
top_hits_computer.collect(
|
||||
DocSortValuesAndFields {
|
||||
sorts: res.feature,
|
||||
@@ -527,15 +530,34 @@ impl TopHitsSegmentCollector {
|
||||
|
||||
top_hits_computer
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO add a specialized variant for a single sort field
|
||||
fn collect_with(
|
||||
impl SegmentAggregationCollector for TopHitsSegmentCollector {
|
||||
fn add_intermediate_aggregation_result(
|
||||
self: Box<Self>,
|
||||
agg_with_accessor: &crate::aggregation::agg_req_with_accessor::AggregationsWithAccessor,
|
||||
results: &mut crate::aggregation::intermediate_agg_result::IntermediateAggregationResults,
|
||||
) -> crate::Result<()> {
|
||||
let name = agg_with_accessor.aggs.keys[self.accessor_idx].to_string();
|
||||
|
||||
let value_accessors = &agg_with_accessor.aggs.values[self.accessor_idx].value_accessors;
|
||||
|
||||
let intermediate_result =
|
||||
IntermediateMetricResult::TopHits(self.into_top_hits_collector(value_accessors));
|
||||
results.push(
|
||||
name,
|
||||
IntermediateAggregationResult::Metric(intermediate_result),
|
||||
)
|
||||
}
|
||||
|
||||
fn collect(
|
||||
&mut self,
|
||||
doc_id: crate::DocId,
|
||||
req: &TopHitsAggregation,
|
||||
accessors: &[(Column<u64>, ColumnType)],
|
||||
agg_with_accessor: &mut crate::aggregation::agg_req_with_accessor::AggregationsWithAccessor,
|
||||
) -> crate::Result<()> {
|
||||
let sorts: Vec<DocValueAndOrder> = req
|
||||
let accessors = &agg_with_accessor.aggs.values[self.accessor_idx].accessors;
|
||||
let sorts: Vec<DocValueAndOrder> = self
|
||||
.req
|
||||
.sort
|
||||
.iter()
|
||||
.enumerate()
|
||||
@@ -560,62 +582,15 @@ impl TopHitsSegmentCollector {
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SegmentAggregationCollector for TopHitsSegmentCollector {
|
||||
fn add_intermediate_aggregation_result(
|
||||
self: Box<Self>,
|
||||
agg_with_accessor: &crate::aggregation::agg_req_with_accessor::AggregationsWithAccessor,
|
||||
results: &mut crate::aggregation::intermediate_agg_result::IntermediateAggregationResults,
|
||||
) -> crate::Result<()> {
|
||||
let name = agg_with_accessor.aggs.keys[self.accessor_idx].to_string();
|
||||
|
||||
let value_accessors = &agg_with_accessor.aggs.values[self.accessor_idx].value_accessors;
|
||||
let tophits_req = &agg_with_accessor.aggs.values[self.accessor_idx]
|
||||
.agg
|
||||
.agg
|
||||
.as_top_hits()
|
||||
.expect("aggregation request must be of type top hits");
|
||||
|
||||
let intermediate_result = IntermediateMetricResult::TopHits(
|
||||
self.into_top_hits_collector(value_accessors, tophits_req),
|
||||
);
|
||||
results.push(
|
||||
name,
|
||||
IntermediateAggregationResult::Metric(intermediate_result),
|
||||
)
|
||||
}
|
||||
|
||||
/// TODO: Consider a caching layer to reduce the call overhead
|
||||
fn collect(
|
||||
&mut self,
|
||||
doc_id: crate::DocId,
|
||||
agg_with_accessor: &mut crate::aggregation::agg_req_with_accessor::AggregationsWithAccessor,
|
||||
) -> crate::Result<()> {
|
||||
let tophits_req = &agg_with_accessor.aggs.values[self.accessor_idx]
|
||||
.agg
|
||||
.agg
|
||||
.as_top_hits()
|
||||
.expect("aggregation request must be of type top hits");
|
||||
let accessors = &agg_with_accessor.aggs.values[self.accessor_idx].accessors;
|
||||
self.collect_with(doc_id, tophits_req, accessors)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_block(
|
||||
&mut self,
|
||||
docs: &[crate::DocId],
|
||||
agg_with_accessor: &mut crate::aggregation::agg_req_with_accessor::AggregationsWithAccessor,
|
||||
) -> crate::Result<()> {
|
||||
let tophits_req = &agg_with_accessor.aggs.values[self.accessor_idx]
|
||||
.agg
|
||||
.agg
|
||||
.as_top_hits()
|
||||
.expect("aggregation request must be of type top hits");
|
||||
let accessors = &agg_with_accessor.aggs.values[self.accessor_idx].accessors;
|
||||
// TODO: Consider getting fields with the column block accessor.
|
||||
for doc in docs {
|
||||
self.collect_with(*doc, tophits_req, accessors)?;
|
||||
self.collect(*doc, agg_with_accessor)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -11,12 +11,12 @@ use super::agg_req_with_accessor::{AggregationWithAccessor, AggregationsWithAcce
|
||||
use super::bucket::{SegmentHistogramCollector, SegmentRangeCollector, SegmentTermCollector};
|
||||
use super::intermediate_agg_result::IntermediateAggregationResults;
|
||||
use super::metric::{
|
||||
AverageAggregation, CountAggregation, ExtendedStatsAggregation, MaxAggregation, MinAggregation,
|
||||
AverageAggregation, CountAggregation, MaxAggregation, MinAggregation,
|
||||
SegmentPercentilesCollector, SegmentStatsCollector, SegmentStatsType, StatsAggregation,
|
||||
SumAggregation,
|
||||
};
|
||||
use crate::aggregation::bucket::TermMissingAgg;
|
||||
use crate::aggregation::metric::{SegmentExtendedStatsCollector, TopHitsSegmentCollector};
|
||||
use crate::aggregation::metric::TopHitsSegmentCollector;
|
||||
|
||||
pub(crate) trait SegmentAggregationCollector: CollectorClone + Debug {
|
||||
fn add_intermediate_aggregation_result(
|
||||
@@ -148,9 +148,6 @@ pub(crate) fn build_single_agg_segment_collector(
|
||||
accessor_idx,
|
||||
*missing,
|
||||
))),
|
||||
ExtendedStats(ExtendedStatsAggregation { missing, sigma, .. }) => Ok(Box::new(
|
||||
SegmentExtendedStatsCollector::from_req(req.field_type, *sigma, accessor_idx, *missing),
|
||||
)),
|
||||
Sum(SumAggregation { missing, .. }) => Ok(Box::new(SegmentStatsCollector::from_req(
|
||||
req.field_type,
|
||||
SegmentStatsType::Sum,
|
||||
|
||||
@@ -643,30 +643,30 @@ mod tests {
|
||||
facet_collector.add_facet(Facet::from("/country/europe"));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_doc_unsorted_multifacet() -> crate::Result<()> {
|
||||
// let mut schema_builder = Schema::builder();
|
||||
// let facet_field = schema_builder.add_facet_field("facets", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
// index_writer.add_document(doc!(
|
||||
// facet_field => Facet::from_text(&"/subjects/A/a").unwrap(),
|
||||
// facet_field => Facet::from_text(&"/subjects/B/a").unwrap(),
|
||||
// facet_field => Facet::from_text(&"/subjects/A/b").unwrap(),
|
||||
// facet_field => Facet::from_text(&"/subjects/B/b").unwrap(),
|
||||
// ))?;
|
||||
// index_writer.commit()?;
|
||||
// let reader = index.reader()?;
|
||||
// let searcher = reader.searcher();
|
||||
// assert_eq!(searcher.num_docs(), 1);
|
||||
// let mut facet_collector = FacetCollector::for_field("facets");
|
||||
// facet_collector.add_facet("/subjects");
|
||||
// let counts = searcher.search(&AllQuery, &facet_collector)?;
|
||||
// let facets: Vec<(&Facet, u64)> = counts.get("/subjects").collect();
|
||||
// assert_eq!(facets[0].1, 1);
|
||||
// Ok(())
|
||||
// }
|
||||
#[test]
|
||||
fn test_doc_unsorted_multifacet() -> crate::Result<()> {
|
||||
let mut schema_builder = Schema::builder();
|
||||
let facet_field = schema_builder.add_facet_field("facets", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
index_writer.add_document(doc!(
|
||||
facet_field => Facet::from_text(&"/subjects/A/a").unwrap(),
|
||||
facet_field => Facet::from_text(&"/subjects/B/a").unwrap(),
|
||||
facet_field => Facet::from_text(&"/subjects/A/b").unwrap(),
|
||||
facet_field => Facet::from_text(&"/subjects/B/b").unwrap(),
|
||||
))?;
|
||||
index_writer.commit()?;
|
||||
let reader = index.reader()?;
|
||||
let searcher = reader.searcher();
|
||||
assert_eq!(searcher.num_docs(), 1);
|
||||
let mut facet_collector = FacetCollector::for_field("facets");
|
||||
facet_collector.add_facet("/subjects");
|
||||
let counts = searcher.search(&AllQuery, &facet_collector)?;
|
||||
let facets: Vec<(&Facet, u64)> = counts.get("/subjects").collect();
|
||||
assert_eq!(facets[0].1, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_doc_search_by_facet() -> crate::Result<()> {
|
||||
@@ -725,99 +725,99 @@ mod tests {
|
||||
facet_collector.add_facet(Facet::from("/countryeurope"));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_collector_topk() {
|
||||
// let mut schema_builder = Schema::builder();
|
||||
// let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
#[test]
|
||||
fn test_facet_collector_topk() {
|
||||
let mut schema_builder = Schema::builder();
|
||||
let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
|
||||
// let uniform = Uniform::new_inclusive(1, 100_000);
|
||||
// let mut docs: Vec<TantivyDocument> =
|
||||
// vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)]
|
||||
// .into_iter()
|
||||
// .flat_map(|(c, count)| {
|
||||
// let facet = Facet::from(&format!("/facet/{c}"));
|
||||
// let doc = doc!(facet_field => facet);
|
||||
// iter::repeat(doc).take(count)
|
||||
// })
|
||||
// .map(|mut doc| {
|
||||
// doc.add_facet(
|
||||
// facet_field,
|
||||
// &format!("/facet/{}", thread_rng().sample(uniform)),
|
||||
// );
|
||||
// doc
|
||||
// })
|
||||
// .collect();
|
||||
// docs[..].shuffle(&mut thread_rng());
|
||||
let uniform = Uniform::new_inclusive(1, 100_000);
|
||||
let mut docs: Vec<TantivyDocument> =
|
||||
vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)]
|
||||
.into_iter()
|
||||
.flat_map(|(c, count)| {
|
||||
let facet = Facet::from(&format!("/facet/{c}"));
|
||||
let doc = doc!(facet_field => facet);
|
||||
iter::repeat(doc).take(count)
|
||||
})
|
||||
.map(|mut doc| {
|
||||
doc.add_facet(
|
||||
facet_field,
|
||||
&format!("/facet/{}", thread_rng().sample(uniform)),
|
||||
);
|
||||
doc
|
||||
})
|
||||
.collect();
|
||||
docs[..].shuffle(&mut thread_rng());
|
||||
|
||||
// let mut index_writer: IndexWriter = index.writer_for_tests().unwrap();
|
||||
// for doc in docs {
|
||||
// index_writer.add_document(doc).unwrap();
|
||||
// }
|
||||
// index_writer.commit().unwrap();
|
||||
// let searcher = index.reader().unwrap().searcher();
|
||||
let mut index_writer: IndexWriter = index.writer_for_tests().unwrap();
|
||||
for doc in docs {
|
||||
index_writer.add_document(doc).unwrap();
|
||||
}
|
||||
index_writer.commit().unwrap();
|
||||
let searcher = index.reader().unwrap().searcher();
|
||||
|
||||
// let mut facet_collector = FacetCollector::for_field("facet");
|
||||
// facet_collector.add_facet("/facet");
|
||||
// let counts: FacetCounts = searcher.search(&AllQuery, &facet_collector).unwrap();
|
||||
let mut facet_collector = FacetCollector::for_field("facet");
|
||||
facet_collector.add_facet("/facet");
|
||||
let counts: FacetCounts = searcher.search(&AllQuery, &facet_collector).unwrap();
|
||||
|
||||
// {
|
||||
// let facets: Vec<(&Facet, u64)> = counts.top_k("/facet", 3);
|
||||
// assert_eq!(
|
||||
// facets,
|
||||
// vec![
|
||||
// (&Facet::from("/facet/b"), 100),
|
||||
// (&Facet::from("/facet/e"), 21),
|
||||
// (&Facet::from("/facet/d"), 12),
|
||||
// ]
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
{
|
||||
let facets: Vec<(&Facet, u64)> = counts.top_k("/facet", 3);
|
||||
assert_eq!(
|
||||
facets,
|
||||
vec![
|
||||
(&Facet::from("/facet/b"), 100),
|
||||
(&Facet::from("/facet/e"), 21),
|
||||
(&Facet::from("/facet/d"), 12),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_collector_topk_tie_break() -> crate::Result<()> {
|
||||
// let mut schema_builder = Schema::builder();
|
||||
// let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
#[test]
|
||||
fn test_facet_collector_topk_tie_break() -> crate::Result<()> {
|
||||
let mut schema_builder = Schema::builder();
|
||||
let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
|
||||
// let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)]
|
||||
// .into_iter()
|
||||
// .flat_map(|(c, count)| {
|
||||
// let facet = Facet::from(&format!("/facet/{c}"));
|
||||
// let doc = doc!(facet_field => facet);
|
||||
// iter::repeat(doc).take(count)
|
||||
// })
|
||||
// .collect();
|
||||
let docs: Vec<TantivyDocument> = vec![("b", 2), ("a", 2), ("c", 4)]
|
||||
.into_iter()
|
||||
.flat_map(|(c, count)| {
|
||||
let facet = Facet::from(&format!("/facet/{c}"));
|
||||
let doc = doc!(facet_field => facet);
|
||||
iter::repeat(doc).take(count)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
// for doc in docs {
|
||||
// index_writer.add_document(doc)?;
|
||||
// }
|
||||
// index_writer.commit()?;
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
for doc in docs {
|
||||
index_writer.add_document(doc)?;
|
||||
}
|
||||
index_writer.commit()?;
|
||||
|
||||
// let searcher = index.reader()?.searcher();
|
||||
// let mut facet_collector = FacetCollector::for_field("facet");
|
||||
// facet_collector.add_facet("/facet");
|
||||
// let counts: FacetCounts = searcher.search(&AllQuery, &facet_collector)?;
|
||||
let searcher = index.reader()?.searcher();
|
||||
let mut facet_collector = FacetCollector::for_field("facet");
|
||||
facet_collector.add_facet("/facet");
|
||||
let counts: FacetCounts = searcher.search(&AllQuery, &facet_collector)?;
|
||||
|
||||
// let facets: Vec<(&Facet, u64)> = counts.top_k("/facet", 2);
|
||||
// assert_eq!(
|
||||
// facets,
|
||||
// vec![(&Facet::from("/facet/c"), 4), (&Facet::from("/facet/a"), 2)]
|
||||
// );
|
||||
// Ok(())
|
||||
// }
|
||||
let facets: Vec<(&Facet, u64)> = counts.top_k("/facet", 2);
|
||||
assert_eq!(
|
||||
facets,
|
||||
vec![(&Facet::from("/facet/c"), 4), (&Facet::from("/facet/a"), 2)]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn is_child_facet() {
|
||||
// assert!(super::is_child_facet(&b"foo"[..], &b"foo\0bar"[..]));
|
||||
// assert!(super::is_child_facet(&b""[..], &b"foo\0bar"[..]));
|
||||
// assert!(super::is_child_facet(&b""[..], &b"foo"[..]));
|
||||
// assert!(!super::is_child_facet(&b"foo\0bar"[..], &b"foo"[..]));
|
||||
// assert!(!super::is_child_facet(&b"foo"[..], &b"foobar\0baz"[..]));
|
||||
// }
|
||||
#[test]
|
||||
fn is_child_facet() {
|
||||
assert!(super::is_child_facet(&b"foo"[..], &b"foo\0bar"[..]));
|
||||
assert!(super::is_child_facet(&b""[..], &b"foo\0bar"[..]));
|
||||
assert!(super::is_child_facet(&b""[..], &b"foo"[..]));
|
||||
assert!(!super::is_child_facet(&b"foo\0bar"[..], &b"foo"[..]));
|
||||
assert!(!super::is_child_facet(&b"foo"[..], &b"foobar\0baz"[..]));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, feature = "unstable"))]
|
||||
|
||||
@@ -871,10 +871,7 @@ mod tests {
|
||||
use crate::schema::{Field, Schema, FAST, STORED, TEXT};
|
||||
use crate::time::format_description::well_known::Rfc3339;
|
||||
use crate::time::OffsetDateTime;
|
||||
use crate::{
|
||||
assert_nearly_equals, DateTime, DocAddress, DocId, Index, IndexWriter, Order, Score,
|
||||
SegmentReader,
|
||||
};
|
||||
use crate::{DateTime, DocAddress, DocId, Index, IndexWriter, Order, Score, SegmentReader};
|
||||
|
||||
fn make_index() -> crate::Result<Index> {
|
||||
let mut schema_builder = Schema::builder();
|
||||
|
||||
@@ -195,7 +195,7 @@ mod tests {
|
||||
let (tx, rx) = crossbeam_channel::bounded::<()>(0);
|
||||
let rx = Arc::new(rx);
|
||||
let executor = Executor::multi_thread(3, "search-test").unwrap();
|
||||
for _ in 0..1000 {
|
||||
for i in 0..1000 {
|
||||
let counter_clone: Arc<AtomicU64> = counter.clone();
|
||||
let other_counter_clone: Arc<AtomicU64> = other_counter.clone();
|
||||
|
||||
@@ -203,18 +203,18 @@ mod tests {
|
||||
let rx_clone2 = rx.clone();
|
||||
let fut = executor.spawn_blocking(move || {
|
||||
counter_clone.fetch_add(1, Ordering::SeqCst);
|
||||
let _ = rx_clone.recv();
|
||||
let () = rx_clone.recv().unwrap();
|
||||
});
|
||||
futures.push(fut);
|
||||
let other_fut = executor.spawn_blocking(move || {
|
||||
other_counter_clone.fetch_add(1, Ordering::SeqCst);
|
||||
let _ = rx_clone2.recv();
|
||||
let () = rx_clone2.recv().unwrap();
|
||||
});
|
||||
other_futures.push(other_fut);
|
||||
}
|
||||
|
||||
// We execute 100 futures.
|
||||
for _ in 0..100 {
|
||||
for i in 0..100 {
|
||||
tx.send(()).unwrap();
|
||||
}
|
||||
|
||||
@@ -226,7 +226,7 @@ mod tests {
|
||||
drop(other_futures);
|
||||
|
||||
// We execute 100 futures.
|
||||
for _ in 0..100 {
|
||||
for i in 0..100 {
|
||||
tx.send(()).unwrap();
|
||||
}
|
||||
|
||||
|
||||
@@ -94,102 +94,102 @@ mod tests {
|
||||
assert_eq!(value, None);
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_several_facets_sorted() {
|
||||
// let mut schema_builder = SchemaBuilder::default();
|
||||
// let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
// let mut index_writer: IndexWriter = index.writer_for_tests().unwrap();
|
||||
// index_writer
|
||||
// .add_document(doc!(facet_field=>Facet::from_text("/parent/child1").unwrap()))
|
||||
// .unwrap();
|
||||
// index_writer
|
||||
// .add_document(doc!(
|
||||
// facet_field=>Facet::from_text("/parent/child2").unwrap(),
|
||||
// facet_field=>Facet::from_text("/parent/child1/blop").unwrap(),
|
||||
// ))
|
||||
// .unwrap();
|
||||
// index_writer.commit().unwrap();
|
||||
// let searcher = index.reader().unwrap().searcher();
|
||||
// let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
// let mut facet_ords = Vec::new();
|
||||
#[test]
|
||||
fn test_facet_several_facets_sorted() {
|
||||
let mut schema_builder = SchemaBuilder::default();
|
||||
let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
let mut index_writer: IndexWriter = index.writer_for_tests().unwrap();
|
||||
index_writer
|
||||
.add_document(doc!(facet_field=>Facet::from_text("/parent/child1").unwrap()))
|
||||
.unwrap();
|
||||
index_writer
|
||||
.add_document(doc!(
|
||||
facet_field=>Facet::from_text("/parent/child2").unwrap(),
|
||||
facet_field=>Facet::from_text("/parent/child1/blop").unwrap(),
|
||||
))
|
||||
.unwrap();
|
||||
index_writer.commit().unwrap();
|
||||
let searcher = index.reader().unwrap().searcher();
|
||||
let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
let mut facet_ords = Vec::new();
|
||||
|
||||
// facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
// assert_eq!(&facet_ords, &[0u64]);
|
||||
facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
assert_eq!(&facet_ords, &[0u64]);
|
||||
|
||||
// facet_ords.clear();
|
||||
// facet_ords.extend(facet_reader.facet_ords(1u32));
|
||||
// assert_eq!(&facet_ords, &[1u64, 2u64]);
|
||||
facet_ords.clear();
|
||||
facet_ords.extend(facet_reader.facet_ords(1u32));
|
||||
assert_eq!(&facet_ords, &[1u64, 2u64]);
|
||||
|
||||
// assert_eq!(facet_reader.num_facets(), 3);
|
||||
// let mut facet = Facet::default();
|
||||
// facet_reader.facet_from_ord(0, &mut facet).unwrap();
|
||||
// assert_eq!(facet.to_path_string(), "/parent/child1");
|
||||
// facet_reader.facet_from_ord(1, &mut facet).unwrap();
|
||||
// assert_eq!(facet.to_path_string(), "/parent/child1/blop");
|
||||
// facet_reader.facet_from_ord(2, &mut facet).unwrap();
|
||||
// assert_eq!(facet.to_path_string(), "/parent/child2");
|
||||
// }
|
||||
assert_eq!(facet_reader.num_facets(), 3);
|
||||
let mut facet = Facet::default();
|
||||
facet_reader.facet_from_ord(0, &mut facet).unwrap();
|
||||
assert_eq!(facet.to_path_string(), "/parent/child1");
|
||||
facet_reader.facet_from_ord(1, &mut facet).unwrap();
|
||||
assert_eq!(facet.to_path_string(), "/parent/child1/blop");
|
||||
facet_reader.facet_from_ord(2, &mut facet).unwrap();
|
||||
assert_eq!(facet.to_path_string(), "/parent/child2");
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_stored_and_indexed() -> crate::Result<()> {
|
||||
// let mut schema_builder = SchemaBuilder::default();
|
||||
// let facet_field = schema_builder.add_facet_field("facet", STORED);
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
// index_writer.add_document(doc!(facet_field=>Facet::from_text("/a/b").unwrap()))?;
|
||||
// index_writer.commit()?;
|
||||
// let searcher = index.reader()?.searcher();
|
||||
// let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
// let mut facet_ords = Vec::new();
|
||||
// facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
// assert_eq!(&facet_ords, &[0u64]);
|
||||
// let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0u32, 0u32))?;
|
||||
// let value: Option<Facet> = doc
|
||||
// .get_first(facet_field)
|
||||
// .and_then(|v| v.as_facet())
|
||||
// .map(|facet| Facet::from_encoded_string(facet.to_string()));
|
||||
// assert_eq!(value, Facet::from_text("/a/b").ok());
|
||||
// Ok(())
|
||||
// }
|
||||
#[test]
|
||||
fn test_facet_stored_and_indexed() -> crate::Result<()> {
|
||||
let mut schema_builder = SchemaBuilder::default();
|
||||
let facet_field = schema_builder.add_facet_field("facet", STORED);
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
index_writer.add_document(doc!(facet_field=>Facet::from_text("/a/b").unwrap()))?;
|
||||
index_writer.commit()?;
|
||||
let searcher = index.reader()?.searcher();
|
||||
let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
let mut facet_ords = Vec::new();
|
||||
facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
assert_eq!(&facet_ords, &[0u64]);
|
||||
let doc = searcher.doc::<TantivyDocument>(DocAddress::new(0u32, 0u32))?;
|
||||
let value: Option<Facet> = doc
|
||||
.get_first(facet_field)
|
||||
.and_then(|v| v.as_facet())
|
||||
.map(|facet| Facet::from_encoded_string(facet.to_string()));
|
||||
assert_eq!(value, Facet::from_text("/a/b").ok());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_not_populated_for_all_docs() -> crate::Result<()> {
|
||||
// let mut schema_builder = SchemaBuilder::default();
|
||||
// let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
// index_writer.add_document(doc!(facet_field=>Facet::from_text("/a/b").unwrap()))?;
|
||||
// index_writer.add_document(TantivyDocument::default())?;
|
||||
// index_writer.commit()?;
|
||||
// let searcher = index.reader()?.searcher();
|
||||
// let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
// let mut facet_ords = Vec::new();
|
||||
// facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
// assert_eq!(&facet_ords, &[0u64]);
|
||||
// facet_ords.clear();
|
||||
// facet_ords.extend(facet_reader.facet_ords(1u32));
|
||||
// assert!(facet_ords.is_empty());
|
||||
// Ok(())
|
||||
// }
|
||||
#[test]
|
||||
fn test_facet_not_populated_for_all_docs() -> crate::Result<()> {
|
||||
let mut schema_builder = SchemaBuilder::default();
|
||||
let facet_field = schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
index_writer.add_document(doc!(facet_field=>Facet::from_text("/a/b").unwrap()))?;
|
||||
index_writer.add_document(TantivyDocument::default())?;
|
||||
index_writer.commit()?;
|
||||
let searcher = index.reader()?.searcher();
|
||||
let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
let mut facet_ords = Vec::new();
|
||||
facet_ords.extend(facet_reader.facet_ords(0u32));
|
||||
assert_eq!(&facet_ords, &[0u64]);
|
||||
facet_ords.clear();
|
||||
facet_ords.extend(facet_reader.facet_ords(1u32));
|
||||
assert!(facet_ords.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_facet_not_populated_for_any_docs() -> crate::Result<()> {
|
||||
// let mut schema_builder = SchemaBuilder::default();
|
||||
// schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
// let schema = schema_builder.build();
|
||||
// let index = Index::create_in_ram(schema);
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
// index_writer.add_document(TantivyDocument::default())?;
|
||||
// index_writer.add_document(TantivyDocument::default())?;
|
||||
// index_writer.commit()?;
|
||||
// let searcher = index.reader()?.searcher();
|
||||
// let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
// assert!(facet_reader.facet_ords(0u32).next().is_none());
|
||||
// assert!(facet_reader.facet_ords(1u32).next().is_none());
|
||||
// Ok(())
|
||||
// }
|
||||
#[test]
|
||||
fn test_facet_not_populated_for_any_docs() -> crate::Result<()> {
|
||||
let mut schema_builder = SchemaBuilder::default();
|
||||
schema_builder.add_facet_field("facet", FacetOptions::default());
|
||||
let schema = schema_builder.build();
|
||||
let index = Index::create_in_ram(schema);
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
index_writer.add_document(TantivyDocument::default())?;
|
||||
index_writer.add_document(TantivyDocument::default())?;
|
||||
index_writer.commit()?;
|
||||
let searcher = index.reader()?.searcher();
|
||||
let facet_reader = searcher.segment_reader(0u32).facet_reader("facet").unwrap();
|
||||
assert!(facet_reader.facet_ords(0u32).next().is_none());
|
||||
assert!(facet_reader.facet_ords(1u32).next().is_none());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -787,8 +787,6 @@ impl IndexMerger {
|
||||
mod tests {
|
||||
|
||||
use columnar::Column;
|
||||
use proptest::prop_oneof;
|
||||
use proptest::strategy::Strategy;
|
||||
use schema::FAST;
|
||||
|
||||
use crate::collector::tests::{
|
||||
@@ -796,7 +794,6 @@ mod tests {
|
||||
};
|
||||
use crate::collector::{Count, FacetCollector};
|
||||
use crate::index::{Index, SegmentId};
|
||||
use crate::indexer::NoMergePolicy;
|
||||
use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery};
|
||||
use crate::schema::{
|
||||
Facet, FacetOptions, IndexRecordOption, NumericOptions, TantivyDocument, Term,
|
||||
@@ -1534,112 +1531,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||
enum IndexingOp {
|
||||
ZeroVal,
|
||||
OneVal { val: u64 },
|
||||
TwoVal { val: u64 },
|
||||
Commit,
|
||||
}
|
||||
|
||||
fn balanced_operation_strategy() -> impl Strategy<Value = IndexingOp> {
|
||||
prop_oneof![
|
||||
(0u64..1u64).prop_map(|_| IndexingOp::ZeroVal),
|
||||
(0u64..1u64).prop_map(|val| IndexingOp::OneVal { val }),
|
||||
(0u64..1u64).prop_map(|val| IndexingOp::TwoVal { val }),
|
||||
(0u64..1u64).prop_map(|_| IndexingOp::Commit),
|
||||
]
|
||||
}
|
||||
|
||||
use proptest::prelude::*;
|
||||
proptest! {
|
||||
#[test]
|
||||
fn test_merge_columnar_int_proptest(ops in proptest::collection::vec(balanced_operation_strategy(), 1..20)) {
|
||||
assert!(test_merge_int_fields(&ops[..]).is_ok());
|
||||
}
|
||||
}
|
||||
fn test_merge_int_fields(ops: &[IndexingOp]) -> crate::Result<()> {
|
||||
if ops.iter().all(|op| *op == IndexingOp::Commit) {
|
||||
return Ok(());
|
||||
}
|
||||
let expected_doc_and_vals: Vec<(u32, Vec<u64>)> = ops
|
||||
.iter()
|
||||
.filter(|op| *op != &IndexingOp::Commit)
|
||||
.map(|op| match op {
|
||||
IndexingOp::ZeroVal => vec![],
|
||||
IndexingOp::OneVal { val } => vec![*val],
|
||||
IndexingOp::TwoVal { val } => vec![*val, *val],
|
||||
IndexingOp::Commit => unreachable!(),
|
||||
})
|
||||
.enumerate()
|
||||
.map(|(id, val)| (id as u32, val))
|
||||
.collect();
|
||||
|
||||
let mut schema_builder = schema::Schema::builder();
|
||||
let int_options = NumericOptions::default().set_fast().set_indexed();
|
||||
let int_field = schema_builder.add_u64_field("intvals", int_options);
|
||||
let index = Index::create_in_ram(schema_builder.build());
|
||||
{
|
||||
let mut index_writer = index.writer_for_tests()?;
|
||||
index_writer.set_merge_policy(Box::new(NoMergePolicy));
|
||||
let index_doc = |index_writer: &mut IndexWriter, int_vals: &[u64]| {
|
||||
let mut doc = TantivyDocument::default();
|
||||
for &val in int_vals {
|
||||
doc.add_u64(int_field, val);
|
||||
}
|
||||
index_writer.add_document(doc).unwrap();
|
||||
};
|
||||
|
||||
for op in ops {
|
||||
match op {
|
||||
IndexingOp::ZeroVal => index_doc(&mut index_writer, &[]),
|
||||
IndexingOp::OneVal { val } => index_doc(&mut index_writer, &[*val]),
|
||||
IndexingOp::TwoVal { val } => index_doc(&mut index_writer, &[*val, *val]),
|
||||
IndexingOp::Commit => {
|
||||
index_writer.commit().expect("commit failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
index_writer.commit().expect("commit failed");
|
||||
}
|
||||
{
|
||||
let mut segment_ids = index.searchable_segment_ids()?;
|
||||
segment_ids.sort();
|
||||
let mut index_writer: IndexWriter = index.writer_for_tests()?;
|
||||
index_writer.merge(&segment_ids).wait()?;
|
||||
index_writer.wait_merging_threads()?;
|
||||
}
|
||||
let reader = index.reader()?;
|
||||
reader.reload()?;
|
||||
|
||||
let mut vals: Vec<u64> = Vec::new();
|
||||
let mut test_vals = move |col: &Column<u64>, doc: DocId, expected: &[u64]| {
|
||||
vals.clear();
|
||||
vals.extend(col.values_for_doc(doc));
|
||||
assert_eq!(&vals[..], expected);
|
||||
};
|
||||
|
||||
let mut test_col = move |col: &Column<u64>, column_expected: &[(u32, Vec<u64>)]| {
|
||||
for (doc_id, vals) in column_expected.iter() {
|
||||
test_vals(col, *doc_id, vals);
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
let searcher = reader.searcher();
|
||||
let segment = searcher.segment_reader(0u32);
|
||||
let col = segment
|
||||
.fast_fields()
|
||||
.column_opt::<u64>("intvals")
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
test_col(&col, &expected_doc_and_vals);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merge_multivalued_int_fields_simple() -> crate::Result<()> {
|
||||
let mut schema_builder = schema::Schema::builder();
|
||||
|
||||
20
src/lib.rs
20
src/lib.rs
@@ -397,20 +397,16 @@ pub mod tests {
|
||||
#[macro_export]
|
||||
macro_rules! assert_nearly_equals {
|
||||
($left:expr, $right:expr) => {{
|
||||
assert_nearly_equals!($left, $right, 0.0005);
|
||||
}};
|
||||
($left:expr, $right:expr, $epsilon:expr) => {{
|
||||
match (&$left, &$right, &$epsilon) {
|
||||
(left_val, right_val, epsilon_val) => {
|
||||
match (&$left, &$right) {
|
||||
(left_val, right_val) => {
|
||||
let diff = (left_val - right_val).abs();
|
||||
|
||||
if diff > *epsilon_val {
|
||||
let add = left_val.abs() + right_val.abs();
|
||||
if diff > 0.0005 * add {
|
||||
panic!(
|
||||
r#"assertion failed: `abs(left-right)>epsilon`
|
||||
left: `{:?}`,
|
||||
right: `{:?}`,
|
||||
epsilon: `{:?}`"#,
|
||||
&*left_val, &*right_val, &*epsilon_val
|
||||
r#"assertion failed: `(left ~= right)`
|
||||
left: `{:?}`,
|
||||
right: `{:?}`"#,
|
||||
&*left_val, &*right_val
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::docset::{DocSet, TERMINATED};
|
||||
use crate::fieldnorm::FieldNormReader;
|
||||
use crate::postings::Postings;
|
||||
use crate::query::bm25::Bm25Weight;
|
||||
use crate::query::phrase_query::{intersection_count, intersection_exists, PhraseScorer};
|
||||
use crate::query::phrase_query::{intersection_count, PhraseScorer};
|
||||
use crate::query::Scorer;
|
||||
use crate::{DocId, Score};
|
||||
|
||||
@@ -92,17 +92,14 @@ impl<TPostings: Postings> Scorer for PhraseKind<TPostings> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PhrasePrefixScorer<TPostings: Postings, const SCORING_ENABLED: bool> {
|
||||
pub struct PhrasePrefixScorer<TPostings: Postings> {
|
||||
phrase_scorer: PhraseKind<TPostings>,
|
||||
suffixes: Vec<TPostings>,
|
||||
suffix_offset: u32,
|
||||
phrase_count: u32,
|
||||
suffix_position_buffer: Vec<u32>,
|
||||
}
|
||||
|
||||
impl<TPostings: Postings, const SCORING_ENABLED: bool>
|
||||
PhrasePrefixScorer<TPostings, SCORING_ENABLED>
|
||||
{
|
||||
impl<TPostings: Postings> PhrasePrefixScorer<TPostings> {
|
||||
// If similarity_weight is None, then scoring is disabled.
|
||||
pub fn new(
|
||||
mut term_postings: Vec<(usize, TPostings)>,
|
||||
@@ -110,7 +107,7 @@ impl<TPostings: Postings, const SCORING_ENABLED: bool>
|
||||
fieldnorm_reader: FieldNormReader,
|
||||
suffixes: Vec<TPostings>,
|
||||
suffix_pos: usize,
|
||||
) -> PhrasePrefixScorer<TPostings, SCORING_ENABLED> {
|
||||
) -> PhrasePrefixScorer<TPostings> {
|
||||
// correct indices so we can merge with our suffix term the PhraseScorer doesn't know about
|
||||
let max_offset = term_postings
|
||||
.iter()
|
||||
@@ -143,7 +140,6 @@ impl<TPostings: Postings, const SCORING_ENABLED: bool>
|
||||
suffixes,
|
||||
suffix_offset: (max_offset - suffix_pos) as u32,
|
||||
phrase_count: 0,
|
||||
suffix_position_buffer: Vec::with_capacity(100),
|
||||
};
|
||||
if phrase_prefix_scorer.doc() != TERMINATED && !phrase_prefix_scorer.matches_prefix() {
|
||||
phrase_prefix_scorer.advance();
|
||||
@@ -157,6 +153,7 @@ impl<TPostings: Postings, const SCORING_ENABLED: bool>
|
||||
|
||||
fn matches_prefix(&mut self) -> bool {
|
||||
let mut count = 0;
|
||||
let mut positions = Vec::new();
|
||||
let current_doc = self.doc();
|
||||
let pos_matching = self.phrase_scorer.get_intersection();
|
||||
for suffix in &mut self.suffixes {
|
||||
@@ -165,27 +162,16 @@ impl<TPostings: Postings, const SCORING_ENABLED: bool>
|
||||
}
|
||||
let doc = suffix.seek(current_doc);
|
||||
if doc == current_doc {
|
||||
suffix.positions_with_offset(self.suffix_offset, &mut self.suffix_position_buffer);
|
||||
if SCORING_ENABLED {
|
||||
count += intersection_count(pos_matching, &self.suffix_position_buffer);
|
||||
} else {
|
||||
if intersection_exists(pos_matching, &self.suffix_position_buffer) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
suffix.positions_with_offset(self.suffix_offset, &mut positions);
|
||||
count += intersection_count(pos_matching, &positions);
|
||||
}
|
||||
}
|
||||
if !SCORING_ENABLED {
|
||||
return false;
|
||||
}
|
||||
self.phrase_count = count as u32;
|
||||
count != 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<TPostings: Postings, const SCORING_ENABLED: bool> DocSet
|
||||
for PhrasePrefixScorer<TPostings, SCORING_ENABLED>
|
||||
{
|
||||
impl<TPostings: Postings> DocSet for PhrasePrefixScorer<TPostings> {
|
||||
fn advance(&mut self) -> DocId {
|
||||
loop {
|
||||
let doc = self.phrase_scorer.advance();
|
||||
@@ -212,15 +198,9 @@ impl<TPostings: Postings, const SCORING_ENABLED: bool> DocSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<TPostings: Postings, const SCORING_ENABLED: bool> Scorer
|
||||
for PhrasePrefixScorer<TPostings, SCORING_ENABLED>
|
||||
{
|
||||
impl<TPostings: Postings> Scorer for PhrasePrefixScorer<TPostings> {
|
||||
fn score(&mut self) -> Score {
|
||||
if SCORING_ENABLED {
|
||||
self.phrase_scorer.score()
|
||||
} else {
|
||||
1.0f32
|
||||
}
|
||||
// TODO modify score??
|
||||
self.phrase_scorer.score()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,11 +42,11 @@ impl PhrasePrefixWeight {
|
||||
Ok(FieldNormReader::constant(reader.max_doc(), 1))
|
||||
}
|
||||
|
||||
pub(crate) fn phrase_prefix_scorer<const SCORING_ENABLED: bool>(
|
||||
pub(crate) fn phrase_scorer(
|
||||
&self,
|
||||
reader: &SegmentReader,
|
||||
boost: Score,
|
||||
) -> crate::Result<Option<PhrasePrefixScorer<SegmentPostings, SCORING_ENABLED>>> {
|
||||
) -> crate::Result<Option<PhrasePrefixScorer<SegmentPostings>>> {
|
||||
let similarity_weight_opt = self
|
||||
.similarity_weight_opt
|
||||
.as_ref()
|
||||
@@ -128,20 +128,15 @@ impl PhrasePrefixWeight {
|
||||
|
||||
impl Weight for PhrasePrefixWeight {
|
||||
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
|
||||
if self.similarity_weight_opt.is_some() {
|
||||
if let Some(scorer) = self.phrase_prefix_scorer::<true>(reader, boost)? {
|
||||
return Ok(Box::new(scorer));
|
||||
}
|
||||
if let Some(scorer) = self.phrase_scorer(reader, boost)? {
|
||||
Ok(Box::new(scorer))
|
||||
} else {
|
||||
if let Some(scorer) = self.phrase_prefix_scorer::<false>(reader, boost)? {
|
||||
return Ok(Box::new(scorer));
|
||||
}
|
||||
Ok(Box::new(EmptyScorer))
|
||||
}
|
||||
Ok(Box::new(EmptyScorer))
|
||||
}
|
||||
|
||||
fn explain(&self, reader: &SegmentReader, doc: DocId) -> crate::Result<Explanation> {
|
||||
let scorer_opt = self.phrase_prefix_scorer::<true>(reader, 1.0)?;
|
||||
let scorer_opt = self.phrase_scorer(reader, 1.0)?;
|
||||
if scorer_opt.is_none() {
|
||||
return Err(does_not_match(doc));
|
||||
}
|
||||
@@ -205,7 +200,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let mut phrase_scorer = phrase_weight
|
||||
.phrase_prefix_scorer::<true>(searcher.segment_reader(0u32), 1.0)?
|
||||
.phrase_scorer(searcher.segment_reader(0u32), 1.0)?
|
||||
.unwrap();
|
||||
assert_eq!(phrase_scorer.doc(), 1);
|
||||
assert_eq!(phrase_scorer.phrase_count(), 2);
|
||||
@@ -216,38 +211,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_phrase_no_count() -> crate::Result<()> {
|
||||
let index = create_index(&[
|
||||
"aa bb dd cc",
|
||||
"aa aa bb c dd aa bb cc aa bb dc",
|
||||
" aa bb cd",
|
||||
])?;
|
||||
let schema = index.schema();
|
||||
let text_field = schema.get_field("text").unwrap();
|
||||
let searcher = index.reader()?.searcher();
|
||||
let phrase_query = PhrasePrefixQuery::new(vec![
|
||||
Term::from_field_text(text_field, "aa"),
|
||||
Term::from_field_text(text_field, "bb"),
|
||||
Term::from_field_text(text_field, "c"),
|
||||
]);
|
||||
let enable_scoring = EnableScoring::enabled_from_searcher(&searcher);
|
||||
let phrase_weight = phrase_query
|
||||
.phrase_prefix_query_weight(enable_scoring)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let mut phrase_scorer = phrase_weight
|
||||
.phrase_prefix_scorer::<false>(searcher.segment_reader(0u32), 1.0)?
|
||||
.unwrap();
|
||||
assert_eq!(phrase_scorer.doc(), 1);
|
||||
assert_eq!(phrase_scorer.phrase_count(), 0);
|
||||
assert_eq!(phrase_scorer.advance(), 2);
|
||||
assert_eq!(phrase_scorer.doc(), 2);
|
||||
assert_eq!(phrase_scorer.phrase_count(), 0);
|
||||
assert_eq!(phrase_scorer.advance(), TERMINATED);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_phrase_count_mid() -> crate::Result<()> {
|
||||
let index = create_index(&["aa dd cc", "aa aa bb c dd aa bb cc aa dc", " aa bb cd"])?;
|
||||
@@ -264,7 +227,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let mut phrase_scorer = phrase_weight
|
||||
.phrase_prefix_scorer::<true>(searcher.segment_reader(0u32), 1.0)?
|
||||
.phrase_scorer(searcher.segment_reader(0u32), 1.0)?
|
||||
.unwrap();
|
||||
assert_eq!(phrase_scorer.doc(), 1);
|
||||
assert_eq!(phrase_scorer.phrase_count(), 2);
|
||||
|
||||
@@ -3,8 +3,8 @@ mod phrase_scorer;
|
||||
mod phrase_weight;
|
||||
|
||||
pub use self::phrase_query::PhraseQuery;
|
||||
pub(crate) use self::phrase_scorer::intersection_count;
|
||||
pub use self::phrase_scorer::PhraseScorer;
|
||||
pub(crate) use self::phrase_scorer::{intersection_count, intersection_exists};
|
||||
pub use self::phrase_weight::PhraseWeight;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -58,7 +58,7 @@ pub struct PhraseScorer<TPostings: Postings> {
|
||||
}
|
||||
|
||||
/// Returns true if and only if the two sorted arrays contain a common element
|
||||
pub(crate) fn intersection_exists(left: &[u32], right: &[u32]) -> bool {
|
||||
fn intersection_exists(left: &[u32], right: &[u32]) -> bool {
|
||||
let mut left_index = 0;
|
||||
let mut right_index = 0;
|
||||
while left_index < left.len() && right_index < right.len() {
|
||||
|
||||
Reference in New Issue
Block a user