Compare commits

...

3 Commits

Author SHA1 Message Date
Paul Masurel
1553901f51 Introducing a column trait 2022-08-27 22:48:05 +02:00
Paul Masurel
e8a6e123ae Small refactoring estimate. 2022-08-27 21:53:46 +02:00
Paul Masurel
43a4c8287c Removing Deserializer trait
And renaming the `Serializer` trait `FastFieldCodec`.
2022-08-27 21:11:54 +02:00
29 changed files with 553 additions and 580 deletions

View File

@@ -7,10 +7,11 @@
// Of course, you can have a look at the tantivy's built-in collectors
// such as the `CountCollector` for more examples.
use fastfield_codecs::Column;
// ---
// Importing tantivy...
use tantivy::collector::{Collector, SegmentCollector};
use tantivy::fastfield::{DynamicFastFieldReader, FastFieldReader};
use tantivy::fastfield::DynamicFastFieldReader;
use tantivy::query::QueryParser;
use tantivy::schema::{Field, Schema, FAST, INDEXED, TEXT};
use tantivy::{doc, Index, Score, SegmentReader};
@@ -103,7 +104,7 @@ impl SegmentCollector for StatsSegmentCollector {
type Fruit = Option<Stats>;
fn collect(&mut self, doc: u32, _score: Score) {
let value = self.fast_field_reader.get(doc) as f64;
let value = self.fast_field_reader.get_val(doc as u64) as f64;
self.stats.count += 1;
self.stats.sum += value;
self.stats.squared_sum += value * value;

View File

@@ -2,8 +2,8 @@ use std::cmp::Reverse;
use std::collections::{HashMap, HashSet};
use std::sync::{Arc, RwLock, Weak};
use fastfield_codecs::Column;
use tantivy::collector::TopDocs;
use tantivy::fastfield::FastFieldReader;
use tantivy::query::QueryParser;
use tantivy::schema::{Field, Schema, FAST, TEXT};
use tantivy::{
@@ -52,7 +52,7 @@ impl Warmer for DynamicPriceColumn {
let product_id_reader = segment.fast_fields().u64(self.field)?;
let product_ids: Vec<ProductId> = segment
.doc_ids_alive()
.map(|doc| product_id_reader.get(doc))
.map(|doc| product_id_reader.get_val(doc as u64))
.collect();
let mut prices_it = self.price_fetcher.fetch_prices(&product_ids).into_iter();
let mut price_vals: Vec<Price> = Vec::new();

View File

@@ -4,9 +4,9 @@ extern crate test;
#[cfg(test)]
mod tests {
use fastfield_codecs::bitpacked::{BitpackedReader, BitpackedSerializer};
use fastfield_codecs::blockwise_linear::{BlockwiseLinearReader, BlockwiseLinearSerializer};
use fastfield_codecs::linear::{LinearReader, LinearSerializer};
use fastfield_codecs::bitpacked::BitpackedCodec;
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::*;
fn get_data() -> Vec<u64> {
@@ -25,16 +25,10 @@ mod tests {
fn value_iter() -> impl Iterator<Item = u64> {
0..20_000
}
fn bench_get<
S: FastFieldCodecSerializer,
R: FastFieldCodecDeserializer + FastFieldDataAccess,
>(
b: &mut Bencher,
data: &[u64],
) {
fn bench_get<Codec: FastFieldCodec>(b: &mut Bencher, data: &[u64]) {
let mut bytes = vec![];
S::serialize(&mut bytes, &data).unwrap();
let reader = R::open_from_bytes(OwnedBytes::new(bytes)).unwrap();
Codec::serialize(&mut bytes, &data).unwrap();
let reader = Codec::open_from_bytes(OwnedBytes::new(bytes)).unwrap();
b.iter(|| {
let mut sum = 0u64;
for pos in value_iter() {
@@ -45,10 +39,11 @@ mod tests {
sum
});
}
fn bench_create<S: FastFieldCodecSerializer>(b: &mut Bencher, data: &[u64]) {
let mut bytes = vec![];
fn bench_create<Codec: FastFieldCodec>(b: &mut Bencher, data: &[u64]) {
let mut bytes = Vec::new();
b.iter(|| {
S::serialize(&mut bytes, &data).unwrap();
bytes.clear();
Codec::serialize(&mut bytes, &data).unwrap();
});
}
@@ -57,32 +52,32 @@ mod tests {
#[bench]
fn bench_fastfield_bitpack_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<BitpackedSerializer>(b, &data);
bench_create::<BitpackedCodec>(b, &data);
}
#[bench]
fn bench_fastfield_linearinterpol_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<LinearSerializer>(b, &data);
bench_create::<LinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_multilinearinterpol_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<BlockwiseLinearSerializer>(b, &data);
bench_create::<BlockwiseLinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_bitpack_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<BitpackedSerializer, BitpackedReader>(b, &data);
bench_get::<BitpackedCodec>(b, &data);
}
#[bench]
fn bench_fastfield_linearinterpol_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<LinearSerializer, LinearReader>(b, &data);
bench_get::<LinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_multilinearinterpol_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<BlockwiseLinearSerializer, BlockwiseLinearReader>(b, &data);
bench_get::<BlockwiseLinearCodec>(b, &data);
}
pub fn stats_from_vec(data: &[u64]) -> FastFieldStats {
let min_value = data.iter().cloned().min().unwrap_or(0);

View File

@@ -4,9 +4,7 @@ use common::BinarySerializable;
use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::{
FastFieldCodecDeserializer, FastFieldCodecSerializer, FastFieldCodecType, FastFieldDataAccess,
};
use crate::{Column, FastFieldCodec, FastFieldCodecType};
/// Depending on the field type, a different
/// fast field is required.
@@ -14,32 +12,12 @@ use crate::{
pub struct BitpackedReader {
data: OwnedBytes,
bit_unpacker: BitUnpacker,
pub min_value_u64: u64,
pub max_value_u64: u64,
pub num_vals: u64,
min_value_u64: u64,
max_value_u64: u64,
num_vals: u64,
}
impl FastFieldCodecDeserializer for BitpackedReader {
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self> {
let footer_offset = bytes.len() - 24;
let (data, mut footer) = bytes.split(footer_offset);
let min_value = u64::deserialize(&mut footer)?;
let amplitude = u64::deserialize(&mut footer)?;
let num_vals = u64::deserialize(&mut footer)?;
let max_value = min_value + amplitude;
let num_bits = compute_num_bits(amplitude);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(BitpackedReader {
data,
bit_unpacker,
min_value_u64: min_value,
max_value_u64: max_value,
num_vals,
})
}
}
impl FastFieldDataAccess for BitpackedReader {
impl Column for BitpackedReader {
#[inline]
fn get_val(&self, doc: u64) -> u64 {
self.min_value_u64 + self.bit_unpacker.get(doc, &self.data)
@@ -111,12 +89,33 @@ impl<'a, W: Write> BitpackedSerializerLegacy<'a, W> {
}
}
pub struct BitpackedSerializer {}
pub struct BitpackedCodec;
impl FastFieldCodecSerializer for BitpackedSerializer {
impl FastFieldCodec for BitpackedCodec {
/// The CODEC_TYPE is an enum value used for serialization.
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::Bitpacked;
type Reader = BitpackedReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_offset = bytes.len() - 24;
let (data, mut footer) = bytes.split(footer_offset);
let min_value = u64::deserialize(&mut footer)?;
let amplitude = u64::deserialize(&mut footer)?;
let num_vals = u64::deserialize(&mut footer)?;
let max_value = min_value + amplitude;
let num_bits = compute_num_bits(amplitude);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(BitpackedReader {
data,
bit_unpacker,
min_value_u64: min_value,
max_value_u64: max_value,
num_vals,
})
}
/// Serializes data with the BitpackedFastFieldSerializer.
///
/// The serializer in fact encode the values by bitpacking
@@ -125,10 +124,7 @@ impl FastFieldCodecSerializer for BitpackedSerializer {
/// It requires a `min_value` and a `max_value` to compute
/// compute the minimum number of bits required to encode
/// values.
fn serialize(
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
fn serialize(write: &mut impl Write, fastfield_accessor: &dyn Column) -> io::Result<()> {
let mut serializer = BitpackedSerializerLegacy::open(
write,
fastfield_accessor.min_value(),
@@ -142,29 +138,27 @@ impl FastFieldCodecSerializer for BitpackedSerializer {
Ok(())
}
fn is_applicable(_fastfield_accessor: &impl FastFieldDataAccess) -> bool {
true
}
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
fn estimate(fastfield_accessor: &impl Column) -> Option<f32> {
let amplitude = fastfield_accessor.max_value() - fastfield_accessor.min_value();
let num_bits = compute_num_bits(amplitude);
let num_bits_uncompressed = 64;
num_bits as f32 / num_bits_uncompressed as f32
Some(num_bits as f32 / num_bits_uncompressed as f32)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::get_codec_test_data_sets;
use crate::tests::get_codec_test_datasets;
fn create_and_validate(data: &[u64], name: &str) {
crate::tests::create_and_validate::<BitpackedSerializer, BitpackedReader>(data, name);
crate::tests::create_and_validate::<BitpackedCodec>(data, name);
}
#[test]
fn test_with_codec_data_sets() {
let data_sets = get_codec_test_data_sets();
let data_sets = get_codec_test_datasets();
for (mut data, name) in data_sets {
create_and_validate(&data, name);
data.reverse();

View File

@@ -18,9 +18,7 @@ use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::linear::{get_calculated_value, get_slope};
use crate::{
FastFieldCodecDeserializer, FastFieldCodecSerializer, FastFieldCodecType, FastFieldDataAccess,
};
use crate::{Column, FastFieldCodec, FastFieldCodecType};
const CHUNK_SIZE: u64 = 512;
@@ -148,18 +146,7 @@ fn get_interpolation_function(doc: u64, interpolations: &[Function]) -> &Functio
&interpolations[get_interpolation_position(doc)]
}
impl FastFieldCodecDeserializer for BlockwiseLinearReader {
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self> {
let footer_len: u32 = (&bytes[bytes.len() - 4..]).deserialize()?;
let footer_offset = bytes.len() - 4 - footer_len as usize;
let (data, mut footer) = bytes.split(footer_offset);
let footer = BlockwiseLinearFooter::deserialize(&mut footer)?;
Ok(BlockwiseLinearReader { data, footer })
}
}
impl FastFieldDataAccess for BlockwiseLinearReader {
impl Column for BlockwiseLinearReader {
#[inline]
fn get_val(&self, idx: u64) -> u64 {
let interpolation = get_interpolation_function(idx, &self.footer.interpolations);
@@ -191,15 +178,24 @@ impl FastFieldDataAccess for BlockwiseLinearReader {
}
/// Same as LinearSerializer, but working on chunks of CHUNK_SIZE elements.
pub struct BlockwiseLinearSerializer {}
pub struct BlockwiseLinearCodec;
impl FastFieldCodecSerializer for BlockwiseLinearSerializer {
impl FastFieldCodec for BlockwiseLinearCodec {
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::BlockwiseLinear;
type Reader = BlockwiseLinearReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_len: u32 = (&bytes[bytes.len() - 4..]).deserialize()?;
let footer_offset = bytes.len() - 4 - footer_len as usize;
let (data, mut footer) = bytes.split(footer_offset);
let footer = BlockwiseLinearFooter::deserialize(&mut footer)?;
Ok(BlockwiseLinearReader { data, footer })
}
/// Creates a new fast field serializer.
fn serialize(
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
fn serialize(write: &mut impl Write, fastfield_accessor: &dyn Column) -> io::Result<()> {
assert!(fastfield_accessor.min_value() <= fastfield_accessor.max_value());
let first_val = fastfield_accessor.get_val(0);
@@ -290,10 +286,14 @@ impl FastFieldCodecSerializer for BlockwiseLinearSerializer {
Ok(())
}
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool {
if fastfield_accessor.num_vals() < 5_000 {
return false;
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima are for the deviation of the calculated value and
/// the offset is also unknown.
fn estimate(fastfield_accessor: &impl Column) -> Option<f32> {
if fastfield_accessor.num_vals() < 10 * CHUNK_SIZE {
return None;
}
// On serialization the offset is added to the actual value.
// We need to make sure this won't run into overflow calculation issues.
// For this we take the maximum theroretical offset and add this to the max value.
@@ -305,14 +305,9 @@ impl FastFieldCodecSerializer for BlockwiseLinearSerializer {
.checked_add(theorethical_maximum_offset)
.is_none()
{
return false;
return None;
}
true
}
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima are for the deviation of the calculated value and
/// the offset is also unknown.
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
let first_val_in_first_block = fastfield_accessor.get_val(0);
let last_elem_in_first_chunk = CHUNK_SIZE.min(fastfield_accessor.num_vals());
let last_val_in_first_block =
@@ -351,7 +346,7 @@ impl FastFieldCodecSerializer for BlockwiseLinearSerializer {
// function metadata per block
+ 29 * (fastfield_accessor.num_vals() / CHUNK_SIZE);
let num_bits_uncompressed = 64 * fastfield_accessor.num_vals();
num_bits as f32 / num_bits_uncompressed as f32
Some(num_bits as f32 / num_bits_uncompressed as f32)
}
}
@@ -366,12 +361,10 @@ fn distance<T: Sub<Output = T> + Ord>(x: T, y: T) -> T {
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::get_codec_test_data_sets;
use crate::tests::get_codec_test_datasets;
fn create_and_validate(data: &[u64], name: &str) -> (f32, f32) {
crate::tests::create_and_validate::<BlockwiseLinearSerializer, BlockwiseLinearReader>(
data, name,
)
fn create_and_validate(data: &[u64], name: &str) -> Option<(f32, f32)> {
crate::tests::create_and_validate::<BlockwiseLinearCodec>(data, name)
}
const HIGHEST_BIT: u64 = 1 << 63;
@@ -385,7 +378,7 @@ mod tests {
.map(i64_to_u64)
.collect::<Vec<_>>();
let (estimate, actual_compression) =
create_and_validate(&data, "simple monotonically large i64");
create_and_validate(&data, "simple monotonically large i64").unwrap();
assert!(actual_compression < 0.2);
assert!(estimate < 0.20);
assert!(estimate > 0.15);
@@ -396,7 +389,7 @@ mod tests {
fn test_compression() {
let data = (10..=6_000_u64).collect::<Vec<_>>();
let (estimate, actual_compression) =
create_and_validate(&data, "simple monotonically large");
create_and_validate(&data, "simple monotonically large").unwrap();
assert!(actual_compression < 0.2);
assert!(estimate < 0.20);
assert!(estimate > 0.15);
@@ -405,7 +398,7 @@ mod tests {
#[test]
fn test_with_codec_data_sets() {
let data_sets = get_codec_test_data_sets();
let data_sets = get_codec_test_datasets();
for (mut data, name) in data_sets {
create_and_validate(&data, name);
data.reverse();

View File

@@ -0,0 +1,49 @@
pub trait Column<T = u64> {
/// Return the value associated to the given idx.
///
/// This accessor should return as fast as possible.
///
/// # Panics
///
/// May panic if `idx` is greater than the column length.
fn get_val(&self, idx: u64) -> T;
/// Fills an output buffer with the fast field values
/// associated with the `DocId` going from
/// `start` to `start + output.len()`.
///
/// Regardless of the type of `Item`, this method works
/// - transmuting the output array
/// - extracting the `Item`s as if they were `u64`
/// - possibly converting the `u64` value to the right type.
///
/// # Panics
///
/// May panic if `start + output.len()` is greater than
/// the segment's `maxdoc`.
fn get_range(&self, start: u64, output: &mut [T]) {
for (out, idx) in output.iter_mut().zip(start..) {
*out = self.get_val(idx);
}
}
/// Returns the minimum value for this fast field.
///
/// The min value does not take in account of possible
/// deleted document, and should be considered as a lower bound
/// of the actual minimum value.
fn min_value(&self) -> T;
/// Returns the maximum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value
fn max_value(&self) -> T;
fn num_vals(&self) -> u64;
/// Returns a iterator over the data
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = T> + 'a> {
Box::new((0..self.num_vals()).map(|idx| self.get_val(idx)))
}
}

View File

@@ -12,22 +12,10 @@ pub mod bitpacked;
pub mod blockwise_linear;
pub mod linear;
pub trait FastFieldCodecDeserializer: Sized {
/// Reads the metadata and returns the CodecReader
fn open_from_bytes(bytes: OwnedBytes) -> std::io::Result<Self>
where Self: FastFieldDataAccess;
}
mod column;
pub use self::column::Column;
pub trait FastFieldDataAccess {
fn get_val(&self, doc: u64) -> u64;
fn min_value(&self) -> u64;
fn max_value(&self) -> u64;
fn num_vals(&self) -> u64;
/// Returns a iterator over the data
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = u64> + 'a> {
Box::new((0..self.num_vals()).map(|idx| self.get_val(idx)))
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy)]
#[repr(u8)]
@@ -69,29 +57,30 @@ impl FastFieldCodecType {
/// The FastFieldSerializerEstimate trait is required on all variants
/// of fast field compressions, to decide which one to choose.
pub trait FastFieldCodecSerializer {
pub trait FastFieldCodec {
/// A codex needs to provide a unique name and id, which is
/// used for debugging and de/serialization.
const CODEC_TYPE: FastFieldCodecType;
/// Check if the Codec is able to compress the data
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool;
type Reader: Column<u64>;
/// Returns an estimate of the compression ratio.
/// The baseline is uncompressed 64bit data.
///
/// It could make sense to also return a value representing
/// computational complexity.
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32;
/// Reads the metadata and returns the CodecReader
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader>;
/// Serializes the data using the serializer into write.
///
/// The fastfield_accessor iterator should be preferred over using fastfield_accessor for
/// performance reasons.
fn serialize(
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()>;
fn serialize(write: &mut impl Write, fastfield_accessor: &dyn Column<u64>) -> io::Result<()>;
/// Returns an estimate of the compression ratio.
/// If the codec is not applicable, returns `None`.
///
/// The baseline is uncompressed 64bit data.
///
/// It could make sense to also return a value representing
/// computational complexity.
fn estimate(fastfield_accessor: &impl Column) -> Option<f32>;
}
#[derive(Debug, Clone)]
@@ -102,7 +91,7 @@ pub struct FastFieldStats {
pub num_vals: u64,
}
impl<'a> FastFieldDataAccess for &'a [u64] {
impl<'a> Column for &'a [u64] {
fn get_val(&self, position: u64) -> u64 {
self[position as usize]
}
@@ -124,7 +113,7 @@ impl<'a> FastFieldDataAccess for &'a [u64] {
}
}
impl FastFieldDataAccess for Vec<u64> {
impl Column for Vec<u64> {
fn get_val(&self, position: u64) -> u64 {
self[position as usize]
}
@@ -149,61 +138,55 @@ mod tests {
use proptest::arbitrary::any;
use proptest::proptest;
use crate::bitpacked::{BitpackedReader, BitpackedSerializer};
use crate::blockwise_linear::{BlockwiseLinearReader, BlockwiseLinearSerializer};
use crate::linear::{LinearReader, LinearSerializer};
use crate::bitpacked::BitpackedCodec;
use crate::blockwise_linear::BlockwiseLinearCodec;
use crate::linear::LinearCodec;
pub fn create_and_validate<
S: FastFieldCodecSerializer,
R: FastFieldCodecDeserializer + FastFieldDataAccess,
>(
pub fn create_and_validate<Codec: FastFieldCodec>(
data: &[u64],
name: &str,
) -> (f32, f32) {
if !S::is_applicable(&data) {
return (f32::MAX, 0.0);
}
let estimation = S::estimate(&data);
) -> Option<(f32, f32)> {
let estimation = Codec::estimate(&data)?;
let mut out: Vec<u8> = Vec::new();
S::serialize(&mut out, &data).unwrap();
Codec::serialize(&mut out, &data).unwrap();
let actual_compression = out.len() as f32 / (data.len() as f32 * 8.0);
let reader = R::open_from_bytes(OwnedBytes::new(out)).unwrap();
let reader = Codec::open_from_bytes(OwnedBytes::new(out)).unwrap();
assert_eq!(reader.num_vals(), data.len() as u64);
for (doc, orig_val) in data.iter().enumerate() {
for (doc, orig_val) in data.iter().copied().enumerate() {
let val = reader.get_val(doc as u64);
if val != *orig_val {
panic!(
"val {val:?} does not match orig_val {orig_val:?}, in data set {name}, data \
{data:?}",
);
}
assert_eq!(
val, orig_val,
"val `{val}` does not match orig_val {orig_val:?}, in data set {name}, data \
`{data:?}`",
);
}
(estimation, actual_compression)
Some((estimation, actual_compression))
}
proptest! {
#[test]
fn test_proptest_small(data in proptest::collection::vec(any::<u64>(), 1..10)) {
create_and_validate::<LinearSerializer, LinearReader>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearSerializer, BlockwiseLinearReader>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedSerializer, BitpackedReader>(&data, "proptest bitpacked");
create_and_validate::<LinearCodec>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearCodec>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedCodec>(&data, "proptest bitpacked");
}
#[test]
fn test_proptest_large(data in proptest::collection::vec(any::<u64>(), 1..6000)) {
create_and_validate::<LinearSerializer, LinearReader>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearSerializer, BlockwiseLinearReader>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedSerializer, BitpackedReader>(&data, "proptest bitpacked");
create_and_validate::<LinearCodec>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearCodec>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedCodec>(&data, "proptest bitpacked");
}
}
pub fn get_codec_test_data_sets() -> Vec<(Vec<u64>, &'static str)> {
pub fn get_codec_test_datasets() -> Vec<(Vec<u64>, &'static str)> {
let mut data_and_names = vec![];
let data = (10..=20_u64).collect::<Vec<_>>();
let data = (10..=10_000_u64).collect::<Vec<_>>();
data_and_names.push((data, "simple monotonically increasing"));
data_and_names.push((
@@ -216,32 +199,30 @@ mod tests {
data_and_names
}
fn test_codec<
S: FastFieldCodecSerializer,
R: FastFieldDataAccess + FastFieldCodecDeserializer,
>() {
let codec_name = format!("{:?}", S::CODEC_TYPE);
for (data, dataset_name) in get_codec_test_data_sets() {
let (estimate, actual) = crate::tests::create_and_validate::<S, R>(&data, dataset_name);
let result = if estimate == f32::MAX {
"Disabled".to_string()
} else {
fn test_codec<C: FastFieldCodec>() {
let codec_name = format!("{:?}", C::CODEC_TYPE);
for (data, dataset_name) in get_codec_test_datasets() {
let estimate_actual_opt: Option<(f32, f32)> =
crate::tests::create_and_validate::<C>(&data, dataset_name);
let result = if let Some((estimate, actual)) = estimate_actual_opt {
format!("Estimate `{estimate}` Actual `{actual}`")
} else {
"Disabled".to_string()
};
println!("Codec {codec_name}, DataSet {dataset_name}, {result}");
}
}
#[test]
fn test_codec_bitpacking() {
test_codec::<BitpackedSerializer, BitpackedReader>();
test_codec::<BitpackedCodec>();
}
#[test]
fn test_codec_interpolation() {
test_codec::<LinearSerializer, LinearReader>();
test_codec::<LinearCodec>();
}
#[test]
fn test_codec_multi_interpolation() {
test_codec::<BlockwiseLinearSerializer, BlockwiseLinearReader>();
test_codec::<BlockwiseLinearCodec>();
}
use super::*;
@@ -250,37 +231,37 @@ mod tests {
fn estimation_good_interpolation_case() {
let data = (10..=20000_u64).collect::<Vec<_>>();
let linear_interpol_estimation = LinearSerializer::estimate(&data);
let linear_interpol_estimation = LinearCodec::estimate(&data).unwrap();
assert_le!(linear_interpol_estimation, 0.01);
let multi_linear_interpol_estimation = BlockwiseLinearSerializer::estimate(&data);
let multi_linear_interpol_estimation = BlockwiseLinearCodec::estimate(&data).unwrap();
assert_le!(multi_linear_interpol_estimation, 0.2);
assert_le!(linear_interpol_estimation, multi_linear_interpol_estimation);
let bitpacked_estimation = BitpackedSerializer::estimate(&data);
let bitpacked_estimation = BitpackedCodec::estimate(&data).unwrap();
assert_le!(linear_interpol_estimation, bitpacked_estimation);
}
#[test]
fn estimation_test_bad_interpolation_case() {
let data = vec![200, 10, 10, 10, 10, 1000, 20];
let linear_interpol_estimation = LinearSerializer::estimate(&data);
let linear_interpol_estimation = LinearCodec::estimate(&data).unwrap();
assert_le!(linear_interpol_estimation, 0.32);
let bitpacked_estimation = BitpackedSerializer::estimate(&data);
let bitpacked_estimation = BitpackedCodec::estimate(&data).unwrap();
assert_le!(bitpacked_estimation, linear_interpol_estimation);
}
#[test]
fn estimation_test_bad_interpolation_case_monotonically_increasing() {
let mut data = (200..=20000_u64).collect::<Vec<_>>();
let mut data: Vec<u64> = (200..=20000_u64).collect();
data.push(1_000_000);
// in this case the linear interpolation can't in fact not be worse than bitpacking,
// but the estimator adds some threshold, which leads to estimated worse behavior
let linear_interpol_estimation = LinearSerializer::estimate(&data);
let linear_interpol_estimation = LinearCodec::estimate(&data).unwrap();
assert_le!(linear_interpol_estimation, 0.35);
let bitpacked_estimation = BitpackedSerializer::estimate(&data);
let bitpacked_estimation = BitpackedCodec::estimate(&data).unwrap();
assert_le!(bitpacked_estimation, 0.32);
assert_le!(bitpacked_estimation, linear_interpol_estimation);
}

View File

@@ -5,9 +5,7 @@ use common::{BinarySerializable, FixedSize};
use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::{
FastFieldCodecDeserializer, FastFieldCodecSerializer, FastFieldCodecType, FastFieldDataAccess,
};
use crate::{Column, FastFieldCodec, FastFieldCodecType};
/// Depending on the field type, a different
/// fast field is required.
@@ -59,25 +57,7 @@ impl FixedSize for LinearFooter {
const SIZE_IN_BYTES: usize = 56;
}
impl FastFieldCodecDeserializer for LinearReader {
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self> {
let footer_offset = bytes.len() - LinearFooter::SIZE_IN_BYTES;
let (data, mut footer) = bytes.split(footer_offset);
let footer = LinearFooter::deserialize(&mut footer)?;
let slope = get_slope(footer.first_val, footer.last_val, footer.num_vals);
let num_bits = compute_num_bits(footer.relative_max_value);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(LinearReader {
data,
bit_unpacker,
footer,
slope,
})
}
}
impl FastFieldDataAccess for LinearReader {
impl Column for LinearReader {
#[inline]
fn get_val(&self, doc: u64) -> u64 {
let calculated_value = get_calculated_value(self.footer.first_val, doc, self.slope);
@@ -100,7 +80,7 @@ impl FastFieldDataAccess for LinearReader {
/// Fastfield serializer, which tries to guess values by linear interpolation
/// and stores the difference bitpacked.
pub struct LinearSerializer {}
pub struct LinearCodec;
#[inline]
pub(crate) fn get_slope(first_val: u64, last_val: u64, num_vals: u64) -> f32 {
@@ -141,14 +121,29 @@ pub fn get_calculated_value(first_val: u64, pos: u64, slope: f32) -> u64 {
}
}
impl FastFieldCodecSerializer for LinearSerializer {
impl FastFieldCodec for LinearCodec {
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::Linear;
type Reader = LinearReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_offset = bytes.len() - LinearFooter::SIZE_IN_BYTES;
let (data, mut footer) = bytes.split(footer_offset);
let footer = LinearFooter::deserialize(&mut footer)?;
let slope = get_slope(footer.first_val, footer.last_val, footer.num_vals);
let num_bits = compute_num_bits(footer.relative_max_value);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(LinearReader {
data,
bit_unpacker,
footer,
slope,
})
}
/// Creates a new fast field serializer.
fn serialize(
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
fn serialize(write: &mut impl Write, fastfield_accessor: &dyn Column) -> io::Result<()> {
assert!(fastfield_accessor.min_value() <= fastfield_accessor.max_value());
let first_val = fastfield_accessor.get_val(0);
@@ -194,10 +189,15 @@ impl FastFieldCodecSerializer for LinearSerializer {
footer.serialize(write)?;
Ok(())
}
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool {
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima for the deviation of the calculated value are and
/// the offset to shift all values to >=0 is also unknown.
fn estimate(fastfield_accessor: &impl Column) -> Option<f32> {
if fastfield_accessor.num_vals() < 3 {
return false; // disable compressor for this case
return None; // disable compressor for this case
}
// On serialisation the offset is added to the actual value.
// We need to make sure this won't run into overflow calculation issues.
// For this we take the maximum theroretical offset and add this to the max value.
@@ -209,14 +209,9 @@ impl FastFieldCodecSerializer for LinearSerializer {
.checked_add(theorethical_maximum_offset)
.is_none()
{
return false;
return None;
}
true
}
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima for the deviation of the calculated value are and
/// the offset to shift all values to >=0 is also unknown.
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
let first_val = fastfield_accessor.get_val(0);
let last_val = fastfield_accessor.get_val(fastfield_accessor.num_vals() as u64 - 1);
let slope = get_slope(first_val, last_val, fastfield_accessor.num_vals());
@@ -248,7 +243,7 @@ impl FastFieldCodecSerializer for LinearSerializer {
* fastfield_accessor.num_vals()
+ LinearFooter::SIZE_IN_BYTES as u64;
let num_bits_uncompressed = 64 * fastfield_accessor.num_vals();
num_bits as f32 / num_bits_uncompressed as f32
Some(num_bits as f32 / num_bits_uncompressed as f32)
}
}
@@ -264,10 +259,10 @@ fn distance<T: Sub<Output = T> + Ord>(x: T, y: T) -> T {
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::get_codec_test_data_sets;
use crate::tests::get_codec_test_datasets;
fn create_and_validate(data: &[u64], name: &str) -> (f32, f32) {
crate::tests::create_and_validate::<LinearSerializer, LinearReader>(data, name)
fn create_and_validate(data: &[u64], name: &str) -> Option<(f32, f32)> {
crate::tests::create_and_validate::<LinearCodec>(data, name)
}
#[test]
@@ -294,15 +289,15 @@ mod tests {
fn test_compression() {
let data = (10..=6_000_u64).collect::<Vec<_>>();
let (estimate, actual_compression) =
create_and_validate(&data, "simple monotonically large");
create_and_validate(&data, "simple monotonically large").unwrap();
assert!(actual_compression < 0.01);
assert!(estimate < 0.01);
}
#[test]
fn test_with_codec_data_sets() {
let data_sets = get_codec_test_data_sets();
fn test_with_codec_datasets() {
let data_sets = get_codec_test_datasets();
for (mut data, name) in data_sets {
create_and_validate(&data, name);
data.reverse();
@@ -339,9 +334,10 @@ mod tests {
#[test]
fn linear_interpol_fast_field_rand() {
for _ in 0..5000 {
let mut data = (0..50).map(|_| rand::random::<u64>()).collect::<Vec<_>>();
let mut data = (0..10_000)
.map(|_| rand::random::<u64>())
.collect::<Vec<_>>();
create_and_validate(&data, "random");
data.reverse();
create_and_validate(&data, "random");
}

View File

@@ -1,8 +1,9 @@
#[macro_use]
extern crate prettytable;
use fastfield_codecs::blockwise_linear::BlockwiseLinearSerializer;
use fastfield_codecs::linear::LinearSerializer;
use fastfield_codecs::{FastFieldCodecSerializer, FastFieldCodecType, FastFieldStats};
use fastfield_codecs::bitpacked::BitpackedCodec;
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::{FastFieldCodec, FastFieldCodecType, FastFieldStats};
use prettytable::{Cell, Row, Table};
fn main() {
@@ -12,37 +13,30 @@ fn main() {
table.add_row(row!["", "Compression Ratio", "Compression Estimation"]);
for (data, data_set_name) in get_codec_test_data_sets() {
let mut results = vec![];
let res = serialize_with_codec::<LinearSerializer>(&data);
results.push(res);
let res = serialize_with_codec::<BlockwiseLinearSerializer>(&data);
results.push(res);
let res = serialize_with_codec::<fastfield_codecs::bitpacked::BitpackedSerializer>(&data);
results.push(res);
// let best_estimation_codec = results
//.iter()
//.min_by(|res1, res2| res1.partial_cmp(&res2).unwrap())
//.unwrap();
let results: Vec<(f32, f32, FastFieldCodecType)> = [
serialize_with_codec::<LinearCodec>(&data),
serialize_with_codec::<BlockwiseLinearCodec>(&data),
serialize_with_codec::<BlockwiseLinearCodec>(&data),
serialize_with_codec::<BitpackedCodec>(&data),
]
.into_iter()
.flatten()
.collect();
let best_compression_ratio_codec = results
.iter()
.min_by(|res1, res2| res1.partial_cmp(res2).unwrap())
.min_by(|&res1, &res2| res1.partial_cmp(res2).unwrap())
.cloned()
.unwrap();
table.add_row(Row::new(vec![Cell::new(data_set_name).style_spec("Bbb")]));
for (is_applicable, est, comp, codec_type) in results {
let (est_cell, ratio_cell) = if !is_applicable {
("Codec Disabled".to_string(), "".to_string())
} else {
(est.to_string(), comp.to_string())
};
for (est, comp, codec_type) in results {
let est_cell = est.to_string();
let ratio_cell = comp.to_string();
let style = if comp == best_compression_ratio_codec.1 {
"Fb"
} else {
""
};
table.add_row(Row::new(vec![
Cell::new(&format!("{codec_type:?}")).style_spec("bFg"),
Cell::new(&ratio_cell).style_spec(style),
@@ -89,19 +83,14 @@ pub fn get_codec_test_data_sets() -> Vec<(Vec<u64>, &'static str)> {
data_and_names
}
pub fn serialize_with_codec<S: FastFieldCodecSerializer>(
pub fn serialize_with_codec<C: FastFieldCodec>(
data: &[u64],
) -> (bool, f32, f32, FastFieldCodecType) {
let is_applicable = S::is_applicable(&data);
if !is_applicable {
return (false, 0.0, 0.0, S::CODEC_TYPE);
}
let estimation = S::estimate(&data);
let mut out = vec![];
S::serialize(&mut out, &data).unwrap();
) -> Option<(f32, f32, FastFieldCodecType)> {
let estimation = C::estimate(&data)?;
let mut out = Vec::new();
C::serialize(&mut out, &data).unwrap();
let actual_compression = out.len() as f32 / (data.len() * 8) as f32;
(true, estimation, actual_compression, S::CODEC_TYPE)
Some((estimation, actual_compression, C::CODEC_TYPE))
}
pub fn stats_from_vec(data: &[u64]) -> FastFieldStats {

View File

@@ -1,6 +1,7 @@
use std::cmp::Ordering;
use std::fmt::Display;
use fastfield_codecs::Column;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
@@ -14,7 +15,7 @@ use crate::aggregation::intermediate_agg_result::{
IntermediateAggregationResults, IntermediateBucketResult, IntermediateHistogramBucketEntry,
};
use crate::aggregation::segment_agg_result::SegmentAggregationResultsCollector;
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::fastfield::DynamicFastFieldReader;
use crate::schema::Type;
use crate::{DocId, TantivyError};
@@ -331,10 +332,10 @@ impl SegmentHistogramCollector {
.expect("unexpected fast field cardinatility");
let mut iter = doc.chunks_exact(4);
for docs in iter.by_ref() {
let val0 = self.f64_from_fastfield_u64(accessor.get(docs[0]));
let val1 = self.f64_from_fastfield_u64(accessor.get(docs[1]));
let val2 = self.f64_from_fastfield_u64(accessor.get(docs[2]));
let val3 = self.f64_from_fastfield_u64(accessor.get(docs[3]));
let val0 = self.f64_from_fastfield_u64(accessor.get_val(docs[0] as u64));
let val1 = self.f64_from_fastfield_u64(accessor.get_val(docs[1] as u64));
let val2 = self.f64_from_fastfield_u64(accessor.get_val(docs[2] as u64));
let val3 = self.f64_from_fastfield_u64(accessor.get_val(docs[3] as u64));
let bucket_pos0 = get_bucket_num(val0);
let bucket_pos1 = get_bucket_num(val1);
@@ -370,8 +371,8 @@ impl SegmentHistogramCollector {
&bucket_with_accessor.sub_aggregation,
)?;
}
for doc in iter.remainder() {
let val = f64_from_fastfield_u64(accessor.get(*doc), &self.field_type);
for &doc in iter.remainder() {
let val = f64_from_fastfield_u64(accessor.get_val(doc as u64), &self.field_type);
if !bounds.contains(val) {
continue;
}
@@ -382,7 +383,7 @@ impl SegmentHistogramCollector {
self.buckets[bucket_pos].key,
get_bucket_val(val, self.interval, self.offset) as f64
);
self.increment_bucket(bucket_pos, *doc, &bucket_with_accessor.sub_aggregation)?;
self.increment_bucket(bucket_pos, doc, &bucket_with_accessor.sub_aggregation)?;
}
if force_flush {
if let Some(sub_aggregations) = self.sub_aggregations.as_mut() {

View File

@@ -1,6 +1,7 @@
use std::fmt::Debug;
use std::ops::Range;
use fastfield_codecs::Column;
use fnv::FnvHashMap;
use serde::{Deserialize, Serialize};
@@ -12,7 +13,6 @@ use crate::aggregation::intermediate_agg_result::{
};
use crate::aggregation::segment_agg_result::{BucketCount, SegmentAggregationResultsCollector};
use crate::aggregation::{f64_from_fastfield_u64, f64_to_fastfield_u64, Key, SerializedKey};
use crate::fastfield::FastFieldReader;
use crate::schema::Type;
use crate::{DocId, TantivyError};
@@ -264,10 +264,10 @@ impl SegmentRangeCollector {
.as_single()
.expect("unexpected fast field cardinatility");
for docs in iter.by_ref() {
let val1 = accessor.get(docs[0]);
let val2 = accessor.get(docs[1]);
let val3 = accessor.get(docs[2]);
let val4 = accessor.get(docs[3]);
let val1 = accessor.get_val(docs[0] as u64);
let val2 = accessor.get_val(docs[1] as u64);
let val3 = accessor.get_val(docs[2] as u64);
let val4 = accessor.get_val(docs[3] as u64);
let bucket_pos1 = self.get_bucket_pos(val1);
let bucket_pos2 = self.get_bucket_pos(val2);
let bucket_pos3 = self.get_bucket_pos(val3);
@@ -278,10 +278,10 @@ impl SegmentRangeCollector {
self.increment_bucket(bucket_pos3, docs[2], &bucket_with_accessor.sub_aggregation)?;
self.increment_bucket(bucket_pos4, docs[3], &bucket_with_accessor.sub_aggregation)?;
}
for doc in iter.remainder() {
let val = accessor.get(*doc);
for &doc in iter.remainder() {
let val = accessor.get_val(doc as u64);
let bucket_pos = self.get_bucket_pos(val);
self.increment_bucket(bucket_pos, *doc, &bucket_with_accessor.sub_aggregation)?;
self.increment_bucket(bucket_pos, doc, &bucket_with_accessor.sub_aggregation)?;
}
if force_flush {
for bucket in &mut self.buckets {

View File

@@ -1,9 +1,10 @@
use std::fmt::Debug;
use fastfield_codecs::Column;
use serde::{Deserialize, Serialize};
use crate::aggregation::f64_from_fastfield_u64;
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::fastfield::DynamicFastFieldReader;
use crate::schema::Type;
use crate::DocId;
@@ -60,10 +61,10 @@ impl SegmentAverageCollector {
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &DynamicFastFieldReader<u64>) {
let mut iter = doc.chunks_exact(4);
for docs in iter.by_ref() {
let val1 = field.get(docs[0]);
let val2 = field.get(docs[1]);
let val3 = field.get(docs[2]);
let val4 = field.get(docs[3]);
let val1 = field.get_val(docs[0] as u64);
let val2 = field.get_val(docs[1] as u64);
let val3 = field.get_val(docs[2] as u64);
let val4 = field.get_val(docs[3] as u64);
let val1 = f64_from_fastfield_u64(val1, &self.field_type);
let val2 = f64_from_fastfield_u64(val2, &self.field_type);
let val3 = f64_from_fastfield_u64(val3, &self.field_type);
@@ -73,8 +74,8 @@ impl SegmentAverageCollector {
self.data.collect(val3);
self.data.collect(val4);
}
for doc in iter.remainder() {
let val = field.get(*doc);
for &doc in iter.remainder() {
let val = field.get_val(doc as u64);
let val = f64_from_fastfield_u64(val, &self.field_type);
self.data.collect(val);
}

View File

@@ -1,7 +1,8 @@
use fastfield_codecs::Column;
use serde::{Deserialize, Serialize};
use crate::aggregation::f64_from_fastfield_u64;
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::fastfield::DynamicFastFieldReader;
use crate::schema::Type;
use crate::{DocId, TantivyError};
@@ -166,10 +167,10 @@ impl SegmentStatsCollector {
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &DynamicFastFieldReader<u64>) {
let mut iter = doc.chunks_exact(4);
for docs in iter.by_ref() {
let val1 = field.get(docs[0]);
let val2 = field.get(docs[1]);
let val3 = field.get(docs[2]);
let val4 = field.get(docs[3]);
let val1 = field.get_val(docs[0] as u64);
let val2 = field.get_val(docs[1] as u64);
let val3 = field.get_val(docs[2] as u64);
let val4 = field.get_val(docs[3] as u64);
let val1 = f64_from_fastfield_u64(val1, &self.field_type);
let val2 = f64_from_fastfield_u64(val2, &self.field_type);
let val3 = f64_from_fastfield_u64(val3, &self.field_type);
@@ -179,8 +180,8 @@ impl SegmentStatsCollector {
self.stats.collect(val3);
self.stats.collect(val4);
}
for doc in iter.remainder() {
let val = field.get(*doc);
for &doc in iter.remainder() {
let val = field.get_val(doc as u64);
let val = f64_from_fastfield_u64(val, &self.field_type);
self.stats.collect(val);
}

View File

@@ -11,8 +11,10 @@
// Importing tantivy...
use std::marker::PhantomData;
use fastfield_codecs::Column;
use crate::collector::{Collector, SegmentCollector};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastValue};
use crate::schema::Field;
use crate::{Score, SegmentReader, TantivyError};
@@ -174,7 +176,7 @@ where
type Fruit = TSegmentCollector::Fruit;
fn collect(&mut self, doc: u32, score: Score) {
let value = self.fast_field_reader.get(doc);
let value = self.fast_field_reader.get_val(doc as u64);
if (self.predicate)(value) {
self.segment_collector.collect(doc, score)
}

View File

@@ -1,7 +1,8 @@
use fastdivide::DividerU64;
use fastfield_codecs::Column;
use crate::collector::{Collector, SegmentCollector};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastValue};
use crate::schema::{Field, Type};
use crate::{DocId, Score};
@@ -91,7 +92,7 @@ impl SegmentCollector for SegmentHistogramCollector {
type Fruit = Vec<u64>;
fn collect(&mut self, doc: DocId, _score: Score) {
let value = self.ff_reader.get(doc);
let value = self.ff_reader.get_val(doc as u64);
self.histogram_computer.add_value(value);
}

View File

@@ -1,7 +1,9 @@
use fastfield_codecs::Column;
use super::*;
use crate::collector::{Count, FilterCollector, TopDocs};
use crate::core::SegmentReader;
use crate::fastfield::{BytesFastFieldReader, DynamicFastFieldReader, FastFieldReader};
use crate::fastfield::{BytesFastFieldReader, DynamicFastFieldReader};
use crate::query::{AllQuery, QueryParser};
use crate::schema::{Field, Schema, FAST, TEXT};
use crate::time::format_description::well_known::Rfc3339;
@@ -197,7 +199,7 @@ impl SegmentCollector for FastFieldSegmentCollector {
type Fruit = Vec<u64>;
fn collect(&mut self, doc: DocId, _score: Score) {
let val = self.reader.get(doc);
let val = self.reader.get_val(doc as u64);
self.vals.push(val);
}

View File

@@ -2,6 +2,8 @@ use std::collections::BinaryHeap;
use std::fmt;
use std::marker::PhantomData;
use fastfield_codecs::Column;
use super::Collector;
use crate::collector::custom_score_top_collector::CustomScoreTopCollector;
use crate::collector::top_collector::{ComparableDoc, TopCollector, TopSegmentCollector};
@@ -9,7 +11,7 @@ use crate::collector::tweak_score_top_collector::TweakedScoreTopCollector;
use crate::collector::{
CustomScorer, CustomSegmentScorer, ScoreSegmentTweaker, ScoreTweaker, SegmentCollector,
};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastValue};
use crate::query::Weight;
use crate::schema::Field;
use crate::{DocAddress, DocId, Score, SegmentOrdinal, SegmentReader, TantivyError};
@@ -134,7 +136,7 @@ struct ScorerByFastFieldReader {
impl CustomSegmentScorer<u64> for ScorerByFastFieldReader {
fn score(&mut self, doc: DocId) -> u64 {
self.ff_reader.get(doc)
self.ff_reader.get_val(doc as u64)
}
}

View File

@@ -1,5 +1,7 @@
use fastfield_codecs::Column;
use crate::directory::{FileSlice, OwnedBytes};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, MultiValueLength};
use crate::fastfield::{DynamicFastFieldReader, MultiValueLength};
use crate::DocId;
/// Reader for byte array fast fields
@@ -28,8 +30,9 @@ impl BytesFastFieldReader {
}
fn range(&self, doc: DocId) -> (usize, usize) {
let start = self.idx_reader.get(doc) as usize;
let stop = self.idx_reader.get(doc + 1) as usize;
let idx = doc as u64;
let start = self.idx_reader.get_val(idx) as usize;
let stop = self.idx_reader.get_val(idx + 1) as usize;
(start, stop)
}

View File

@@ -3,7 +3,7 @@ use std::num::NonZeroU64;
use common::BinarySerializable;
use fastdivide::DividerU64;
use fastfield_codecs::{FastFieldCodecDeserializer, FastFieldDataAccess};
use fastfield_codecs::{Column, FastFieldCodec};
use ownedbytes::OwnedBytes;
pub const GCD_DEFAULT: u64 = 1;
@@ -12,50 +12,70 @@ pub const GCD_DEFAULT: u64 = 1;
///
/// Holds the data and the codec to the read the data.
#[derive(Clone)]
pub struct GCDFastFieldCodec<CodecReader> {
gcd: u64,
min_value: u64,
num_vals: u64,
pub struct GCDReader<CodecReader: Column> {
gcd_params: GCDParams,
reader: CodecReader,
}
impl<C: FastFieldDataAccess + FastFieldCodecDeserializer + Clone> FastFieldCodecDeserializer
for GCDFastFieldCodec<C>
{
fn open_from_bytes(bytes: OwnedBytes) -> std::io::Result<Self> {
let footer_offset = bytes.len() - 24;
let (body, mut footer) = bytes.split(footer_offset);
let gcd = u64::deserialize(&mut footer)?;
let min_value = u64::deserialize(&mut footer)?;
let num_vals = u64::deserialize(&mut footer)?;
let reader = C::open_from_bytes(body)?;
Ok(GCDFastFieldCodec {
#[derive(Debug, Clone, Copy)]
struct GCDParams {
gcd: u64,
min_value: u64,
num_vals: u64,
}
impl GCDParams {
pub fn eval(&self, val: u64) -> u64 {
self.min_value + self.gcd * val
}
}
impl BinarySerializable for GCDParams {
fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()> {
self.gcd.serialize(writer)?;
self.min_value.serialize(writer)?;
self.num_vals.serialize(writer)?;
Ok(())
}
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let gcd: u64 = u64::deserialize(reader)?;
let min_value: u64 = u64::deserialize(reader)?;
let num_vals: u64 = u64::deserialize(reader)?;
Ok(Self {
gcd,
min_value,
num_vals,
reader,
})
}
}
impl<C: FastFieldDataAccess + Clone> FastFieldDataAccess for GCDFastFieldCodec<C> {
pub fn open_gcd_from_bytes<WrappedCodec: FastFieldCodec>(
bytes: OwnedBytes,
) -> io::Result<GCDReader<WrappedCodec::Reader>> {
let footer_offset = bytes.len() - 24;
let (body, mut footer) = bytes.split(footer_offset);
let gcd_params = GCDParams::deserialize(&mut footer)?;
let reader: WrappedCodec::Reader = WrappedCodec::open_from_bytes(body)?;
Ok(GCDReader { gcd_params, reader })
}
impl<C: Column + Clone> Column for GCDReader<C> {
#[inline]
fn get_val(&self, doc: u64) -> u64 {
let mut data = self.reader.get_val(doc);
data *= self.gcd;
data += self.min_value;
data
let val = self.reader.get_val(doc);
self.gcd_params.eval(val)
}
fn min_value(&self) -> u64 {
self.min_value + self.reader.min_value() * self.gcd
self.gcd_params.eval(self.reader.min_value())
}
fn max_value(&self) -> u64 {
self.min_value + self.reader.max_value() * self.gcd
self.gcd_params.eval(self.reader.max_value())
}
fn num_vals(&self) -> u64 {
self.num_vals
self.gcd_params.num_vals
}
}
@@ -117,6 +137,7 @@ mod tests {
use std::time::{Duration, SystemTime};
use common::HasLen;
use fastfield_codecs::Column;
use crate::directory::{CompositeFile, RamDirectory, WritePtr};
use crate::fastfield::gcd::compute_gcd;
@@ -124,7 +145,7 @@ mod tests {
use crate::fastfield::tests::{FIELD, FIELDI64, SCHEMA, SCHEMAI64};
use crate::fastfield::{
find_gcd, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldCodecType,
FastFieldReader, FastFieldsWriter, ALL_CODECS,
FastFieldsWriter, ALL_CODECS,
};
use crate::schema::{Cardinality, Schema};
use crate::{DateOptions, DatePrecision, DateTime, Directory};
@@ -168,9 +189,9 @@ mod tests {
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<i64>::open(file)?;
assert_eq!(fast_field_reader.get(0), -4000i64);
assert_eq!(fast_field_reader.get(1), -3000i64);
assert_eq!(fast_field_reader.get(2), -2000i64);
assert_eq!(fast_field_reader.get_val(0), -4000i64);
assert_eq!(fast_field_reader.get_val(1), -3000i64);
assert_eq!(fast_field_reader.get_val(2), -2000i64);
assert_eq!(fast_field_reader.max_value(), (num_vals as i64 - 5) * 1000);
assert_eq!(fast_field_reader.min_value(), -4000i64);
let file = directory.open_read(path).unwrap();
@@ -209,9 +230,9 @@ mod tests {
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(file)?;
assert_eq!(fast_field_reader.get(0), 1000u64);
assert_eq!(fast_field_reader.get(1), 2000u64);
assert_eq!(fast_field_reader.get(2), 3000u64);
assert_eq!(fast_field_reader.get_val(0), 1000u64);
assert_eq!(fast_field_reader.get_val(1), 2000u64);
assert_eq!(fast_field_reader.get_val(2), 3000u64);
assert_eq!(fast_field_reader.max_value(), num_vals as u64 * 1000);
assert_eq!(fast_field_reader.min_value(), 1000u64);
let file = directory.open_read(path).unwrap();
@@ -238,9 +259,9 @@ mod tests {
#[test]
pub fn test_fastfield2() {
let test_fastfield = DynamicFastFieldReader::<u64>::from(vec![100, 200, 300]);
assert_eq!(test_fastfield.get(0), 100);
assert_eq!(test_fastfield.get(1), 200);
assert_eq!(test_fastfield.get(2), 300);
assert_eq!(test_fastfield.get_val(0), 100);
assert_eq!(test_fastfield.get_val(1), 200);
assert_eq!(test_fastfield.get_val(2), 300);
}
#[test]
@@ -305,9 +326,9 @@ mod tests {
let len = file.len();
let test_fastfield = DynamicFastFieldReader::<DateTime>::open(file)?;
assert_eq!(test_fastfield.get(0), time1.truncate(precision));
assert_eq!(test_fastfield.get(1), time2.truncate(precision));
assert_eq!(test_fastfield.get(2), time3.truncate(precision));
assert_eq!(test_fastfield.get_val(0), time1.truncate(precision));
assert_eq!(test_fastfield.get_val(1), time2.truncate(precision));
assert_eq!(test_fastfield.get_val(2), time3.truncate(precision));
Ok(len)
}

View File

@@ -26,12 +26,12 @@ pub use self::alive_bitset::{intersect_alive_bitsets, write_alive_bitset, AliveB
pub use self::bytes::{BytesFastFieldReader, BytesFastFieldWriter};
pub use self::error::{FastFieldNotAvailableError, Result};
pub use self::facet_reader::FacetReader;
pub(crate) use self::gcd::{find_gcd, GCDFastFieldCodec, GCD_DEFAULT};
pub(crate) use self::gcd::{find_gcd, GCDReader, GCD_DEFAULT};
pub use self::multivalued::{MultiValuedFastFieldReader, MultiValuedFastFieldWriter};
pub use self::reader::{DynamicFastFieldReader, FastFieldReader};
pub use self::reader::DynamicFastFieldReader;
pub use self::readers::FastFieldReaders;
pub(crate) use self::readers::{type_and_cardinality, FastType};
pub use self::serializer::{CompositeFastFieldSerializer, FastFieldDataAccess, FastFieldStats};
pub use self::serializer::{Column, CompositeFastFieldSerializer, FastFieldStats};
pub use self::writer::{FastFieldsWriter, IntFastFieldWriter};
use crate::schema::{Cardinality, FieldType, Type, Value};
use crate::{DateTime, DocId};
@@ -298,9 +298,9 @@ mod tests {
#[test]
pub fn test_fastfield() {
let test_fastfield = DynamicFastFieldReader::<u64>::from(vec![100, 200, 300]);
assert_eq!(test_fastfield.get(0), 100);
assert_eq!(test_fastfield.get(1), 200);
assert_eq!(test_fastfield.get(2), 300);
assert_eq!(test_fastfield.get_val(0u64), 100);
assert_eq!(test_fastfield.get_val(1u64), 200);
assert_eq!(test_fastfield.get_val(2u64), 300);
}
#[test]
@@ -330,9 +330,9 @@ mod tests {
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(file)?;
assert_eq!(fast_field_reader.get(0), 13u64);
assert_eq!(fast_field_reader.get(1), 14u64);
assert_eq!(fast_field_reader.get(2), 2u64);
assert_eq!(fast_field_reader.get_val(0), 13u64);
assert_eq!(fast_field_reader.get_val(1), 14u64);
assert_eq!(fast_field_reader.get_val(2), 2u64);
Ok(())
}
@@ -362,15 +362,15 @@ mod tests {
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 4u64);
assert_eq!(fast_field_reader.get(1), 14_082_001u64);
assert_eq!(fast_field_reader.get(2), 3_052u64);
assert_eq!(fast_field_reader.get(3), 9002u64);
assert_eq!(fast_field_reader.get(4), 15_001u64);
assert_eq!(fast_field_reader.get(5), 777u64);
assert_eq!(fast_field_reader.get(6), 1_002u64);
assert_eq!(fast_field_reader.get(7), 1_501u64);
assert_eq!(fast_field_reader.get(8), 215u64);
assert_eq!(fast_field_reader.get_val(0), 4u64);
assert_eq!(fast_field_reader.get_val(1), 14_082_001u64);
assert_eq!(fast_field_reader.get_val(2), 3_052u64);
assert_eq!(fast_field_reader.get_val(3), 9002u64);
assert_eq!(fast_field_reader.get_val(4), 15_001u64);
assert_eq!(fast_field_reader.get_val(5), 777u64);
assert_eq!(fast_field_reader.get_val(6), 1_002u64);
assert_eq!(fast_field_reader.get_val(7), 1_501u64);
assert_eq!(fast_field_reader.get_val(8), 215u64);
}
Ok(())
}
@@ -399,7 +399,7 @@ mod tests {
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
for doc in 0..10_000 {
assert_eq!(fast_field_reader.get(doc), 100_000u64);
assert_eq!(fast_field_reader.get_val(doc), 100_000u64);
}
}
Ok(())
@@ -430,10 +430,10 @@ mod tests {
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 0u64);
assert_eq!(fast_field_reader.get_val(0), 0u64);
for doc in 1..10_001 {
assert_eq!(
fast_field_reader.get(doc),
fast_field_reader.get_val(doc),
5_000_000_000_000_000_000u64 + doc as u64 - 1u64
);
}
@@ -475,7 +475,7 @@ mod tests {
assert_eq!(fast_field_reader.min_value(), -100i64);
assert_eq!(fast_field_reader.max_value(), 9_999i64);
for (doc, i) in (-100i64..10_000i64).enumerate() {
assert_eq!(fast_field_reader.get(doc as u32), i);
assert_eq!(fast_field_reader.get_val(doc as u64), i);
}
let mut buffer = vec![0i64; 100];
fast_field_reader.get_range(53, &mut buffer[..]);
@@ -511,7 +511,7 @@ mod tests {
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(i64_field).unwrap();
let fast_field_reader = DynamicFastFieldReader::<i64>::open(data)?;
assert_eq!(fast_field_reader.get(0u32), 0i64);
assert_eq!(fast_field_reader.get_val(0), 0i64);
}
Ok(())
}
@@ -551,7 +551,7 @@ mod tests {
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
for a in 0..n {
assert_eq!(fast_field_reader.get(a as u32), permutation[a as usize]);
assert_eq!(fast_field_reader.get_val(a as u64), permutation[a as usize]);
}
}
Ok(())
@@ -842,19 +842,19 @@ mod tests {
let dates_fast_field = fast_fields.dates(multi_date_field).unwrap();
let mut dates = vec![];
{
assert_eq!(date_fast_field.get(0u32).into_timestamp_micros(), 1i64);
assert_eq!(date_fast_field.get_val(0).into_timestamp_micros(), 1i64);
dates_fast_field.get_vals(0u32, &mut dates);
assert_eq!(dates.len(), 2);
assert_eq!(dates[0].into_timestamp_micros(), 2i64);
assert_eq!(dates[1].into_timestamp_micros(), 3i64);
}
{
assert_eq!(date_fast_field.get(1u32).into_timestamp_micros(), 4i64);
assert_eq!(date_fast_field.get_val(1).into_timestamp_micros(), 4i64);
dates_fast_field.get_vals(1u32, &mut dates);
assert!(dates.is_empty());
}
{
assert_eq!(date_fast_field.get(2u32).into_timestamp_micros(), 0i64);
assert_eq!(date_fast_field.get_val(2).into_timestamp_micros(), 0i64);
dates_fast_field.get_vals(2u32, &mut dates);
assert_eq!(dates.len(), 2);
assert_eq!(dates[0].into_timestamp_micros(), 5i64);
@@ -866,10 +866,10 @@ mod tests {
#[test]
pub fn test_fastfield_bool() {
let test_fastfield = DynamicFastFieldReader::<bool>::from(vec![true, false, true, false]);
assert_eq!(test_fastfield.get(0), true);
assert_eq!(test_fastfield.get(1), false);
assert_eq!(test_fastfield.get(2), true);
assert_eq!(test_fastfield.get(3), false);
assert_eq!(test_fastfield.get_val(0), true);
assert_eq!(test_fastfield.get_val(1), false);
assert_eq!(test_fastfield.get_val(2), true);
assert_eq!(test_fastfield.get_val(3), false);
}
#[test]
@@ -900,10 +900,10 @@ mod tests {
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
assert_eq!(fast_field_reader.get(0), true);
assert_eq!(fast_field_reader.get(1), false);
assert_eq!(fast_field_reader.get(2), true);
assert_eq!(fast_field_reader.get(3), false);
assert_eq!(fast_field_reader.get_val(0), true);
assert_eq!(fast_field_reader.get_val(1), false);
assert_eq!(fast_field_reader.get_val(2), true);
assert_eq!(fast_field_reader.get_val(3), false);
Ok(())
}
@@ -937,8 +937,8 @@ mod tests {
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
for i in 0..25 {
assert_eq!(fast_field_reader.get(i * 2), true);
assert_eq!(fast_field_reader.get(i * 2 + 1), false);
assert_eq!(fast_field_reader.get_val(i * 2), true);
assert_eq!(fast_field_reader.get_val(i * 2 + 1), false);
}
Ok(())
@@ -970,7 +970,7 @@ mod tests {
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
assert_eq!(fast_field_reader.get(0), false);
assert_eq!(fast_field_reader.get_val(0), false);
Ok(())
}

View File

@@ -1,6 +1,8 @@
use std::ops::Range;
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue, MultiValueLength};
use fastfield_codecs::Column;
use crate::fastfield::{DynamicFastFieldReader, FastValue, MultiValueLength};
use crate::DocId;
/// Reader for a multivalued `u64` fast field.
@@ -31,8 +33,9 @@ impl<Item: FastValue> MultiValuedFastFieldReader<Item> {
/// to the given document are `start..end`.
#[inline]
fn range(&self, doc: DocId) -> Range<u64> {
let start = self.idx_reader.get(doc);
let end = self.idx_reader.get(doc + 1);
let idx = doc as u64;
let start = self.idx_reader.get_val(idx);
let end = self.idx_reader.get_val(idx + 1);
start..end
}

View File

@@ -3,58 +3,17 @@ use std::marker::PhantomData;
use std::path::Path;
use common::BinarySerializable;
use fastfield_codecs::bitpacked::BitpackedReader;
use fastfield_codecs::blockwise_linear::BlockwiseLinearReader;
use fastfield_codecs::linear::LinearReader;
use fastfield_codecs::{FastFieldCodecDeserializer, FastFieldCodecType, FastFieldDataAccess};
use fastfield_codecs::bitpacked::{BitpackedCodec, BitpackedReader};
use fastfield_codecs::blockwise_linear::{BlockwiseLinearCodec, BlockwiseLinearReader};
use fastfield_codecs::linear::{LinearCodec, LinearReader};
use fastfield_codecs::{Column, FastFieldCodec, FastFieldCodecType};
use super::{FastValue, GCDFastFieldCodec};
use super::gcd::open_gcd_from_bytes;
use super::FastValue;
use crate::directory::{CompositeFile, Directory, FileSlice, OwnedBytes, RamDirectory, WritePtr};
use crate::error::DataCorruption;
use crate::fastfield::{CompositeFastFieldSerializer, FastFieldsWriter};
use crate::fastfield::{CompositeFastFieldSerializer, FastFieldsWriter, GCDReader};
use crate::schema::{Schema, FAST};
use crate::DocId;
/// FastFieldReader is the trait to access fast field data.
pub trait FastFieldReader<Item: FastValue>: Clone {
/// Return the value associated to the given document.
///
/// This accessor should return as fast as possible.
///
/// # Panics
///
/// May panic if `doc` is greater than the segment
fn get(&self, doc: DocId) -> Item;
/// Fills an output buffer with the fast field values
/// associated with the `DocId` going from
/// `start` to `start + output.len()`.
///
/// Regardless of the type of `Item`, this method works
/// - transmuting the output array
/// - extracting the `Item`s as if they were `u64`
/// - possibly converting the `u64` value to the right type.
///
/// # Panics
///
/// May panic if `start + output.len()` is greater than
/// the segment's `maxdoc`.
fn get_range(&self, start: u64, output: &mut [Item]);
/// Returns the minimum value for this fast field.
///
/// The min value does not take in account of possible
/// deleted document, and should be considered as a lower bound
/// of the actual minimum value.
fn min_value(&self) -> Item;
/// Returns the maximum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value.
fn max_value(&self) -> Item;
}
#[derive(Clone)]
/// DynamicFastFieldReader wraps different readers to access
@@ -68,11 +27,11 @@ pub enum DynamicFastFieldReader<Item: FastValue> {
BlockwiseLinear(FastFieldReaderCodecWrapper<Item, BlockwiseLinearReader>),
/// GCD and Bitpacked compressed fastfield data.
BitpackedGCD(FastFieldReaderCodecWrapper<Item, GCDFastFieldCodec<BitpackedReader>>),
BitpackedGCD(FastFieldReaderCodecWrapper<Item, GCDReader<BitpackedReader>>),
/// GCD and Linear interpolated values + bitpacked
LinearGCD(FastFieldReaderCodecWrapper<Item, GCDFastFieldCodec<LinearReader>>),
LinearGCD(FastFieldReaderCodecWrapper<Item, GCDReader<LinearReader>>),
/// GCD and Blockwise linear interpolated values + bitpacked
BlockwiseLinearGCD(FastFieldReaderCodecWrapper<Item, GCDFastFieldCodec<BlockwiseLinearReader>>),
BlockwiseLinearGCD(FastFieldReaderCodecWrapper<Item, GCDReader<BlockwiseLinearReader>>),
}
impl<Item: FastValue> DynamicFastFieldReader<Item> {
@@ -83,46 +42,27 @@ impl<Item: FastValue> DynamicFastFieldReader<Item> {
) -> crate::Result<DynamicFastFieldReader<Item>> {
let reader = match codec_type {
FastFieldCodecType::Bitpacked => {
DynamicFastFieldReader::Bitpacked(FastFieldReaderCodecWrapper::<
Item,
BitpackedReader,
>::open_from_bytes(bytes)?)
DynamicFastFieldReader::Bitpacked(BitpackedCodec::open_from_bytes(bytes)?.into())
}
FastFieldCodecType::Linear => DynamicFastFieldReader::Linear(
FastFieldReaderCodecWrapper::<Item, LinearReader>::open_from_bytes(bytes)?,
FastFieldCodecType::Linear => {
DynamicFastFieldReader::Linear(LinearCodec::open_from_bytes(bytes)?.into())
}
FastFieldCodecType::BlockwiseLinear => DynamicFastFieldReader::BlockwiseLinear(
BlockwiseLinearCodec::open_from_bytes(bytes)?.into(),
),
FastFieldCodecType::BlockwiseLinear => {
DynamicFastFieldReader::BlockwiseLinear(FastFieldReaderCodecWrapper::<
Item,
BlockwiseLinearReader,
>::open_from_bytes(bytes)?)
}
FastFieldCodecType::Gcd => {
let codec_type = FastFieldCodecType::deserialize(&mut bytes)?;
match codec_type {
FastFieldCodecType::Bitpacked => {
DynamicFastFieldReader::BitpackedGCD(FastFieldReaderCodecWrapper::<
Item,
GCDFastFieldCodec<BitpackedReader>,
>::open_from_bytes(
bytes
)?)
}
FastFieldCodecType::Linear => {
DynamicFastFieldReader::LinearGCD(FastFieldReaderCodecWrapper::<
Item,
GCDFastFieldCodec<LinearReader>,
>::open_from_bytes(
bytes
)?)
}
FastFieldCodecType::Bitpacked => DynamicFastFieldReader::BitpackedGCD(
open_gcd_from_bytes::<BitpackedCodec>(bytes)?.into(),
),
FastFieldCodecType::Linear => DynamicFastFieldReader::LinearGCD(
open_gcd_from_bytes::<LinearCodec>(bytes)?.into(),
),
FastFieldCodecType::BlockwiseLinear => {
DynamicFastFieldReader::BlockwiseLinearGCD(FastFieldReaderCodecWrapper::<
Item,
GCDFastFieldCodec<BlockwiseLinearReader>,
>::open_from_bytes(
bytes
)?)
DynamicFastFieldReader::BlockwiseLinearGCD(
open_gcd_from_bytes::<BlockwiseLinearCodec>(bytes)?.into(),
)
}
FastFieldCodecType::Gcd => {
return Err(DataCorruption::comment_only(
@@ -145,16 +85,16 @@ impl<Item: FastValue> DynamicFastFieldReader<Item> {
}
}
impl<Item: FastValue> FastFieldReader<Item> for DynamicFastFieldReader<Item> {
impl<Item: FastValue> Column<Item> for DynamicFastFieldReader<Item> {
#[inline]
fn get(&self, doc: DocId) -> Item {
fn get_val(&self, idx: u64) -> Item {
match self {
Self::Bitpacked(reader) => reader.get(doc),
Self::Linear(reader) => reader.get(doc),
Self::BlockwiseLinear(reader) => reader.get(doc),
Self::BitpackedGCD(reader) => reader.get(doc),
Self::LinearGCD(reader) => reader.get(doc),
Self::BlockwiseLinearGCD(reader) => reader.get(doc),
Self::Bitpacked(reader) => reader.get_val(idx),
Self::Linear(reader) => reader.get_val(idx),
Self::BlockwiseLinear(reader) => reader.get_val(idx),
Self::BitpackedGCD(reader) => reader.get_val(idx),
Self::LinearGCD(reader) => reader.get_val(idx),
Self::BlockwiseLinearGCD(reader) => reader.get_val(idx),
}
}
#[inline]
@@ -188,6 +128,17 @@ impl<Item: FastValue> FastFieldReader<Item> for DynamicFastFieldReader<Item> {
Self::BlockwiseLinearGCD(reader) => reader.max_value(),
}
}
fn num_vals(&self) -> u64 {
match self {
Self::Bitpacked(reader) => reader.num_vals(),
Self::Linear(reader) => reader.num_vals(),
Self::BlockwiseLinear(reader) => reader.num_vals(),
Self::BitpackedGCD(reader) => reader.num_vals(),
Self::LinearGCD(reader) => reader.num_vals(),
Self::BlockwiseLinearGCD(reader) => reader.num_vals(),
}
}
}
/// Wrapper for accessing a fastfield.
@@ -199,36 +150,21 @@ pub struct FastFieldReaderCodecWrapper<Item: FastValue, CodecReader> {
_phantom: PhantomData<Item>,
}
impl<Item: FastValue, C: FastFieldDataAccess + FastFieldCodecDeserializer>
FastFieldReaderCodecWrapper<Item, C>
impl<Item: FastValue, CodecReader> From<CodecReader>
for FastFieldReaderCodecWrapper<Item, CodecReader>
{
/// Opens a fast field given a file.
pub fn open(file: FileSlice) -> crate::Result<Self> {
let mut bytes = file.read_bytes()?;
let codec_code = bytes.read_u8();
let codec_type = FastFieldCodecType::from_code(codec_code).ok_or_else(|| {
DataCorruption::comment_only("Unknown codec code does not exist `{codec_code}`")
})?;
assert_eq!(
FastFieldCodecType::Bitpacked,
codec_type,
"Tried to open fast field as bitpacked encoded (id=1), but got serializer with \
different id"
);
Self::open_from_bytes(bytes)
}
/// Opens a fast field given the bytes.
pub fn open_from_bytes(bytes: OwnedBytes) -> crate::Result<Self> {
let reader = C::open_from_bytes(bytes)?;
Ok(FastFieldReaderCodecWrapper {
fn from(reader: CodecReader) -> Self {
FastFieldReaderCodecWrapper {
reader,
_phantom: PhantomData,
})
}
}
}
impl<Item: FastValue, D: Column> FastFieldReaderCodecWrapper<Item, D> {
#[inline]
pub(crate) fn get_u64(&self, doc: u64) -> Item {
let data = self.reader.get_val(doc);
pub(crate) fn get_u64(&self, idx: u64) -> Item {
let data = self.reader.get_val(idx);
Item::from_u64(data)
}
@@ -251,9 +187,7 @@ impl<Item: FastValue, C: FastFieldDataAccess + FastFieldCodecDeserializer>
}
}
impl<Item: FastValue, C: FastFieldDataAccess + FastFieldCodecDeserializer + Clone>
FastFieldReader<Item> for FastFieldReaderCodecWrapper<Item, C>
{
impl<Item: FastValue, C: Column + Clone> Column<Item> for FastFieldReaderCodecWrapper<Item, C> {
/// Return the value associated to the given document.
///
/// This accessor should return as fast as possible.
@@ -262,8 +196,8 @@ impl<Item: FastValue, C: FastFieldDataAccess + FastFieldCodecDeserializer + Clon
///
/// May panic if `doc` is greater than the segment
// `maxdoc`.
fn get(&self, doc: DocId) -> Item {
self.get_u64(u64::from(doc))
fn get_val(&self, idx: u64) -> Item {
self.get_u64(idx)
}
/// Fills an output buffer with the fast field values
@@ -300,6 +234,10 @@ impl<Item: FastValue, C: FastFieldDataAccess + FastFieldCodecDeserializer + Clon
fn max_value(&self) -> Item {
Item::from_u64(self.reader.max_value())
}
fn num_vals(&self) -> u64 {
self.reader.num_vals()
}
}
impl<Item: FastValue> From<Vec<Item>> for DynamicFastFieldReader<Item> {

View File

@@ -3,11 +3,11 @@ use std::num::NonZeroU64;
use common::{BinarySerializable, CountingWriter};
use fastdivide::DividerU64;
pub use fastfield_codecs::bitpacked::{BitpackedSerializer, BitpackedSerializerLegacy};
use fastfield_codecs::blockwise_linear::BlockwiseLinearSerializer;
use fastfield_codecs::linear::LinearSerializer;
pub use fastfield_codecs::bitpacked::{BitpackedCodec, BitpackedSerializerLegacy};
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::FastFieldCodecType;
pub use fastfield_codecs::{FastFieldCodecSerializer, FastFieldDataAccess, FastFieldStats};
pub use fastfield_codecs::{Column, FastFieldCodec, FastFieldStats};
use super::{find_gcd, ALL_CODECS, GCD_DEFAULT};
use crate::directory::{CompositeWrite, WritePtr};
@@ -64,15 +64,13 @@ impl From<FastFieldCodecType> for FastFieldCodecEnableCheck {
// use this, when this is merged and stabilized explicit_generic_args_with_impl_trait
// https://github.com/rust-lang/rust/pull/86176
fn codec_estimation<T: FastFieldCodecSerializer, A: FastFieldDataAccess>(
fastfield_accessor: &A,
fn codec_estimation<C: FastFieldCodec>(
fastfield_accessor: &impl Column,
estimations: &mut Vec<(f32, FastFieldCodecType)>,
) {
if !T::is_applicable(fastfield_accessor) {
return;
if let Some(ratio) = C::estimate(fastfield_accessor) {
estimations.push((ratio, C::CODEC_TYPE));
}
let ratio = T::estimate(fastfield_accessor);
estimations.push((ratio, T::CODEC_TYPE));
}
impl CompositeFastFieldSerializer {
@@ -99,7 +97,7 @@ impl CompositeFastFieldSerializer {
pub fn create_auto_detect_u64_fast_field(
&mut self,
field: Field,
fastfield_accessor: impl FastFieldDataAccess,
fastfield_accessor: impl Column,
) -> io::Result<()> {
self.create_auto_detect_u64_fast_field_with_idx(field, fastfield_accessor, 0)
}
@@ -119,7 +117,7 @@ impl CompositeFastFieldSerializer {
pub fn create_auto_detect_u64_fast_field_with_idx(
&mut self,
field: Field,
fastfield_accessor: impl FastFieldDataAccess,
fastfield_accessor: impl Column,
idx: usize,
) -> io::Result<()> {
let min_value = fastfield_accessor.min_value();
@@ -138,7 +136,7 @@ impl CompositeFastFieldSerializer {
}
Self::write_header(field_write, FastFieldCodecType::Gcd)?;
struct GCDWrappedFFAccess<T: FastFieldDataAccess> {
struct GCDWrappedFFAccess<T: Column> {
fastfield_accessor: T,
base_value: u64,
max_value: u64,
@@ -146,7 +144,7 @@ impl CompositeFastFieldSerializer {
gcd: DividerU64,
}
impl<T: FastFieldDataAccess> FastFieldDataAccess for GCDWrappedFFAccess<T> {
impl<T: Column> Column for GCDWrappedFFAccess<T> {
fn get_val(&self, position: u64) -> u64 {
self.gcd
.divide(self.fastfield_accessor.get_val(position) - self.base_value)
@@ -199,18 +197,18 @@ impl CompositeFastFieldSerializer {
codec_enable_checker: FastFieldCodecEnableCheck,
field: Field,
field_write: &mut CountingWriter<W>,
fastfield_accessor: impl FastFieldDataAccess,
fastfield_accessor: impl Column,
) -> io::Result<()> {
let mut estimations = vec![];
if codec_enable_checker.is_enabled(FastFieldCodecType::Bitpacked) {
codec_estimation::<BitpackedSerializer, _>(&fastfield_accessor, &mut estimations);
codec_estimation::<BitpackedCodec>(&fastfield_accessor, &mut estimations);
}
if codec_enable_checker.is_enabled(FastFieldCodecType::Linear) {
codec_estimation::<LinearSerializer, _>(&fastfield_accessor, &mut estimations);
codec_estimation::<LinearCodec>(&fastfield_accessor, &mut estimations);
}
if codec_enable_checker.is_enabled(FastFieldCodecType::BlockwiseLinear) {
codec_estimation::<BlockwiseLinearSerializer, _>(&fastfield_accessor, &mut estimations);
codec_estimation::<BlockwiseLinearCodec>(&fastfield_accessor, &mut estimations);
}
if let Some(broken_estimation) = estimations.iter().find(|estimation| estimation.0.is_nan())
{
@@ -229,13 +227,13 @@ impl CompositeFastFieldSerializer {
Self::write_header(field_write, codec_type)?;
match codec_type {
FastFieldCodecType::Bitpacked => {
BitpackedSerializer::serialize(field_write, &fastfield_accessor)?;
BitpackedCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::Linear => {
LinearSerializer::serialize(field_write, &fastfield_accessor)?;
LinearCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::BlockwiseLinear => {
BlockwiseLinearSerializer::serialize(field_write, &fastfield_accessor)?;
BlockwiseLinearCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::Gcd => {
return Err(io::Error::new(

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::io;
use common;
use fastfield_codecs::FastFieldDataAccess;
use fastfield_codecs::Column;
use fnv::FnvHashMap;
use tantivy_bitpacker::BlockedBitpacker;
@@ -384,7 +384,7 @@ struct WriterFastFieldAccessProvider<'map, 'bitp> {
vals: &'bitp BlockedBitpacker,
stats: FastFieldStats,
}
impl<'map, 'bitp> FastFieldDataAccess for WriterFastFieldAccessProvider<'map, 'bitp> {
impl<'map, 'bitp> Column for WriterFastFieldAccessProvider<'map, 'bitp> {
/// Return the value associated to the given doc.
///
/// Whenever possible use the Iterator passed to the fastfield creation instead, for performance

View File

@@ -143,8 +143,9 @@ pub(crate) fn get_doc_id_mapping_from_field(
#[cfg(test)]
mod tests_indexsorting {
use fastfield_codecs::Column;
use crate::collector::TopDocs;
use crate::fastfield::FastFieldReader;
use crate::indexer::doc_id_mapping::DocIdMapping;
use crate::query::QueryParser;
use crate::schema::{Schema, *};
@@ -464,9 +465,9 @@ mod tests_indexsorting {
let my_number = index.schema().get_field("my_number").unwrap();
let fast_field = fast_fields.u64(my_number).unwrap();
assert_eq!(fast_field.get(0u32), 10u64);
assert_eq!(fast_field.get(1u32), 20u64);
assert_eq!(fast_field.get(2u32), 30u64);
assert_eq!(fast_field.get_val(0), 10u64);
assert_eq!(fast_field.get_val(1), 20u64);
assert_eq!(fast_field.get_val(2), 30u64);
let multi_numbers = index.schema().get_field("multi_numbers").unwrap();
let multifield = fast_fields.u64s(multi_numbers).unwrap();

View File

@@ -777,6 +777,7 @@ impl Drop for IndexWriter {
mod tests {
use std::collections::{HashMap, HashSet};
use fastfield_codecs::Column;
use proptest::prelude::*;
use proptest::prop_oneof;
use proptest::strategy::Strategy;
@@ -785,7 +786,6 @@ mod tests {
use crate::collector::TopDocs;
use crate::directory::error::LockError;
use crate::error::*;
use crate::fastfield::FastFieldReader;
use crate::indexer::NoMergePolicy;
use crate::query::{QueryParser, TermQuery};
use crate::schema::{
@@ -1327,7 +1327,7 @@ mod tests {
let fast_field_reader = segment_reader.fast_fields().u64(id_field)?;
let in_order_alive_ids: Vec<u64> = segment_reader
.doc_ids_alive()
.map(|doc| fast_field_reader.get(doc))
.map(|doc| fast_field_reader.get_val(doc as u64))
.collect();
assert_eq!(&in_order_alive_ids[..], &[9, 8, 7, 6, 5, 4, 1, 0]);
Ok(())
@@ -1493,7 +1493,7 @@ mod tests {
let ff_reader = segment_reader.fast_fields().u64(id_field).unwrap();
segment_reader
.doc_ids_alive()
.map(move |doc| ff_reader.get(doc))
.map(move |doc| ff_reader.get_val(doc as u64))
})
.collect();
@@ -1504,7 +1504,7 @@ mod tests {
let ff_reader = segment_reader.fast_fields().u64(id_field).unwrap();
segment_reader
.doc_ids_alive()
.map(move |doc| ff_reader.get(doc))
.map(move |doc| ff_reader.get_val(doc as u64))
})
.collect();
@@ -1622,7 +1622,7 @@ mod tests {
facet_reader
.facet_from_ord(facet_ords[0], &mut facet)
.unwrap();
let id = ff_reader.get(doc_id);
let id = ff_reader.get_val(doc_id as u64);
let facet_expected = Facet::from(&("/cola/".to_string() + &id.to_string()));
assert_eq!(facet, facet_expected);

View File

@@ -4,14 +4,13 @@ use std::sync::Arc;
use itertools::Itertools;
use measure_time::debug_time;
use tantivy_bitpacker::minmax;
use crate::core::{Segment, SegmentReader};
use crate::docset::{DocSet, TERMINATED};
use crate::error::DataCorruption;
use crate::fastfield::{
AliveBitSet, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldDataAccess,
FastFieldReader, FastFieldStats, MultiValueLength, MultiValuedFastFieldReader,
AliveBitSet, Column, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldStats,
MultiValueLength, MultiValuedFastFieldReader,
};
use crate::fieldnorm::{FieldNormReader, FieldNormReaders, FieldNormsSerializer, FieldNormsWriter};
use crate::indexer::doc_id_mapping::{expect_field_id_for_sort_field, SegmentDocIdMapping};
@@ -88,7 +87,7 @@ pub struct IndexMerger {
}
fn compute_min_max_val(
u64_reader: &impl FastFieldReader<u64>,
u64_reader: &impl Column<u64>,
segment_reader: &SegmentReader,
) -> Option<(u64, u64)> {
if segment_reader.max_doc() == 0 {
@@ -102,11 +101,11 @@ fn compute_min_max_val(
}
// some deleted documents,
// we need to recompute the max / min
minmax(
segment_reader
.doc_ids_alive()
.map(|doc_id| u64_reader.get(doc_id)),
)
segment_reader
.doc_ids_alive()
.map(|doc_id| u64_reader.get_val(doc_id as u64))
.minmax()
.into_option()
}
struct TermOrdinalMapping {
@@ -376,13 +375,13 @@ impl IndexMerger {
fast_field_readers: &'a Vec<DynamicFastFieldReader<u64>>,
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for SortedDocIdFieldAccessProvider<'a> {
impl<'a> Column for SortedDocIdFieldAccessProvider<'a> {
fn get_val(&self, doc: u64) -> u64 {
let DocAddress {
doc_id,
segment_ord,
} = self.doc_id_mapping.get_old_doc_addr(doc as u32);
self.fast_field_readers[segment_ord as usize].get(doc_id)
self.fast_field_readers[segment_ord as usize].get_val(doc_id as u64)
}
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
@@ -392,7 +391,7 @@ impl IndexMerger {
.map(|old_doc_addr| {
let fast_field_reader =
&self.fast_field_readers[old_doc_addr.segment_ord as usize];
fast_field_reader.get(old_doc_addr.doc_id)
fast_field_reader.get_val(old_doc_addr.doc_id as u64)
}),
)
}
@@ -429,7 +428,7 @@ impl IndexMerger {
let everything_is_in_order = reader_ordinal_and_field_accessors
.into_iter()
.map(|reader| reader.1)
.map(|(_, col)| Arc::new(col))
.tuple_windows()
.all(|(field_accessor1, field_accessor2)| {
if sort_by_field.order.is_asc() {
@@ -444,7 +443,7 @@ impl IndexMerger {
pub(crate) fn get_sort_field_accessor(
reader: &SegmentReader,
sort_by_field: &IndexSortByField,
) -> crate::Result<impl FastFieldReader<u64>> {
) -> crate::Result<impl Column> {
let field_id = expect_field_id_for_sort_field(reader.schema(), sort_by_field)?; // for now expect fastfield, but not strictly required
let value_accessor = reader.fast_fields().u64_lenient(field_id)?;
Ok(value_accessor)
@@ -453,7 +452,7 @@ impl IndexMerger {
pub(crate) fn get_reader_with_sort_field_accessor(
&self,
sort_by_field: &IndexSortByField,
) -> crate::Result<Vec<(SegmentOrdinal, impl FastFieldReader<u64> + Clone)>> {
) -> crate::Result<Vec<(SegmentOrdinal, impl Column)>> {
let reader_ordinal_and_field_accessors = self
.readers
.iter()
@@ -506,8 +505,8 @@ impl IndexMerger {
doc_id_reader_pair
.into_iter()
.kmerge_by(|a, b| {
let val1 = a.2.get(a.0);
let val2 = b.2.get(b.0);
let val1 = a.2.get_val(a.0 as u64);
let val2 = b.2.get_val(b.0 as u64);
if sort_by_field.order == Order::Asc {
val1 < val2
} else {
@@ -578,7 +577,7 @@ impl IndexMerger {
offsets: &'a [u64],
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for FieldIndexAccessProvider<'a> {
impl<'a> Column for FieldIndexAccessProvider<'a> {
fn get_val(&self, doc: u64) -> u64 {
self.offsets[doc as usize]
}
@@ -778,7 +777,7 @@ impl IndexMerger {
offsets: Vec<u64>,
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for SortedDocIdMultiValueAccessProvider<'a> {
impl<'a> Column for SortedDocIdMultiValueAccessProvider<'a> {
fn get_val(&self, pos: u64) -> u64 {
// use the offsets index to find the doc_id which will contain the position.
// the offsets are strictly increasing so we can do a simple search on it.
@@ -1200,6 +1199,7 @@ impl IndexMerger {
#[cfg(test)]
mod tests {
use byteorder::{BigEndian, ReadBytesExt};
use fastfield_codecs::Column;
use schema::FAST;
use crate::collector::tests::{
@@ -1207,7 +1207,6 @@ mod tests {
};
use crate::collector::{Count, FacetCollector};
use crate::core::Index;
use crate::fastfield::FastFieldReader;
use crate::query::{AllQuery, BooleanQuery, Scorer, TermQuery};
use crate::schema::{
Cardinality, Document, Facet, FacetOptions, IndexRecordOption, NumericOptions, Term,

View File

@@ -1,8 +1,10 @@
#[cfg(test)]
mod tests {
use fastfield_codecs::Column;
use crate::collector::TopDocs;
use crate::core::Index;
use crate::fastfield::{AliveBitSet, FastFieldReader, MultiValuedFastFieldReader};
use crate::fastfield::{AliveBitSet, MultiValuedFastFieldReader};
use crate::query::QueryParser;
use crate::schema::{
self, BytesOptions, Cardinality, Facet, FacetOptions, IndexRecordOption, NumericOptions,
@@ -186,17 +188,17 @@ mod tests {
let fast_fields = segment_reader.fast_fields();
let fast_field = fast_fields.u64(int_field).unwrap();
assert_eq!(fast_field.get(5u32), 1u64);
assert_eq!(fast_field.get(4u32), 2u64);
assert_eq!(fast_field.get(3u32), 3u64);
assert_eq!(fast_field.get_val(5), 1u64);
assert_eq!(fast_field.get_val(4), 2u64);
assert_eq!(fast_field.get_val(3), 3u64);
if force_disjunct_segment_sort_values {
assert_eq!(fast_field.get(2u32), 20u64);
assert_eq!(fast_field.get(1u32), 100u64);
assert_eq!(fast_field.get_val(2u64), 20u64);
assert_eq!(fast_field.get_val(1u64), 100u64);
} else {
assert_eq!(fast_field.get(2u32), 10u64);
assert_eq!(fast_field.get(1u32), 20u64);
assert_eq!(fast_field.get_val(2u64), 10u64);
assert_eq!(fast_field.get_val(1u64), 20u64);
}
assert_eq!(fast_field.get(0u32), 1_000u64);
assert_eq!(fast_field.get_val(0u64), 1_000u64);
// test new field norm mapping
{
@@ -373,12 +375,12 @@ mod tests {
let fast_fields = segment_reader.fast_fields();
let fast_field = fast_fields.u64(int_field).unwrap();
assert_eq!(fast_field.get(0u32), 1u64);
assert_eq!(fast_field.get(1u32), 2u64);
assert_eq!(fast_field.get(2u32), 3u64);
assert_eq!(fast_field.get(3u32), 10u64);
assert_eq!(fast_field.get(4u32), 20u64);
assert_eq!(fast_field.get(5u32), 1_000u64);
assert_eq!(fast_field.get_val(0), 1u64);
assert_eq!(fast_field.get_val(1), 2u64);
assert_eq!(fast_field.get_val(2), 3u64);
assert_eq!(fast_field.get_val(3), 10u64);
assert_eq!(fast_field.get_val(4), 20u64);
assert_eq!(fast_field.get_val(5), 1_000u64);
let get_vals = |fast_field: &MultiValuedFastFieldReader<u64>, doc_id: u32| -> Vec<u64> {
let mut vals = vec![];

View File

@@ -421,6 +421,7 @@ pub struct DocAddress {
#[cfg(test)]
pub mod tests {
use common::{BinarySerializable, FixedSize};
use fastfield_codecs::Column;
use rand::distributions::{Bernoulli, Uniform};
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
@@ -429,7 +430,6 @@ pub mod tests {
use crate::collector::tests::TEST_COLLECTOR_WITH_SCORE;
use crate::core::SegmentReader;
use crate::docset::{DocSet, TERMINATED};
use crate::fastfield::FastFieldReader;
use crate::merge_policy::NoMergePolicy;
use crate::query::BooleanQuery;
use crate::schema::*;
@@ -1036,21 +1036,21 @@ pub mod tests {
let fast_field_reader_opt = segment_reader.fast_fields().u64(fast_field_unsigned);
assert!(fast_field_reader_opt.is_ok());
let fast_field_reader = fast_field_reader_opt.unwrap();
assert_eq!(fast_field_reader.get(0), 4u64)
assert_eq!(fast_field_reader.get_val(0), 4u64)
}
{
let fast_field_reader_res = segment_reader.fast_fields().i64(fast_field_signed);
assert!(fast_field_reader_res.is_ok());
let fast_field_reader = fast_field_reader_res.unwrap();
assert_eq!(fast_field_reader.get(0), 4i64)
assert_eq!(fast_field_reader.get_val(0), 4i64)
}
{
let fast_field_reader_res = segment_reader.fast_fields().f64(fast_field_float);
assert!(fast_field_reader_res.is_ok());
let fast_field_reader = fast_field_reader_res.unwrap();
assert_eq!(fast_field_reader.get(0), 4f64)
assert_eq!(fast_field_reader.get_val(0), 4f64)
}
Ok(())
}