Compare commits

..

53 Commits

Author SHA1 Message Date
Paul Masurel
e765706487 Removing Deserializer trait
And renaming the `Serializer` trait `FastFieldCodec`.
2022-08-27 21:02:02 +02:00
Pascal Seitz
fdd0f63787 merge traits 2022-08-27 17:01:41 +02:00
Pascal Seitz
fd60e6fe08 rename get_u64 to ge_val 2022-08-27 17:01:41 +02:00
Pascal Seitz
02c3252d1e split open_from_bytes to own trait 2022-08-27 17:01:39 +02:00
Pascal Seitz
4a6f36937c num_vals to FastFieldCodecReader 2022-08-27 17:00:55 +02:00
Paul Masurel
3a9727aa91 Pleasing Clippy 2022-08-27 11:33:03 +02:00
UEDA Akira
17093e8ffe Collapse overlapped highlighted ranges (#1473) 2022-08-26 14:37:08 +09:00
Paul Masurel
03e4630cd8 Mark the CI as successful regardless of whether uploading to Coverall fails. 2022-08-26 07:35:29 +02:00
Paul Masurel
4ae0317d68 Cargo fmt 2022-08-26 00:50:07 +02:00
Paul Masurel
107b19855f Fixing the fastfield codec benchmark (#1484) 2022-08-26 05:54:14 +09:00
Paul Masurel
d8f66ba07e Rename fastfield codecs (#1483) 2022-08-26 01:19:30 +09:00
Paul Masurel
f908549245 Argument missing in bench 2022-08-25 15:42:59 +02:00
Paul Masurel
3673a5df9b Homogeneous codec names. (#1481) 2022-08-25 05:51:37 +09:00
Paul Masurel
298b5dd726 GCD wrapper uses DividerU64 (#1478) 2022-08-25 02:29:13 +09:00
Paul Masurel
8bbb22e9bf Minor refactoring. Introducing a codec type enum. (#1477) 2022-08-25 02:21:41 +09:00
PSeitz
513f68209d Merge pull request #1476 from quickwit-oss/fix_interpol
add proptest to ff codecs
2022-08-24 08:01:36 -07:00
Pascal Seitz
91f2f7e722 add proptest to ff codecs 2022-08-24 16:42:40 +02:00
PSeitz
c476b530cf Merge pull request #1432 from quickwit-oss/gcd_encoding
add gcd test for DateTime
2022-08-24 06:50:34 -07:00
PSeitz
77dd202e19 Merge pull request #1475 from quickwit-oss/extend_ff_access
move fastfield stats to trait
2022-08-24 06:44:57 -07:00
Pascal Seitz
00ebff3c16 move fastfield stats to trait 2022-08-24 15:29:55 +02:00
Paul Masurel
9a6d37c42c Apply suggestions from code review 2022-08-24 21:20:17 +09:00
PSeitz
bb01e99e05 Fixes race condition in Searcher (#1464)
Fixes a race condition in Searcher, by avoiding repeated calls to open_segment_readers and passing them instead as argument

Closes #1461
2022-08-24 21:17:37 +09:00
PSeitz
535f1a5d83 Merge pull request #1471 from adamreichold/ci-no-nightly-no-cry
Split test into check and test CI jobs
2022-08-24 04:41:42 -07:00
Pascal Seitz
625f9174a7 check for size 2022-08-24 10:32:45 +02:00
Adam Reichold
11a4d97cf5 Use a job matrix to further split and deduplicate the test CI job. 2022-08-24 10:27:57 +02:00
Adam Reichold
1c3d39677a Split checking and testing to a bit more parallelism in the CI. 2022-08-24 10:27:57 +02:00
Pascal Seitz
6f65995cfd remove gcd from api 2022-08-24 10:24:09 +02:00
Pascal Seitz
e2e4190571 add gcd test for DateTime 2022-08-24 10:24:09 +02:00
PSeitz
82209c58aa reuse get_calculated_value (#1472) 2022-08-24 17:16:25 +09:00
Paul Masurel
21519788ea Build fix (#1470) 2022-08-24 07:16:38 +09:00
Shikhar Bhushan
4c6c6e4a9c ConstScoreQuery (#1463) 2022-08-24 06:37:34 +09:00
Adam Reichold
df0ac9e901 Extend facet deserialization to handle owned in addition to borrowed strings. (#1466) 2022-08-24 06:37:13 +09:00
Adam Reichold
71ab482720 RFC: Use a more general but still object-safe signature for Query::query_terms. (#1468)
* Use a more general but still object-safe signature for Query::query_terms.

* Further constraint the generalized Query::query_terms signature to allow extracting references to terms.
2022-08-24 06:34:07 +09:00
Adam Reichold
2ae383e452 Cache dependencies in CI to speed up build times. (#1469)
* Cache dependencies in CI to speed up build times.

* Give cargo-nextest a try.
2022-08-24 06:27:29 +09:00
PSeitz
8b3a6f6231 Merge pull request #1439 from quickwit-oss/fix_value_range
fix get calculated value
2022-08-23 10:15:13 -07:00
PSeitz
11edd6bd59 fix for api change (#1467) 2022-08-24 01:10:12 +09:00
Pascal Seitz
193a3c21f4 fix neg slope calculated value 2022-08-23 13:42:09 +02:00
PSeitz
998b1263f6 Merge pull request #1460 from quickwit-oss/merge_ff_access_iterator
move iter to FastFieldDataAccess
2022-08-23 02:58:10 -07:00
Pascal Seitz
72272bdf81 fix variable name 2022-08-23 11:38:27 +02:00
Pascal Seitz
c39c2d79da move iter to FastFieldDataAccess 2022-08-23 11:26:47 +02:00
Paul Masurel
67d94f5bd2 Getting rid of the gcd dependency and using NonZeroU64 in gcd. (#1459) 2022-08-23 07:25:26 +09:00
Paul Masurel
abbd934ac9 Embeds OwnedBytes into the FastFieldCodecReader. (#1458) 2022-08-23 00:02:31 +09:00
Paul Masurel
7f9ba0ee50 Minor readability refactoring in the SegmentDocIdMapping (#1451) 2022-08-22 22:44:36 +09:00
PSeitz
8edcd6f958 Merge pull request #1428 from izihawa/feature/dismax
[feat] Implement `DisjunctionMaxQuery` and refactor `ScoreCombiner`
2022-08-22 06:15:30 -07:00
Pasha Podolsky
f50700835d [fix] Fn -> FnOnce 2022-08-22 15:57:30 +03:00
PSeitz
494e92ca59 fix issue in composite (#1456)
The file offsets were recorded incorrectly in some cases, e.g. when the recording looked like this [(Field 1, Index 0, Offset 0), (Field 1, Index 1, Offset 14), (Field 0, Index 0, Offset 14)]. The last file is offset 14 to end of file for field 0. But the data was converted to a vec and sorted, which changes the last file to Field 1.
2022-08-22 17:52:12 +09:00
Paul Masurel
4a3169011d clippy (#1452) 2022-08-20 20:01:33 +09:00
Pascal Seitz
050fc5dde9 add comment for diff dance 2022-08-20 08:56:03 +02:00
Pascal Seitz
f01cb7d3aa remove cast 2022-08-12 19:50:06 +02:00
Pascal Seitz
9811d15657 improve slope calculation by delaying f64 cast 2022-08-11 13:32:10 +02:00
Pascal Seitz
31ba5a3c16 fix get calculated value
fix get calculated value by delaying cast
2022-08-11 09:44:20 +02:00
Pasha Podolsky
71041b2314 [fix] Fix bench 2022-07-28 21:36:28 +03:00
Pasha Podolsky
09aae134e6 [feat] Implement DisjunctionMaxQuery and refactor ScoreCombiner 2022-07-28 20:47:20 +03:00
59 changed files with 2411 additions and 1475 deletions

View File

@@ -12,12 +12,14 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Install Rust
run: rustup toolchain install nightly --component llvm-tools-preview
run: rustup toolchain install nightly --profile minimal --component llvm-tools-preview
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@cargo-llvm-cov
- name: Generate code coverage
run: cargo +nightly llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
files: lcov.info

View File

@@ -19,11 +19,10 @@ jobs:
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
components: rustfmt, clippy
- name: Run indexing_unsorted
run: cargo test indexing_unsorted -- --ignored
- name: Run indexing_sorted
run: cargo test indexing_sorted -- --ignored

View File

@@ -10,34 +10,27 @@ env:
CARGO_TERM_COLOR: always
jobs:
test:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install latest nightly to test also against unstable feature flag
- name: Install nightly
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
profile: minimal
components: rustfmt
- name: Install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
components: rustfmt, clippy
profile: minimal
components: clippy
- name: Build
run: cargo build --verbose --workspace
- name: Run tests
run: cargo +stable test --features mmap,brotli-compression,lz4-compression,snappy-compression,zstd-compression,failpoints --verbose --workspace
- name: Run tests quickwit feature
run: cargo +stable test --features mmap,quickwit,failpoints --verbose --workspace
- uses: Swatinem/rust-cache@v2
- name: Check Formatting
run: cargo +nightly fmt --all -- --check
@@ -48,3 +41,34 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
args: --tests
test:
runs-on: ubuntu-latest
strategy:
matrix:
features: [
{ label: "all", flags: "mmap,brotli-compression,lz4-compression,snappy-compression,zstd-compression,failpoints" },
{ label: "quickwit", flags: "mmap,quickwit,failpoints" }
]
name: test-${{ matrix.features.label}}
steps:
- uses: actions/checkout@v3
- name: Install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- uses: taiki-e/install-action@nextest
- uses: Swatinem/rust-cache@v2
- name: Run tests
run: cargo +stable nextest run --features ${{ matrix.features.flags }} --verbose --workspace
- name: Run doctests
run: cargo +stable test --doc --features ${{ matrix.features.flags }} --verbose --workspace

View File

@@ -82,14 +82,16 @@ impl BitUnpacker {
}
}
pub fn bit_width(&self) -> u8 {
self.num_bits as u8
}
#[inline]
pub fn get(&self, idx: u64, data: &[u8]) -> u64 {
if self.num_bits == 0 {
return 0u64;
}
let num_bits = self.num_bits;
let mask = self.mask;
let addr_in_bits = idx * num_bits;
let addr_in_bits = idx * self.num_bits;
let addr = addr_in_bits >> 3;
let bit_shift = addr_in_bits & 7;
debug_assert!(
@@ -101,7 +103,7 @@ impl BitUnpacker {
.unwrap();
let val_unshifted_unmasked: u64 = u64::from_le_bytes(bytes);
let val_shifted = (val_unshifted_unmasked >> bit_shift) as u64;
val_shifted & mask
val_shifted & self.mask
}
}

View File

@@ -14,7 +14,6 @@ pub struct BlockedBitpacker {
buffer: Vec<u64>,
offset_and_bits: Vec<BlockedBitpackerEntryMetaData>,
}
impl Default for BlockedBitpacker {
fn default() -> Self {
BlockedBitpacker::new()
@@ -59,13 +58,18 @@ fn metadata_test() {
assert_eq!(meta.num_bits(), 6);
}
fn mem_usage<T>(items: &Vec<T>) -> usize {
items.capacity() * std::mem::size_of::<T>()
}
impl BlockedBitpacker {
pub fn new() -> Self {
let compressed_blocks = vec![0u8; 8];
let mut compressed_blocks = vec![];
compressed_blocks.resize(8, 0);
Self {
compressed_blocks,
buffer: Vec::new(),
offset_and_bits: Vec::new(),
buffer: vec![],
offset_and_bits: vec![],
}
}
@@ -73,10 +77,8 @@ impl BlockedBitpacker {
pub fn mem_usage(&self) -> usize {
std::mem::size_of::<BlockedBitpacker>()
+ self.compressed_blocks.capacity()
+ self.offset_and_bits.capacity()
* std::mem::size_of_val(&self.offset_and_bits.get(0).cloned().unwrap_or_default())
+ self.buffer.capacity()
* std::mem::size_of_val(&self.buffer.get(0).cloned().unwrap_or_default())
+ mem_usage(&self.offset_and_bits)
+ mem_usage(&self.buffer)
}
#[inline]

View File

@@ -10,7 +10,7 @@
// ---
// Importing tantivy...
use tantivy::collector::{Collector, SegmentCollector};
use tantivy::fastfield::{FastFieldReader, FastFieldReaderImpl};
use tantivy::fastfield::{DynamicFastFieldReader, FastFieldReader};
use tantivy::query::QueryParser;
use tantivy::schema::{Field, Schema, FAST, INDEXED, TEXT};
use tantivy::{doc, Index, Score, SegmentReader};
@@ -95,7 +95,7 @@ impl Collector for StatsCollector {
}
struct StatsSegmentCollector {
fast_field_reader: FastFieldReaderImpl<u64>,
fast_field_reader: DynamicFastFieldReader<u64>,
stats: Stats,
}

View File

@@ -14,10 +14,10 @@ tantivy-bitpacker = { version="0.2", path = "../bitpacker/" }
ownedbytes = { version = "0.3.0", path = "../ownedbytes" }
prettytable-rs = {version="0.9.0", optional= true}
rand = {version="0.8.3", optional= true}
fastdivide = "0.4"
[dev-dependencies]
more-asserts = "0.3.0"
proptest = "1.0.0"
rand = "0.8.3"
[features]

View File

@@ -4,11 +4,9 @@ extern crate test;
#[cfg(test)]
mod tests {
use fastfield_codecs::bitpacked::{BitpackedFastFieldCodec, BitpackedFastFieldReader};
use fastfield_codecs::linearinterpol::{LinearInterpolCodec, LinearInterpolFastFieldReader};
use fastfield_codecs::multilinearinterpol::{
MultiLinearInterpolFastFieldCodec, MultiLinearInterpolFastFieldReader,
};
use fastfield_codecs::bitpacked::BitpackedCodec;
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::*;
fn get_data() -> Vec<u64> {
@@ -27,69 +25,59 @@ mod tests {
fn value_iter() -> impl Iterator<Item = u64> {
0..20_000
}
fn bench_get<S: FastFieldCodec, R: FastFieldCodecReader>(b: &mut Bencher, data: &[u64]) {
fn bench_get<Codec: FastFieldCodec>(b: &mut Bencher, data: &[u64]) {
let mut bytes = vec![];
S::serialize(
&mut bytes,
&data,
stats_from_vec(data),
data.iter().cloned(),
data.iter().cloned(),
)
.unwrap();
let reader = R::open_from_bytes(&bytes).unwrap();
Codec::serialize(&mut bytes, &data).unwrap();
let reader = Codec::open_from_bytes(OwnedBytes::new(bytes)).unwrap();
b.iter(|| {
let mut sum = 0u64;
for pos in value_iter() {
reader.get_u64(pos as u64, &bytes);
let val = reader.get_val(pos as u64);
debug_assert_eq!(data[pos as usize], val);
sum = sum.wrapping_add(val);
}
sum
});
}
fn bench_create<S: FastFieldCodec>(b: &mut Bencher, data: &[u64]) {
let mut bytes = vec![];
fn bench_create<Codec: FastFieldCodec>(b: &mut Bencher, data: &[u64]) {
let mut bytes = Vec::new();
b.iter(|| {
S::serialize(
&mut bytes,
&data,
stats_from_vec(data),
data.iter().cloned(),
data.iter().cloned(),
)
.unwrap();
bytes.clear();
Codec::serialize(&mut bytes, &data).unwrap();
});
}
use ownedbytes::OwnedBytes;
use test::Bencher;
#[bench]
fn bench_fastfield_bitpack_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<BitpackedFastFieldCodec>(b, &data);
bench_create::<BitpackedCodec>(b, &data);
}
#[bench]
fn bench_fastfield_linearinterpol_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<LinearInterpolCodec>(b, &data);
bench_create::<LinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_multilinearinterpol_create(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_create::<MultiLinearInterpolFastFieldCodec>(b, &data);
bench_create::<BlockwiseLinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_bitpack_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<BitpackedFastFieldCodec, BitpackedFastFieldReader>(b, &data);
bench_get::<BitpackedCodec>(b, &data);
}
#[bench]
fn bench_fastfield_linearinterpol_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<LinearInterpolCodec, LinearInterpolFastFieldReader>(b, &data);
bench_get::<LinearCodec>(b, &data);
}
#[bench]
fn bench_fastfield_multilinearinterpol_get(b: &mut Bencher) {
let data: Vec<_> = get_data();
bench_get::<MultiLinearInterpolFastFieldCodec, MultiLinearInterpolFastFieldReader>(
b, &data,
);
bench_get::<BlockwiseLinearCodec>(b, &data);
}
pub fn stats_from_vec(data: &[u64]) -> FastFieldStats {
let min_value = data.iter().cloned().min().unwrap_or(0);

View File

@@ -4,21 +4,22 @@ use common::BinarySerializable;
use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::{FastFieldCodec, FastFieldCodecReader, FastFieldStats};
use crate::{FastFieldCodec, FastFieldCodecType, FastFieldDataAccess};
/// Depending on the field type, a different
/// fast field is required.
#[derive(Clone)]
pub struct BitpackedFastFieldReader {
pub struct BitpackedReader {
data: OwnedBytes,
bit_unpacker: BitUnpacker,
pub min_value_u64: u64,
pub max_value_u64: u64,
min_value_u64: u64,
max_value_u64: u64,
num_vals: u64,
}
impl FastFieldCodecReader for BitpackedFastFieldReader {
impl FastFieldDataAccess for BitpackedReader {
#[inline]
fn get_u64(&self, doc: u64) -> u64 {
fn get_val(&self, doc: u64) -> u64 {
self.min_value_u64 + self.bit_unpacker.get(doc, &self.data)
}
#[inline]
@@ -29,16 +30,21 @@ impl FastFieldCodecReader for BitpackedFastFieldReader {
fn max_value(&self) -> u64 {
self.max_value_u64
}
#[inline]
fn num_vals(&self) -> u64 {
self.num_vals
}
}
pub struct BitpackedFastFieldSerializerLegacy<'a, W: 'a + Write> {
pub struct BitpackedSerializerLegacy<'a, W: 'a + Write> {
bit_packer: BitPacker,
write: &'a mut W,
min_value: u64,
num_vals: u64,
amplitude: u64,
num_bits: u8,
}
impl<'a, W: Write> BitpackedFastFieldSerializerLegacy<'a, W> {
impl<'a, W: Write> BitpackedSerializerLegacy<'a, W> {
/// Creates a new fast field serializer.
///
/// The serializer in fact encode the values by bitpacking
@@ -51,15 +57,16 @@ impl<'a, W: Write> BitpackedFastFieldSerializerLegacy<'a, W> {
write: &'a mut W,
min_value: u64,
max_value: u64,
) -> io::Result<BitpackedFastFieldSerializerLegacy<'a, W>> {
) -> io::Result<BitpackedSerializerLegacy<'a, W>> {
assert!(min_value <= max_value);
let amplitude = max_value - min_value;
let num_bits = compute_num_bits(amplitude);
let bit_packer = BitPacker::new();
Ok(BitpackedFastFieldSerializerLegacy {
Ok(BitpackedSerializerLegacy {
bit_packer,
write,
min_value,
num_vals: 0,
amplitude,
num_bits,
})
@@ -70,37 +77,42 @@ impl<'a, W: Write> BitpackedFastFieldSerializerLegacy<'a, W> {
let val_to_write: u64 = val - self.min_value;
self.bit_packer
.write(val_to_write, self.num_bits, &mut self.write)?;
self.num_vals += 1;
Ok(())
}
pub fn close_field(mut self) -> io::Result<()> {
self.bit_packer.close(&mut self.write)?;
self.min_value.serialize(&mut self.write)?;
self.amplitude.serialize(&mut self.write)?;
self.num_vals.serialize(&mut self.write)?;
Ok(())
}
}
pub struct BitpackedFastFieldCodec;
pub struct BitpackedCodec;
impl FastFieldCodec for BitpackedFastFieldCodec {
const NAME: &'static str = "Bitpacked";
impl FastFieldCodec for BitpackedCodec {
/// The CODEC_TYPE is an enum value used for serialization.
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::Bitpacked;
type Reader = BitpackedFastFieldReader;
type Reader = BitpackedReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_offset = bytes.len() - 16;
let footer_offset = bytes.len() - 24;
let (data, mut footer) = bytes.split(footer_offset);
let min_value = u64::deserialize(&mut footer)?;
let amplitude = u64::deserialize(&mut footer)?;
let num_vals = u64::deserialize(&mut footer)?;
let max_value = min_value + amplitude;
let num_bits = compute_num_bits(amplitude);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(BitpackedFastFieldReader {
Ok(BitpackedReader {
data,
bit_unpacker,
min_value_u64: min_value,
max_value_u64: max_value,
bit_unpacker,
num_vals,
})
}
@@ -113,26 +125,27 @@ impl FastFieldCodec for BitpackedFastFieldCodec {
/// compute the minimum number of bits required to encode
/// values.
fn serialize(
&self,
write: &mut impl io::Write,
vals: &[u64],
stats: FastFieldStats,
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
let mut serializer =
BitpackedFastFieldSerializerLegacy::open(write, stats.min_value, stats.max_value)?;
let mut serializer = BitpackedSerializerLegacy::open(
write,
fastfield_accessor.min_value(),
fastfield_accessor.max_value(),
)?;
for &val in vals {
for val in fastfield_accessor.iter() {
serializer.add_val(val)?;
}
serializer.close_field()?;
Ok(())
}
fn is_applicable(_vals: &[u64], _stats: FastFieldStats) -> bool {
fn is_applicable(_fastfield_accessor: &impl FastFieldDataAccess) -> bool {
true
}
fn estimate(_vals: &[u64], stats: FastFieldStats) -> f32 {
let amplitude = stats.max_value - stats.min_value;
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
let amplitude = fastfield_accessor.max_value() - fastfield_accessor.min_value();
let num_bits = compute_num_bits(amplitude);
let num_bits_uncompressed = 64;
num_bits as f32 / num_bits_uncompressed as f32
@@ -145,7 +158,7 @@ mod tests {
use crate::tests::get_codec_test_data_sets;
fn create_and_validate(data: &[u64], name: &str) {
crate::tests::create_and_validate(&BitpackedFastFieldCodec, data, name);
crate::tests::create_and_validate::<BitpackedCodec>(data, name);
}
#[test]

View File

@@ -1,4 +1,4 @@
//! MultiLinearInterpol compressor uses linear interpolation to guess a values and stores the
//! The BlockwiseLinear codec uses linear interpolation to guess a values and stores the
//! offset, but in blocks of 512.
//!
//! With a CHUNK_SIZE of 512 and 29 byte metadata per block, we get a overhead for metadata of 232 /
@@ -17,16 +17,17 @@ use common::{BinarySerializable, CountingWriter, DeserializeFrom};
use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::{FastFieldCodec, FastFieldCodecReader, FastFieldStats};
use crate::linear::{get_calculated_value, get_slope};
use crate::{FastFieldCodec, FastFieldCodecType, FastFieldDataAccess};
const CHUNK_SIZE: usize = 512;
const CHUNK_SIZE: u64 = 512;
/// Depending on the field type, a different
/// fast field is required.
#[derive(Clone)]
pub struct MultiLinearInterpolFastFieldReader {
pub struct BlockwiseLinearReader {
data: OwnedBytes,
pub footer: MultiLinearInterpolFooter,
pub footer: BlockwiseLinearFooter,
}
#[derive(Clone, Debug, Default)]
@@ -101,14 +102,14 @@ impl BinarySerializable for Function {
}
#[derive(Clone, Debug)]
pub struct MultiLinearInterpolFooter {
pub struct BlockwiseLinearFooter {
pub num_vals: u64,
pub min_value: u64,
pub max_value: u64,
interpolations: Vec<Function>,
}
impl BinarySerializable for MultiLinearInterpolFooter {
impl BinarySerializable for BlockwiseLinearFooter {
fn serialize<W: Write>(&self, write: &mut W) -> io::Result<()> {
let mut out = vec![];
self.num_vals.serialize(&mut out)?;
@@ -120,35 +121,45 @@ impl BinarySerializable for MultiLinearInterpolFooter {
Ok(())
}
fn deserialize<R: Read>(reader: &mut R) -> io::Result<MultiLinearInterpolFooter> {
let mut footer = MultiLinearInterpolFooter {
fn deserialize<R: Read>(reader: &mut R) -> io::Result<BlockwiseLinearFooter> {
let mut footer = BlockwiseLinearFooter {
num_vals: u64::deserialize(reader)?,
min_value: u64::deserialize(reader)?,
max_value: u64::deserialize(reader)?,
interpolations: Vec::<Function>::deserialize(reader)?,
};
for (num, interpol) in footer.interpolations.iter_mut().enumerate() {
interpol.start_pos = (CHUNK_SIZE * num) as u64;
interpol.start_pos = CHUNK_SIZE * num as u64;
}
Ok(footer)
}
}
#[inline]
fn get_interpolation_function(doc: u64, interpolations: &[Function]) -> &Function {
&interpolations[doc as usize / CHUNK_SIZE]
fn get_interpolation_position(doc: u64) -> usize {
let index = doc / CHUNK_SIZE;
index as usize
}
impl FastFieldCodecReader for MultiLinearInterpolFastFieldReader {
#[inline]
fn get_interpolation_function(doc: u64, interpolations: &[Function]) -> &Function {
&interpolations[get_interpolation_position(doc)]
}
impl FastFieldDataAccess for BlockwiseLinearReader {
#[inline]
fn get_u64(&self, doc: u64) -> u64 {
let interpolation = get_interpolation_function(doc, &self.footer.interpolations);
let doc = doc - interpolation.start_pos;
let calculated_value =
get_calculated_value(interpolation.value_start_pos, doc, interpolation.slope);
let diff = interpolation
.bit_unpacker
.get(doc, &self.data[interpolation.data_start_offset as usize..]);
fn get_val(&self, idx: u64) -> u64 {
let interpolation = get_interpolation_function(idx, &self.footer.interpolations);
let in_block_idx = idx - interpolation.start_pos;
let calculated_value = get_calculated_value(
interpolation.value_start_pos,
in_block_idx,
interpolation.slope,
);
let diff = interpolation.bit_unpacker.get(
in_block_idx,
&self.data[interpolation.data_start_offset as usize..],
);
(calculated_value + diff) - interpolation.positive_val_offset
}
@@ -160,49 +171,41 @@ impl FastFieldCodecReader for MultiLinearInterpolFastFieldReader {
fn max_value(&self) -> u64 {
self.footer.max_value
}
#[inline]
fn num_vals(&self) -> u64 {
self.footer.num_vals
}
}
#[inline]
fn get_slope(first_val: u64, last_val: u64, num_vals: u64) -> f32 {
((last_val as f64 - first_val as f64) / (num_vals as u64 - 1) as f64) as f32
}
/// Same as LinearSerializer, but working on chunks of CHUNK_SIZE elements.
pub struct BlockwiseLinearCodec;
#[inline]
fn get_calculated_value(first_val: u64, pos: u64, slope: f32) -> u64 {
(first_val as i64 + (pos as f32 * slope) as i64) as u64
}
impl FastFieldCodec for BlockwiseLinearCodec {
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::BlockwiseLinear;
/// Same as LinearInterpolFastFieldSerializer, but working on chunks of CHUNK_SIZE elements.
pub struct MultiLinearInterpolFastFieldCodec;
impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
const NAME: &'static str = "MultiLinearInterpol";
type Reader = MultiLinearInterpolFastFieldReader;
type Reader = BlockwiseLinearReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_len: u32 = (&bytes[bytes.len() - 4..]).deserialize()?;
let footer_offset = bytes.len() - 4 - footer_len as usize;
let (data, mut footer) = bytes.split(footer_offset);
let footer = MultiLinearInterpolFooter::deserialize(&mut footer)?;
Ok(MultiLinearInterpolFastFieldReader { data, footer })
let footer = BlockwiseLinearFooter::deserialize(&mut footer)?;
Ok(BlockwiseLinearReader { data, footer })
}
/// Creates a new fast field serializer.
fn serialize(
&self,
write: &mut impl io::Write,
vals: &[u64],
stats: FastFieldStats,
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
assert!(stats.min_value <= stats.max_value);
assert!(fastfield_accessor.min_value() <= fastfield_accessor.max_value());
let first_val = vals[0];
let last_val = vals[vals.len() - 1];
let first_val = fastfield_accessor.get_val(0);
let last_val = fastfield_accessor.get_val(fastfield_accessor.num_vals() as u64 - 1);
let mut first_function = Function {
end_pos: stats.num_vals,
end_pos: fastfield_accessor.num_vals(),
value_start_pos: first_val,
value_end_pos: last_val,
..Default::default()
@@ -210,11 +213,16 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
first_function.calc_slope();
let mut interpolations = vec![first_function];
// Since we potentially apply multiple passes over the data, the data is cached.
// Multiple iteration can be expensive (merge with index sorting can add lot of overhead per
// iteration)
let data = fastfield_accessor.iter().collect::<Vec<_>>();
//// let's split this into chunks of CHUNK_SIZE
for vals_pos in (0..vals.len()).step_by(CHUNK_SIZE).skip(1) {
for data_pos in (0..data.len() as u64).step_by(CHUNK_SIZE as usize).skip(1) {
let new_fun = {
let current_interpolation = interpolations.last_mut().unwrap();
current_interpolation.split(vals_pos as u64, vals[vals_pos])
current_interpolation.split(data_pos, data[data_pos as usize])
};
interpolations.push(new_fun);
}
@@ -222,7 +230,7 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
for interpolation in &mut interpolations {
let mut offset = 0;
let mut rel_positive_max = 0;
for (pos, actual_value) in vals
for (pos, actual_value) in data
[interpolation.start_pos as usize..interpolation.end_pos as usize]
.iter()
.cloned()
@@ -253,7 +261,7 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
for interpolation in &mut interpolations {
interpolation.data_start_offset = write.written_bytes();
let num_bits = interpolation.num_bits;
for (pos, actual_value) in vals
for (pos, actual_value) in data
[interpolation.start_pos as usize..interpolation.end_pos as usize]
.iter()
.cloned()
@@ -271,27 +279,28 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
}
bit_packer.close(write)?;
let footer = MultiLinearInterpolFooter {
num_vals: stats.num_vals,
min_value: stats.min_value,
max_value: stats.max_value,
let footer = BlockwiseLinearFooter {
num_vals: fastfield_accessor.num_vals(),
min_value: fastfield_accessor.min_value(),
max_value: fastfield_accessor.max_value(),
interpolations,
};
footer.serialize(write)?;
Ok(())
}
fn is_applicable(_vals: &[u64], stats: FastFieldStats) -> bool {
if stats.num_vals < 5_000 {
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool {
if fastfield_accessor.num_vals() < 5_000 {
return false;
}
// On serialization the offset is added to the actual value.
// We need to make sure this won't run into overflow calculation issues.
// For this we take the maximum theroretical offset and add this to the max value.
// If this doesn't overflow the algorithm should be fine
let theorethical_maximum_offset = stats.max_value - stats.min_value;
if stats
.max_value
let theorethical_maximum_offset =
fastfield_accessor.max_value() - fastfield_accessor.min_value();
if fastfield_accessor
.max_value()
.checked_add(theorethical_maximum_offset)
.is_none()
{
@@ -302,15 +311,15 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima are for the deviation of the calculated value and
/// the offset is also unknown.
fn estimate(vals: &[u64], stats: FastFieldStats) -> f32 {
// TODO simplify now that we have a vals array.
let first_val_in_first_block = vals[0];
let last_elem_in_first_chunk = CHUNK_SIZE.min(vals.len());
let last_val_in_first_block = vals[last_elem_in_first_chunk - 1];
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
let first_val_in_first_block = fastfield_accessor.get_val(0);
let last_elem_in_first_chunk = CHUNK_SIZE.min(fastfield_accessor.num_vals());
let last_val_in_first_block =
fastfield_accessor.get_val(last_elem_in_first_chunk as u64 - 1);
let slope = get_slope(
first_val_in_first_block,
last_val_in_first_block,
stats.num_vals,
fastfield_accessor.num_vals(),
);
// let's sample at 0%, 5%, 10% .. 95%, 100%, but for the first block only
@@ -320,11 +329,10 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
let max_distance = sample_positions
.iter()
.copied()
.map(|pos| {
let calculated_value =
get_calculated_value(first_val_in_first_block, pos as u64, slope);
let actual_value = vals[pos];
get_calculated_value(first_val_in_first_block, *pos as u64, slope);
let actual_value = fastfield_accessor.get_val(*pos as u64);
distance(calculated_value, actual_value)
})
.max()
@@ -338,10 +346,10 @@ impl FastFieldCodec for MultiLinearInterpolFastFieldCodec {
//
let relative_max_value = (max_distance as f32 * 1.5) * 2.0;
let num_bits = compute_num_bits(relative_max_value as u64) as u64 * stats.num_vals as u64
let num_bits = compute_num_bits(relative_max_value as u64) as u64 * fastfield_accessor.num_vals() as u64
// function metadata per block
+ 29 * (stats.num_vals / CHUNK_SIZE as u64);
let num_bits_uncompressed = 64 * stats.num_vals;
+ 29 * (fastfield_accessor.num_vals() / CHUNK_SIZE);
let num_bits_uncompressed = 64 * fastfield_accessor.num_vals();
num_bits as f32 / num_bits_uncompressed as f32
}
}
@@ -360,7 +368,25 @@ mod tests {
use crate::tests::get_codec_test_data_sets;
fn create_and_validate(data: &[u64], name: &str) -> (f32, f32) {
crate::tests::create_and_validate(&MultiLinearInterpolFastFieldCodec, data, name)
crate::tests::create_and_validate::<BlockwiseLinearCodec, BlockwiseLinearReader>(data, name)
}
const HIGHEST_BIT: u64 = 1 << 63;
pub fn i64_to_u64(val: i64) -> u64 {
(val as u64) ^ HIGHEST_BIT
}
#[test]
fn test_compression_i64() {
let data = (i64::MAX - 600_000..=i64::MAX - 550_000)
.map(i64_to_u64)
.collect::<Vec<_>>();
let (estimate, actual_compression) =
create_and_validate(&data, "simple monotonically large i64");
assert!(actual_compression < 0.2);
assert!(estimate < 0.20);
assert!(estimate > 0.15);
assert!(actual_compression > 0.01);
}
#[test]

View File

@@ -1,254 +0,0 @@
// Copyright (C) 2022 Quickwit, Inc.
//
// Quickwit is offered under the AGPL v3.0 and as commercial software.
// For commercial licensing, contact us at hello@quickwit.io.
//
// AGPL:
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
use std::io;
use std::num::NonZeroU64;
use std::sync::Arc;
use common::BinarySerializable;
use fastdivide::DividerU64;
use ownedbytes::OwnedBytes;
use crate::bitpacked::BitpackedFastFieldCodec;
use crate::gcd::{find_gcd, GCDFastFieldCodecReader, GCDParams};
use crate::linearinterpol::LinearInterpolCodec;
use crate::multilinearinterpol::MultiLinearInterpolFastFieldCodec;
use crate::{FastFieldCodec, FastFieldCodecReader, FastFieldStats};
pub struct DynamicFastFieldCodec;
impl FastFieldCodec for DynamicFastFieldCodec {
const NAME: &'static str = "dynamic";
type Reader = DynamicFastFieldReader;
fn is_applicable(_vals: &[u64], _stats: crate::FastFieldStats) -> bool {
true
}
fn estimate(_vals: &[u64], _stats: crate::FastFieldStats) -> f32 {
0f32
}
fn serialize(
&self,
wrt: &mut impl io::Write,
vals: &[u64],
stats: crate::FastFieldStats,
) -> io::Result<()> {
let gcd: NonZeroU64 = find_gcd(vals.iter().copied().map(|val| val - stats.min_value))
.unwrap_or(unsafe { NonZeroU64::new_unchecked(1) });
if gcd.get() > 1 {
let gcd_divider = DividerU64::divide_by(gcd.get());
let scaled_vals: Vec<u64> = vals
.iter()
.copied()
.map(|val| gcd_divider.divide(val - stats.min_value))
.collect();
<CodecType as BinarySerializable>::serialize(&CodecType::Gcd, wrt)?;
let gcd_params = GCDParams {
min_value: stats.min_value,
gcd,
};
gcd_params.serialize(wrt)?;
let codec_type = choose_codec(stats, &scaled_vals);
<CodecType as BinarySerializable>::serialize(&codec_type, wrt)?;
let scaled_stats = FastFieldStats::compute(&scaled_vals);
codec_type.serialize(wrt, &scaled_vals, scaled_stats)?;
} else {
let codec_type = choose_codec(stats, vals);
wrt.write_all(&[codec_type.to_code()])?;
codec_type.serialize(wrt, vals, stats)?;
}
Ok(())
}
fn open_from_bytes(mut bytes: OwnedBytes) -> io::Result<Self::Reader> {
let codec_code = bytes.read_u8();
let codec_type = CodecType::from_code(codec_code).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Unknown codec code `{codec_code}`"),
)
})?;
let fast_field_reader: Arc<dyn FastFieldCodecReader> = match codec_type {
CodecType::Bitpacked => Arc::new(BitpackedFastFieldCodec::open_from_bytes(bytes)?),
CodecType::LinearInterpol => Arc::new(LinearInterpolCodec::open_from_bytes(bytes)?),
CodecType::MultiLinearInterpol => {
Arc::new(MultiLinearInterpolFastFieldCodec::open_from_bytes(bytes)?)
}
CodecType::Gcd => {
let gcd_params = GCDParams::deserialize(&mut bytes)?;
let inner_codec_type = <CodecType as BinarySerializable>::deserialize(&mut bytes)?;
match inner_codec_type {
CodecType::Bitpacked => Arc::new(GCDFastFieldCodecReader {
params: gcd_params,
reader: BitpackedFastFieldCodec::open_from_bytes(bytes)?,
}),
CodecType::LinearInterpol => Arc::new(GCDFastFieldCodecReader {
params: gcd_params,
reader: LinearInterpolCodec::open_from_bytes(bytes)?,
}),
CodecType::MultiLinearInterpol => Arc::new(GCDFastFieldCodecReader {
params: gcd_params,
reader: MultiLinearInterpolFastFieldCodec::open_from_bytes(bytes)?,
}),
CodecType::Gcd => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"A GCD codec may not wrap another GCD codec.",
));
}
}
}
};
Ok(DynamicFastFieldReader(fast_field_reader))
}
}
#[derive(Clone)]
/// DynamicFastFieldReader wraps different readers to access
/// the various encoded fastfield data
pub struct DynamicFastFieldReader(Arc<dyn FastFieldCodecReader>);
#[repr(u8)]
#[derive(Debug, Clone, Copy)]
pub enum CodecType {
Bitpacked = 0,
LinearInterpol = 1,
MultiLinearInterpol = 2,
Gcd = 3,
}
impl BinarySerializable for CodecType {
fn serialize<W: io::Write>(&self, wrt: &mut W) -> io::Result<()> {
wrt.write_all(&[self.to_code()])?;
Ok(())
}
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let codec_code = u8::deserialize(reader)?;
let codec_type = CodecType::from_code(codec_code).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Invalid codec type code {codec_code}"),
)
})?;
Ok(codec_type)
}
}
impl CodecType {
pub fn from_code(code: u8) -> Option<Self> {
match code {
0 => Some(CodecType::Bitpacked),
1 => Some(CodecType::LinearInterpol),
2 => Some(CodecType::MultiLinearInterpol),
3 => Some(CodecType::Gcd),
_ => None,
}
}
pub fn to_code(self) -> u8 {
self as u8
}
fn codec_estimation(
&self,
stats: FastFieldStats,
vals: &[u64],
estimations: &mut Vec<(f32, CodecType)>,
) {
let estimate_opt: Option<f32> = match self {
CodecType::Bitpacked => codec_estimation::<BitpackedFastFieldCodec>(stats, vals),
CodecType::LinearInterpol => codec_estimation::<LinearInterpolCodec>(stats, vals),
CodecType::MultiLinearInterpol => {
codec_estimation::<MultiLinearInterpolFastFieldCodec>(stats, vals)
}
CodecType::Gcd => None,
};
if let Some(estimate) = estimate_opt {
if !estimate.is_nan() && estimate.is_finite() {
estimations.push((estimate, *self));
}
}
}
fn serialize(
&self,
wrt: &mut impl io::Write,
fastfield_accessor: &[u64],
stats: FastFieldStats,
) -> io::Result<()> {
match self {
CodecType::Bitpacked => {
BitpackedFastFieldCodec.serialize(wrt, fastfield_accessor, stats)?;
}
CodecType::LinearInterpol => {
LinearInterpolCodec.serialize(wrt, fastfield_accessor, stats)?;
}
CodecType::MultiLinearInterpol => {
MultiLinearInterpolFastFieldCodec.serialize(wrt, fastfield_accessor, stats)?;
}
CodecType::Gcd => {
panic!("GCD should never be called that way.");
}
}
Ok(())
}
}
impl FastFieldCodecReader for DynamicFastFieldReader {
fn get_u64(&self, doc: u64) -> u64 {
self.0.get_u64(doc)
}
fn min_value(&self) -> u64 {
self.0.min_value()
}
fn max_value(&self) -> u64 {
self.0.max_value()
}
}
fn codec_estimation<T: FastFieldCodec>(stats: FastFieldStats, vals: &[u64]) -> Option<f32> {
if !T::is_applicable(vals, stats.clone()) {
return None;
}
let ratio = T::estimate(vals, stats);
Some(ratio)
}
const CODEC_TYPES: [CodecType; 3] = [
CodecType::Bitpacked,
CodecType::LinearInterpol,
CodecType::MultiLinearInterpol,
];
fn choose_codec(stats: FastFieldStats, vals: &[u64]) -> CodecType {
let mut estimations = Vec::new();
for codec_type in &CODEC_TYPES {
codec_type.codec_estimation(stats, vals, &mut estimations);
}
estimations.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
let (_ratio, codec_type) = estimations[0];
codec_type
}

View File

@@ -1,247 +0,0 @@
use std::io::{self, Write};
use std::num::NonZeroU64;
use common::BinarySerializable;
use fastdivide::DividerU64;
use crate::FastFieldCodecReader;
/// Wrapper for accessing a fastfield.
///
/// Holds the data and the codec to the read the data.
#[derive(Clone)]
pub struct GCDFastFieldCodecReader<CodecReader> {
pub params: GCDParams,
pub reader: CodecReader,
}
impl<C: FastFieldCodecReader> FastFieldCodecReader for GCDFastFieldCodecReader<C> {
#[inline]
fn get_u64(&self, doc: u64) -> u64 {
self.params.min_value + self.params.gcd.get() * self.reader.get_u64(doc)
}
fn min_value(&self) -> u64 {
self.params.min_value + self.params.gcd.get() * self.reader.min_value()
}
fn max_value(&self) -> u64 {
self.params.min_value + self.params.gcd.get() * self.reader.max_value()
}
}
#[derive(Debug, Copy, Clone)]
pub struct GCDParams {
pub min_value: u64,
pub gcd: NonZeroU64,
}
impl BinarySerializable for GCDParams {
fn serialize<W: Write>(&self, wrt: &mut W) -> io::Result<()> {
self.gcd.get().serialize(wrt)?;
self.min_value.serialize(wrt)?;
Ok(())
}
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let gcd = NonZeroU64::new(u64::deserialize(reader)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "GCD=0 is invalid."))?;
let min_value = u64::deserialize(reader)?;
Ok(GCDParams { min_value, gcd })
}
}
fn compute_gcd(mut left: u64, mut right: u64) -> u64 {
while right != 0 {
(left, right) = (right, left % right);
}
left
}
// Find GCD for iterator of numbers
//
// If all numbers are '0' (or if there are not numbers, return None).
pub fn find_gcd(numbers: impl Iterator<Item = u64>) -> Option<NonZeroU64> {
let mut numbers = numbers.filter(|n| *n != 0);
let mut gcd = numbers.next()?;
if gcd == 1 {
return NonZeroU64::new(gcd);
}
let mut gcd_divider = DividerU64::divide_by(gcd);
for val in numbers {
let remainder = val - gcd_divider.divide(val) * gcd;
if remainder == 0 {
continue;
}
gcd = compute_gcd(gcd, val);
if gcd == 1 {
return NonZeroU64::new(1);
}
gcd_divider = DividerU64::divide_by(gcd);
}
NonZeroU64::new(gcd)
}
#[cfg(test)]
mod tests {
// TODO Move test
//
// use std::collections::HashMap;
// use std::path::Path;
//
// use crate::directory::{CompositeFile, RamDirectory, WritePtr};
// use crate::fastfield::serializer::FastFieldCodecEnableCheck;
// use crate::fastfield::tests::{FIELD, FIELDI64, SCHEMA, SCHEMAI64};
// use super::{
// find_gcd, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldCodecName,
// FastFieldReader, FastFieldsWriter, ALL_CODECS,
// };
// use crate::schema::Schema;
// use crate::Directory;
//
// fn get_index(
// docs: &[crate::Document],
// schema: &Schema,
// codec_enable_checker: FastFieldCodecEnableCheck,
// ) -> crate::Result<RamDirectory> {
// let directory: RamDirectory = RamDirectory::create();
// {
// let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
// let mut serializer =
// CompositeFastFieldSerializer::from_write_with_codec(write, codec_enable_checker)
// .unwrap();
// let mut fast_field_writers = FastFieldsWriter::from_schema(schema);
// for doc in docs {
// fast_field_writers.add_document(doc);
// }
// fast_field_writers
// .serialize(&mut serializer, &HashMap::new(), None)
// .unwrap();
// serializer.close().unwrap();
// }
// Ok(directory)
// }
//
// fn test_fastfield_gcd_i64_with_codec(
// codec_name: FastFieldCodecName,
// num_vals: usize,
// ) -> crate::Result<()> {
// let path = Path::new("test");
// let mut docs = vec![];
// for i in 1..=num_vals {
// let val = i as i64 * 1000i64;
// docs.push(doc!(*FIELDI64=>val));
// }
// let directory = get_index(&docs, &SCHEMAI64, codec_name.clone().into())?;
// let file = directory.open_read(path).unwrap();
// assert_eq!(file.len(), 118);
// let composite_file = CompositeFile::open(&file)?;
// let file = composite_file.open_read(*FIELD).unwrap();
// let fast_field_reader = DynamicFastFieldReader::<i64>::open(file)?;
// assert_eq!(fast_field_reader.get(0), 1000i64);
// assert_eq!(fast_field_reader.get(1), 2000i64);
// assert_eq!(fast_field_reader.get(2), 3000i64);
// assert_eq!(fast_field_reader.max_value(), num_vals as i64 * 1000);
// assert_eq!(fast_field_reader.min_value(), 1000i64);
// let file = directory.open_read(path).unwrap();
//
// Can't apply gcd
// let path = Path::new("test");
// docs.pop();
// docs.push(doc!(*FIELDI64=>2001i64));
// let directory = get_index(&docs, &SCHEMAI64, codec_name.into())?;
// let file2 = directory.open_read(path).unwrap();
// assert!(file2.len() > file.len());
//
// Ok(())
// }
//
// #[test]
// fn test_fastfield_gcd_i64() -> crate::Result<()> {
// for codec_name in ALL_CODECS {
// test_fastfield_gcd_i64_with_codec(codec_name.clone(), 5005)?;
// }
// Ok(())
// }
//
// fn test_fastfield_gcd_u64_with_codec(
// codec_name: FastFieldCodecName,
// num_vals: usize,
// ) -> crate::Result<()> {
// let path = Path::new("test");
// let mut docs = vec![];
// for i in 1..=num_vals {
// let val = i as u64 * 1000u64;
// docs.push(doc!(*FIELD=>val));
// }
// let directory = get_index(&docs, &SCHEMA, codec_name.clone().into())?;
// let file = directory.open_read(path).unwrap();
// assert_eq!(file.len(), 118);
// let composite_file = CompositeFile::open(&file)?;
// let file = composite_file.open_read(*FIELD).unwrap();
// let fast_field_reader = DynamicFastFieldReader::<u64>::open(file)?;
// assert_eq!(fast_field_reader.get(0), 1000u64);
// assert_eq!(fast_field_reader.get(1), 2000u64);
// assert_eq!(fast_field_reader.get(2), 3000u64);
// assert_eq!(fast_field_reader.max_value(), num_vals as u64 * 1000);
// assert_eq!(fast_field_reader.min_value(), 1000u64);
// let file = directory.open_read(path).unwrap();
//
// Can't apply gcd
// let path = Path::new("test");
// docs.pop();
// docs.push(doc!(*FIELDI64=>2001u64));
// let directory = get_index(&docs, &SCHEMA, codec_name.into())?;
// let file2 = directory.open_read(path).unwrap();
// assert!(file2.len() > file.len());
//
// Ok(())
// }
//
// #[test]
// fn test_fastfield_gcd_u64() -> crate::Result<()> {
// for codec_name in ALL_CODECS {
// test_fastfield_gcd_u64_with_codec(codec_name.clone(), 5005)?;
// }
// Ok(())
// }
//
// #[test]
// pub fn test_fastfield2() {
// let test_fastfield = DynamicFastFieldReader::<u64>::from(vec![100, 200, 300]);
// assert_eq!(test_fastfield.get(0), 100);
// assert_eq!(test_fastfield.get(1), 200);
// assert_eq!(test_fastfield.get(2), 300);
// }
use std::num::NonZeroU64;
use crate::gcd::{compute_gcd, find_gcd};
#[test]
fn test_compute_gcd() {
assert_eq!(compute_gcd(0, 0), 0);
assert_eq!(compute_gcd(4, 0), 4);
assert_eq!(compute_gcd(0, 4), 4);
assert_eq!(compute_gcd(1, 4), 1);
assert_eq!(compute_gcd(4, 1), 1);
assert_eq!(compute_gcd(4, 2), 2);
assert_eq!(compute_gcd(10, 25), 5);
assert_eq!(compute_gcd(25, 10), 5);
assert_eq!(compute_gcd(25, 25), 25);
}
#[test]
fn find_gcd_test() {
assert_eq!(find_gcd([0].into_iter()), None);
assert_eq!(find_gcd([0, 10].into_iter()), NonZeroU64::new(10));
assert_eq!(find_gcd([10, 0].into_iter()), NonZeroU64::new(10));
assert_eq!(find_gcd([].into_iter()), None);
assert_eq!(find_gcd([15, 30, 5, 10].into_iter()), NonZeroU64::new(5));
assert_eq!(find_gcd([15, 16, 10].into_iter()), NonZeroU64::new(1));
assert_eq!(find_gcd([0, 5, 5, 5].into_iter()), NonZeroU64::new(5));
assert_eq!(find_gcd([0, 0].into_iter()), None);
}
}

View File

@@ -3,121 +3,196 @@
extern crate more_asserts;
use std::io;
use std::io::Write;
use common::BinarySerializable;
use ownedbytes::OwnedBytes;
pub mod bitpacked;
pub mod dynamic;
pub mod gcd;
pub mod linearinterpol;
pub mod multilinearinterpol;
pub mod blockwise_linear;
pub mod linear;
// Unify with FastFieldReader
pub trait FastFieldCodecReader {
/// reads the metadata and returns the CodecReader
fn get_u64(&self, doc: u64) -> u64;
pub trait FastFieldDataAccess {
fn get_val(&self, doc: u64) -> u64;
fn min_value(&self) -> u64;
fn max_value(&self) -> u64;
fn num_vals(&self) -> u64;
/// Returns a iterator over the data
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = u64> + 'a> {
Box::new((0..self.num_vals()).map(|idx| self.get_val(idx)))
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy)]
#[repr(u8)]
pub enum FastFieldCodecType {
Bitpacked = 1,
Linear = 2,
BlockwiseLinear = 3,
Gcd = 4,
}
impl BinarySerializable for FastFieldCodecType {
fn serialize<W: Write>(&self, wrt: &mut W) -> io::Result<()> {
self.to_code().serialize(wrt)
}
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let code = u8::deserialize(reader)?;
let codec_type: Self = Self::from_code(code)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "Unknown code `{code}.`"))?;
Ok(codec_type)
}
}
impl FastFieldCodecType {
pub fn to_code(self) -> u8 {
self as u8
}
pub fn from_code(code: u8) -> Option<Self> {
match code {
1 => Some(Self::Bitpacked),
2 => Some(Self::Linear),
3 => Some(Self::BlockwiseLinear),
4 => Some(Self::Gcd),
_ => None,
}
}
}
/// The FastFieldSerializerEstimate trait is required on all variants
/// of fast field compressions, to decide which one to choose.
pub trait FastFieldCodec {
/// A codex needs to provide a unique name used for debugging.
const NAME: &'static str;
/// A codex needs to provide a unique name and id, which is
/// used for debugging and de/serialization.
const CODEC_TYPE: FastFieldCodecType;
type Reader: FastFieldCodecReader;
type Reader: FastFieldDataAccess;
/// Reads the metadata and returns the CodecReader
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader>;
/// Serializes the data using the serializer into write.
///
/// The fastfield_accessor iterator should be preferred over using fastfield_accessor for
/// performance reasons.
fn serialize(
write: &mut impl Write,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()>;
/// Check if the Codec is able to compress the data
fn is_applicable(vals: &[u64], stats: FastFieldStats) -> bool;
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool;
/// Returns an estimate of the compression ratio.
/// The baseline is uncompressed 64bit data.
///
/// It could make sense to also return a value representing
/// computational complexity.
fn estimate(vals: &[u64], stats: FastFieldStats) -> f32;
/// Serializes the data using the serializer into write.
/// There are multiple iterators, in case the codec needs to read the data multiple times.
/// The iterators should be preferred over using fastfield_accessor for performance reasons.
fn serialize(
&self,
write: &mut impl io::Write,
vals: &[u64],
stats: FastFieldStats,
) -> io::Result<()>;
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader>;
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32;
}
#[derive(Debug, Clone)]
/// Statistics are used in codec detection and stored in the fast field footer.
#[derive(Clone, Copy, Default, Debug)]
pub struct FastFieldStats {
pub min_value: u64,
pub max_value: u64,
pub num_vals: u64,
}
impl FastFieldStats {
pub fn compute(vals: &[u64]) -> Self {
if vals.is_empty() {
return FastFieldStats::default();
}
let first_val = vals[0];
let mut fast_field_stats = FastFieldStats {
min_value: first_val,
max_value: first_val,
num_vals: 1,
};
for &val in &vals[1..] {
fast_field_stats.record(val);
}
fast_field_stats
impl<'a> FastFieldDataAccess for &'a [u64] {
fn get_val(&self, position: u64) -> u64 {
self[position as usize]
}
pub fn record(&mut self, val: u64) {
self.num_vals += 1;
self.min_value = self.min_value.min(val);
self.max_value = self.max_value.max(val);
fn iter<'b>(&'b self) -> Box<dyn Iterator<Item = u64> + 'b> {
Box::new((self as &[u64]).iter().cloned())
}
fn min_value(&self) -> u64 {
self.iter().min().unwrap_or(0)
}
fn max_value(&self) -> u64 {
self.iter().max().unwrap_or(0)
}
fn num_vals(&self) -> u64 {
self.len() as u64
}
}
impl FastFieldDataAccess for Vec<u64> {
fn get_val(&self, position: u64) -> u64 {
self[position as usize]
}
fn iter<'b>(&'b self) -> Box<dyn Iterator<Item = u64> + 'b> {
Box::new((self as &[u64]).iter().cloned())
}
fn min_value(&self) -> u64 {
self.iter().min().unwrap_or(0)
}
fn max_value(&self) -> u64 {
self.iter().max().unwrap_or(0)
}
fn num_vals(&self) -> u64 {
self.len() as u64
}
}
#[cfg(test)]
mod tests {
use crate::bitpacked::BitpackedFastFieldCodec;
use crate::linearinterpol::LinearInterpolCodec;
use crate::multilinearinterpol::MultiLinearInterpolFastFieldCodec;
use proptest::arbitrary::any;
use proptest::proptest;
pub fn create_and_validate<S: FastFieldCodec>(
codec: &S,
data: &[u64],
name: &str,
) -> (f32, f32) {
if !S::is_applicable(&data, crate::tests::stats_from_vec(data)) {
use crate::bitpacked::BitpackedCodec;
use crate::blockwise_linear::BlockwiseLinearCodec;
use crate::linear::LinearCodec;
pub fn create_and_validate<Codec: FastFieldCodec>(data: &[u64], name: &str) -> (f32, f32) {
if !Codec::is_applicable(&data) {
return (f32::MAX, 0.0);
}
let estimation = S::estimate(&data, crate::tests::stats_from_vec(data));
let estimation = Codec::estimate(&data);
let mut out: Vec<u8> = Vec::new();
codec
.serialize(&mut out, &data, crate::tests::stats_from_vec(data))
.unwrap();
Codec::serialize(&mut out, &data).unwrap();
let actual_compression = out.len() as f32 / (data.len() as f32 * 8.0);
let reader = S::open_from_bytes(OwnedBytes::new(out)).unwrap();
let reader = Codec::open_from_bytes(OwnedBytes::new(out)).unwrap();
assert_eq!(reader.num_vals(), data.len() as u64);
for (doc, orig_val) in data.iter().enumerate() {
let val = reader.get_u64(doc as u64);
let val = reader.get_val(doc as u64);
if val != *orig_val {
panic!(
"val {:?} does not match orig_val {:?}, in data set {}, data {:?}",
val, orig_val, name, data
"val {val:?} does not match orig_val {orig_val:?}, in data set {name}, data \
{data:?}",
);
}
}
(estimation, actual_compression)
}
proptest! {
#[test]
fn test_proptest_small(data in proptest::collection::vec(any::<u64>(), 1..10)) {
create_and_validate::<LinearCodec>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearCodec>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedCodec>(&data, "proptest bitpacked");
}
#[test]
fn test_proptest_large(data in proptest::collection::vec(any::<u64>(), 1..6000)) {
create_and_validate::<LinearCodec>(&data, "proptest linearinterpol");
create_and_validate::<BlockwiseLinearCodec>(&data, "proptest multilinearinterpol");
create_and_validate::<BitpackedCodec>(&data, "proptest bitpacked");
}
}
pub fn get_codec_test_data_sets() -> Vec<(Vec<u64>, &'static str)> {
let mut data_and_names = vec![];
@@ -134,70 +209,55 @@ mod tests {
data_and_names
}
fn test_codec<C: FastFieldCodec>(codec: &C) {
let codec_name = C::NAME;
for (data, data_set_name) in get_codec_test_data_sets() {
let (estimate, actual) = crate::tests::create_and_validate(codec, &data, data_set_name);
fn test_codec<C: FastFieldCodec>() {
let codec_name = format!("{:?}", C::CODEC_TYPE);
for (data, dataset_name) in get_codec_test_data_sets() {
let (estimate, actual) = crate::tests::create_and_validate::<C>(&data, dataset_name);
let result = if estimate == f32::MAX {
"Disabled".to_string()
} else {
format!("Estimate {:?} Actual {:?} ", estimate, actual)
format!("Estimate `{estimate}` Actual `{actual}`")
};
println!(
"Codec {}, DataSet {}, {}",
codec_name, data_set_name, result
);
println!("Codec {codec_name}, DataSet {dataset_name}, {result}");
}
}
#[test]
fn test_codec_bitpacking() {
test_codec(&BitpackedFastFieldCodec);
test_codec::<BitpackedCodec>();
}
#[test]
fn test_codec_interpolation() {
test_codec(&LinearInterpolCodec);
test_codec::<LinearCodec>();
}
#[test]
fn test_codec_multi_interpolation() {
test_codec(&MultiLinearInterpolFastFieldCodec);
test_codec::<BlockwiseLinearCodec>();
}
use super::*;
pub fn stats_from_vec(data: &[u64]) -> FastFieldStats {
let min_value = data.iter().cloned().min().unwrap_or(0);
let max_value = data.iter().cloned().max().unwrap_or(0);
FastFieldStats {
min_value,
max_value,
num_vals: data.len() as u64,
}
}
#[test]
fn estimation_good_interpolation_case() {
let data = (10..=20000_u64).collect::<Vec<_>>();
let linear_interpol_estimation =
LinearInterpolCodec::estimate(&data, stats_from_vec(&data));
let linear_interpol_estimation = LinearCodec::estimate(&data);
assert_le!(linear_interpol_estimation, 0.01);
let multi_linear_interpol_estimation =
MultiLinearInterpolFastFieldCodec::estimate(&&data[..], stats_from_vec(&data));
let multi_linear_interpol_estimation = BlockwiseLinearCodec::estimate(&data);
assert_le!(multi_linear_interpol_estimation, 0.2);
assert_le!(linear_interpol_estimation, multi_linear_interpol_estimation);
let bitpacked_estimation = BitpackedFastFieldCodec::estimate(&data, stats_from_vec(&data));
let bitpacked_estimation = BitpackedCodec::estimate(&data);
assert_le!(linear_interpol_estimation, bitpacked_estimation);
}
#[test]
fn estimation_test_bad_interpolation_case() {
let data = vec![200, 10, 10, 10, 10, 1000, 20];
let linear_interpol_estimation =
LinearInterpolCodec::estimate(&data, stats_from_vec(&data));
let linear_interpol_estimation = LinearCodec::estimate(&data);
assert_le!(linear_interpol_estimation, 0.32);
let bitpacked_estimation = BitpackedFastFieldCodec::estimate(&data, stats_from_vec(&data));
let bitpacked_estimation = BitpackedCodec::estimate(&data);
assert_le!(bitpacked_estimation, linear_interpol_estimation);
}
#[test]
@@ -207,12 +267,23 @@ mod tests {
// in this case the linear interpolation can't in fact not be worse than bitpacking,
// but the estimator adds some threshold, which leads to estimated worse behavior
let linear_interpol_estimation =
LinearInterpolCodec::estimate(&data, stats_from_vec(&data));
let linear_interpol_estimation = LinearCodec::estimate(&data);
assert_le!(linear_interpol_estimation, 0.35);
let bitpacked_estimation = BitpackedFastFieldCodec::estimate(&data, stats_from_vec(&data));
let bitpacked_estimation = BitpackedCodec::estimate(&data);
assert_le!(bitpacked_estimation, 0.32);
assert_le!(bitpacked_estimation, linear_interpol_estimation);
}
#[test]
fn test_fast_field_codec_type_to_code() {
let mut count_codec = 0;
for code in 0..=255 {
if let Some(codec_type) = FastFieldCodecType::from_code(code) {
assert_eq!(codec_type.to_code(), code);
count_codec += 1;
}
}
assert_eq!(count_codec, 4);
}
}

View File

@@ -5,20 +5,20 @@ use common::{BinarySerializable, FixedSize};
use ownedbytes::OwnedBytes;
use tantivy_bitpacker::{compute_num_bits, BitPacker, BitUnpacker};
use crate::{FastFieldCodec, FastFieldCodecReader, FastFieldStats};
use crate::{FastFieldCodec, FastFieldCodecType, FastFieldDataAccess};
/// Depending on the field type, a different
/// fast field is required.
#[derive(Clone)]
pub struct LinearInterpolFastFieldReader {
pub struct LinearReader {
data: OwnedBytes,
bit_unpacker: BitUnpacker,
pub footer: LinearInterpolFooter,
pub footer: LinearFooter,
pub slope: f32,
}
#[derive(Clone, Debug)]
pub struct LinearInterpolFooter {
pub struct LinearFooter {
pub relative_max_value: u64,
pub offset: u64,
pub first_val: u64,
@@ -28,7 +28,7 @@ pub struct LinearInterpolFooter {
pub max_value: u64,
}
impl BinarySerializable for LinearInterpolFooter {
impl BinarySerializable for LinearFooter {
fn serialize<W: Write>(&self, write: &mut W) -> io::Result<()> {
self.relative_max_value.serialize(write)?;
self.offset.serialize(write)?;
@@ -40,8 +40,8 @@ impl BinarySerializable for LinearInterpolFooter {
Ok(())
}
fn deserialize<R: Read>(reader: &mut R) -> io::Result<LinearInterpolFooter> {
Ok(LinearInterpolFooter {
fn deserialize<R: Read>(reader: &mut R) -> io::Result<LinearFooter> {
Ok(LinearFooter {
relative_max_value: u64::deserialize(reader)?,
offset: u64::deserialize(reader)?,
first_val: u64::deserialize(reader)?,
@@ -53,13 +53,13 @@ impl BinarySerializable for LinearInterpolFooter {
}
}
impl FixedSize for LinearInterpolFooter {
impl FixedSize for LinearFooter {
const SIZE_IN_BYTES: usize = 56;
}
impl FastFieldCodecReader for LinearInterpolFastFieldReader {
impl FastFieldDataAccess for LinearReader {
#[inline]
fn get_u64(&self, doc: u64) -> u64 {
fn get_val(&self, doc: u64) -> u64 {
let calculated_value = get_calculated_value(self.footer.first_val, doc, self.slope);
(calculated_value + self.bit_unpacker.get(doc, &self.data)) - self.footer.offset
}
@@ -72,41 +72,69 @@ impl FastFieldCodecReader for LinearInterpolFastFieldReader {
fn max_value(&self) -> u64 {
self.footer.max_value
}
#[inline]
fn num_vals(&self) -> u64 {
self.footer.num_vals
}
}
/// Fastfield serializer, which tries to guess values by linear interpolation
/// and stores the difference bitpacked.
pub struct LinearInterpolCodec;
pub struct LinearCodec;
#[inline]
fn get_slope(first_val: u64, last_val: u64, num_vals: u64) -> f32 {
pub(crate) fn get_slope(first_val: u64, last_val: u64, num_vals: u64) -> f32 {
if num_vals <= 1 {
return 0.0;
}
// We calculate the slope with f64 high precision and use the result in lower precision f32
// This is done in order to handle estimations for very large values like i64::MAX
((last_val as f64 - first_val as f64) / (num_vals as u64 - 1) as f64) as f32
let diff = diff(last_val, first_val);
(diff / (num_vals - 1) as f64) as f32
}
/// Delay the cast, to improve precision for very large u64 values.
///
/// Since i64 is mapped monotonically to u64 space, 0i64 is after the mapping i64::MAX.
/// So very large values are not uncommon.
///
/// ```rust
/// let val1 = i64::MAX;
/// let val2 = i64::MAX - 100;
/// assert_eq!(val1 - val2, 100);
/// assert_eq!(val1 as f64 - val2 as f64, 0.0);
/// ```
fn diff(val1: u64, val2: u64) -> f64 {
if val1 >= val2 {
(val1 - val2) as f64
} else {
(val2 - val1) as f64 * -1.0
}
}
#[inline]
fn get_calculated_value(first_val: u64, pos: u64, slope: f32) -> u64 {
first_val + (pos as f32 * slope) as u64
pub fn get_calculated_value(first_val: u64, pos: u64, slope: f32) -> u64 {
if slope < 0.0 {
first_val - (pos as f32 * -slope) as u64
} else {
first_val + (pos as f32 * slope) as u64
}
}
impl FastFieldCodec for LinearInterpolCodec {
const NAME: &'static str = "LinearInterpol";
impl FastFieldCodec for LinearCodec {
const CODEC_TYPE: FastFieldCodecType = FastFieldCodecType::Linear;
type Reader = LinearInterpolFastFieldReader;
type Reader = LinearReader;
/// Opens a fast field given a file.
fn open_from_bytes(bytes: OwnedBytes) -> io::Result<Self::Reader> {
let footer_offset = bytes.len() - LinearInterpolFooter::SIZE_IN_BYTES;
let footer_offset = bytes.len() - LinearFooter::SIZE_IN_BYTES;
let (data, mut footer) = bytes.split(footer_offset);
let footer = LinearInterpolFooter::deserialize(&mut footer)?;
let footer = LinearFooter::deserialize(&mut footer)?;
let slope = get_slope(footer.first_val, footer.last_val, footer.num_vals);
let num_bits = compute_num_bits(footer.relative_max_value);
let bit_unpacker = BitUnpacker::new(num_bits);
Ok(LinearInterpolFastFieldReader {
Ok(LinearReader {
data,
bit_unpacker,
footer,
@@ -116,21 +144,18 @@ impl FastFieldCodec for LinearInterpolCodec {
/// Creates a new fast field serializer.
fn serialize(
&self,
write: &mut impl Write,
vals: &[u64],
stats: FastFieldStats,
fastfield_accessor: &dyn FastFieldDataAccess,
) -> io::Result<()> {
assert!(stats.min_value <= stats.max_value);
assert!(fastfield_accessor.min_value() <= fastfield_accessor.max_value());
let first_val = vals[0];
let last_val = vals[vals.len() - 1];
let slope = get_slope(first_val, last_val, stats.num_vals);
let first_val = fastfield_accessor.get_val(0);
let last_val = fastfield_accessor.get_val(fastfield_accessor.num_vals() as u64 - 1);
let slope = get_slope(first_val, last_val, fastfield_accessor.num_vals());
// calculate offset to ensure all values are positive
let mut offset = 0;
let mut rel_positive_max = 0;
for (pos, actual_value) in vals.iter().copied().enumerate() {
for (pos, actual_value) in fastfield_accessor.iter().enumerate() {
let calculated_value = get_calculated_value(first_val, pos as u64, slope);
if calculated_value > actual_value {
// negative value we need to apply an offset
@@ -148,36 +173,37 @@ impl FastFieldCodec for LinearInterpolCodec {
let num_bits = compute_num_bits(relative_max_value);
let mut bit_packer = BitPacker::new();
for (pos, val) in vals.iter().copied().enumerate() {
for (pos, val) in fastfield_accessor.iter().enumerate() {
let calculated_value = get_calculated_value(first_val, pos as u64, slope);
let diff = (val + offset) - calculated_value;
bit_packer.write(diff, num_bits, write)?;
}
bit_packer.close(write)?;
let footer = LinearInterpolFooter {
let footer = LinearFooter {
relative_max_value,
offset,
first_val,
last_val,
num_vals: stats.num_vals,
min_value: stats.min_value,
max_value: stats.max_value,
num_vals: fastfield_accessor.num_vals(),
min_value: fastfield_accessor.min_value(),
max_value: fastfield_accessor.max_value(),
};
footer.serialize(write)?;
Ok(())
}
fn is_applicable(_vals: &[u64], stats: FastFieldStats) -> bool {
if stats.num_vals < 3 {
fn is_applicable(fastfield_accessor: &impl FastFieldDataAccess) -> bool {
if fastfield_accessor.num_vals() < 3 {
return false; // disable compressor for this case
}
// On serialisation the offset is added to the actual value.
// We need to make sure this won't run into overflow calculation issues.
// For this we take the maximum theroretical offset and add this to the max value.
// If this doesn't overflow the algorithm should be fine
let theorethical_maximum_offset = stats.max_value - stats.min_value;
if stats
.max_value
let theorethical_maximum_offset =
fastfield_accessor.max_value() - fastfield_accessor.min_value();
if fastfield_accessor
.max_value()
.checked_add(theorethical_maximum_offset)
.is_none()
{
@@ -188,22 +214,22 @@ impl FastFieldCodec for LinearInterpolCodec {
/// estimation for linear interpolation is hard because, you don't know
/// where the local maxima for the deviation of the calculated value are and
/// the offset to shift all values to >=0 is also unknown.
fn estimate(vals: &[u64], stats: FastFieldStats) -> f32 {
let first_val = vals[0];
let last_val = vals[vals.len() - 1];
let slope = get_slope(first_val, last_val, stats.num_vals);
fn estimate(fastfield_accessor: &impl FastFieldDataAccess) -> f32 {
let first_val = fastfield_accessor.get_val(0);
let last_val = fastfield_accessor.get_val(fastfield_accessor.num_vals() as u64 - 1);
let slope = get_slope(first_val, last_val, fastfield_accessor.num_vals());
// let's sample at 0%, 5%, 10% .. 95%, 100%
let num_vals = stats.num_vals as f32 / 100.0;
let sample_positions: Vec<usize> = (0..20)
let num_vals = fastfield_accessor.num_vals() as f32 / 100.0;
let sample_positions = (0..20)
.map(|pos| (num_vals * pos as f32 * 5.0) as usize)
.collect::<Vec<_>>();
let max_distance = sample_positions
.into_iter()
.iter()
.map(|pos| {
let calculated_value = get_calculated_value(first_val, pos as u64, slope);
let actual_value = vals[pos];
let calculated_value = get_calculated_value(first_val, *pos as u64, slope);
let actual_value = fastfield_accessor.get_val(*pos as u64);
distance(calculated_value, actual_value)
})
.max()
@@ -216,9 +242,10 @@ impl FastFieldCodec for LinearInterpolCodec {
//
let relative_max_value = (max_distance as f32 * 1.5) * 2.0;
let num_bits = compute_num_bits(relative_max_value as u64) as u64 * stats.num_vals as u64
+ LinearInterpolFooter::SIZE_IN_BYTES as u64;
let num_bits_uncompressed = 64 * stats.num_vals;
let num_bits = compute_num_bits(relative_max_value as u64) as u64
* fastfield_accessor.num_vals()
+ LinearFooter::SIZE_IN_BYTES as u64;
let num_bits_uncompressed = 64 * fastfield_accessor.num_vals();
num_bits as f32 / num_bits_uncompressed as f32
}
}
@@ -238,7 +265,27 @@ mod tests {
use crate::tests::get_codec_test_data_sets;
fn create_and_validate(data: &[u64], name: &str) -> (f32, f32) {
crate::tests::create_and_validate(&LinearInterpolCodec, data, name)
crate::tests::create_and_validate::<LinearCodec, LinearReader>(data, name)
}
#[test]
fn get_calculated_value_test() {
// pos slope
assert_eq!(get_calculated_value(100, 10, 5.0), 150);
// neg slope
assert_eq!(get_calculated_value(100, 10, -5.0), 50);
// pos slope, very high values
assert_eq!(
get_calculated_value(i64::MAX as u64, 10, 5.0),
i64::MAX as u64 + 50
);
// neg slope, very high values
assert_eq!(
get_calculated_value(i64::MAX as u64, 10, -5.0),
i64::MAX as u64 - 50
);
}
#[test]

View File

@@ -1,9 +1,8 @@
#[macro_use]
extern crate prettytable;
// use fastfield_codecs::linearinterpol::LinearInterpolFastFieldSerializer;
// use fastfield_codecs::multilinearinterpol::MultiLinearInterpolFastFieldSerializer;
use fastfield_codecs::bitpacked::BitpackedFastFieldCodec;
use fastfield_codecs::{FastFieldCodec, FastFieldStats};
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::{FastFieldCodec, FastFieldCodecType, FastFieldStats};
use prettytable::{Cell, Row, Table};
fn main() {
@@ -13,12 +12,12 @@ fn main() {
table.add_row(row!["", "Compression Ratio", "Compression Estimation"]);
for (data, data_set_name) in get_codec_test_data_sets() {
let mut results = Vec::new();
// let res = serialize_with_codec::<LinearInterpolFastFieldSerializer>(&data);
// results.push(res);
// let res = serialize_with_codec::<MultiLinearInterpolFastFieldSerializer>(&data);
// results.push(res);
let res = serialize_with_codec(&BitpackedFastFieldCodec, &data);
let mut results = vec![];
let res = serialize_with_codec::<LinearCodec>(&data);
results.push(res);
let res = serialize_with_codec::<BlockwiseLinearCodec>(&data);
results.push(res);
let res = serialize_with_codec::<fastfield_codecs::bitpacked::BitpackedCodec>(&data);
results.push(res);
// let best_estimation_codec = results
@@ -32,7 +31,7 @@ fn main() {
.unwrap();
table.add_row(Row::new(vec![Cell::new(data_set_name).style_spec("Bbb")]));
for (is_applicable, est, comp, name) in results {
for (is_applicable, est, comp, codec_type) in results {
let (est_cell, ratio_cell) = if !is_applicable {
("Codec Disabled".to_string(), "".to_string())
} else {
@@ -45,7 +44,7 @@ fn main() {
};
table.add_row(Row::new(vec![
Cell::new(name).style_spec("bFg"),
Cell::new(&format!("{codec_type:?}")).style_spec("bFg"),
Cell::new(&ratio_cell).style_spec(style),
Cell::new(&est_cell).style_spec(""),
]));
@@ -90,22 +89,19 @@ pub fn get_codec_test_data_sets() -> Vec<(Vec<u64>, &'static str)> {
data_and_names
}
pub fn serialize_with_codec<S: FastFieldCodec>(
codec: &S,
pub fn serialize_with_codec<C: FastFieldCodec>(
data: &[u64],
) -> (bool, f32, f32, &'static str) {
let is_applicable = S::is_applicable(&data, stats_from_vec(data));
) -> (bool, f32, f32, FastFieldCodecType) {
let is_applicable = C::is_applicable(&data);
if !is_applicable {
return (false, 0.0, 0.0, S::NAME);
return (false, 0.0, 0.0, C::CODEC_TYPE);
}
let estimation = S::estimate(&data, stats_from_vec(data));
let estimation = C::estimate(&data);
let mut out = vec![];
codec
.serialize(&mut out, &data, stats_from_vec(data))
.unwrap();
C::serialize(&mut out, &data).unwrap();
let actual_compression = out.len() as f32 / (data.len() * 8) as f32;
(true, estimation, actual_compression, S::NAME)
(true, estimation, actual_compression, C::CODEC_TYPE)
}
pub fn stats_from_vec(data: &[u64]) -> FastFieldStats {

View File

@@ -1,3 +1,5 @@
#![allow(clippy::derive_partial_eq_without_eq)]
mod occur;
mod query_grammar;
mod user_input_ast;

View File

@@ -10,7 +10,7 @@ use super::metric::{AverageAggregation, StatsAggregation};
use super::segment_agg_result::BucketCount;
use super::VecWithNames;
use crate::fastfield::{
type_and_cardinality, FastFieldReaderImpl, FastType, MultiValuedFastFieldReader,
type_and_cardinality, DynamicFastFieldReader, FastType, MultiValuedFastFieldReader,
};
use crate::schema::{Cardinality, Type};
use crate::{InvertedIndexReader, SegmentReader, TantivyError};
@@ -37,10 +37,10 @@ impl AggregationsWithAccessor {
#[derive(Clone)]
pub(crate) enum FastFieldAccessor {
Multi(MultiValuedFastFieldReader<u64>),
Single(FastFieldReaderImpl<u64>),
Single(DynamicFastFieldReader<u64>),
}
impl FastFieldAccessor {
pub fn as_single(&self) -> Option<&FastFieldReaderImpl<u64>> {
pub fn as_single(&self) -> Option<&DynamicFastFieldReader<u64>> {
match self {
FastFieldAccessor::Multi(_) => None,
FastFieldAccessor::Single(reader) => Some(reader),
@@ -118,7 +118,7 @@ impl BucketAggregationWithAccessor {
pub struct MetricAggregationWithAccessor {
pub metric: MetricAggregation,
pub field_type: Type,
pub accessor: FastFieldReaderImpl<u64>,
pub accessor: DynamicFastFieldReader<u64>,
}
impl MetricAggregationWithAccessor {

View File

@@ -14,7 +14,7 @@ use crate::aggregation::intermediate_agg_result::{
IntermediateAggregationResults, IntermediateBucketResult, IntermediateHistogramBucketEntry,
};
use crate::aggregation::segment_agg_result::SegmentAggregationResultsCollector;
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::schema::Type;
use crate::{DocId, TantivyError};
@@ -263,7 +263,7 @@ impl SegmentHistogramCollector {
req: &HistogramAggregation,
sub_aggregation: &AggregationsWithAccessor,
field_type: Type,
accessor: &FastFieldReaderImpl<u64>,
accessor: &DynamicFastFieldReader<u64>,
) -> crate::Result<Self> {
req.validate()?;
let min = f64_from_fastfield_u64(accessor.min_value(), &field_type);

View File

@@ -3,7 +3,7 @@ use std::fmt::Debug;
use serde::{Deserialize, Serialize};
use crate::aggregation::f64_from_fastfield_u64;
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::schema::Type;
use crate::DocId;
@@ -43,7 +43,7 @@ pub(crate) struct SegmentAverageCollector {
}
impl Debug for SegmentAverageCollector {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AverageCollector")
.field("data", &self.data)
.finish()
@@ -57,7 +57,7 @@ impl SegmentAverageCollector {
data: Default::default(),
}
}
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &FastFieldReaderImpl<u64>) {
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &DynamicFastFieldReader<u64>) {
let mut iter = doc.chunks_exact(4);
for docs in iter.by_ref() {
let val1 = field.get(docs[0]);

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use crate::aggregation::f64_from_fastfield_u64;
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::schema::Type;
use crate::{DocId, TantivyError};
@@ -163,7 +163,7 @@ impl SegmentStatsCollector {
stats: IntermediateStats::default(),
}
}
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &FastFieldReaderImpl<u64>) {
pub(crate) fn collect_block(&mut self, doc: &[DocId], field: &DynamicFastFieldReader<u64>) {
let mut iter = doc.chunks_exact(4);
for docs in iter.by_ref() {
let val1 = field.get(docs[0]);

View File

@@ -12,7 +12,7 @@
use std::marker::PhantomData;
use crate::collector::{Collector, SegmentCollector};
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::schema::Field;
use crate::{Score, SegmentReader, TantivyError};
@@ -158,7 +158,7 @@ where
TPredicate: 'static,
TPredicateValue: FastValue,
{
fast_field_reader: FastFieldReaderImpl<TPredicateValue>,
fast_field_reader: DynamicFastFieldReader<TPredicateValue>,
segment_collector: TSegmentCollector,
predicate: TPredicate,
t_predicate_value: PhantomData<TPredicateValue>,

View File

@@ -1,7 +1,7 @@
use fastdivide::DividerU64;
use crate::collector::{Collector, SegmentCollector};
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::schema::{Field, Type};
use crate::{DocId, Score};
@@ -84,7 +84,7 @@ impl HistogramComputer {
}
pub struct SegmentHistogramCollector {
histogram_computer: HistogramComputer,
ff_reader: FastFieldReaderImpl<u64>,
ff_reader: DynamicFastFieldReader<u64>,
}
impl SegmentCollector for SegmentHistogramCollector {

View File

@@ -1,7 +1,7 @@
use super::*;
use crate::collector::{Count, FilterCollector, TopDocs};
use crate::core::SegmentReader;
use crate::fastfield::{BytesFastFieldReader, FastFieldReader, FastFieldReaderImpl};
use crate::fastfield::{BytesFastFieldReader, DynamicFastFieldReader, FastFieldReader};
use crate::query::{AllQuery, QueryParser};
use crate::schema::{Field, Schema, FAST, TEXT};
use crate::time::format_description::well_known::Rfc3339;
@@ -156,7 +156,7 @@ pub struct FastFieldTestCollector {
pub struct FastFieldSegmentCollector {
vals: Vec<u64>,
reader: FastFieldReaderImpl<u64>,
reader: DynamicFastFieldReader<u64>,
}
impl FastFieldTestCollector {

View File

@@ -9,7 +9,7 @@ use crate::collector::tweak_score_top_collector::TweakedScoreTopCollector;
use crate::collector::{
CustomScorer, CustomSegmentScorer, ScoreSegmentTweaker, ScoreTweaker, SegmentCollector,
};
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, FastValue};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue};
use crate::query::Weight;
use crate::schema::Field;
use crate::{DocAddress, DocId, Score, SegmentOrdinal, SegmentReader, TantivyError};
@@ -129,7 +129,7 @@ impl fmt::Debug for TopDocs {
}
struct ScorerByFastFieldReader {
ff_reader: FastFieldReaderImpl<u64>,
ff_reader: DynamicFastFieldReader<u64>,
}
impl CustomSegmentScorer<u64> for ScorerByFastFieldReader {

View File

@@ -247,6 +247,14 @@ impl SearcherInner {
generation: TrackedObject<SearcherGeneration>,
doc_store_cache_size: usize,
) -> io::Result<SearcherInner> {
assert_eq!(
&segment_readers
.iter()
.map(|reader| (reader.segment_id(), reader.delete_opstamp()))
.collect::<BTreeMap<_, _>>(),
generation.segments(),
"Set of segments referenced by this Searcher and its SearcherGeneration must match"
);
let store_readers: Vec<StoreReader> = segment_readers
.iter()
.map(|segment_reader| segment_reader.get_store_reader(doc_store_cache_size))

View File

@@ -38,7 +38,7 @@ impl BinarySerializable for FileAddr {
/// A `CompositeWrite` is used to write a `CompositeFile`.
pub struct CompositeWrite<W = WritePtr> {
write: CountingWriter<W>,
offsets: HashMap<FileAddr, u64>,
offsets: Vec<(FileAddr, u64)>,
}
impl<W: TerminatingWrite + Write> CompositeWrite<W> {
@@ -47,7 +47,7 @@ impl<W: TerminatingWrite + Write> CompositeWrite<W> {
pub fn wrap(w: W) -> CompositeWrite<W> {
CompositeWrite {
write: CountingWriter::wrap(w),
offsets: HashMap::new(),
offsets: Vec::new(),
}
}
@@ -60,8 +60,8 @@ impl<W: TerminatingWrite + Write> CompositeWrite<W> {
pub fn for_field_with_idx(&mut self, field: Field, idx: usize) -> &mut CountingWriter<W> {
let offset = self.write.written_bytes();
let file_addr = FileAddr::new(field, idx);
assert!(!self.offsets.contains_key(&file_addr));
self.offsets.insert(file_addr, offset);
assert!(!self.offsets.iter().any(|el| el.0 == file_addr));
self.offsets.push((file_addr, offset));
&mut self.write
}
@@ -73,16 +73,8 @@ impl<W: TerminatingWrite + Write> CompositeWrite<W> {
let footer_offset = self.write.written_bytes();
VInt(self.offsets.len() as u64).serialize(&mut self.write)?;
let mut offset_fields: Vec<_> = self
.offsets
.iter()
.map(|(file_addr, offset)| (*offset, *file_addr))
.collect();
offset_fields.sort();
let mut prev_offset = 0;
for (offset, file_addr) in offset_fields {
for (file_addr, offset) in self.offsets {
VInt((offset - prev_offset) as u64).serialize(&mut self.write)?;
file_addr.serialize(&mut self.write)?;
prev_offset = offset;
@@ -106,6 +98,14 @@ pub struct CompositeFile {
offsets_index: HashMap<FileAddr, Range<usize>>,
}
impl std::fmt::Debug for CompositeFile {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("CompositeFile")
.field("offsets_index", &self.offsets_index)
.finish()
}
}
impl CompositeFile {
/// Opens a composite file stored in a given
/// `FileSlice`.
@@ -233,4 +233,56 @@ mod test {
}
Ok(())
}
#[test]
fn test_composite_file_bug() -> crate::Result<()> {
let path = Path::new("test_path");
let directory = RamDirectory::create();
{
let w = directory.open_write(path).unwrap();
let mut composite_write = CompositeWrite::wrap(w);
let mut write = composite_write.for_field_with_idx(Field::from_field_id(1u32), 0);
VInt(32431123u64).serialize(&mut write)?;
write.flush()?;
let write = composite_write.for_field_with_idx(Field::from_field_id(1u32), 1);
write.flush()?;
let mut write = composite_write.for_field_with_idx(Field::from_field_id(0u32), 0);
VInt(1_000_000).serialize(&mut write)?;
write.flush()?;
composite_write.close()?;
}
{
let r = directory.open_read(path)?;
let composite_file = CompositeFile::open(&r)?;
{
let file = composite_file
.open_read_with_idx(Field::from_field_id(1u32), 0)
.unwrap()
.read_bytes()?;
let mut file0_buf = file.as_slice();
let payload_0 = VInt::deserialize(&mut file0_buf)?.0;
assert_eq!(file0_buf.len(), 0);
assert_eq!(payload_0, 32431123u64);
}
{
let file = composite_file
.open_read_with_idx(Field::from_field_id(1u32), 1)
.unwrap()
.read_bytes()?;
let file = file.as_slice();
assert_eq!(file.len(), 0);
}
{
let file = composite_file
.open_read_with_idx(Field::from_field_id(0u32), 0)
.unwrap()
.read_bytes()?;
let file = file.as_slice();
assert_eq!(file.len(), 3);
}
}
Ok(())
}
}

View File

@@ -1,5 +1,5 @@
use crate::directory::{FileSlice, OwnedBytes};
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, MultiValueLength};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, MultiValueLength};
use crate::DocId;
/// Reader for byte array fast fields
@@ -14,13 +14,13 @@ use crate::DocId;
/// and the start index for the next document, and keeping the bytes in between.
#[derive(Clone)]
pub struct BytesFastFieldReader {
idx_reader: FastFieldReaderImpl<u64>,
idx_reader: DynamicFastFieldReader<u64>,
values: OwnedBytes,
}
impl BytesFastFieldReader {
pub(crate) fn open(
idx_reader: FastFieldReaderImpl<u64>,
idx_reader: DynamicFastFieldReader<u64>,
values_file: FileSlice,
) -> crate::Result<BytesFastFieldReader> {
let values = values_file.read_bytes()?;

360
src/fastfield/gcd.rs Normal file
View File

@@ -0,0 +1,360 @@
use std::io::{self, Write};
use std::num::NonZeroU64;
use common::BinarySerializable;
use fastdivide::DividerU64;
use fastfield_codecs::{FastFieldCodec, FastFieldDataAccess};
use ownedbytes::OwnedBytes;
pub const GCD_DEFAULT: u64 = 1;
/// Wrapper for accessing a fastfield.
///
/// Holds the data and the codec to the read the data.
#[derive(Clone)]
pub struct GCDReader<CodecReader: FastFieldDataAccess> {
gcd_params: GCDParams,
reader: CodecReader,
}
#[derive(Debug, Clone, Copy)]
struct GCDParams {
gcd: u64,
min_value: u64,
num_vals: u64,
}
impl GCDParams {
pub fn eval(&self, val: u64) -> u64 {
self.min_value + self.gcd * val
}
}
impl BinarySerializable for GCDParams {
fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()> {
self.gcd.serialize(writer)?;
self.min_value.serialize(writer)?;
self.num_vals.serialize(writer)?;
Ok(())
}
fn deserialize<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let gcd: u64 = u64::deserialize(reader)?;
let min_value: u64 = u64::deserialize(reader)?;
let num_vals: u64 = u64::deserialize(reader)?;
Ok(Self {
gcd,
min_value,
num_vals,
})
}
}
pub fn open_gcd_from_bytes<WrappedCodec: FastFieldCodec>(
bytes: OwnedBytes,
) -> io::Result<GCDReader<WrappedCodec::Reader>> {
let footer_offset = bytes.len() - 24;
let (body, mut footer) = bytes.split(footer_offset);
let gcd_params = GCDParams::deserialize(&mut footer)?;
let reader: WrappedCodec::Reader = WrappedCodec::open_from_bytes(body)?;
Ok(GCDReader { gcd_params, reader })
}
impl<C: FastFieldDataAccess + Clone> FastFieldDataAccess for GCDReader<C> {
#[inline]
fn get_val(&self, doc: u64) -> u64 {
let val = self.reader.get_val(doc);
self.gcd_params.eval(val)
}
fn min_value(&self) -> u64 {
self.gcd_params.eval(self.reader.min_value())
}
fn max_value(&self) -> u64 {
self.gcd_params.eval(self.reader.max_value())
}
fn num_vals(&self) -> u64 {
self.gcd_params.num_vals
}
}
pub fn write_gcd_header<W: Write>(
field_write: &mut W,
min_value: u64,
gcd: u64,
num_vals: u64,
) -> io::Result<()> {
gcd.serialize(field_write)?;
min_value.serialize(field_write)?;
num_vals.serialize(field_write)?;
Ok(())
}
/// Compute the gcd of two non null numbers.
///
/// It is recommended, but not required, to feed values such that `large >= small`.
fn compute_gcd(mut large: NonZeroU64, mut small: NonZeroU64) -> NonZeroU64 {
loop {
let rem: u64 = large.get() % small;
if let Some(new_small) = NonZeroU64::new(rem) {
(large, small) = (small, new_small);
} else {
return small;
}
}
}
// Find GCD for iterator of numbers
pub fn find_gcd(numbers: impl Iterator<Item = u64>) -> Option<NonZeroU64> {
let mut numbers = numbers.flat_map(NonZeroU64::new);
let mut gcd: NonZeroU64 = numbers.next()?;
if gcd.get() == 1 {
return Some(gcd);
}
let mut gcd_divider = DividerU64::divide_by(gcd.get());
for val in numbers {
let remainder = val.get() - (gcd_divider.divide(val.get())) * gcd.get();
if remainder == 0 {
continue;
}
gcd = compute_gcd(val, gcd);
if gcd.get() == 1 {
return Some(gcd);
}
gcd_divider = DividerU64::divide_by(gcd.get());
}
Some(gcd)
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::num::NonZeroU64;
use std::path::Path;
use std::time::{Duration, SystemTime};
use common::HasLen;
use crate::directory::{CompositeFile, RamDirectory, WritePtr};
use crate::fastfield::gcd::compute_gcd;
use crate::fastfield::serializer::FastFieldCodecEnableCheck;
use crate::fastfield::tests::{FIELD, FIELDI64, SCHEMA, SCHEMAI64};
use crate::fastfield::{
find_gcd, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldCodecType,
FastFieldReader, FastFieldsWriter, ALL_CODECS,
};
use crate::schema::{Cardinality, Schema};
use crate::{DateOptions, DatePrecision, DateTime, Directory};
fn get_index(
docs: &[crate::Document],
schema: &Schema,
codec_enable_checker: FastFieldCodecEnableCheck,
) -> crate::Result<RamDirectory> {
let directory: RamDirectory = RamDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer =
CompositeFastFieldSerializer::from_write_with_codec(write, codec_enable_checker)
.unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(schema);
for doc in docs {
fast_field_writers.add_document(doc);
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new(), None)
.unwrap();
serializer.close().unwrap();
}
Ok(directory)
}
fn test_fastfield_gcd_i64_with_codec(
code_type: FastFieldCodecType,
num_vals: usize,
) -> crate::Result<()> {
let path = Path::new("test");
let mut docs = vec![];
for i in 1..=num_vals {
let val = (i as i64 - 5) * 1000i64;
docs.push(doc!(*FIELDI64=>val));
}
let directory = get_index(&docs, &SCHEMAI64, code_type.into())?;
let file = directory.open_read(path).unwrap();
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<i64>::open(file)?;
assert_eq!(fast_field_reader.get(0), -4000i64);
assert_eq!(fast_field_reader.get(1), -3000i64);
assert_eq!(fast_field_reader.get(2), -2000i64);
assert_eq!(fast_field_reader.max_value(), (num_vals as i64 - 5) * 1000);
assert_eq!(fast_field_reader.min_value(), -4000i64);
let file = directory.open_read(path).unwrap();
// Can't apply gcd
let path = Path::new("test");
docs.pop();
docs.push(doc!(*FIELDI64=>2001i64));
let directory = get_index(&docs, &SCHEMAI64, code_type.into())?;
let file2 = directory.open_read(path).unwrap();
assert!(file2.len() > file.len());
Ok(())
}
#[test]
fn test_fastfield_gcd_i64() -> crate::Result<()> {
for &code_type in ALL_CODECS {
test_fastfield_gcd_i64_with_codec(code_type, 5005)?;
}
Ok(())
}
fn test_fastfield_gcd_u64_with_codec(
code_type: FastFieldCodecType,
num_vals: usize,
) -> crate::Result<()> {
let path = Path::new("test");
let mut docs = vec![];
for i in 1..=num_vals {
let val = i as u64 * 1000u64;
docs.push(doc!(*FIELD=>val));
}
let directory = get_index(&docs, &SCHEMA, code_type.into())?;
let file = directory.open_read(path).unwrap();
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = DynamicFastFieldReader::<u64>::open(file)?;
assert_eq!(fast_field_reader.get(0), 1000u64);
assert_eq!(fast_field_reader.get(1), 2000u64);
assert_eq!(fast_field_reader.get(2), 3000u64);
assert_eq!(fast_field_reader.max_value(), num_vals as u64 * 1000);
assert_eq!(fast_field_reader.min_value(), 1000u64);
let file = directory.open_read(path).unwrap();
// Can't apply gcd
let path = Path::new("test");
docs.pop();
docs.push(doc!(*FIELDI64=>2001u64));
let directory = get_index(&docs, &SCHEMA, code_type.into())?;
let file2 = directory.open_read(path).unwrap();
assert!(file2.len() > file.len());
Ok(())
}
#[test]
fn test_fastfield_gcd_u64() -> crate::Result<()> {
for &code_type in ALL_CODECS {
test_fastfield_gcd_u64_with_codec(code_type, 5005)?;
}
Ok(())
}
#[test]
pub fn test_fastfield2() {
let test_fastfield = DynamicFastFieldReader::<u64>::from(vec![100, 200, 300]);
assert_eq!(test_fastfield.get(0), 100);
assert_eq!(test_fastfield.get(1), 200);
assert_eq!(test_fastfield.get(2), 300);
}
#[test]
pub fn test_gcd_date() -> crate::Result<()> {
let size_prec_sec =
test_gcd_date_with_codec(FastFieldCodecType::Bitpacked, DatePrecision::Seconds)?;
let size_prec_micro =
test_gcd_date_with_codec(FastFieldCodecType::Bitpacked, DatePrecision::Microseconds)?;
assert!(size_prec_sec < size_prec_micro);
let size_prec_sec =
test_gcd_date_with_codec(FastFieldCodecType::Linear, DatePrecision::Seconds)?;
let size_prec_micro =
test_gcd_date_with_codec(FastFieldCodecType::Linear, DatePrecision::Microseconds)?;
assert!(size_prec_sec < size_prec_micro);
Ok(())
}
fn test_gcd_date_with_codec(
codec_type: FastFieldCodecType,
precision: DatePrecision,
) -> crate::Result<usize> {
let time1 = DateTime::from_timestamp_micros(
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs() as i64,
);
let time2 = DateTime::from_timestamp_micros(
SystemTime::now()
.checked_sub(Duration::from_micros(4111))
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs() as i64,
);
let time3 = DateTime::from_timestamp_micros(
SystemTime::now()
.checked_sub(Duration::from_millis(2000))
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs() as i64,
);
let mut schema_builder = Schema::builder();
let date_options = DateOptions::default()
.set_fast(Cardinality::SingleValue)
.set_precision(precision);
let field = schema_builder.add_date_field("field", date_options);
let schema = schema_builder.build();
let docs = vec![doc!(field=>time1), doc!(field=>time2), doc!(field=>time3)];
let directory = get_index(&docs, &schema, codec_type.into())?;
let path = Path::new("test");
let file = directory.open_read(path).unwrap();
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let len = file.len();
let test_fastfield = DynamicFastFieldReader::<DateTime>::open(file)?;
assert_eq!(test_fastfield.get(0), time1.truncate(precision));
assert_eq!(test_fastfield.get(1), time2.truncate(precision));
assert_eq!(test_fastfield.get(2), time3.truncate(precision));
Ok(len)
}
#[test]
fn test_compute_gcd() {
let test_compute_gcd_aux = |large, small, expected| {
let large = NonZeroU64::new(large).unwrap();
let small = NonZeroU64::new(small).unwrap();
let expected = NonZeroU64::new(expected).unwrap();
assert_eq!(compute_gcd(small, large), expected);
assert_eq!(compute_gcd(large, small), expected);
};
test_compute_gcd_aux(1, 4, 1);
test_compute_gcd_aux(2, 4, 2);
test_compute_gcd_aux(10, 25, 5);
test_compute_gcd_aux(25, 25, 25);
}
#[test]
fn find_gcd_test() {
assert_eq!(find_gcd([0].into_iter()), None);
assert_eq!(find_gcd([0, 10].into_iter()), NonZeroU64::new(10));
assert_eq!(find_gcd([10, 0].into_iter()), NonZeroU64::new(10));
assert_eq!(find_gcd([].into_iter()), None);
assert_eq!(find_gcd([15, 30, 5, 10].into_iter()), NonZeroU64::new(5));
assert_eq!(find_gcd([15, 16, 10].into_iter()), NonZeroU64::new(1));
assert_eq!(find_gcd([0, 5, 5, 5].into_iter()), NonZeroU64::new(5));
assert_eq!(find_gcd([0, 0].into_iter()), None);
}
}

View File

@@ -20,18 +20,18 @@
//!
//! Read access performance is comparable to that of an array lookup.
use fastfield_codecs::dynamic::DynamicFastFieldCodec;
use fastfield_codecs::FastFieldCodecType;
pub use self::alive_bitset::{intersect_alive_bitsets, write_alive_bitset, AliveBitSet};
pub use self::bytes::{BytesFastFieldReader, BytesFastFieldWriter};
pub use self::error::{FastFieldNotAvailableError, Result};
pub use self::facet_reader::FacetReader;
pub(crate) use self::gcd::{find_gcd, GCDReader, GCD_DEFAULT};
pub use self::multivalued::{MultiValuedFastFieldReader, MultiValuedFastFieldWriter};
pub use self::reader::FastFieldReader;
pub use self::reader::{DynamicFastFieldReader, FastFieldReader};
pub use self::readers::FastFieldReaders;
pub(crate) use self::readers::{type_and_cardinality, FastType};
pub use self::serializer::{CompositeFastFieldSerializer, FastFieldStats};
pub use self::wrapper::FastFieldReaderWrapper;
pub use self::serializer::{CompositeFastFieldSerializer, FastFieldDataAccess, FastFieldStats};
pub use self::writer::{FastFieldsWriter, IntFastFieldWriter};
use crate::schema::{Cardinality, FieldType, Type, Value};
use crate::{DateTime, DocId};
@@ -40,13 +40,19 @@ mod alive_bitset;
mod bytes;
mod error;
mod facet_reader;
mod gcd;
mod multivalued;
mod reader;
mod readers;
mod serializer;
mod wrapper;
mod writer;
pub(crate) const ALL_CODECS: &[FastFieldCodecType; 3] = &[
FastFieldCodecType::Bitpacked,
FastFieldCodecType::Linear,
FastFieldCodecType::BlockwiseLinear,
];
/// Trait for `BytesFastFieldReader` and `MultiValuedFastFieldReader` to return the length of data
/// for a doc_id
pub trait MultiValueLength {
@@ -116,9 +122,6 @@ impl FastValue for u64 {
}
}
// TODO rename
pub type FastFieldReaderImpl<V> = FastFieldReaderWrapper<V, DynamicFastFieldCodec>;
impl FastValue for i64 {
fn from_u64(val: u64) -> Self {
common::u64_to_i64(val)
@@ -283,11 +286,18 @@ mod tests {
schema_builder.build()
});
pub static SCHEMAI64: Lazy<Schema> = Lazy::new(|| {
let mut schema_builder = Schema::builder();
schema_builder.add_i64_field("field", FAST);
schema_builder.build()
});
pub static FIELD: Lazy<Field> = Lazy::new(|| SCHEMA.get_field("field").unwrap());
pub static FIELDI64: Lazy<Field> = Lazy::new(|| SCHEMAI64.get_field("field").unwrap());
#[test]
pub fn test_fastfield() {
let test_fastfield = FastFieldReaderImpl::<u64>::from(&[100, 200, 300]);
let test_fastfield = DynamicFastFieldReader::<u64>::from(vec![100, 200, 300]);
assert_eq!(test_fastfield.get(0), 100);
assert_eq!(test_fastfield.get(1), 200);
assert_eq!(test_fastfield.get(2), 300);
@@ -316,10 +326,10 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 37);
assert_eq!(file.len(), 45);
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReaderImpl::<u64>::open(file)?;
let fast_field_reader = DynamicFastFieldReader::<u64>::open(file)?;
assert_eq!(fast_field_reader.get(0), 13u64);
assert_eq!(fast_field_reader.get(1), 14u64);
assert_eq!(fast_field_reader.get(2), 2u64);
@@ -347,11 +357,11 @@ mod tests {
serializer.close()?;
}
let file = directory.open_read(path)?;
assert_eq!(file.len(), 62);
assert_eq!(file.len(), 70);
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReaderImpl::<u64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 4u64);
assert_eq!(fast_field_reader.get(1), 14_082_001u64);
assert_eq!(fast_field_reader.get(2), 3_052u64);
@@ -383,11 +393,11 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 35);
assert_eq!(file.len(), 43);
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReaderImpl::<u64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
for doc in 0..10_000 {
assert_eq!(fast_field_reader.get(doc), 100_000u64);
}
@@ -415,11 +425,11 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 80043);
assert_eq!(file.len(), 80051);
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReaderImpl::<u64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 0u64);
for doc in 1..10_001 {
assert_eq!(
@@ -455,11 +465,12 @@ mod tests {
}
let file = directory.open_read(path).unwrap();
// assert_eq!(file.len(), 17710 as usize); //bitpacked size
assert_eq!(file.len(), 10175_usize); // linear interpol size
// assert_eq!(file.len(), 10175_usize); // linear interpol size
assert_eq!(file.len(), 75_usize); // linear interpol size after calc improvement
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(i64_field).unwrap();
let fast_field_reader = FastFieldReaderImpl::<i64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<i64>::open(data)?;
assert_eq!(fast_field_reader.min_value(), -100i64);
assert_eq!(fast_field_reader.max_value(), 9_999i64);
@@ -499,7 +510,7 @@ mod tests {
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(i64_field).unwrap();
let fast_field_reader = FastFieldReaderImpl::<i64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<i64>::open(data)?;
assert_eq!(fast_field_reader.get(0u32), 0i64);
}
Ok(())
@@ -537,7 +548,7 @@ mod tests {
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReaderImpl::<u64>::open(data)?;
let fast_field_reader = DynamicFastFieldReader::<u64>::open(data)?;
for a in 0..n {
assert_eq!(fast_field_reader.get(a as u32), permutation[a as usize]);
@@ -854,7 +865,7 @@ mod tests {
#[test]
pub fn test_fastfield_bool() {
let test_fastfield = FastFieldReaderImpl::<bool>::from(&[true, false, true, false]);
let test_fastfield = DynamicFastFieldReader::<bool>::from(vec![true, false, true, false]);
assert_eq!(test_fastfield.get(0), true);
assert_eq!(test_fastfield.get(1), false);
assert_eq!(test_fastfield.get(2), true);
@@ -885,10 +896,10 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 36);
assert_eq!(file.len(), 44);
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = FastFieldReaderImpl::<bool>::open(file)?;
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
assert_eq!(fast_field_reader.get(0), true);
assert_eq!(fast_field_reader.get(1), false);
assert_eq!(fast_field_reader.get(2), true);
@@ -921,10 +932,10 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 48);
assert_eq!(file.len(), 56);
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = FastFieldReaderImpl::<bool>::open(file)?;
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
for i in 0..25 {
assert_eq!(fast_field_reader.get(i * 2), true);
assert_eq!(fast_field_reader.get(i * 2 + 1), false);
@@ -955,10 +966,10 @@ mod tests {
serializer.close().unwrap();
}
let file = directory.open_read(path).unwrap();
assert_eq!(file.len(), 35);
assert_eq!(file.len(), 43);
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(field).unwrap();
let fast_field_reader = FastFieldReaderImpl::<bool>::open(file)?;
let fast_field_reader = DynamicFastFieldReader::<bool>::open(file)?;
assert_eq!(fast_field_reader.get(0), false);
Ok(())

View File

@@ -346,32 +346,26 @@ mod tests {
assert!(test_multivalued_no_panic(&ops[..]).is_ok());
}
}
#[test]
fn test_proptest_merge_multivalued_bug() {
use IndexingOp::*;
let ops = &[AddDoc { id: 7 }, AddDoc { id: 4 }, Merge];
assert!(test_multivalued_no_panic(ops).is_ok());
}
#[test]
fn test_multivalued_proptest_gcd() {
use IndexingOp::*;
let ops = &[AddDoc { id: 9 }, AddDoc { id: 9 }, Merge];
assert!(test_multivalued_no_panic(ops).is_ok());
let ops = [AddDoc { id: 9 }, AddDoc { id: 9 }, Merge];
assert!(test_multivalued_no_panic(&ops[..]).is_ok());
}
#[test]
fn test_multivalued_proptest_off_by_one_bug_1151() {
use IndexingOp::*;
let ops = &[
let ops = [
AddDoc { id: 3 },
AddDoc { id: 1 },
AddDoc { id: 3 },
Commit,
Merge,
];
assert!(test_multivalued_no_panic(ops).is_ok());
assert!(test_multivalued_no_panic(&ops[..]).is_ok());
}
#[test]

View File

@@ -1,6 +1,6 @@
use std::ops::Range;
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, FastValue, MultiValueLength};
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader, FastValue, MultiValueLength};
use crate::DocId;
/// Reader for a multivalued `u64` fast field.
@@ -12,14 +12,14 @@ use crate::DocId;
/// The `idx_reader` associated, for each document, the index of its first value.
#[derive(Clone)]
pub struct MultiValuedFastFieldReader<Item: FastValue> {
idx_reader: FastFieldReaderImpl<u64>,
vals_reader: FastFieldReaderImpl<Item>,
idx_reader: DynamicFastFieldReader<u64>,
vals_reader: DynamicFastFieldReader<Item>,
}
impl<Item: FastValue> MultiValuedFastFieldReader<Item> {
pub(crate) fn open(
idx_reader: FastFieldReaderImpl<u64>,
vals_reader: FastFieldReaderImpl<Item>,
idx_reader: DynamicFastFieldReader<u64>,
vals_reader: DynamicFastFieldReader<Item>,
) -> MultiValuedFastFieldReader<Item> {
MultiValuedFastFieldReader {
idx_reader,

View File

@@ -3,7 +3,7 @@ use std::io;
use fnv::FnvHashMap;
use tantivy_bitpacker::minmax;
use crate::fastfield::serializer::BitpackedFastFieldSerializerLegacy;
use crate::fastfield::serializer::BitpackedSerializerLegacy;
use crate::fastfield::{value_to_u64, CompositeFastFieldSerializer, FastFieldType, FastValue};
use crate::indexer::doc_id_mapping::DocIdMapping;
use crate::postings::UnorderedTermId;
@@ -171,7 +171,7 @@ impl MultiValuedFastFieldWriter {
}
{
// writing the values themselves.
let mut value_serializer: BitpackedFastFieldSerializerLegacy<'_, _>;
let mut value_serializer: BitpackedSerializerLegacy<'_, _>;
if let Some(mapping) = mapping_opt {
value_serializer = serializer.new_u64_fast_field_with_idx(
self.field,

View File

@@ -1,8 +1,23 @@
use std::collections::HashMap;
use std::marker::PhantomData;
use std::path::Path;
use common::BinarySerializable;
use fastfield_codecs::bitpacked::{BitpackedCodec, BitpackedReader};
use fastfield_codecs::blockwise_linear::{BlockwiseLinearCodec, BlockwiseLinearReader};
use fastfield_codecs::linear::{LinearCodec, LinearReader};
use fastfield_codecs::{FastFieldCodec, FastFieldCodecType, FastFieldDataAccess};
use super::gcd::open_gcd_from_bytes;
use super::FastValue;
use crate::directory::{CompositeFile, Directory, FileSlice, OwnedBytes, RamDirectory, WritePtr};
use crate::error::DataCorruption;
use crate::fastfield::{CompositeFastFieldSerializer, FastFieldsWriter, GCDReader};
use crate::schema::{Schema, FAST};
use crate::DocId;
/// FastFieldReader is the trait to access fast field data.
pub trait FastFieldReader<Item: FastValue> {
pub trait FastFieldReader<Item: FastValue>: Clone {
/// Return the value associated to the given document.
///
/// This accessor should return as fast as possible.
@@ -41,3 +56,252 @@ pub trait FastFieldReader<Item: FastValue> {
/// of the actual maximum value.
fn max_value(&self) -> Item;
}
#[derive(Clone)]
/// DynamicFastFieldReader wraps different readers to access
/// the various encoded fastfield data
pub enum DynamicFastFieldReader<Item: FastValue> {
/// Bitpacked compressed fastfield data.
Bitpacked(FastFieldReaderCodecWrapper<Item, BitpackedReader>),
/// Linear interpolated values + bitpacked
Linear(FastFieldReaderCodecWrapper<Item, LinearReader>),
/// Blockwise linear interpolated values + bitpacked
BlockwiseLinear(FastFieldReaderCodecWrapper<Item, BlockwiseLinearReader>),
/// GCD and Bitpacked compressed fastfield data.
BitpackedGCD(FastFieldReaderCodecWrapper<Item, GCDReader<BitpackedReader>>),
/// GCD and Linear interpolated values + bitpacked
LinearGCD(FastFieldReaderCodecWrapper<Item, GCDReader<LinearReader>>),
/// GCD and Blockwise linear interpolated values + bitpacked
BlockwiseLinearGCD(FastFieldReaderCodecWrapper<Item, GCDReader<BlockwiseLinearReader>>),
}
impl<Item: FastValue> DynamicFastFieldReader<Item> {
/// Returns correct the reader wrapped in the `DynamicFastFieldReader` enum for the data.
pub fn open_from_id(
mut bytes: OwnedBytes,
codec_type: FastFieldCodecType,
) -> crate::Result<DynamicFastFieldReader<Item>> {
let reader = match codec_type {
FastFieldCodecType::Bitpacked => {
DynamicFastFieldReader::Bitpacked(BitpackedCodec::open_from_bytes(bytes)?.into())
}
FastFieldCodecType::Linear => {
DynamicFastFieldReader::Linear(LinearCodec::open_from_bytes(bytes)?.into())
}
FastFieldCodecType::BlockwiseLinear => DynamicFastFieldReader::BlockwiseLinear(
BlockwiseLinearCodec::open_from_bytes(bytes)?.into(),
),
FastFieldCodecType::Gcd => {
let codec_type = FastFieldCodecType::deserialize(&mut bytes)?;
match codec_type {
FastFieldCodecType::Bitpacked => DynamicFastFieldReader::BitpackedGCD(
open_gcd_from_bytes::<BitpackedCodec>(bytes)?.into(),
),
FastFieldCodecType::Linear => DynamicFastFieldReader::LinearGCD(
open_gcd_from_bytes::<LinearCodec>(bytes)?.into(),
),
FastFieldCodecType::BlockwiseLinear => {
DynamicFastFieldReader::BlockwiseLinearGCD(
open_gcd_from_bytes::<BlockwiseLinearCodec>(bytes)?.into(),
)
}
FastFieldCodecType::Gcd => {
return Err(DataCorruption::comment_only(
"Gcd codec wrapped into another gcd codec. This combination is not \
allowed.",
)
.into())
}
}
}
};
Ok(reader)
}
/// Returns correct the reader wrapped in the `DynamicFastFieldReader` enum for the data.
pub fn open(file: FileSlice) -> crate::Result<DynamicFastFieldReader<Item>> {
let mut bytes = file.read_bytes()?;
let codec_type = FastFieldCodecType::deserialize(&mut bytes)?;
Self::open_from_id(bytes, codec_type)
}
}
impl<Item: FastValue> FastFieldReader<Item> for DynamicFastFieldReader<Item> {
#[inline]
fn get(&self, doc: DocId) -> Item {
match self {
Self::Bitpacked(reader) => reader.get(doc),
Self::Linear(reader) => reader.get(doc),
Self::BlockwiseLinear(reader) => reader.get(doc),
Self::BitpackedGCD(reader) => reader.get(doc),
Self::LinearGCD(reader) => reader.get(doc),
Self::BlockwiseLinearGCD(reader) => reader.get(doc),
}
}
#[inline]
fn get_range(&self, start: u64, output: &mut [Item]) {
match self {
Self::Bitpacked(reader) => reader.get_range(start, output),
Self::Linear(reader) => reader.get_range(start, output),
Self::BlockwiseLinear(reader) => reader.get_range(start, output),
Self::BitpackedGCD(reader) => reader.get_range(start, output),
Self::LinearGCD(reader) => reader.get_range(start, output),
Self::BlockwiseLinearGCD(reader) => reader.get_range(start, output),
}
}
fn min_value(&self) -> Item {
match self {
Self::Bitpacked(reader) => reader.min_value(),
Self::Linear(reader) => reader.min_value(),
Self::BlockwiseLinear(reader) => reader.min_value(),
Self::BitpackedGCD(reader) => reader.min_value(),
Self::LinearGCD(reader) => reader.min_value(),
Self::BlockwiseLinearGCD(reader) => reader.min_value(),
}
}
fn max_value(&self) -> Item {
match self {
Self::Bitpacked(reader) => reader.max_value(),
Self::Linear(reader) => reader.max_value(),
Self::BlockwiseLinear(reader) => reader.max_value(),
Self::BitpackedGCD(reader) => reader.max_value(),
Self::LinearGCD(reader) => reader.max_value(),
Self::BlockwiseLinearGCD(reader) => reader.max_value(),
}
}
}
/// Wrapper for accessing a fastfield.
///
/// Holds the data and the codec to the read the data.
#[derive(Clone)]
pub struct FastFieldReaderCodecWrapper<Item: FastValue, CodecReader> {
reader: CodecReader,
_phantom: PhantomData<Item>,
}
impl<Item: FastValue, CodecReader> From<CodecReader>
for FastFieldReaderCodecWrapper<Item, CodecReader>
{
fn from(reader: CodecReader) -> Self {
FastFieldReaderCodecWrapper {
reader,
_phantom: PhantomData,
}
}
}
impl<Item: FastValue, D: FastFieldDataAccess> FastFieldReaderCodecWrapper<Item, D> {
#[inline]
pub(crate) fn get_u64(&self, doc: u64) -> Item {
let data = self.reader.get_val(doc);
Item::from_u64(data)
}
/// Internally `multivalued` also use SingleValue Fast fields.
/// It works as follows... A first column contains the list of start index
/// for each document, a second column contains the actual values.
///
/// The values associated to a given doc, are then
/// `second_column[first_column.get(doc)..first_column.get(doc+1)]`.
///
/// Which means single value fast field reader can be indexed internally with
/// something different from a `DocId`. For this use case, we want to use `u64`
/// values.
///
/// See `get_range` for an actual documentation about this method.
pub(crate) fn get_range_u64(&self, start: u64, output: &mut [Item]) {
for (i, out) in output.iter_mut().enumerate() {
*out = self.get_u64(start + (i as u64));
}
}
}
impl<Item: FastValue, C: FastFieldDataAccess + Clone> FastFieldReader<Item>
for FastFieldReaderCodecWrapper<Item, C>
{
/// Return the value associated to the given document.
///
/// This accessor should return as fast as possible.
///
/// # Panics
///
/// May panic if `doc` is greater than the segment
// `maxdoc`.
fn get(&self, doc: DocId) -> Item {
self.get_u64(u64::from(doc))
}
/// Fills an output buffer with the fast field values
/// associated with the `DocId` going from
/// `start` to `start + output.len()`.
///
/// Regardless of the type of `Item`, this method works
/// - transmuting the output array
/// - extracting the `Item`s as if they were `u64`
/// - possibly converting the `u64` value to the right type.
///
/// # Panics
///
/// May panic if `start + output.len()` is greater than
/// the segment's `maxdoc`.
fn get_range(&self, start: u64, output: &mut [Item]) {
self.get_range_u64(start, output);
}
/// Returns the minimum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value.
fn min_value(&self) -> Item {
Item::from_u64(self.reader.min_value())
}
/// Returns the maximum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value.
fn max_value(&self) -> Item {
Item::from_u64(self.reader.max_value())
}
}
impl<Item: FastValue> From<Vec<Item>> for DynamicFastFieldReader<Item> {
fn from(vals: Vec<Item>) -> DynamicFastFieldReader<Item> {
let mut schema_builder = Schema::builder();
let field = schema_builder.add_u64_field("field", FAST);
let schema = schema_builder.build();
let path = Path::new("__dummy__");
let directory: RamDirectory = RamDirectory::create();
{
let write: WritePtr = directory
.open_write(path)
.expect("With a RamDirectory, this should never fail.");
let mut serializer = CompositeFastFieldSerializer::from_write(write)
.expect("With a RamDirectory, this should never fail.");
let mut fast_field_writers = FastFieldsWriter::from_schema(&schema);
{
let fast_field_writer = fast_field_writers
.get_field_writer_mut(field)
.expect("With a RamDirectory, this should never fail.");
for val in vals {
fast_field_writer.add_val(val.to_u64());
}
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new(), None)
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(path).expect("Failed to open the file");
let composite_file = CompositeFile::open(&file).expect("Failed to read the composite file");
let field_file = composite_file
.open_read(field)
.expect("File component not found");
DynamicFastFieldReader::open(field_file).unwrap()
}
}

View File

@@ -1,7 +1,7 @@
use super::reader::DynamicFastFieldReader;
use crate::directory::{CompositeFile, FileSlice};
use crate::fastfield::{
BytesFastFieldReader, FastFieldNotAvailableError, FastFieldReaderImpl, FastValue,
MultiValuedFastFieldReader,
BytesFastFieldReader, FastFieldNotAvailableError, FastValue, MultiValuedFastFieldReader,
};
use crate::schema::{Cardinality, Field, FieldType, Schema};
use crate::space_usage::PerFieldSpaceUsage;
@@ -109,15 +109,14 @@ impl FastFieldReaders {
&self,
field: Field,
index: usize,
) -> crate::Result<FastFieldReaderImpl<TFastValue>> {
) -> crate::Result<DynamicFastFieldReader<TFastValue>> {
let fast_field_slice = self.fast_field_data(field, index)?;
let fast_field_data = fast_field_slice.read_bytes()?;
FastFieldReaderImpl::open_from_bytes(fast_field_data)
DynamicFastFieldReader::open(fast_field_slice)
}
pub(crate) fn typed_fast_field_reader<TFastValue: FastValue>(
&self,
field: Field,
) -> crate::Result<FastFieldReaderImpl<TFastValue>> {
) -> crate::Result<DynamicFastFieldReader<TFastValue>> {
self.typed_fast_field_reader_with_idx(field, 0)
}
@@ -133,7 +132,7 @@ impl FastFieldReaders {
/// Returns the `u64` fast field reader reader associated to `field`.
///
/// If `field` is not a u64 fast field, this method returns an Error.
pub fn u64(&self, field: Field) -> crate::Result<FastFieldReaderImpl<u64>> {
pub fn u64(&self, field: Field) -> crate::Result<DynamicFastFieldReader<u64>> {
self.check_type(field, FastType::U64, Cardinality::SingleValue)?;
self.typed_fast_field_reader(field)
}
@@ -143,14 +142,14 @@ impl FastFieldReaders {
///
/// If not, the fastfield reader will returns the u64-value associated to the original
/// FastValue.
pub fn u64_lenient(&self, field: Field) -> crate::Result<FastFieldReaderImpl<u64>> {
pub fn u64_lenient(&self, field: Field) -> crate::Result<DynamicFastFieldReader<u64>> {
self.typed_fast_field_reader(field)
}
/// Returns the `i64` fast field reader reader associated to `field`.
///
/// If `field` is not a i64 fast field, this method returns an Error.
pub fn i64(&self, field: Field) -> crate::Result<FastFieldReaderImpl<i64>> {
pub fn i64(&self, field: Field) -> crate::Result<DynamicFastFieldReader<i64>> {
self.check_type(field, FastType::I64, Cardinality::SingleValue)?;
self.typed_fast_field_reader(field)
}
@@ -158,7 +157,7 @@ impl FastFieldReaders {
/// Returns the `date` fast field reader reader associated to `field`.
///
/// If `field` is not a date fast field, this method returns an Error.
pub fn date(&self, field: Field) -> crate::Result<FastFieldReaderImpl<DateTime>> {
pub fn date(&self, field: Field) -> crate::Result<DynamicFastFieldReader<DateTime>> {
self.check_type(field, FastType::Date, Cardinality::SingleValue)?;
self.typed_fast_field_reader(field)
}
@@ -166,7 +165,7 @@ impl FastFieldReaders {
/// Returns the `f64` fast field reader reader associated to `field`.
///
/// If `field` is not a f64 fast field, this method returns an Error.
pub fn f64(&self, field: Field) -> crate::Result<FastFieldReaderImpl<f64>> {
pub fn f64(&self, field: Field) -> crate::Result<DynamicFastFieldReader<f64>> {
self.check_type(field, FastType::F64, Cardinality::SingleValue)?;
self.typed_fast_field_reader(field)
}
@@ -174,7 +173,7 @@ impl FastFieldReaders {
/// Returns the `bool` fast field reader reader associated to `field`.
///
/// If `field` is not a bool fast field, this method returns an Error.
pub fn bool(&self, field: Field) -> crate::Result<FastFieldReaderImpl<bool>> {
pub fn bool(&self, field: Field) -> crate::Result<DynamicFastFieldReader<bool>> {
self.check_type(field, FastType::Bool, Cardinality::SingleValue)?;
self.typed_fast_field_reader(field)
}
@@ -242,8 +241,7 @@ impl FastFieldReaders {
)));
}
let fast_field_idx_file = self.fast_field_data(field, 0)?;
let fast_field_idx_bytes = fast_field_idx_file.read_bytes()?;
let idx_reader = FastFieldReaderImpl::open_from_bytes(fast_field_idx_bytes)?;
let idx_reader = DynamicFastFieldReader::open(fast_field_idx_file)?;
let data = self.fast_field_data(field, 1)?;
BytesFastFieldReader::open(idx_reader, data)
} else {

View File

@@ -1,13 +1,17 @@
use std::io::{self, Write};
use std::num::NonZeroU64;
use common::{BinarySerializable, CountingWriter};
pub use fastfield_codecs::bitpacked::{
BitpackedFastFieldCodec, BitpackedFastFieldSerializerLegacy,
};
use fastfield_codecs::dynamic::{CodecType, DynamicFastFieldCodec};
pub use fastfield_codecs::{FastFieldCodec, FastFieldStats};
use fastdivide::DividerU64;
pub use fastfield_codecs::bitpacked::{BitpackedCodec, BitpackedSerializerLegacy};
use fastfield_codecs::blockwise_linear::BlockwiseLinearCodec;
use fastfield_codecs::linear::LinearCodec;
use fastfield_codecs::FastFieldCodecType;
pub use fastfield_codecs::{FastFieldCodec, FastFieldDataAccess, FastFieldStats};
use super::{find_gcd, ALL_CODECS, GCD_DEFAULT};
use crate::directory::{CompositeWrite, WritePtr};
use crate::fastfield::gcd::write_gcd_header;
use crate::schema::Field;
/// `CompositeFastFieldSerializer` is in charge of serializing
@@ -32,13 +36,62 @@ use crate::schema::Field;
/// * `close()`
pub struct CompositeFastFieldSerializer {
composite_write: CompositeWrite<WritePtr>,
codec_enable_checker: FastFieldCodecEnableCheck,
}
#[derive(Debug, Clone)]
pub struct FastFieldCodecEnableCheck {
enabled_codecs: Vec<FastFieldCodecType>,
}
impl FastFieldCodecEnableCheck {
fn allow_all() -> Self {
FastFieldCodecEnableCheck {
enabled_codecs: ALL_CODECS.to_vec(),
}
}
fn is_enabled(&self, code_type: FastFieldCodecType) -> bool {
self.enabled_codecs.contains(&code_type)
}
}
impl From<FastFieldCodecType> for FastFieldCodecEnableCheck {
fn from(code_type: FastFieldCodecType) -> Self {
FastFieldCodecEnableCheck {
enabled_codecs: vec![code_type],
}
}
}
// use this, when this is merged and stabilized explicit_generic_args_with_impl_trait
// https://github.com/rust-lang/rust/pull/86176
fn codec_estimation<C: FastFieldCodec, A: FastFieldDataAccess>(
fastfield_accessor: &A,
estimations: &mut Vec<(f32, FastFieldCodecType)>,
) {
if !C::is_applicable(fastfield_accessor) {
return;
}
let ratio = C::estimate(fastfield_accessor);
estimations.push((ratio, C::CODEC_TYPE));
}
impl CompositeFastFieldSerializer {
/// Constructor
pub fn from_write(write: WritePtr) -> io::Result<CompositeFastFieldSerializer> {
Self::from_write_with_codec(write, FastFieldCodecEnableCheck::allow_all())
}
/// Constructor
pub fn from_write_with_codec(
write: WritePtr,
codec_enable_checker: FastFieldCodecEnableCheck,
) -> io::Result<CompositeFastFieldSerializer> {
// just making room for the pointer to header.
let composite_write = CompositeWrite::wrap(write);
Ok(CompositeFastFieldSerializer { composite_write })
Ok(CompositeFastFieldSerializer {
composite_write,
codec_enable_checker,
})
}
/// Serialize data into a new u64 fast field. The best compression codec will be chosen
@@ -46,10 +99,19 @@ impl CompositeFastFieldSerializer {
pub fn create_auto_detect_u64_fast_field(
&mut self,
field: Field,
stats: FastFieldStats,
vals: &[u64],
fastfield_accessor: impl FastFieldDataAccess,
) -> io::Result<()> {
self.create_auto_detect_u64_fast_field_with_idx(field, stats, vals, 0)
self.create_auto_detect_u64_fast_field_with_idx(field, fastfield_accessor, 0)
}
/// Serialize data into a new u64 fast field. The best compression codec will be chosen
/// automatically.
pub fn write_header<W: Write>(
field_write: &mut W,
codec_type: FastFieldCodecType,
) -> io::Result<()> {
codec_type.to_code().serialize(field_write)?;
Ok(())
}
/// Serialize data into a new u64 fast field. The best compression codec will be chosen
@@ -57,12 +119,133 @@ impl CompositeFastFieldSerializer {
pub fn create_auto_detect_u64_fast_field_with_idx(
&mut self,
field: Field,
stats: FastFieldStats,
vals: &[u64],
fastfield_accessor: impl FastFieldDataAccess,
idx: usize,
) -> io::Result<()> {
let min_value = fastfield_accessor.min_value();
let field_write = self.composite_write.for_field_with_idx(field, idx);
DynamicFastFieldCodec.serialize(field_write, vals, stats)?;
let gcd = find_gcd(fastfield_accessor.iter().map(|val| val - min_value))
.map(NonZeroU64::get)
.unwrap_or(GCD_DEFAULT);
if gcd == 1 {
return Self::create_auto_detect_u64_fast_field_with_idx_gcd(
self.codec_enable_checker.clone(),
field,
field_write,
fastfield_accessor,
);
}
Self::write_header(field_write, FastFieldCodecType::Gcd)?;
struct GCDWrappedFFAccess<T: FastFieldDataAccess> {
fastfield_accessor: T,
base_value: u64,
max_value: u64,
num_vals: u64,
gcd: DividerU64,
}
impl<T: FastFieldDataAccess> FastFieldDataAccess for GCDWrappedFFAccess<T> {
fn get_val(&self, position: u64) -> u64 {
self.gcd
.divide(self.fastfield_accessor.get_val(position) - self.base_value)
}
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
Box::new(
self.fastfield_accessor
.iter()
.map(|val| self.gcd.divide(val - self.base_value)),
)
}
fn min_value(&self) -> u64 {
0
}
fn max_value(&self) -> u64 {
self.max_value
}
fn num_vals(&self) -> u64 {
self.num_vals
}
}
let num_vals = fastfield_accessor.num_vals();
let base_value = fastfield_accessor.min_value();
let max_value = (fastfield_accessor.max_value() - fastfield_accessor.min_value()) / gcd;
let fastfield_accessor = GCDWrappedFFAccess {
fastfield_accessor,
base_value,
max_value,
num_vals,
gcd: DividerU64::divide_by(gcd),
};
Self::create_auto_detect_u64_fast_field_with_idx_gcd(
self.codec_enable_checker.clone(),
field,
field_write,
fastfield_accessor,
)?;
write_gcd_header(field_write, base_value, gcd, num_vals)?;
Ok(())
}
/// Serialize data into a new u64 fast field. The best compression codec will be chosen
/// automatically.
pub fn create_auto_detect_u64_fast_field_with_idx_gcd<W: Write>(
codec_enable_checker: FastFieldCodecEnableCheck,
field: Field,
field_write: &mut CountingWriter<W>,
fastfield_accessor: impl FastFieldDataAccess,
) -> io::Result<()> {
let mut estimations = vec![];
if codec_enable_checker.is_enabled(FastFieldCodecType::Bitpacked) {
codec_estimation::<BitpackedCodec, _>(&fastfield_accessor, &mut estimations);
}
if codec_enable_checker.is_enabled(FastFieldCodecType::Linear) {
codec_estimation::<LinearCodec, _>(&fastfield_accessor, &mut estimations);
}
if codec_enable_checker.is_enabled(FastFieldCodecType::BlockwiseLinear) {
codec_estimation::<BlockwiseLinearCodec, _>(&fastfield_accessor, &mut estimations);
}
if let Some(broken_estimation) = estimations.iter().find(|estimation| estimation.0.is_nan())
{
warn!(
"broken estimation for fast field codec {:?}",
broken_estimation.1
);
}
// removing nan values for codecs with broken calculations, and max values which disables
// codecs
estimations.retain(|estimation| !estimation.0.is_nan() && estimation.0 != f32::MAX);
estimations.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
let (_ratio, codec_type) = estimations[0];
debug!("choosing fast field codec {codec_type:?} for field_id {field:?}"); // todo print actual field name
Self::write_header(field_write, codec_type)?;
match codec_type {
FastFieldCodecType::Bitpacked => {
BitpackedCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::Linear => {
LinearCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::BlockwiseLinear => {
BlockwiseLinearCodec::serialize(field_write, &fastfield_accessor)?;
}
FastFieldCodecType::Gcd => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"GCD codec not supported.",
));
}
}
field_write.flush()?;
Ok(())
}
@@ -72,7 +255,7 @@ impl CompositeFastFieldSerializer {
field: Field,
min_value: u64,
max_value: u64,
) -> io::Result<BitpackedFastFieldSerializerLegacy<'_, CountingWriter<WritePtr>>> {
) -> io::Result<BitpackedSerializerLegacy<'_, CountingWriter<WritePtr>>> {
self.new_u64_fast_field_with_idx(field, min_value, max_value, 0)
}
@@ -82,7 +265,7 @@ impl CompositeFastFieldSerializer {
field: Field,
min_value: u64,
max_value: u64,
) -> io::Result<BitpackedFastFieldSerializerLegacy<'_, CountingWriter<WritePtr>>> {
) -> io::Result<BitpackedSerializerLegacy<'_, CountingWriter<WritePtr>>> {
self.new_u64_fast_field_with_idx(field, min_value, max_value, 0)
}
@@ -93,11 +276,11 @@ impl CompositeFastFieldSerializer {
min_value: u64,
max_value: u64,
idx: usize,
) -> io::Result<BitpackedFastFieldSerializerLegacy<'_, CountingWriter<WritePtr>>> {
) -> io::Result<BitpackedSerializerLegacy<'_, CountingWriter<WritePtr>>> {
let field_write = self.composite_write.for_field_with_idx(field, idx);
// Prepend codec id to field data for compatibility with DynamicFastFieldReader.
CodecType::Bitpacked.serialize(field_write)?;
BitpackedFastFieldSerializerLegacy::open(field_write, min_value, max_value)
FastFieldCodecType::Bitpacked.serialize(field_write)?;
BitpackedSerializerLegacy::open(field_write, min_value, max_value)
}
/// Start serializing a new [u8] fast field

View File

@@ -1,184 +0,0 @@
// Copyright (C) 2022 Quickwit, Inc.
//
// Quickwit is offered under the AGPL v3.0 and as commercial software.
// For commercial licensing, contact us at hello@quickwit.io.
//
// AGPL:
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
use std::marker::PhantomData;
use fastfield_codecs::dynamic::DynamicFastFieldCodec;
use fastfield_codecs::{FastFieldCodec, FastFieldCodecReader, FastFieldStats};
use ownedbytes::OwnedBytes;
use crate::directory::FileSlice;
use crate::fastfield::{FastFieldReader, FastFieldReaderImpl, FastValue};
use crate::DocId;
/// Wrapper for accessing a fastfield.
///
/// Holds the data and the codec to the read the data.
pub struct FastFieldReaderWrapper<Item: FastValue, Codec: FastFieldCodec> {
reader: Codec::Reader,
_phantom: PhantomData<Item>,
_codec: PhantomData<Codec>,
}
impl<Item: FastValue, Codec: FastFieldCodec> FastFieldReaderWrapper<Item, Codec> {
fn new(reader: Codec::Reader) -> Self {
Self {
reader,
_phantom: PhantomData,
_codec: PhantomData,
}
}
}
impl<Item: FastValue, Codec: FastFieldCodec> Clone for FastFieldReaderWrapper<Item, Codec>
where Codec::Reader: Clone
{
fn clone(&self) -> Self {
Self {
reader: self.reader.clone(),
_phantom: PhantomData,
_codec: PhantomData,
}
}
}
impl<Item: FastValue, C: FastFieldCodec> FastFieldReader<Item> for FastFieldReaderWrapper<Item, C> {
/// Return the value associated to the given document.
///
/// This accessor should return as fast as possible.
///
/// # Panics
///
/// May panic if `doc` is greater than the segment
// `maxdoc`.
fn get(&self, doc: DocId) -> Item {
self.get_u64(u64::from(doc))
}
/// Fills an output buffer with the fast field values
/// associated with the `DocId` going from
/// `start` to `start + output.len()`.
///
/// Regardless of the type of `Item`, this method works
/// - transmuting the output array
/// - extracting the `Item`s as if they were `u64`
/// - possibly converting the `u64` value to the right type.
///
/// # Panics
///
/// May panic if `start + output.len()` is greater than
/// the segment's `maxdoc`.
fn get_range(&self, start: u64, output: &mut [Item]) {
self.get_range_u64(start, output);
}
/// Returns the minimum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value.
fn min_value(&self) -> Item {
Item::from_u64(self.reader.min_value())
}
/// Returns the maximum value for this fast field.
///
/// The max value does not take in account of possible
/// deleted document, and should be considered as an upper bound
/// of the actual maximum value.
fn max_value(&self) -> Item {
Item::from_u64(self.reader.max_value())
}
}
impl<Item: FastValue, Codec: FastFieldCodec> FastFieldReaderWrapper<Item, Codec> {
/// Opens a fast field given a file.
pub fn open(file: FileSlice) -> crate::Result<Self> {
let mut bytes = file.read_bytes()?;
// TODO
// let codec_id = bytes.read_u8();
// assert_eq!(
// 0u8, codec_id,
// "Tried to open fast field as bitpacked encoded (id=1), but got serializer with \
// different id"
// );
Self::open_from_bytes(bytes)
}
/// Opens a fast field given the bytes.
pub fn open_from_bytes(bytes: OwnedBytes) -> crate::Result<Self> {
let reader = Codec::open_from_bytes(bytes)?;
Ok(FastFieldReaderWrapper {
reader,
_codec: PhantomData,
_phantom: PhantomData,
})
}
#[inline]
pub(crate) fn get_u64(&self, doc: u64) -> Item {
let data = self.reader.get_u64(doc);
Item::from_u64(data)
}
/// Internally `multivalued` also use SingleValue Fast fields.
/// It works as follows... A first column contains the list of start index
/// for each document, a second column contains the actual values.
///
/// The values associated to a given doc, are then
/// `second_column[first_column.get(doc)..first_column.get(doc+1)]`.
///
/// Which means single value fast field reader can be indexed internally with
/// something different from a `DocId`. For this use case, we want to use `u64`
/// values.
///
/// See `get_range` for an actual documentation about this method.
pub(crate) fn get_range_u64(&self, start: u64, output: &mut [Item]) {
for (i, out) in output.iter_mut().enumerate() {
*out = self.get_u64(start + (i as u64));
}
}
}
use itertools::Itertools;
impl<Item: FastValue, Arr: AsRef<[Item]>> From<Arr> for FastFieldReaderImpl<Item> {
fn from(vals: Arr) -> FastFieldReaderImpl<Item> {
let mut buffer = Vec::new();
let vals_u64: Vec<u64> = vals.as_ref().iter().map(|val| val.to_u64()).collect();
let (min_value, max_value) = vals_u64
.iter()
.copied()
.minmax()
.into_option()
.expect("Expected non empty");
let stats = FastFieldStats {
min_value,
max_value,
num_vals: vals_u64.len() as u64,
};
DynamicFastFieldCodec
.serialize(&mut buffer, &vals_u64, stats)
.unwrap();
let bytes = OwnedBytes::new(buffer);
let fast_field_reader = DynamicFastFieldCodec::open_from_bytes(bytes).unwrap();
FastFieldReaderImpl::new(fast_field_reader)
}
}

View File

@@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::io;
use common;
use fastfield_codecs::FastFieldDataAccess;
use fnv::FnvHashMap;
use tantivy_bitpacker::BlockedBitpacker;
@@ -217,13 +218,12 @@ impl FastFieldsWriter {
) -> io::Result<()> {
for field_writer in &self.term_id_writers {
let field = field_writer.field();
dbg!("multifield", field);
field_writer.serialize(serializer, mapping.get(&field), doc_id_map)?;
}
for field_writer in &self.single_value_writers {
dbg!("singlefield");
field_writer.serialize(serializer, doc_id_map)?;
}
for field_writer in &self.multi_values_writers {
let field = field_writer.field();
field_writer.serialize(serializer, mapping.get(&field), doc_id_map)?;
@@ -360,26 +360,71 @@ impl IntFastFieldWriter {
(self.val_min, self.val_max)
};
let vals = compute_fast_field_vals(&self.vals, doc_id_map);
let stats = FastFieldStats {
min_value: min,
max_value: max,
num_vals: self.val_count as u64,
};
dbg!(&stats);
dbg!(&vals);
serializer.create_auto_detect_u64_fast_field(self.field, stats, &vals)?;
let fastfield_accessor = WriterFastFieldAccessProvider {
doc_id_map,
vals: &self.vals,
stats,
};
serializer.create_auto_detect_u64_fast_field(self.field, fastfield_accessor)?;
Ok(())
}
}
fn compute_fast_field_vals(vals: &BlockedBitpacker, doc_id_map: Option<&DocIdMapping>) -> Vec<u64> {
if let Some(doc_id_mapping) = doc_id_map {
doc_id_mapping
.iter_old_doc_ids()
.map(|old_doc_id| vals.get(old_doc_id as usize))
.collect()
} else {
vals.iter().collect()
#[derive(Clone)]
struct WriterFastFieldAccessProvider<'map, 'bitp> {
doc_id_map: Option<&'map DocIdMapping>,
vals: &'bitp BlockedBitpacker,
stats: FastFieldStats,
}
impl<'map, 'bitp> FastFieldDataAccess for WriterFastFieldAccessProvider<'map, 'bitp> {
/// Return the value associated to the given doc.
///
/// Whenever possible use the Iterator passed to the fastfield creation instead, for performance
/// reasons.
///
/// # Panics
///
/// May panic if `doc` is greater than the index.
fn get_val(&self, doc: u64) -> u64 {
if let Some(doc_id_map) = self.doc_id_map {
self.vals
.get(doc_id_map.get_old_doc_id(doc as u32) as usize) // consider extra
// FastFieldReader wrapper for
// non doc_id_map
} else {
self.vals.get(doc as usize)
}
}
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
if let Some(doc_id_map) = self.doc_id_map {
Box::new(
doc_id_map
.iter_old_doc_ids()
.map(|doc_id| self.vals.get(doc_id as usize)),
)
} else {
Box::new(self.vals.iter())
}
}
fn min_value(&self) -> u64 {
self.stats.min_value
}
fn max_value(&self) -> u64 {
self.stats.max_value
}
fn num_vals(&self) -> u64 {
self.stats.num_vals
}
}

View File

@@ -2,35 +2,42 @@
//! to get mappings from old doc_id to new doc_id and vice versa, after sorting
use std::cmp::Reverse;
use std::ops::Index;
use super::SegmentWriter;
use crate::schema::{Field, Schema};
use crate::{DocId, IndexSortByField, Order, SegmentOrdinal, TantivyError};
use crate::{DocAddress, DocId, IndexSortByField, Order, TantivyError};
/// Struct to provide mapping from new doc_id to old doc_id and segment.
#[derive(Clone)]
pub(crate) struct SegmentDocIdMapping {
new_doc_id_to_old_and_segment: Vec<(DocId, SegmentOrdinal)>,
new_doc_id_to_old_doc_addr: Vec<DocAddress>,
is_trivial: bool,
}
impl SegmentDocIdMapping {
pub(crate) fn new(
new_doc_id_to_old_and_segment: Vec<(DocId, SegmentOrdinal)>,
is_trivial: bool,
) -> Self {
pub(crate) fn new(new_doc_id_to_old_and_segment: Vec<DocAddress>, is_trivial: bool) -> Self {
Self {
new_doc_id_to_old_and_segment,
new_doc_id_to_old_doc_addr: new_doc_id_to_old_and_segment,
is_trivial,
}
}
pub(crate) fn iter(&self) -> impl Iterator<Item = &(DocId, SegmentOrdinal)> {
self.new_doc_id_to_old_and_segment.iter()
/// Returns an iterator over the old document addresses, ordered by the new document ids.
///
/// In the returned `DocAddress`, the `segment_ord` is the ordinal of targetted segment
/// in the list of merged segments.
pub(crate) fn iter_old_doc_addrs(&self) -> impl Iterator<Item = DocAddress> + '_ {
self.new_doc_id_to_old_doc_addr.iter().copied()
}
pub(crate) fn len(&self) -> usize {
self.new_doc_id_to_old_and_segment.len()
self.new_doc_id_to_old_doc_addr.len()
}
pub(crate) fn get_old_doc_addr(&self, new_doc_id: DocId) -> DocAddress {
self.new_doc_id_to_old_doc_addr[new_doc_id as usize]
}
/// This flags means the segments are simply stacked in the order of their ordinal.
/// e.g. [(0, 1), .. (n, 1), (0, 2)..., (m, 2)]
///
@@ -39,21 +46,6 @@ impl SegmentDocIdMapping {
self.is_trivial
}
}
impl Index<usize> for SegmentDocIdMapping {
type Output = (DocId, SegmentOrdinal);
fn index(&self, idx: usize) -> &Self::Output {
&self.new_doc_id_to_old_and_segment[idx]
}
}
impl IntoIterator for SegmentDocIdMapping {
type Item = (DocId, SegmentOrdinal);
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.new_doc_id_to_old_and_segment.into_iter()
}
}
/// Struct to provide mapping from old doc_id to new doc_id and vice versa within a segment.
pub struct DocIdMapping {

View File

@@ -10,8 +10,8 @@ use crate::core::{Segment, SegmentReader};
use crate::docset::{DocSet, TERMINATED};
use crate::error::DataCorruption;
use crate::fastfield::{
AliveBitSet, CompositeFastFieldSerializer, FastFieldReader, FastFieldReaderImpl,
FastFieldStats, MultiValueLength, MultiValuedFastFieldReader,
AliveBitSet, CompositeFastFieldSerializer, DynamicFastFieldReader, FastFieldDataAccess,
FastFieldReader, FastFieldStats, MultiValueLength, MultiValuedFastFieldReader,
};
use crate::fieldnorm::{FieldNormReader, FieldNormReaders, FieldNormsSerializer, FieldNormsWriter};
use crate::indexer::doc_id_mapping::{expect_field_id_for_sort_field, SegmentDocIdMapping};
@@ -21,8 +21,8 @@ use crate::schema::{Cardinality, Field, FieldType, Schema};
use crate::store::StoreWriter;
use crate::termdict::{TermMerger, TermOrdinal};
use crate::{
DocId, IndexSettings, IndexSortByField, InvertedIndexReader, Order, SegmentComponent,
SegmentOrdinal,
DocAddress, DocId, IndexSettings, IndexSortByField, InvertedIndexReader, Order,
SegmentComponent, SegmentOrdinal,
};
/// Segment's max doc must be `< MAX_DOC_LIMIT`.
@@ -134,7 +134,7 @@ impl TermOrdinalMapping {
fn max_term_ord(&self) -> TermOrdinal {
self.per_segment_new_term_ordinals
.iter()
.flat_map(|term_ordinals| term_ordinals.iter().cloned().max())
.flat_map(|term_ordinals| term_ordinals.iter().max())
.max()
.unwrap_or_default()
}
@@ -164,30 +164,6 @@ impl DeltaComputer {
}
}
fn compute_sorted_multivalued_vals(
doc_id_mapping: &SegmentDocIdMapping,
fast_field_readers: &Vec<MultiValuedFastFieldReader<u64>>,
) -> Vec<u64> {
let mut vals = Vec::new();
let mut buf: Vec<u64> = Vec::new();
for &(doc_id, segment_ord) in doc_id_mapping.iter() {
fast_field_readers[segment_ord as usize].get_vals(doc_id, &mut buf);
vals.extend_from_slice(&buf);
}
vals
}
fn compute_vals_sorted(
doc_id_mapping: &SegmentDocIdMapping,
fast_field_readers: &[FastFieldReaderImpl<u64>],
) -> Vec<u64> {
let mut vals = Vec::with_capacity(doc_id_mapping.len());
for &(doc_id, segment_ord) in doc_id_mapping.iter() {
vals.push(fast_field_readers[segment_ord as usize].get_u64(doc_id as u64));
}
vals
}
impl IndexMerger {
pub fn open(
schema: Schema,
@@ -284,9 +260,9 @@ impl IndexMerger {
.iter()
.map(|reader| reader.get_fieldnorms_reader(field))
.collect::<Result<_, _>>()?;
for (doc_id, reader_ordinal) in doc_id_mapping.iter() {
let fieldnorms_reader = &fieldnorms_readers[*reader_ordinal as usize];
let fieldnorm_id = fieldnorms_reader.fieldnorm_id(*doc_id);
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let fieldnorms_reader = &fieldnorms_readers[old_doc_addr.segment_ord as usize];
let fieldnorm_id = fieldnorms_reader.fieldnorm_id(old_doc_addr.doc_id);
fieldnorms_data.push(fieldnorm_id);
}
@@ -366,7 +342,7 @@ impl IndexMerger {
.readers
.iter()
.filter_map(|reader| {
let u64_reader: FastFieldReaderImpl<u64> =
let u64_reader: DynamicFastFieldReader<u64> =
reader.fast_fields().typed_fast_field_reader(field).expect(
"Failed to find a reader for single fast field. This is a tantivy bug and \
it should never happen.",
@@ -380,7 +356,7 @@ impl IndexMerger {
.readers
.iter()
.map(|reader| {
let u64_reader: crate::fastfield::FastFieldReaderImpl<u64> =
let u64_reader: DynamicFastFieldReader<u64> =
reader.fast_fields().typed_fast_field_reader(field).expect(
"Failed to find a reader for single fast field. This is a tantivy bug and \
it should never happen.",
@@ -394,9 +370,50 @@ impl IndexMerger {
max_value,
num_vals: doc_id_mapping.len() as u64,
};
#[derive(Clone)]
struct SortedDocIdFieldAccessProvider<'a> {
doc_id_mapping: &'a SegmentDocIdMapping,
fast_field_readers: &'a Vec<DynamicFastFieldReader<u64>>,
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for SortedDocIdFieldAccessProvider<'a> {
fn get_val(&self, doc: u64) -> u64 {
let DocAddress {
doc_id,
segment_ord,
} = self.doc_id_mapping.get_old_doc_addr(doc as u32);
self.fast_field_readers[segment_ord as usize].get(doc_id)
}
let vals = compute_vals_sorted(doc_id_mapping, &fast_field_readers);
fast_field_serializer.create_auto_detect_u64_fast_field(field, stats, &vals)?;
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
Box::new(
self.doc_id_mapping
.iter_old_doc_addrs()
.map(|old_doc_addr| {
let fast_field_reader =
&self.fast_field_readers[old_doc_addr.segment_ord as usize];
fast_field_reader.get(old_doc_addr.doc_id)
}),
)
}
fn min_value(&self) -> u64 {
self.stats.min_value
}
fn max_value(&self) -> u64 {
self.stats.max_value
}
fn num_vals(&self) -> u64 {
self.stats.num_vals
}
}
let fastfield_accessor = SortedDocIdFieldAccessProvider {
doc_id_mapping,
fast_field_readers: &fast_field_readers,
stats,
};
fast_field_serializer.create_auto_detect_u64_fast_field(field, fastfield_accessor)?;
Ok(())
}
@@ -427,7 +444,7 @@ impl IndexMerger {
pub(crate) fn get_sort_field_accessor(
reader: &SegmentReader,
sort_by_field: &IndexSortByField,
) -> crate::Result<FastFieldReaderImpl<u64>> {
) -> crate::Result<impl FastFieldReader<u64>> {
let field_id = expect_field_id_for_sort_field(reader.schema(), sort_by_field)?; // for now expect fastfield, but not strictly required
let value_accessor = reader.fast_fields().u64_lenient(field_id)?;
Ok(value_accessor)
@@ -436,7 +453,7 @@ impl IndexMerger {
pub(crate) fn get_reader_with_sort_field_accessor(
&self,
sort_by_field: &IndexSortByField,
) -> crate::Result<Vec<(SegmentOrdinal, FastFieldReaderImpl<u64>)>> {
) -> crate::Result<Vec<(SegmentOrdinal, impl FastFieldReader<u64> + Clone)>> {
let reader_ordinal_and_field_accessors = self
.readers
.iter()
@@ -469,15 +486,11 @@ impl IndexMerger {
let doc_id_reader_pair =
reader_ordinal_and_field_accessors
.iter()
.map(|reader_and_field_accessor| {
let reader = &self.readers[reader_and_field_accessor.0 as usize];
reader.doc_ids_alive().map(move |doc_id| {
(
doc_id,
reader_and_field_accessor.0,
&reader_and_field_accessor.1,
)
})
.map(|(reader_ord, ff_reader)| {
let reader = &self.readers[*reader_ord as usize];
reader
.doc_ids_alive()
.map(move |doc_id| (doc_id, reader_ord, ff_reader))
});
let total_num_new_docs = self
@@ -486,7 +499,7 @@ impl IndexMerger {
.map(|reader| reader.num_docs() as usize)
.sum();
let mut sorted_doc_ids = Vec::with_capacity(total_num_new_docs);
let mut sorted_doc_ids: Vec<DocAddress> = Vec::with_capacity(total_num_new_docs);
// create iterator tuple of (old doc_id, reader) in order of the new doc_ids
sorted_doc_ids.extend(
@@ -501,7 +514,10 @@ impl IndexMerger {
val1 > val2
}
})
.map(|(doc_id, reader_with_id, _)| (doc_id, reader_with_id)),
.map(|(doc_id, &segment_ord, _)| DocAddress {
doc_id,
segment_ord,
}),
);
Ok(SegmentDocIdMapping::new(sorted_doc_ids, false))
}
@@ -548,16 +564,46 @@ impl IndexMerger {
// access on the fly or 2. change the codec api to make random access optional, but
// they both have also major drawbacks.
let mut offsets: Vec<u64> = Vec::with_capacity(doc_id_mapping.len());
let mut offsets = Vec::with_capacity(doc_id_mapping.len());
let mut offset = 0;
for (doc_id, reader) in doc_id_mapping.iter() {
let reader = &reader_and_field_accessors[*reader as usize].1;
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let reader = &reader_and_field_accessors[old_doc_addr.segment_ord as usize].1;
offsets.push(offset);
offset += reader.get_len(*doc_id) as u64;
offset += reader.get_len(old_doc_addr.doc_id) as u64;
}
offsets.push(offset);
fast_field_serializer.create_auto_detect_u64_fast_field(field, stats, &offsets[..])?;
#[derive(Clone)]
struct FieldIndexAccessProvider<'a> {
offsets: &'a [u64],
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for FieldIndexAccessProvider<'a> {
fn get_val(&self, doc: u64) -> u64 {
self.offsets[doc as usize]
}
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
Box::new(self.offsets.iter().cloned())
}
fn min_value(&self) -> u64 {
self.stats.min_value
}
fn max_value(&self) -> u64 {
self.stats.max_value
}
fn num_vals(&self) -> u64 {
self.stats.num_vals
}
}
let fastfield_accessor = FieldIndexAccessProvider {
offsets: &offsets,
stats,
};
fast_field_serializer.create_auto_detect_u64_fast_field(field, fastfield_accessor)?;
Ok(offsets)
}
/// Returns the fastfield index (index for the data, not the data).
@@ -566,7 +612,7 @@ impl IndexMerger {
field: Field,
fast_field_serializer: &mut CompositeFastFieldSerializer,
doc_id_mapping: &SegmentDocIdMapping,
) -> crate::Result<()> {
) -> crate::Result<Vec<u64>> {
let reader_ordinal_and_field_accessors = self
.readers
.iter()
@@ -587,8 +633,7 @@ impl IndexMerger {
fast_field_serializer,
doc_id_mapping,
&reader_ordinal_and_field_accessors,
)?;
Ok(())
)
}
fn write_term_id_fast_field(
@@ -626,12 +671,12 @@ impl IndexMerger {
fast_field_serializer.new_u64_fast_field_with_idx(field, 0u64, max_term_ord, 1)?;
let mut vals = Vec::with_capacity(100);
for (old_doc_id, reader_ordinal) in doc_id_mapping.iter() {
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let term_ordinal_mapping: &[TermOrdinal] =
term_ordinal_mappings.get_segment(*reader_ordinal as usize);
term_ordinal_mappings.get_segment(old_doc_addr.segment_ord as usize);
let ff_reader = &fast_field_reader[*reader_ordinal as usize];
ff_reader.get_vals(*old_doc_id, &mut vals);
let ff_reader = &fast_field_reader[old_doc_addr.segment_ord as usize];
ff_reader.get_vals(old_doc_addr.doc_id, &mut vals);
for &prev_term_ord in &vals {
let new_term_ord = term_ordinal_mapping[prev_term_ord as usize];
serialize_vals.add_val(new_term_ord)?;
@@ -652,16 +697,17 @@ impl IndexMerger {
.map(|reader| reader.num_docs() as usize)
.sum();
let mut mapping = Vec::with_capacity(total_num_new_docs);
let mut mapping: Vec<DocAddress> = Vec::with_capacity(total_num_new_docs);
mapping.extend(
self.readers
.iter()
.enumerate()
.flat_map(|(reader_ordinal, reader)| {
reader
.doc_ids_alive()
.map(move |doc_id| (doc_id, reader_ordinal as SegmentOrdinal))
.flat_map(|(segment_ord, reader)| {
reader.doc_ids_alive().map(move |doc_id| DocAddress {
segment_ord: segment_ord as u32,
doc_id,
})
}),
);
Ok(SegmentDocIdMapping::new(mapping, true))
@@ -677,7 +723,12 @@ impl IndexMerger {
// The second contains the actual values.
// First we merge the idx fast field.
self.write_multi_value_fast_field_idx(field, fast_field_serializer, doc_id_mapping)?;
let offsets =
self.write_multi_value_fast_field_idx(field, fast_field_serializer, doc_id_mapping)?;
let mut min_value = u64::MAX;
let mut max_value = u64::MIN;
let mut num_vals = 0;
let mut vals = Vec::with_capacity(100);
@@ -699,18 +750,94 @@ impl IndexMerger {
);
for doc in reader.doc_ids_alive() {
ff_reader.get_vals(doc, &mut vals);
for &val in &vals {
min_value = cmp::min(val, min_value);
max_value = cmp::max(val, max_value);
}
num_vals += vals.len();
}
ff_readers.push(ff_reader);
// TODO optimize when no deletes
}
let vals = compute_sorted_multivalued_vals(doc_id_mapping, &ff_readers);
let stats = FastFieldStats::compute(&vals);
if min_value > max_value {
min_value = 0;
max_value = 0;
}
// We can now initialize our serializer, and push it the different values
let stats = FastFieldStats {
max_value,
num_vals: num_vals as u64,
min_value,
};
struct SortedDocIdMultiValueAccessProvider<'a> {
doc_id_mapping: &'a SegmentDocIdMapping,
fast_field_readers: &'a Vec<MultiValuedFastFieldReader<u64>>,
offsets: Vec<u64>,
stats: FastFieldStats,
}
impl<'a> FastFieldDataAccess for SortedDocIdMultiValueAccessProvider<'a> {
fn get_val(&self, pos: u64) -> u64 {
// use the offsets index to find the doc_id which will contain the position.
// the offsets are strictly increasing so we can do a simple search on it.
let new_doc_id: DocId =
self.offsets
.iter()
.position(|offset| offset > pos)
.expect("pos is out of bounds") as DocId
- 1u32;
// now we need to find the position of `pos` in the multivalued bucket
let num_pos_covered_until_now = self.offsets[new_doc_id as usize];
let pos_in_values = pos - num_pos_covered_until_now;
let old_doc_addr = self.doc_id_mapping.get_old_doc_addr(new_doc_id);
let num_vals = self.fast_field_readers[old_doc_addr.segment_ord as usize]
.get_len(old_doc_addr.doc_id);
assert!(num_vals >= pos_in_values);
let mut vals = Vec::new();
self.fast_field_readers[old_doc_addr.segment_ord as usize]
.get_vals(old_doc_addr.doc_id, &mut vals);
vals[pos_in_values as usize]
}
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
Box::new(
self.doc_id_mapping
.iter_old_doc_addrs()
.flat_map(|old_doc_addr| {
let ff_reader =
&self.fast_field_readers[old_doc_addr.segment_ord as usize];
let mut vals = Vec::new();
ff_reader.get_vals(old_doc_addr.doc_id, &mut vals);
vals.into_iter()
}),
)
}
fn min_value(&self) -> u64 {
self.stats.min_value
}
fn max_value(&self) -> u64 {
self.stats.max_value
}
fn num_vals(&self) -> u64 {
self.stats.num_vals
}
}
let fastfield_accessor = SortedDocIdMultiValueAccessProvider {
doc_id_mapping,
fast_field_readers: &ff_readers,
offsets,
stats,
};
fast_field_serializer.create_auto_detect_u64_fast_field_with_idx(
field,
stats,
&vals[..],
fastfield_accessor,
1,
)?;
@@ -743,9 +870,9 @@ impl IndexMerger {
)?;
let mut serialize_vals = fast_field_serializer.new_bytes_fast_field_with_idx(field, 1);
for (doc_id, reader_ordinal) in doc_id_mapping.iter() {
let bytes_reader = &reader_and_field_accessors[*reader_ordinal as usize].1;
let val = bytes_reader.get_bytes(*doc_id);
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let bytes_reader = &reader_and_field_accessors[old_doc_addr.segment_ord as usize].1;
let val = bytes_reader.get_bytes(old_doc_addr.doc_id);
serialize_vals.write_all(val)?;
}
@@ -801,9 +928,9 @@ impl IndexMerger {
segment_local_map
})
.collect();
for (new_doc_id, (old_doc_id, segment_ord)) in doc_id_mapping.iter().enumerate() {
let segment_map = &mut merged_doc_id_map[*segment_ord as usize];
segment_map[*old_doc_id as usize] = Some(new_doc_id as DocId);
for (new_doc_id, old_doc_addr) in doc_id_mapping.iter_old_doc_addrs().enumerate() {
let segment_map = &mut merged_doc_id_map[old_doc_addr.segment_ord as usize];
segment_map[old_doc_addr.doc_id as usize] = Some(new_doc_id as DocId);
}
// Note that the total number of tokens is not exact.
@@ -978,15 +1105,15 @@ impl IndexMerger {
.map(|(i, store)| store.iter_raw(self.readers[i].alive_bitset()))
.collect();
for (old_doc_id, reader_ordinal) in doc_id_mapping.iter() {
let doc_bytes_it = &mut document_iterators[*reader_ordinal as usize];
for old_doc_addr in doc_id_mapping.iter_old_doc_addrs() {
let doc_bytes_it = &mut document_iterators[old_doc_addr.segment_ord as usize];
if let Some(doc_bytes_res) = doc_bytes_it.next() {
let doc_bytes = doc_bytes_res?;
store_writer.store_bytes(&doc_bytes)?;
} else {
return Err(DataCorruption::comment_only(&format!(
"unexpected missing document in docstore on merge, doc id {:?}",
old_doc_id
"unexpected missing document in docstore on merge, doc address \
{old_doc_addr:?}",
))
.into());
}

View File

@@ -484,7 +484,7 @@ mod bench_sorted_index_merge {
// use cratedoc_id, readerdoc_id_mappinglet vals = reader.fate::schema;
use crate::fastfield::{DynamicFastFieldReader, FastFieldReader};
use crate::indexer::merger::IndexMerger;
use crate::schema::{Cardinality, Document, NumericOptions, Schema};
use crate::schema::{Cardinality, NumericOptions, Schema};
use crate::{IndexSettings, IndexSortByField, IndexWriter, Order};
fn create_index(sort_by_field: Option<IndexSortByField>) -> Index {
let mut schema_builder = Schema::builder();
@@ -503,9 +503,7 @@ mod bench_sorted_index_merge {
{
let mut index_writer = index.writer_for_tests().unwrap();
let index_doc = |index_writer: &mut IndexWriter, val: u64| {
let mut doc = Document::default();
doc.add_u64(int_field, val);
index_writer.add_document(doc).unwrap();
index_writer.add_document(doc!(int_field=>val)).unwrap();
};
// 3 segments with 10_000 values in the fast fields
for _ in 0..3 {
@@ -518,6 +516,7 @@ mod bench_sorted_index_merge {
}
index
}
#[bench]
fn create_sorted_index_walk_overkmerge_on_merge_fastfield(
b: &mut Bencher,
@@ -533,19 +532,19 @@ mod bench_sorted_index_merge {
IndexMerger::open(index.schema(), index.settings().clone(), &segments[..])?;
let doc_id_mapping = merger.generate_doc_id_mapping(&sort_by_field).unwrap();
b.iter(|| {
let sorted_doc_ids = doc_id_mapping.iter().map(|(doc_id, ordinal)| {
let reader = &merger.readers[*ordinal as usize];
let sorted_doc_ids = doc_id_mapping.iter_old_doc_addrs().map(|doc_addr| {
let reader = &merger.readers[doc_addr.segment_ord as usize];
let u64_reader: DynamicFastFieldReader<u64> =
reader.fast_fields().typed_fast_field_reader(field).expect(
"Failed to find a reader for single fast field. This is a tantivy bug and \
it should never happen.",
);
(doc_id, reader, u64_reader)
(doc_addr.doc_id, reader, u64_reader)
});
// add values in order of the new doc_ids
let mut val = 0;
for (doc_id, _reader, field_reader) in sorted_doc_ids {
val = field_reader.get(*doc_id);
val = field_reader.get(doc_id);
}
val

View File

@@ -11,6 +11,7 @@
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
#![warn(missing_docs)]
#![allow(clippy::len_without_is_empty)]
#![allow(clippy::derive_partial_eq_without_eq)]
//! # `tantivy`
//!

View File

@@ -371,7 +371,7 @@ mod tests {
fn compute_checkpoints_manual(term_scorers: Vec<TermScorer>, n: usize) -> Vec<(DocId, Score)> {
let mut heap: BinaryHeap<Float> = BinaryHeap::with_capacity(n);
let mut checkpoints: Vec<(DocId, Score)> = Vec::new();
let mut scorer: Union<TermScorer, SumCombiner> = Union::from(term_scorers);
let mut scorer = Union::build(term_scorers, SumCombiner::default);
let mut limit = Score::MIN;
loop {

View File

@@ -1,7 +1,5 @@
use std::collections::BTreeMap;
use super::boolean_weight::BooleanWeight;
use crate::query::{Occur, Query, TermQuery, Weight};
use crate::query::{Occur, Query, SumWithCoordsCombiner, TermQuery, Weight};
use crate::schema::{IndexRecordOption, Term};
use crate::Searcher;
@@ -153,12 +151,16 @@ impl Query for BooleanQuery {
Ok((*occur, subquery.weight(searcher, scoring_enabled)?))
})
.collect::<crate::Result<_>>()?;
Ok(Box::new(BooleanWeight::new(sub_weights, scoring_enabled)))
Ok(Box::new(BooleanWeight::new(
sub_weights,
scoring_enabled,
Box::new(SumWithCoordsCombiner::default),
)))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
for (_occur, subquery) in &self.subqueries {
subquery.query_terms(terms);
subquery.query_terms(visitor);
}
}
}

View File

@@ -3,7 +3,7 @@ use std::collections::HashMap;
use crate::core::SegmentReader;
use crate::postings::FreqReadingOption;
use crate::query::explanation::does_not_match;
use crate::query::score_combiner::{DoNothingCombiner, ScoreCombiner, SumWithCoordsCombiner};
use crate::query::score_combiner::{DoNothingCombiner, ScoreCombiner};
use crate::query::term_query::TermScorer;
use crate::query::weight::{for_each_pruning_scorer, for_each_scorer};
use crate::query::{
@@ -17,8 +17,13 @@ enum SpecializedScorer {
Other(Box<dyn Scorer>),
}
fn scorer_union<TScoreCombiner>(scorers: Vec<Box<dyn Scorer>>) -> SpecializedScorer
where TScoreCombiner: ScoreCombiner {
fn scorer_union<TScoreCombiner>(
scorers: Vec<Box<dyn Scorer>>,
score_combiner_fn: impl Fn() -> TScoreCombiner,
) -> SpecializedScorer
where
TScoreCombiner: ScoreCombiner,
{
assert!(!scorers.is_empty());
if scorers.len() == 1 {
return SpecializedScorer::Other(scorers.into_iter().next().unwrap()); //< we checked the size beforehand
@@ -38,35 +43,45 @@ where TScoreCombiner: ScoreCombiner {
// Block wand is only available if we read frequencies.
return SpecializedScorer::TermUnion(scorers);
} else {
return SpecializedScorer::Other(Box::new(Union::<_, TScoreCombiner>::from(
return SpecializedScorer::Other(Box::new(Union::build(
scorers,
score_combiner_fn,
)));
}
}
}
SpecializedScorer::Other(Box::new(Union::<_, TScoreCombiner>::from(scorers)))
SpecializedScorer::Other(Box::new(Union::build(scorers, score_combiner_fn)))
}
fn into_box_scorer<TScoreCombiner: ScoreCombiner>(scorer: SpecializedScorer) -> Box<dyn Scorer> {
fn into_box_scorer<TScoreCombiner: ScoreCombiner>(
scorer: SpecializedScorer,
score_combiner_fn: impl Fn() -> TScoreCombiner,
) -> Box<dyn Scorer> {
match scorer {
SpecializedScorer::TermUnion(term_scorers) => {
let union_scorer = Union::<TermScorer, TScoreCombiner>::from(term_scorers);
let union_scorer = Union::build(term_scorers, score_combiner_fn);
Box::new(union_scorer)
}
SpecializedScorer::Other(scorer) => scorer,
}
}
pub struct BooleanWeight {
pub struct BooleanWeight<TScoreCombiner: ScoreCombiner> {
weights: Vec<(Occur, Box<dyn Weight>)>,
scoring_enabled: bool,
score_combiner_fn: Box<dyn Fn() -> TScoreCombiner + Sync + Send>,
}
impl BooleanWeight {
pub fn new(weights: Vec<(Occur, Box<dyn Weight>)>, scoring_enabled: bool) -> BooleanWeight {
impl<TScoreCombiner: ScoreCombiner> BooleanWeight<TScoreCombiner> {
pub fn new(
weights: Vec<(Occur, Box<dyn Weight>)>,
scoring_enabled: bool,
score_combiner_fn: Box<dyn Fn() -> TScoreCombiner + Sync + Send + 'static>,
) -> BooleanWeight<TScoreCombiner> {
BooleanWeight {
weights,
scoring_enabled,
score_combiner_fn,
}
}
@@ -86,21 +101,23 @@ impl BooleanWeight {
Ok(per_occur_scorers)
}
fn complex_scorer<TScoreCombiner: ScoreCombiner>(
fn complex_scorer<TComplexScoreCombiner: ScoreCombiner>(
&self,
reader: &SegmentReader,
boost: Score,
score_combiner_fn: impl Fn() -> TComplexScoreCombiner,
) -> crate::Result<SpecializedScorer> {
let mut per_occur_scorers = self.per_occur_scorers(reader, boost)?;
let should_scorer_opt: Option<SpecializedScorer> = per_occur_scorers
.remove(&Occur::Should)
.map(scorer_union::<TScoreCombiner>);
.map(|scorers| scorer_union(scorers, &score_combiner_fn));
let exclude_scorer_opt: Option<Box<dyn Scorer>> = per_occur_scorers
.remove(&Occur::MustNot)
.map(scorer_union::<DoNothingCombiner>)
.map(into_box_scorer::<DoNothingCombiner>);
.map(|scorers| scorer_union(scorers, DoNothingCombiner::default))
.map(|specialized_scorer| {
into_box_scorer(specialized_scorer, DoNothingCombiner::default)
});
let must_scorer_opt: Option<Box<dyn Scorer>> = per_occur_scorers
.remove(&Occur::Must)
@@ -112,10 +129,10 @@ impl BooleanWeight {
SpecializedScorer::Other(Box::new(RequiredOptionalScorer::<
Box<dyn Scorer>,
Box<dyn Scorer>,
TScoreCombiner,
TComplexScoreCombiner,
>::new(
must_scorer,
into_box_scorer::<TScoreCombiner>(should_scorer),
into_box_scorer(should_scorer, &score_combiner_fn),
)))
} else {
SpecializedScorer::Other(must_scorer)
@@ -129,8 +146,7 @@ impl BooleanWeight {
};
if let Some(exclude_scorer) = exclude_scorer_opt {
let positive_scorer_boxed: Box<dyn Scorer> =
into_box_scorer::<TScoreCombiner>(positive_scorer);
let positive_scorer_boxed = into_box_scorer(positive_scorer, &score_combiner_fn);
Ok(SpecializedScorer::Other(Box::new(Exclude::new(
positive_scorer_boxed,
exclude_scorer,
@@ -141,7 +157,7 @@ impl BooleanWeight {
}
}
impl Weight for BooleanWeight {
impl<TScoreCombiner: ScoreCombiner + Sync> Weight for BooleanWeight<TScoreCombiner> {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
if self.weights.is_empty() {
Ok(Box::new(EmptyScorer))
@@ -153,13 +169,15 @@ impl Weight for BooleanWeight {
weight.scorer(reader, boost)
}
} else if self.scoring_enabled {
self.complex_scorer::<SumWithCoordsCombiner>(reader, boost)
self.complex_scorer(reader, boost, &self.score_combiner_fn)
.map(|specialized_scorer| {
into_box_scorer::<SumWithCoordsCombiner>(specialized_scorer)
into_box_scorer(specialized_scorer, &self.score_combiner_fn)
})
} else {
self.complex_scorer::<DoNothingCombiner>(reader, boost)
.map(into_box_scorer::<DoNothingCombiner>)
self.complex_scorer(reader, boost, &DoNothingCombiner::default)
.map(|specialized_scorer| {
into_box_scorer(specialized_scorer, &DoNothingCombiner::default)
})
}
}
@@ -188,11 +206,10 @@ impl Weight for BooleanWeight {
reader: &SegmentReader,
callback: &mut dyn FnMut(DocId, Score),
) -> crate::Result<()> {
let scorer = self.complex_scorer::<SumWithCoordsCombiner>(reader, 1.0)?;
let scorer = self.complex_scorer(reader, 1.0, &self.score_combiner_fn)?;
match scorer {
SpecializedScorer::TermUnion(term_scorers) => {
let mut union_scorer =
Union::<TermScorer, SumWithCoordsCombiner>::from(term_scorers);
let mut union_scorer = Union::build(term_scorers, &self.score_combiner_fn);
for_each_scorer(&mut union_scorer, callback);
}
SpecializedScorer::Other(mut scorer) => {
@@ -218,7 +235,7 @@ impl Weight for BooleanWeight {
reader: &SegmentReader,
callback: &mut dyn FnMut(DocId, Score) -> Score,
) -> crate::Result<()> {
let scorer = self.complex_scorer::<SumWithCoordsCombiner>(reader, 1.0)?;
let scorer = self.complex_scorer(reader, 1.0, &self.score_combiner_fn)?;
match scorer {
SpecializedScorer::TermUnion(term_scorers) => {
super::block_wand(term_scorers, threshold, callback);

View File

@@ -4,6 +4,7 @@ mod boolean_weight;
pub(crate) use self::block_wand::{block_wand, block_wand_single_scorer};
pub use self::boolean_query::BooleanQuery;
pub(crate) use self::boolean_weight::BooleanWeight;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,3 @@
use std::collections::BTreeMap;
use std::fmt;
use crate::fastfield::AliveBitSet;
@@ -49,8 +48,8 @@ impl Query for BoostQuery {
Ok(boosted_weight)
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.query.query_terms(terms)
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
self.query.query_terms(visitor)
}
}

View File

@@ -0,0 +1,176 @@
use std::fmt;
use crate::query::{Explanation, Query, Scorer, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader, TantivyError, Term};
/// `ConstScoreQuery` is a wrapper over a query to provide a constant score.
/// It can avoid unnecessary score computation on the wrapped query.
///
/// The document set matched by the `ConstScoreQuery` is strictly the same as the underlying query.
/// The configured score is used for each document.
pub struct ConstScoreQuery {
query: Box<dyn Query>,
score: Score,
}
impl ConstScoreQuery {
/// Builds a const score query.
pub fn new(query: Box<dyn Query>, score: Score) -> ConstScoreQuery {
ConstScoreQuery { query, score }
}
}
impl Clone for ConstScoreQuery {
fn clone(&self) -> Self {
ConstScoreQuery {
query: self.query.box_clone(),
score: self.score,
}
}
}
impl fmt::Debug for ConstScoreQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Const(score={}, query={:?})", self.score, self.query)
}
}
impl Query for ConstScoreQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let inner_weight = self.query.weight(searcher, scoring_enabled)?;
Ok(if scoring_enabled {
Box::new(ConstWeight::new(inner_weight, self.score))
} else {
inner_weight
})
}
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
self.query.query_terms(visitor);
}
}
struct ConstWeight {
weight: Box<dyn Weight>,
score: Score,
}
impl ConstWeight {
pub fn new(weight: Box<dyn Weight>, score: Score) -> Self {
ConstWeight { weight, score }
}
}
impl Weight for ConstWeight {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
let inner_scorer = self.weight.scorer(reader, boost)?;
Ok(Box::new(ConstScorer::new(inner_scorer, boost * self.score)))
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
let mut scorer = self.scorer(reader, 1.0)?;
if scorer.seek(doc) != doc {
return Err(TantivyError::InvalidArgument(format!(
"Document #({}) does not match",
doc
)));
}
let mut explanation = Explanation::new("Const", self.score);
let underlying_explanation = self.weight.explain(reader, doc)?;
explanation.add_detail(underlying_explanation);
Ok(explanation)
}
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
self.weight.count(reader)
}
}
/// Wraps a `DocSet` and simply returns a constant `Scorer`.
/// The `ConstScorer` is useful if you have a `DocSet` where
/// you needed a scorer.
///
/// The `ConstScorer`'s constant score can be set
/// by calling `.set_score(...)`.
pub struct ConstScorer<TDocSet: DocSet> {
docset: TDocSet,
score: Score,
}
impl<TDocSet: DocSet> ConstScorer<TDocSet> {
/// Creates a new `ConstScorer`.
pub fn new(docset: TDocSet, score: Score) -> ConstScorer<TDocSet> {
ConstScorer { docset, score }
}
}
impl<TDocSet: DocSet> From<TDocSet> for ConstScorer<TDocSet> {
fn from(docset: TDocSet) -> Self {
ConstScorer::new(docset, 1.0)
}
}
impl<TDocSet: DocSet> DocSet for ConstScorer<TDocSet> {
fn advance(&mut self) -> DocId {
self.docset.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.docset.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.docset.fill_buffer(buffer)
}
fn doc(&self) -> DocId {
self.docset.doc()
}
fn size_hint(&self) -> u32 {
self.docset.size_hint()
}
}
impl<TDocSet: DocSet + 'static> Scorer for ConstScorer<TDocSet> {
fn score(&mut self) -> Score {
self.score
}
}
#[cfg(test)]
mod tests {
use super::ConstScoreQuery;
use crate::query::{AllQuery, Query};
use crate::schema::Schema;
use crate::{DocAddress, Document, Index};
#[test]
fn test_const_score_query_explain() -> crate::Result<()> {
let schema = Schema::builder().build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests()?;
index_writer.add_document(Document::new())?;
index_writer.commit()?;
let reader = index.reader()?;
let searcher = reader.searcher();
let query = ConstScoreQuery::new(Box::new(AllQuery), 0.42);
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
r#"{
"value": 0.42,
"description": "Const",
"details": [
{
"value": 1.0,
"description": "AllQuery",
"context": []
}
],
"context": []
}"#
);
Ok(())
}
}

View File

@@ -0,0 +1,131 @@
use tantivy_query_grammar::Occur;
use crate::query::{BooleanWeight, DisjunctionMaxCombiner, Query, Weight};
use crate::{Score, Searcher, Term};
/// The disjunction max query кeturns documents matching one or more wrapped queries,
/// called query clauses or clauses.
///
/// If a returned document matches multiple query clauses,
/// the `DisjunctionMaxQuery` assigns the document the highest relevance score from any matching
/// clause, plus a tie breaking increment for any additional matching subqueries.
///
/// ```rust
/// use tantivy::collector::TopDocs;
/// use tantivy::doc;
/// use tantivy::query::{DisjunctionMaxQuery, Query, QueryClone, TermQuery};
/// use tantivy::schema::{IndexRecordOption, Schema, TEXT};
/// use tantivy::Term;
/// use tantivy::Index;
///
/// fn main() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let body = schema_builder.add_text_field("body", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of Girl",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Girl",
/// ))?;
/// index_writer.commit()?;
/// }
///
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
///
/// // Make TermQuery's for "girl" and "diary" in the title
/// let girl_term_query: Box<dyn Query> = Box::new(TermQuery::new(
/// Term::from_field_text(title, "girl"),
/// IndexRecordOption::Basic,
/// ));
/// let diary_term_query: Box<dyn Query> = Box::new(TermQuery::new(
/// Term::from_field_text(title, "diary"),
/// IndexRecordOption::Basic,
/// ));
///
/// // TermQuery "diary" and "girl" should be present and only one should be accounted in score
/// let queries1 = vec![diary_term_query.box_clone(), girl_term_query.box_clone()];
/// let diary_and_girl = DisjunctionMaxQuery::new(queries1);
/// let documents = searcher.search(&diary_and_girl, &TopDocs::with_limit(3))?;
/// assert_eq!(documents[0].0, documents[1].0);
/// assert_eq!(documents[1].0, documents[2].0);
///
/// // TermQuery "diary" and "girl" should be present
/// // and one should be accounted with multiplier 0.7
/// let queries2 = vec![diary_term_query.box_clone(), girl_term_query.box_clone()];
/// let tie_breaker = 0.7;
/// let diary_and_girl_with_tie_breaker = DisjunctionMaxQuery::with_tie_breaker(queries2, tie_breaker);
/// let documents = searcher.search(&diary_and_girl_with_tie_breaker, &TopDocs::with_limit(3))?;
/// assert_eq!(documents[1].0, documents[2].0);
/// // For this test all terms brings the same score. So we can do easy math and assume that
/// // `DisjunctionMaxQuery` with tie breakers score should be equal
/// // to term1 score + `tie_breaker` * term2 score or (1.0 + tie_breaker) * term score
/// assert!(f32::abs(documents[0].0 - documents[1].0 * (1.0 + tie_breaker)) < 0.001);
/// Ok(())
/// }
/// ```
#[derive(Debug)]
pub struct DisjunctionMaxQuery {
disjuncts: Vec<Box<dyn Query>>,
tie_breaker: Score,
}
impl Clone for DisjunctionMaxQuery {
fn clone(&self) -> Self {
DisjunctionMaxQuery::with_tie_breaker(
self.disjuncts
.iter()
.map(|disjunct| disjunct.box_clone())
.collect::<Vec<_>>(),
self.tie_breaker,
)
}
}
impl Query for DisjunctionMaxQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let disjuncts = self
.disjuncts
.iter()
.map(|disjunct| Ok((Occur::Should, disjunct.weight(searcher, scoring_enabled)?)))
.collect::<crate::Result<_>>()?;
let tie_breaker = self.tie_breaker;
Ok(Box::new(BooleanWeight::new(
disjuncts,
scoring_enabled,
Box::new(move || DisjunctionMaxCombiner::with_tie_breaker(tie_breaker)),
)))
}
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
for disjunct in &self.disjuncts {
disjunct.query_terms(visitor);
}
}
}
impl DisjunctionMaxQuery {
/// Creates a new `DisjunctionMaxQuery` with tie breaker.
pub fn with_tie_breaker(
disjuncts: Vec<Box<dyn Query>>,
tie_breaker: Score,
) -> DisjunctionMaxQuery {
DisjunctionMaxQuery {
disjuncts,
tie_breaker,
}
}
/// Creates a new `DisjunctionMaxQuery` with no tie breaker.
pub fn new(disjuncts: Vec<Box<dyn Query>>) -> DisjunctionMaxQuery {
DisjunctionMaxQuery::with_tie_breaker(disjuncts, 0.0)
}
}

View File

@@ -6,6 +6,8 @@ mod bitset;
mod bm25;
mod boolean_query;
mod boost_query;
mod const_score_query;
mod disjunction_max_query;
mod empty_query;
mod exclude;
mod explanation;
@@ -34,7 +36,10 @@ pub use self::automaton_weight::AutomatonWeight;
pub use self::bitset::BitSetDocSet;
pub(crate) use self::bm25::Bm25Weight;
pub use self::boolean_query::BooleanQuery;
pub(crate) use self::boolean_query::BooleanWeight;
pub use self::boost_query::BoostQuery;
pub use self::const_score_query::{ConstScoreQuery, ConstScorer};
pub use self::disjunction_max_query::DisjunctionMaxQuery;
pub use self::empty_query::{EmptyQuery, EmptyScorer, EmptyWeight};
pub use self::exclude::Exclude;
pub use self::explanation::Explanation;
@@ -49,7 +54,10 @@ pub use self::query_parser::{QueryParser, QueryParserError};
pub use self::range_query::RangeQuery;
pub use self::regex_query::RegexQuery;
pub use self::reqopt_scorer::RequiredOptionalScorer;
pub use self::scorer::{ConstScorer, Scorer};
pub use self::score_combiner::{
DisjunctionMaxCombiner, ScoreCombiner, SumCombiner, SumWithCoordsCombiner,
};
pub use self::scorer::Scorer;
pub use self::term_query::TermQuery;
pub use self::union::Union;
#[cfg(test)]
@@ -58,8 +66,6 @@ pub use self::weight::Weight;
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use crate::query::QueryParser;
use crate::schema::{Schema, TEXT};
use crate::{Index, Term};
@@ -74,49 +80,34 @@ mod tests {
let term_a = Term::from_field_text(text_field, "a");
let term_b = Term::from_field_text(text_field, "b");
{
let mut terms: BTreeMap<Term, bool> = Default::default();
query_parser
.parse_query("a")
.unwrap()
.query_terms(&mut terms);
let terms: Vec<(&Term, &bool)> = terms.iter().collect();
assert_eq!(vec![(&term_a, &false)], terms);
let query = query_parser.parse_query("a").unwrap();
let mut terms = Vec::new();
query.query_terms(&mut |term, pos| terms.push((term, pos)));
assert_eq!(vec![(&term_a, false)], terms);
}
{
let mut terms: BTreeMap<Term, bool> = Default::default();
query_parser
.parse_query("a b")
.unwrap()
.query_terms(&mut terms);
let terms: Vec<(&Term, &bool)> = terms.iter().collect();
assert_eq!(vec![(&term_a, &false), (&term_b, &false)], terms);
let query = query_parser.parse_query("a b").unwrap();
let mut terms = Vec::new();
query.query_terms(&mut |term, pos| terms.push((term, pos)));
assert_eq!(vec![(&term_a, false), (&term_b, false)], terms);
}
{
let mut terms: BTreeMap<Term, bool> = Default::default();
query_parser
.parse_query("\"a b\"")
.unwrap()
.query_terms(&mut terms);
let terms: Vec<(&Term, &bool)> = terms.iter().collect();
assert_eq!(vec![(&term_a, &true), (&term_b, &true)], terms);
let query = query_parser.parse_query("\"a b\"").unwrap();
let mut terms = Vec::new();
query.query_terms(&mut |term, pos| terms.push((term, pos)));
assert_eq!(vec![(&term_a, true), (&term_b, true)], terms);
}
{
let mut terms: BTreeMap<Term, bool> = Default::default();
query_parser
.parse_query("a a a a a")
.unwrap()
.query_terms(&mut terms);
let terms: Vec<(&Term, &bool)> = terms.iter().collect();
assert_eq!(vec![(&term_a, &false)], terms);
let query = query_parser.parse_query("a a a a a").unwrap();
let mut terms = Vec::new();
query.query_terms(&mut |term, pos| terms.push((term, pos)));
assert_eq!(vec![(&term_a, false); 5], terms);
}
{
let mut terms: BTreeMap<Term, bool> = Default::default();
query_parser
.parse_query("a -b")
.unwrap()
.query_terms(&mut terms);
let terms: Vec<(&Term, &bool)> = terms.iter().collect();
assert_eq!(vec![(&term_a, &false), (&term_b, &false)], terms);
let query = query_parser.parse_query("a -b").unwrap();
let mut terms = Vec::new();
query.query_terms(&mut |term, pos| terms.push((term, pos)));
assert_eq!(vec![(&term_a, false), (&term_b, false)], terms);
}
}
}

View File

@@ -1,5 +1,3 @@
use std::collections::BTreeMap;
use super::PhraseWeight;
use crate::core::searcher::Searcher;
use crate::query::bm25::Bm25Weight;
@@ -129,9 +127,9 @@ impl Query for PhraseQuery {
Ok(Box::new(phrase_weight))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
for (_, term) in &self.phrase_terms {
terms.insert(term.clone(), true);
visitor(term, true);
}
}
}

View File

@@ -1,4 +1,3 @@
use std::collections::BTreeMap;
use std::fmt;
use downcast_rs::impl_downcast;
@@ -67,12 +66,15 @@ pub trait Query: QueryClone + Send + Sync + downcast_rs::Downcast + fmt::Debug {
Ok(result)
}
/// Extract all of the terms associated to the query and insert them in the
/// term set given in arguments.
/// Extract all of the terms associated to the query and pass them to the
/// given closure.
///
/// Each term is associated with a boolean indicating whether
/// Positions are required or not.
fn query_terms(&self, _term_set: &mut BTreeMap<Term, bool>) {}
/// positions are required or not.
///
/// Note that there can be multiple instances of any given term
/// in a query and deduplication must be handled by the visitor.
fn query_terms<'a>(&'a self, _visitor: &mut dyn FnMut(&'a Term, bool)) {}
}
/// Implements `box_clone`.
@@ -98,8 +100,8 @@ impl Query for Box<dyn Query> {
self.as_ref().count(searcher)
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.as_ref().query_terms(terms);
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
self.as_ref().query_terms(visitor);
}
}

View File

@@ -184,7 +184,7 @@ pub struct QueryParser {
fn all_negative(ast: &LogicalAst) -> bool {
match ast {
LogicalAst::Leaf(_) => false,
LogicalAst::Boost(ref child_ast, _) => all_negative(&*child_ast),
LogicalAst::Boost(ref child_ast, _) => all_negative(child_ast),
LogicalAst::Clause(children) => children
.iter()
.all(|(ref occur, child)| (*occur == Occur::MustNot) || all_negative(child)),

View File

@@ -77,3 +77,40 @@ impl ScoreCombiner for SumWithCoordsCombiner {
self.score
}
}
/// Take max score of different scorers
/// and optionally sum it with other matches multiplied by `tie_breaker`
#[derive(Default, Clone, Copy)]
pub struct DisjunctionMaxCombiner {
max: Score,
sum: Score,
tie_breaker: Score,
}
impl DisjunctionMaxCombiner {
/// Creates `DisjunctionMaxCombiner` with tie breaker
pub fn with_tie_breaker(tie_breaker: Score) -> DisjunctionMaxCombiner {
DisjunctionMaxCombiner {
max: 0.0,
sum: 0.0,
tie_breaker,
}
}
}
impl ScoreCombiner for DisjunctionMaxCombiner {
fn update<TScorer: Scorer>(&mut self, scorer: &mut TScorer) {
let score = scorer.score();
self.max = Score::max(score, self.max);
self.sum += score;
}
fn clear(&mut self) {
self.max = 0.0;
self.sum = 0.0;
}
fn score(&self) -> Score {
self.max + (self.sum - self.max) * self.tie_breaker
}
}

View File

@@ -3,7 +3,7 @@ use std::ops::DerefMut;
use downcast_rs::impl_downcast;
use crate::docset::DocSet;
use crate::{DocId, Score};
use crate::Score;
/// Scored set of documents matching a query within a specific segment.
///
@@ -22,55 +22,3 @@ impl Scorer for Box<dyn Scorer> {
self.deref_mut().score()
}
}
/// Wraps a `DocSet` and simply returns a constant `Scorer`.
/// The `ConstScorer` is useful if you have a `DocSet` where
/// you needed a scorer.
///
/// The `ConstScorer`'s constant score can be set
/// by calling `.set_score(...)`.
pub struct ConstScorer<TDocSet: DocSet> {
docset: TDocSet,
score: Score,
}
impl<TDocSet: DocSet> ConstScorer<TDocSet> {
/// Creates a new `ConstScorer`.
pub fn new(docset: TDocSet, score: Score) -> ConstScorer<TDocSet> {
ConstScorer { docset, score }
}
}
impl<TDocSet: DocSet> From<TDocSet> for ConstScorer<TDocSet> {
fn from(docset: TDocSet) -> Self {
ConstScorer::new(docset, 1.0)
}
}
impl<TDocSet: DocSet> DocSet for ConstScorer<TDocSet> {
fn advance(&mut self) -> DocId {
self.docset.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.docset.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.docset.fill_buffer(buffer)
}
fn doc(&self) -> DocId {
self.docset.doc()
}
fn size_hint(&self) -> u32 {
self.docset.size_hint()
}
}
impl<TDocSet: DocSet + 'static> Scorer for ConstScorer<TDocSet> {
fn score(&mut self) -> Score {
self.score
}
}

View File

@@ -1,4 +1,3 @@
use std::collections::BTreeMap;
use std::fmt;
use super::term_weight::TermWeight;
@@ -121,7 +120,7 @@ impl Query for TermQuery {
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
fn query_terms<'a>(&'a self, visitor: &mut dyn FnMut(&'a Term, bool)) {
visitor(&self.term, false);
}
}

View File

@@ -36,34 +36,6 @@ pub struct Union<TScorer, TScoreCombiner = DoNothingCombiner> {
score: Score,
}
impl<TScorer, TScoreCombiner> From<Vec<TScorer>> for Union<TScorer, TScoreCombiner>
where
TScoreCombiner: ScoreCombiner,
TScorer: Scorer,
{
fn from(docsets: Vec<TScorer>) -> Union<TScorer, TScoreCombiner> {
let non_empty_docsets: Vec<TScorer> = docsets
.into_iter()
.filter(|docset| docset.doc() != TERMINATED)
.collect();
let mut union = Union {
docsets: non_empty_docsets,
bitsets: Box::new([TinySet::empty(); HORIZON_NUM_TINYBITSETS]),
scores: Box::new([TScoreCombiner::default(); HORIZON as usize]),
cursor: HORIZON_NUM_TINYBITSETS,
offset: 0,
doc: 0,
score: 0.0,
};
if union.refill() {
union.advance();
} else {
union.doc = TERMINATED;
}
union
}
}
fn refill<TScorer: Scorer, TScoreCombiner: ScoreCombiner>(
scorers: &mut Vec<TScorer>,
bitsets: &mut [TinySet; HORIZON_NUM_TINYBITSETS],
@@ -90,6 +62,31 @@ fn refill<TScorer: Scorer, TScoreCombiner: ScoreCombiner>(
}
impl<TScorer: Scorer, TScoreCombiner: ScoreCombiner> Union<TScorer, TScoreCombiner> {
pub(crate) fn build(
docsets: Vec<TScorer>,
score_combiner_fn: impl FnOnce() -> TScoreCombiner,
) -> Union<TScorer, TScoreCombiner> {
let non_empty_docsets: Vec<TScorer> = docsets
.into_iter()
.filter(|docset| docset.doc() != TERMINATED)
.collect();
let mut union = Union {
docsets: non_empty_docsets,
bitsets: Box::new([TinySet::empty(); HORIZON_NUM_TINYBITSETS]),
scores: Box::new([score_combiner_fn(); HORIZON as usize]),
cursor: HORIZON_NUM_TINYBITSETS,
offset: 0,
doc: 0,
score: 0.0,
};
if union.refill() {
union.advance();
} else {
union.doc = TERMINATED;
}
union
}
fn refill(&mut self) -> bool {
if let Some(min_doc) = self.docsets.iter().map(DocSet::doc).min() {
self.offset = min_doc;
@@ -179,7 +176,6 @@ where
// The target is outside of the buffered horizon.
// advance all docsets to a doc >= to the target.
#[cfg_attr(feature = "cargo-clippy", allow(clippy::clippy::collapsible_if))]
unordered_drain_filter(&mut self.docsets, |docset| {
if docset.doc() < target {
docset.seek(target);
@@ -266,12 +262,13 @@ mod tests {
let union_vals: Vec<u32> = val_set.into_iter().collect();
let mut union_expected = VecDocSet::from(union_vals);
let make_union = || {
Union::from(
Union::build(
vals.iter()
.cloned()
.map(VecDocSet::from)
.map(|docset| ConstScorer::new(docset, 1.0))
.collect::<Vec<ConstScorer<VecDocSet>>>(),
DoNothingCombiner::default,
)
};
let mut union: Union<_, DoNothingCombiner> = make_union();
@@ -312,13 +309,14 @@ mod tests {
btree_set.extend(docs.iter().cloned());
}
let docset_factory = || {
let res: Box<dyn DocSet> = Box::new(Union::<_, DoNothingCombiner>::from(
let res: Box<dyn DocSet> = Box::new(Union::build(
docs_list
.iter()
.cloned()
.map(VecDocSet::from)
.map(|docset| ConstScorer::new(docset, 1.0))
.collect::<Vec<_>>(),
DoNothingCombiner::default,
));
res
};
@@ -346,10 +344,13 @@ mod tests {
#[test]
fn test_union_skip_corner_case3() {
let mut docset = Union::<_, DoNothingCombiner>::from(vec![
ConstScorer::from(VecDocSet::from(vec![0u32, 5u32])),
ConstScorer::from(VecDocSet::from(vec![1u32, 4u32])),
]);
let mut docset = Union::build(
vec![
ConstScorer::from(VecDocSet::from(vec![0u32, 5u32])),
ConstScorer::from(VecDocSet::from(vec![1u32, 4u32])),
],
DoNothingCombiner::default,
);
assert_eq!(docset.doc(), 0u32);
assert_eq!(docset.seek(0u32), 0u32);
assert_eq!(docset.seek(0u32), 0u32);
@@ -405,12 +406,13 @@ mod bench {
tests::sample_with_seed(100_000, 0.2, 1),
];
bench.iter(|| {
let mut v = Union::<_, DoNothingCombiner>::from(
let mut v = Union::build(
union_docset
.iter()
.map(|doc_ids| VecDocSet::from(doc_ids.clone()))
.map(|docset| ConstScorer::new(docset, 1.0))
.collect::<Vec<_>>(),
DoNothingCombiner::default,
);
while v.doc() != TERMINATED {
v.advance();
@@ -425,12 +427,13 @@ mod bench {
tests::sample_with_seed(100_000, 0.001, 2),
];
bench.iter(|| {
let mut v = Union::<_, DoNothingCombiner>::from(
let mut v = Union::build(
union_docset
.iter()
.map(|doc_ids| VecDocSet::from(doc_ids.clone()))
.map(|docset| ConstScorer::new(docset, 1.0))
.collect::<Vec<_>>(),
DoNothingCombiner::default,
);
while v.doc() != TERMINATED {
v.advance();

View File

@@ -164,21 +164,18 @@ impl InnerIndexReader {
doc_store_cache_size: usize,
index: Index,
warming_state: WarmingState,
// The searcher_generation_inventory is not used as source, but as target to track the
// loaded segments.
searcher_generation_inventory: Inventory<SearcherGeneration>,
) -> crate::Result<Self> {
let searcher_generation_counter: Arc<AtomicU64> = Default::default();
let segment_readers = Self::open_segment_readers(&index)?;
let searcher_generation = Self::create_new_searcher_generation(
&segment_readers,
&searcher_generation_counter,
&searcher_generation_inventory,
);
let searcher = Self::create_searcher(
&index,
doc_store_cache_size,
&warming_state,
searcher_generation,
&searcher_generation_counter,
&searcher_generation_inventory,
)?;
Ok(InnerIndexReader {
doc_store_cache_size,
@@ -204,12 +201,12 @@ impl InnerIndexReader {
Ok(segment_readers)
}
fn create_new_searcher_generation(
fn track_segment_readers_in_inventory(
segment_readers: &[SegmentReader],
searcher_generation_counter: &Arc<AtomicU64>,
searcher_generation_inventory: &Inventory<SearcherGeneration>,
) -> TrackedObject<SearcherGeneration> {
let generation_id = searcher_generation_counter.fetch_add(1, atomic::Ordering::Relaxed);
let generation_id = searcher_generation_counter.fetch_add(1, atomic::Ordering::AcqRel);
let searcher_generation =
SearcherGeneration::from_segment_readers(segment_readers, generation_id);
searcher_generation_inventory.track(searcher_generation)
@@ -219,9 +216,16 @@ impl InnerIndexReader {
index: &Index,
doc_store_cache_size: usize,
warming_state: &WarmingState,
searcher_generation: TrackedObject<SearcherGeneration>,
searcher_generation_counter: &Arc<AtomicU64>,
searcher_generation_inventory: &Inventory<SearcherGeneration>,
) -> crate::Result<Arc<SearcherInner>> {
let segment_readers = Self::open_segment_readers(index)?;
let searcher_generation = Self::track_segment_readers_in_inventory(
&segment_readers,
searcher_generation_counter,
searcher_generation_inventory,
);
let schema = index.schema();
let searcher = Arc::new(SearcherInner::new(
schema,
@@ -236,17 +240,12 @@ impl InnerIndexReader {
}
fn reload(&self) -> crate::Result<()> {
let segment_readers = Self::open_segment_readers(&self.index)?;
let searcher_generation = Self::create_new_searcher_generation(
&segment_readers,
&self.searcher_generation_counter,
&self.searcher_generation_inventory,
);
let searcher = Self::create_searcher(
&self.index,
self.doc_store_cache_size,
&self.warming_state,
searcher_generation,
&self.searcher_generation_counter,
&self.searcher_generation_inventory,
)?;
self.searcher.store(searcher);

View File

@@ -7,6 +7,7 @@ use std::string::FromUtf8Error;
use common::BinarySerializable;
use once_cell::sync::Lazy;
use regex::Regex;
use serde::de::Error as _;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
const SLASH_BYTE: u8 = b'/';
@@ -230,7 +231,9 @@ impl Serialize for Facet {
impl<'de> Deserialize<'de> for Facet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de> {
<&'de str as Deserialize<'de>>::deserialize(deserializer).map(Facet::from)
<Cow<'de, str> as Deserialize<'de>>::deserialize(deserializer).and_then(|path| {
Facet::from_text(&*path).map_err(|err| D::Error::custom(err.to_string()))
})
}
}
@@ -327,4 +330,25 @@ mod tests {
assert!(Facet::from("/").is_prefix_of(&Facet::from("/foobar")));
assert!(!Facet::from("/").is_prefix_of(&Facet::from("/")));
}
#[test]
fn deserialize_from_borrowed_string() {
let facet = serde_json::from_str::<Facet>(r#""/foo/bar""#).unwrap();
assert_eq!(facet, Facet::from_path(["foo", "bar"]));
}
#[test]
fn deserialize_from_owned_string() {
let facet = serde_json::from_str::<Facet>(r#""/foo/\u263A""#).unwrap();
assert_eq!(facet, Facet::from_path(["foo", ""]));
}
#[test]
fn deserialize_from_invalid_string() {
let error = serde_json::from_str::<Facet>(r#""foo/bar""#).unwrap_err();
assert_eq!(
error.to_string(),
"Failed to parse the facet string: 'foo/bar'"
);
}
}

View File

@@ -1,5 +1,5 @@
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::{BTreeMap, BTreeSet};
use std::ops::Range;
use htmlescape::encode_minimal;
@@ -7,7 +7,7 @@ use htmlescape::encode_minimal;
use crate::query::Query;
use crate::schema::{Field, Value};
use crate::tokenizer::{TextAnalyzer, Token};
use crate::{Document, Score, Searcher};
use crate::{Document, Score, Searcher, Term};
const DEFAULT_MAX_NUM_CHARS: usize = 150;
@@ -79,7 +79,7 @@ impl Snippet {
let mut html = String::new();
let mut start_from: usize = 0;
for item in self.highlighted.iter() {
for item in collapse_overlapped_ranges(&self.highlighted) {
html.push_str(&encode_minimal(&self.fragment[start_from..item.start]));
html.push_str(HIGHLIGHTEN_PREFIX);
html.push_str(&encode_minimal(&self.fragment[item.clone()]));
@@ -186,6 +186,53 @@ fn select_best_fragment_combination(fragments: &[FragmentCandidate], text: &str)
}
}
/// Returns ranges that are collapsed into non-overlapped ranges.
///
/// ## Examples
/// - [0..1, 2..3] -> [0..1, 2..3] # no overlap
/// - [0..1, 1..2] -> [0..1, 1..2] # no overlap
/// - [0..2, 1..2] -> [0..2] # collapsed
/// - [0..2, 1..3] -> [0..3] # collapsed
/// - [0..3, 1..2] -> [0..3] # second range's end is also inside of the first range
///
/// Note: This function assumes `ranges` is sorted by `Range.start` in ascending order.
fn collapse_overlapped_ranges(ranges: &[Range<usize>]) -> Vec<Range<usize>> {
debug_assert!(is_sorted(ranges.iter().map(|range| range.start)));
let mut result = Vec::new();
let mut ranges_it = ranges.iter();
let mut current = match ranges_it.next() {
Some(range) => range.clone(),
None => return result,
};
for range in ranges {
if current.end > range.start {
current = current.start..std::cmp::max(current.end, range.end);
} else {
result.push(current);
current = range.clone();
}
}
result.push(current);
result
}
fn is_sorted(mut it: impl Iterator<Item = usize>) -> bool {
if let Some(first) = it.next() {
let mut prev = first;
for item in it {
if item < prev {
return false;
}
prev = item;
}
}
true
}
/// `SnippetGenerator`
///
/// # Example
@@ -255,19 +302,20 @@ impl SnippetGenerator {
query: &dyn Query,
field: Field,
) -> crate::Result<SnippetGenerator> {
let mut terms = BTreeMap::new();
query.query_terms(&mut terms);
let mut terms_text: BTreeMap<String, Score> = Default::default();
for (term, _) in terms {
if term.field() != field {
continue;
let mut terms: BTreeSet<&Term> = BTreeSet::new();
query.query_terms(&mut |term, _| {
if term.field() == field {
terms.insert(term);
}
});
let mut terms_text: BTreeMap<String, Score> = Default::default();
for term in terms {
let term_str = if let Some(term_str) = term.as_str() {
term_str
} else {
continue;
};
let doc_freq = searcher.doc_freq(&term)?;
let doc_freq = searcher.doc_freq(term)?;
if doc_freq > 0 {
let score = 1.0 / (1.0 + doc_freq as Score);
terms_text.insert(term_str.to_string(), score);
@@ -319,10 +367,10 @@ mod tests {
use maplit::btreemap;
use super::{search_fragments, select_best_fragment_combination};
use super::{collapse_overlapped_ranges, search_fragments, select_best_fragment_combination};
use crate::query::QueryParser;
use crate::schema::{IndexRecordOption, Schema, TextFieldIndexing, TextOptions, TEXT};
use crate::tokenizer::SimpleTokenizer;
use crate::tokenizer::{NgramTokenizer, SimpleTokenizer};
use crate::{Index, SnippetGenerator};
const TEST_TEXT: &str = r#"Rust is a systems programming language sponsored by
@@ -587,4 +635,44 @@ Survey in 2016, 2017, and 2018."#;
}
Ok(())
}
#[test]
fn test_collapse_overlapped_ranges() {
assert_eq!(&collapse_overlapped_ranges(&[0..1, 2..3,]), &[0..1, 2..3]);
assert_eq!(
collapse_overlapped_ranges(&vec![0..1, 1..2,]),
vec![0..1, 1..2]
);
assert_eq!(collapse_overlapped_ranges(&[0..2, 1..2,]), vec![0..2]);
assert_eq!(collapse_overlapped_ranges(&[0..2, 1..3,]), vec![0..3]);
assert_eq!(collapse_overlapped_ranges(&[0..3, 1..2,]), vec![0..3]);
}
#[test]
fn test_snippet_with_overlapped_highlighted_ranges() {
let text = "abc";
let mut terms = BTreeMap::new();
terms.insert(String::from("ab"), 0.9);
terms.insert(String::from("bc"), 1.0);
let fragments = search_fragments(
&From::from(NgramTokenizer::all_ngrams(2, 2)),
text,
&terms,
3,
);
assert_eq!(fragments.len(), 1);
{
let first = &fragments[0];
assert_eq!(first.score, 1.9);
assert_eq!(first.start_offset, 0);
assert_eq!(first.stop_offset, 3);
}
let snippet = select_best_fragment_combination(&fragments[..], text);
assert_eq!(snippet.fragment, "abc");
assert_eq!(snippet.to_html(), "<b>abc</b>");
}
}