mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-06 09:12:55 +00:00
Add bench to reproduce performance drop on array of texts.
This commit is contained in:
100000
benches/hdfs_with_array.json
Normal file
100000
benches/hdfs_with_array.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ use tantivy::schema::{INDEXED, STORED, STRING, TEXT};
|
||||
use tantivy::Index;
|
||||
|
||||
const HDFS_LOGS: &str = include_str!("hdfs.json");
|
||||
const HDFS_LOGS_WITH_ARRAY: &str = include_str!("hdfs_with_array.json");
|
||||
const NUM_REPEATS: usize = 2;
|
||||
|
||||
pub fn hdfs_index_benchmark(c: &mut Criterion) {
|
||||
@@ -41,6 +42,18 @@ pub fn hdfs_index_benchmark(c: &mut Criterion) {
|
||||
}
|
||||
})
|
||||
});
|
||||
group.bench_function("index-hdfs-with-array-no-commit", |b| {
|
||||
b.iter(|| {
|
||||
let index = Index::create_in_ram(schema.clone());
|
||||
let index_writer = index.writer_with_num_threads(1, 100_000_000).unwrap();
|
||||
for _ in 0..NUM_REPEATS {
|
||||
for doc_json in HDFS_LOGS_WITH_ARRAY.trim().split("\n") {
|
||||
let doc = schema.parse_document(doc_json).unwrap();
|
||||
index_writer.add_document(doc).unwrap();
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
group.bench_function("index-hdfs-with-commit", |b| {
|
||||
b.iter(|| {
|
||||
let index = Index::create_in_ram(schema.clone());
|
||||
|
||||
Reference in New Issue
Block a user