mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2025-12-23 02:29:57 +00:00
use json in agg tests (#1998)
* switch to JSON in tests, add flat aggregation types * use method * clippy * remove commented file
This commit is contained in:
@@ -193,56 +193,9 @@ fn main() -> tantivy::Result<()> {
|
||||
let agg_req: Aggregations = serde_json::from_str(agg_req_str)?;
|
||||
let collector = AggregationCollector::from_aggs(agg_req, Default::default());
|
||||
|
||||
let agg_res: AggregationResults = searcher.search(&AllQuery, &collector).unwrap();
|
||||
let res2: Value = serde_json::to_value(agg_res)?;
|
||||
|
||||
// ### Request Rust API
|
||||
//
|
||||
// This is exactly the same request as above, but via the rust structures.
|
||||
//
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"group_by_stock".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "stock".to_string(),
|
||||
ranges: vec![
|
||||
RangeAggregationRange {
|
||||
key: Some("few".into()),
|
||||
from: None,
|
||||
to: Some(1f64),
|
||||
},
|
||||
RangeAggregationRange {
|
||||
key: Some("some".into()),
|
||||
from: Some(1f64),
|
||||
to: Some(10f64),
|
||||
},
|
||||
RangeAggregationRange {
|
||||
key: Some("many".into()),
|
||||
from: Some(10f64),
|
||||
to: None,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: vec![(
|
||||
"average_price".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(
|
||||
AverageAggregation::from_field_name("price".to_string()),
|
||||
)),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let collector = AggregationCollector::from_aggs(agg_req, Default::default());
|
||||
// We use the `AllQuery` which will pass all documents to the AggregationCollector.
|
||||
let agg_res: AggregationResults = searcher.search(&AllQuery, &collector).unwrap();
|
||||
|
||||
let res1: Value = serde_json::to_value(agg_res)?;
|
||||
let res: Value = serde_json::to_value(agg_res)?;
|
||||
|
||||
// ### Aggregation Result
|
||||
//
|
||||
@@ -260,8 +213,7 @@ fn main() -> tantivy::Result<()> {
|
||||
}
|
||||
"#;
|
||||
let expected_json: Value = serde_json::from_str(expected_res)?;
|
||||
assert_eq!(expected_json, res1);
|
||||
assert_eq!(expected_json, res2);
|
||||
assert_eq!(expected_json, res);
|
||||
|
||||
// ### Request 2
|
||||
//
|
||||
|
||||
@@ -9,25 +9,7 @@
|
||||
//! # Example
|
||||
//!
|
||||
//! ```
|
||||
//! use tantivy::aggregation::bucket::RangeAggregation;
|
||||
//! use tantivy::aggregation::agg_req::BucketAggregationType;
|
||||
//! use tantivy::aggregation::agg_req::{Aggregation, Aggregations};
|
||||
//! use tantivy::aggregation::agg_req::BucketAggregation;
|
||||
//! let agg_req1: Aggregations = vec![
|
||||
//! (
|
||||
//! "range".to_string(),
|
||||
//! Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
//! bucket_agg: BucketAggregationType::Range(RangeAggregation{
|
||||
//! field: "score".to_string(),
|
||||
//! ranges: vec![(3f64..7f64).into(), (7f64..20f64).into()],
|
||||
//! keyed: false,
|
||||
//! }),
|
||||
//! sub_aggregation: Default::default(),
|
||||
//! })),
|
||||
//! ),
|
||||
//! ]
|
||||
//! .into_iter()
|
||||
//! .collect();
|
||||
//! use tantivy::aggregation::agg_req::Aggregations;
|
||||
//!
|
||||
//! let elasticsearch_compatible_json_req = r#"
|
||||
//! {
|
||||
@@ -41,8 +23,7 @@
|
||||
//! }
|
||||
//! }
|
||||
//! }"#;
|
||||
//! let agg_req2: Aggregations = serde_json::from_str(elasticsearch_compatible_json_req).unwrap();
|
||||
//! assert_eq!(agg_req1, agg_req2);
|
||||
//! let _agg_req: Aggregations = serde_json::from_str(elasticsearch_compatible_json_req).unwrap();
|
||||
//! ```
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
@@ -77,13 +58,16 @@ impl From<Aggregations> for AggregationsInternal {
|
||||
let mut buckets = vec![];
|
||||
for (key, agg) in aggs {
|
||||
match agg {
|
||||
Aggregation::Bucket(bucket) => buckets.push((
|
||||
key,
|
||||
BucketAggregationInternal {
|
||||
bucket_agg: bucket.bucket_agg,
|
||||
sub_aggregation: bucket.sub_aggregation.into(),
|
||||
},
|
||||
)),
|
||||
Aggregation::Bucket(bucket) => {
|
||||
let sub_aggregation = bucket.get_sub_aggs().clone().into();
|
||||
buckets.push((
|
||||
key,
|
||||
BucketAggregationInternal {
|
||||
bucket_agg: bucket.bucket_agg,
|
||||
sub_aggregation,
|
||||
},
|
||||
))
|
||||
}
|
||||
Aggregation::Metric(metric) => metrics.push((key, metric)),
|
||||
}
|
||||
}
|
||||
@@ -101,10 +85,13 @@ pub(crate) struct BucketAggregationInternal {
|
||||
pub bucket_agg: BucketAggregationType,
|
||||
/// The sub_aggregations in the buckets. Each bucket will aggregate on the document set in the
|
||||
/// bucket.
|
||||
pub sub_aggregation: AggregationsInternal,
|
||||
sub_aggregation: AggregationsInternal,
|
||||
}
|
||||
|
||||
impl BucketAggregationInternal {
|
||||
pub(crate) fn sub_aggregation(&self) -> &AggregationsInternal {
|
||||
&self.sub_aggregation
|
||||
}
|
||||
pub(crate) fn as_range(&self) -> Option<&RangeAggregation> {
|
||||
match &self.bucket_agg {
|
||||
BucketAggregationType::Range(range) => Some(range),
|
||||
@@ -184,6 +171,9 @@ pub struct BucketAggregation {
|
||||
}
|
||||
|
||||
impl BucketAggregation {
|
||||
pub(crate) fn get_sub_aggs(&self) -> &Aggregations {
|
||||
&self.sub_aggregation
|
||||
}
|
||||
fn get_fast_field_names(&self, fast_field_names: &mut HashSet<String>) {
|
||||
let fast_field_name = self.bucket_agg.get_fast_field_name();
|
||||
fast_field_names.insert(fast_field_name.to_string());
|
||||
@@ -311,25 +301,6 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn serialize_to_json_test() {
|
||||
let agg_req1: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score".to_string(),
|
||||
ranges: vec![
|
||||
(f64::MIN..3f64).into(),
|
||||
(3f64..7f64).into(),
|
||||
(7f64..20f64).into(),
|
||||
(20f64..f64::MAX).into(),
|
||||
],
|
||||
keyed: true,
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let elasticsearch_compatible_json_req = r#"{
|
||||
"range": {
|
||||
"range": {
|
||||
@@ -354,57 +325,56 @@ mod tests {
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
let agg_req1: Aggregations =
|
||||
{ serde_json::from_str(elasticsearch_compatible_json_req).unwrap() };
|
||||
|
||||
let agg_req2: String = serde_json::to_string_pretty(&agg_req1).unwrap();
|
||||
assert_eq!(agg_req2, elasticsearch_compatible_json_req);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_fast_field_names() {
|
||||
let agg_req2: Aggregations = vec![
|
||||
(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score2".to_string(),
|
||||
ranges: vec![
|
||||
(f64::MIN..3f64).into(),
|
||||
(3f64..7f64).into(),
|
||||
(7f64..20f64).into(),
|
||||
(20f64..f64::MAX).into(),
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"metric".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(
|
||||
AverageAggregation::from_field_name("field123".to_string()),
|
||||
)),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let agg_req1: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score".to_string(),
|
||||
ranges: vec![
|
||||
(f64::MIN..3f64).into(),
|
||||
(3f64..7f64).into(),
|
||||
(7f64..20f64).into(),
|
||||
(20f64..f64::MAX).into(),
|
||||
let range_agg: Aggregation = {
|
||||
serde_json::from_value(json!({
|
||||
"range": {
|
||||
"field": "score",
|
||||
"ranges": [
|
||||
{ "to": 3.0 },
|
||||
{ "from": 3.0, "to": 7.0 },
|
||||
{ "from": 7.0, "to": 20.0 },
|
||||
{ "from": 20.0 }
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: agg_req2,
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
|
||||
}))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let agg_req1: Aggregations = {
|
||||
serde_json::from_value(json!({
|
||||
"range1": range_agg,
|
||||
"range2":{
|
||||
"range": {
|
||||
"field": "score2",
|
||||
"ranges": [
|
||||
{ "to": 3.0 },
|
||||
{ "from": 3.0, "to": 7.0 },
|
||||
{ "from": 7.0, "to": 20.0 },
|
||||
{ "from": 20.0 }
|
||||
],
|
||||
},
|
||||
"aggs": {
|
||||
"metric": {
|
||||
"avg": {
|
||||
"field": "field123"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
get_fast_field_names(&agg_req1),
|
||||
|
||||
@@ -175,7 +175,7 @@ pub(crate) fn get_aggs_with_accessor_and_validate(
|
||||
key.to_string(),
|
||||
BucketAggregationWithAccessor::try_from_bucket(
|
||||
&bucket.bucket_agg,
|
||||
&bucket.sub_aggregation,
|
||||
bucket.get_sub_aggs(),
|
||||
reader,
|
||||
limits.clone(),
|
||||
)?,
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::aggregation::agg_req::{
|
||||
Aggregation, Aggregations, BucketAggregation, BucketAggregationType, MetricAggregation,
|
||||
};
|
||||
use crate::aggregation::agg_req::{Aggregation, Aggregations, MetricAggregation};
|
||||
use crate::aggregation::agg_result::AggregationResults;
|
||||
use crate::aggregation::bucket::{RangeAggregation, TermsAggregation};
|
||||
use crate::aggregation::buf_collector::DOC_BLOCK_SIZE;
|
||||
use crate::aggregation::collector::AggregationCollector;
|
||||
use crate::aggregation::intermediate_agg_result::IntermediateAggregationResults;
|
||||
@@ -210,43 +207,23 @@ fn test_aggregation_level1() -> crate::Result<()> {
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
|
||||
let range_agg = |field_name: &str| -> Aggregation {
|
||||
serde_json::from_value(json!({
|
||||
"range": {
|
||||
"field": field_name,
|
||||
"ranges": [ { "from": 3.0f64, "to": 7.0f64 }, { "from": 7.0f64, "to": 20.0f64 } ]
|
||||
}
|
||||
}))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let agg_req_1: Aggregations = vec![
|
||||
("average_i64".to_string(), get_avg_req("score_i64")),
|
||||
("average_f64".to_string(), get_avg_req("score_f64")),
|
||||
("average".to_string(), get_avg_req("score")),
|
||||
(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score".to_string(),
|
||||
ranges: vec![(3f64..7f64).into(), (7f64..20f64).into()],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"rangef64".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
ranges: vec![(3f64..7f64).into(), (7f64..20f64).into()],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"rangei64".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score_i64".to_string(),
|
||||
ranges: vec![(3f64..7f64).into(), (7f64..20f64).into()],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
),
|
||||
("range".to_string(), range_agg("score")),
|
||||
("rangef64".to_string(), range_agg("score_f64")),
|
||||
("rangei64".to_string(), range_agg("score_i64")),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
@@ -295,7 +272,6 @@ fn test_aggregation_level1() -> crate::Result<()> {
|
||||
fn test_aggregation_level2(
|
||||
merge_segments: bool,
|
||||
use_distributed_collector: bool,
|
||||
use_elastic_json_req: bool,
|
||||
) -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(merge_segments)?;
|
||||
|
||||
@@ -312,23 +288,7 @@ fn test_aggregation_level2(
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
|
||||
let sub_agg_req: Aggregations = vec![
|
||||
("average_in_range".to_string(), get_avg_req("score")),
|
||||
(
|
||||
"term_agg".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "text".to_string(),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = if use_elastic_json_req {
|
||||
let elasticsearch_compatible_json_req = r#"
|
||||
let elasticsearch_compatible_json_req = r#"
|
||||
{
|
||||
"rangef64": {
|
||||
"range": {
|
||||
@@ -383,61 +343,7 @@ fn test_aggregation_level2(
|
||||
}
|
||||
}
|
||||
"#;
|
||||
let value: Aggregations = serde_json::from_str(elasticsearch_compatible_json_req).unwrap();
|
||||
value
|
||||
} else {
|
||||
let agg_req: Aggregations = vec![
|
||||
("average".to_string(), get_avg_req("score")),
|
||||
(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score".to_string(),
|
||||
ranges: vec![
|
||||
(3f64..7f64).into(),
|
||||
(7f64..19f64).into(),
|
||||
(19f64..20f64).into(),
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg_req.clone(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"rangef64".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
ranges: vec![
|
||||
(3f64..7f64).into(),
|
||||
(7f64..19f64).into(),
|
||||
(19f64..20f64).into(),
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg_req.clone(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"rangei64".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score_i64".to_string(),
|
||||
ranges: vec![
|
||||
(3f64..7f64).into(),
|
||||
(7f64..19f64).into(),
|
||||
(19f64..20f64).into(),
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg_req,
|
||||
})),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
agg_req
|
||||
};
|
||||
let agg_req: Aggregations = serde_json::from_str(elasticsearch_compatible_json_req).unwrap();
|
||||
|
||||
let agg_res: AggregationResults = if use_distributed_collector {
|
||||
let collector =
|
||||
@@ -518,42 +424,22 @@ fn test_aggregation_level2(
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_multi_segments() -> crate::Result<()> {
|
||||
test_aggregation_level2(false, false, false)
|
||||
test_aggregation_level2(false, false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_single_segment() -> crate::Result<()> {
|
||||
test_aggregation_level2(true, false, false)
|
||||
test_aggregation_level2(true, false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_multi_segments_distributed_collector() -> crate::Result<()> {
|
||||
test_aggregation_level2(false, true, false)
|
||||
test_aggregation_level2(false, true)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_single_segment_distributed_collector() -> crate::Result<()> {
|
||||
test_aggregation_level2(true, true, false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_multi_segments_use_json() -> crate::Result<()> {
|
||||
test_aggregation_level2(false, false, true)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_single_segment_use_json() -> crate::Result<()> {
|
||||
test_aggregation_level2(true, false, true)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_multi_segments_distributed_collector_use_json() -> crate::Result<()> {
|
||||
test_aggregation_level2(false, true, true)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level2_single_segment_distributed_collector_use_json() -> crate::Result<()> {
|
||||
test_aggregation_level2(true, true, true)
|
||||
test_aggregation_level2(true, true)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -618,18 +504,16 @@ fn test_aggregation_on_json_object() {
|
||||
index_writer.commit().unwrap();
|
||||
let reader = index.reader().unwrap();
|
||||
let searcher = reader.searcher();
|
||||
let agg: Aggregations = vec![(
|
||||
"jsonagg".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "json.color".to_string(),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let agg: Aggregations = serde_json::from_value(json!({
|
||||
"jsonagg": {
|
||||
"terms": {
|
||||
"field": "json.color",
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let aggregation_collector = get_collector(agg);
|
||||
let aggregation_results = searcher.search(&AllQuery, &aggregation_collector).unwrap();
|
||||
let aggregation_res_json = serde_json::to_value(aggregation_results).unwrap();
|
||||
@@ -687,18 +571,15 @@ fn test_aggregation_on_json_object_empty_columns() {
|
||||
|
||||
let reader = index.reader().unwrap();
|
||||
let searcher = reader.searcher();
|
||||
let agg: Aggregations = vec![(
|
||||
"jsonagg".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "json.color".to_string(),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let agg: Aggregations = serde_json::from_value(json!({
|
||||
"jsonagg": {
|
||||
"terms": {
|
||||
"field": "json.color",
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let aggregation_collector = get_collector(agg);
|
||||
let aggregation_results = searcher.search(&AllQuery, &aggregation_collector).unwrap();
|
||||
|
||||
@@ -539,10 +539,7 @@ mod tests {
|
||||
use serde_json::Value;
|
||||
|
||||
use super::*;
|
||||
use crate::aggregation::agg_req::{
|
||||
Aggregation, Aggregations, BucketAggregation, BucketAggregationType, MetricAggregation,
|
||||
};
|
||||
use crate::aggregation::metric::{AverageAggregation, StatsAggregation};
|
||||
use crate::aggregation::agg_req::Aggregations;
|
||||
use crate::aggregation::tests::{
|
||||
exec_request, exec_request_with_query, exec_request_with_query_and_memory_limit,
|
||||
get_test_index_2_segments, get_test_index_from_values, get_test_index_with_num_docs,
|
||||
@@ -554,20 +551,16 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_values(false, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_interval".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 3.5,
|
||||
offset: Some(0.0),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_interval": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 3.5,
|
||||
"offset": 0.0,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -580,20 +573,16 @@ mod tests {
|
||||
assert_eq!(res["my_interval"]["buckets"][9], Value::Null);
|
||||
|
||||
// With offset
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_interval".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 3.5,
|
||||
offset: Some(1.2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_interval": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 3.5,
|
||||
"offset": 1.2,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -629,19 +618,15 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_interval".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_interval": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -664,19 +649,15 @@ mod tests {
|
||||
fn histogram_simple_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -694,19 +675,15 @@ mod tests {
|
||||
fn histogram_memory_limit() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(true, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 0.1,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 0.1,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query_and_memory_limit(
|
||||
agg_req,
|
||||
@@ -731,19 +708,15 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_values(false, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -771,20 +744,16 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
min_doc_count: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"min_doc_count": 2,
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -809,23 +778,19 @@ mod tests {
|
||||
let values = vec![5.0];
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -841,20 +806,19 @@ mod tests {
|
||||
let values = vec![5.0, 5.5];
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds { min: 3.0, max: 6.0 }),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 3.0,
|
||||
"max": 6.0,
|
||||
},
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -872,21 +836,23 @@ mod tests {
|
||||
let values = vec![15.0];
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds { min: 3.0, max: 6.0 }),
|
||||
hard_bounds: Some(HistogramBounds { min: 3.0, max: 6.0 }),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 3.0,
|
||||
"max": 6.0,
|
||||
},
|
||||
"hard_bounds": {
|
||||
"min": 3.0,
|
||||
"max": 6.0,
|
||||
},
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -916,26 +882,19 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_values(merge_segments, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
hard_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"hard_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -950,30 +909,23 @@ mod tests {
|
||||
|
||||
// hard_bounds and extended_bounds will act like a force bounds
|
||||
//
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
hard_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
extended_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
"hard_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -986,31 +938,24 @@ mod tests {
|
||||
|
||||
assert_eq!(res["histogram"]["buckets"][11], Value::Null);
|
||||
|
||||
// Invalid request
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
hard_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
extended_bounds: Some(HistogramBounds {
|
||||
min: 1.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
// invalid request
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 1.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
"hard_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index).unwrap_err();
|
||||
assert_eq!(
|
||||
@@ -1035,22 +980,15 @@ mod tests {
|
||||
fn histogram_empty_result_behaviour_test_with_opt(merge_segments: bool) -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(merge_segments)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req.clone(), &index, Some(("text", "blubberasdf")))?;
|
||||
|
||||
@@ -1086,26 +1024,19 @@ mod tests {
|
||||
// Don't merge empty segments
|
||||
let index = get_test_index_from_values(false, &values)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1117,27 +1048,24 @@ mod tests {
|
||||
assert_eq!(res["histogram"]["buckets"][10]["key"], 12.0);
|
||||
assert_eq!(res["histogram"]["buckets"][10]["doc_count"], 0);
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds { min: 2.0, max: 5.0 }),
|
||||
hard_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 5.0,
|
||||
},
|
||||
"hard_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1149,26 +1077,20 @@ mod tests {
|
||||
assert_eq!(res["histogram"]["buckets"][10], Value::Null);
|
||||
|
||||
// hard_bounds will not extend the result
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
hard_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"hard_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1181,43 +1103,27 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let agg_req: Aggregations = vec![
|
||||
(
|
||||
"stats".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Stats(StatsAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
})),
|
||||
),
|
||||
(
|
||||
"avg".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(AverageAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
})),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let sub_agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"stats": { "stats": { "field": "score_f64", } },
|
||||
"avg": { "avg": { "field": "score_f64", } }
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 1.0,
|
||||
extended_bounds: Some(HistogramBounds {
|
||||
min: 2.0,
|
||||
max: 12.0,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: agg_req,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 1.0,
|
||||
"extended_bounds": {
|
||||
"min": 2.0,
|
||||
"max": 12.0,
|
||||
},
|
||||
},
|
||||
"aggs": sub_agg_req
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1262,22 +1168,15 @@ mod tests {
|
||||
fn histogram_single_bucket_test_with_opt(merge_segments: bool) -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(merge_segments)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 100000.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 100000.0,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1303,22 +1202,15 @@ mod tests {
|
||||
fn histogram_date_test_with_opt(merge_segments: bool) -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(merge_segments)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "date".to_string(),
|
||||
interval: 86400000000.0, // one day in microseconds
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "date",
|
||||
"interval": 86400000000.0, // one day in microseconds
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1354,22 +1246,15 @@ mod tests {
|
||||
fn histogram_invalid_request() -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(true)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 0.0,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 0.0,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_res = exec_request(agg_req, &index);
|
||||
|
||||
@@ -1381,24 +1266,16 @@ mod tests {
|
||||
#[test]
|
||||
fn histogram_keyed_buckets_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"histogram".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Histogram(HistogramAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
interval: 50.0,
|
||||
keyed: true,
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"histogram": {
|
||||
"histogram": {
|
||||
"field": "score_f64",
|
||||
"interval": 50.0,
|
||||
"keyed": true
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
|
||||
@@ -445,10 +445,7 @@ mod tests {
|
||||
use serde_json::Value;
|
||||
|
||||
use super::*;
|
||||
use crate::aggregation::agg_req::{
|
||||
Aggregation, Aggregations, BucketAggregation, BucketAggregationType, MetricAggregation,
|
||||
};
|
||||
use crate::aggregation::metric::AverageAggregation;
|
||||
use crate::aggregation::agg_req::Aggregations;
|
||||
use crate::aggregation::tests::{
|
||||
exec_request, exec_request_with_query, get_test_index_2_segments,
|
||||
get_test_index_with_num_docs,
|
||||
@@ -478,22 +475,18 @@ mod tests {
|
||||
fn range_fraction_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "fraction_f64".to_string(),
|
||||
ranges: vec![(0f64..0.1f64).into(), (0.1f64..0.2f64).into()],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"range": {
|
||||
"range": {
|
||||
"field": "fraction_f64",
|
||||
"ranges": [
|
||||
{"from": 0.0, "to": 0.1},
|
||||
{"from": 0.1, "to": 0.2},
|
||||
]
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None)?;
|
||||
|
||||
@@ -513,31 +506,25 @@ mod tests {
|
||||
fn range_fraction_test_with_sub_agg() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let sub_agg_req: Aggregations = vec![(
|
||||
"score_f64".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(
|
||||
AverageAggregation::from_field_name("score_f64".to_string()),
|
||||
)),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let sub_agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"avg": { "avg": { "field": "score_f64", } }
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "fraction_f64".to_string(),
|
||||
ranges: vec![(0f64..0.1f64).into(), (0.1f64..0.2f64).into()],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg_req,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"range": {
|
||||
"range": {
|
||||
"field": "fraction_f64",
|
||||
"ranges": [
|
||||
{"from": 0.0, "to": 0.1},
|
||||
{"from": 0.1, "to": 0.2},
|
||||
]
|
||||
},
|
||||
"aggs": sub_agg_req
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None)?;
|
||||
|
||||
@@ -557,22 +544,19 @@ mod tests {
|
||||
fn range_keyed_buckets_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "fraction_f64".to_string(),
|
||||
ranges: vec![(0f64..0.1f64).into(), (0.1f64..0.2f64).into()],
|
||||
keyed: true,
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"range": {
|
||||
"range": {
|
||||
"field": "fraction_f64",
|
||||
"ranges": [
|
||||
{"from": 0.0, "to": 0.1},
|
||||
{"from": 0.1, "to": 0.2},
|
||||
],
|
||||
"keyed": true
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None)?;
|
||||
|
||||
@@ -597,33 +581,19 @@ mod tests {
|
||||
fn range_custom_key_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "fraction_f64".to_string(),
|
||||
ranges: vec![
|
||||
RangeAggregationRange {
|
||||
key: Some("custom-key-0-to-0.1".to_string()),
|
||||
from: Some(0f64),
|
||||
to: Some(0.1f64),
|
||||
},
|
||||
RangeAggregationRange {
|
||||
key: None,
|
||||
from: Some(0.1f64),
|
||||
to: Some(0.2f64),
|
||||
},
|
||||
],
|
||||
keyed: false,
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"range": {
|
||||
"range": {
|
||||
"field": "fraction_f64",
|
||||
"ranges": [
|
||||
{"key": "custom-key-0-to-0.1", "from": 0.0, "to": 0.1},
|
||||
{"from": 0.1, "to": 0.2},
|
||||
],
|
||||
"keyed": false
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None)?;
|
||||
|
||||
@@ -657,33 +627,19 @@ mod tests {
|
||||
fn range_date_test_with_opt(merge_segments: bool) -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(merge_segments)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"date_ranges".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "date".to_string(),
|
||||
ranges: vec![
|
||||
RangeAggregationRange {
|
||||
key: None,
|
||||
from: None,
|
||||
to: Some(1546300800000000.0f64),
|
||||
},
|
||||
RangeAggregationRange {
|
||||
key: None,
|
||||
from: Some(1546300800000000.0f64),
|
||||
to: Some(1546387200000000.0f64),
|
||||
},
|
||||
],
|
||||
keyed: false,
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"date_ranges": {
|
||||
"range": {
|
||||
"field": "date",
|
||||
"ranges": [
|
||||
{"to": 1546300800000000i64},
|
||||
{"from": 1546300800000000i64, "to": 1546387200000000i64},
|
||||
],
|
||||
"keyed": false
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -722,26 +678,18 @@ mod tests {
|
||||
fn range_custom_key_keyed_buckets_test() -> crate::Result<()> {
|
||||
let index = get_test_index_with_num_docs(false, 100)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "fraction_f64".to_string(),
|
||||
ranges: vec![RangeAggregationRange {
|
||||
key: Some("custom-key-0-to-0.1".to_string()),
|
||||
from: Some(0f64),
|
||||
to: Some(0.1f64),
|
||||
}],
|
||||
keyed: true,
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"range": {
|
||||
"range": {
|
||||
"field": "fraction_f64",
|
||||
"ranges": [
|
||||
{"key": "custom-key-0-to-0.1", "from": 0.0, "to": 0.1},
|
||||
],
|
||||
"keyed": true
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None)?;
|
||||
|
||||
|
||||
@@ -522,10 +522,7 @@ pub(crate) fn cut_off_buckets<T: GetDocCount + Debug>(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::aggregation::agg_req::{
|
||||
Aggregation, Aggregations, BucketAggregation, BucketAggregationType, MetricAggregation,
|
||||
};
|
||||
use crate::aggregation::agg_req::{Aggregation, Aggregations, MetricAggregation};
|
||||
use crate::aggregation::metric::{AverageAggregation, StatsAggregation};
|
||||
use crate::aggregation::tests::{
|
||||
exec_request, exec_request_with_query, exec_request_with_query_and_memory_limit,
|
||||
@@ -554,21 +551,14 @@ mod tests {
|
||||
];
|
||||
let index = get_test_index_from_terms(merge_segments, &segment_and_terms)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -579,23 +569,16 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["buckets"][2]["doc_count"], 1);
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
size: Some(2),
|
||||
split_size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"size": 2,
|
||||
"segment_size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -609,23 +592,16 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 1);
|
||||
|
||||
// test min_doc_count
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
size: Some(2),
|
||||
min_doc_count: Some(3),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"size": 2,
|
||||
"min_doc_count": 3,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -662,7 +638,7 @@ mod tests {
|
||||
];
|
||||
let index = get_test_index_from_values_and_terms(merge_segments, &segment_and_terms)?;
|
||||
|
||||
let sub_agg: Aggregations = vec![
|
||||
let _sub_agg: Aggregations = vec![
|
||||
(
|
||||
"avg_score".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(
|
||||
@@ -679,26 +655,33 @@ mod tests {
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
// sub agg desc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Count,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
let sub_agg: Aggregations = serde_json::from_value(json!({
|
||||
"avg_score": {
|
||||
"avg": {
|
||||
"field": "score",
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
},
|
||||
"stats_score": {
|
||||
"stats": {
|
||||
"field": "score",
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
// sub agg desc
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_count": "asc",
|
||||
},
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "termb");
|
||||
@@ -717,61 +700,37 @@ mod tests {
|
||||
|
||||
// Agg on non string
|
||||
//
|
||||
let agg_req: Aggregations = vec![
|
||||
(
|
||||
"my_scores1".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "score".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Count,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
),
|
||||
(
|
||||
"my_scores2".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "score_f64".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Count,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
),
|
||||
(
|
||||
"my_scores3".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "score_i64".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Count,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_scores1": {
|
||||
"terms": {
|
||||
"field": "score",
|
||||
"order": {
|
||||
"_count": "asc",
|
||||
},
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
},
|
||||
"my_scores2": {
|
||||
"terms": {
|
||||
"field": "score_f64",
|
||||
"order": {
|
||||
"_count": "asc",
|
||||
},
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
},
|
||||
"my_scores3": {
|
||||
"terms": {
|
||||
"field": "score_i64",
|
||||
"order": {
|
||||
"_count": "asc",
|
||||
},
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_scores1"]["buckets"][0]["key"], 8.0);
|
||||
@@ -848,43 +807,33 @@ mod tests {
|
||||
];
|
||||
let index = get_test_index_from_values_and_terms(merge_segments, &segment_and_terms)?;
|
||||
|
||||
let sub_agg: Aggregations = vec![
|
||||
(
|
||||
"avg_score".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Average(
|
||||
AverageAggregation::from_field_name("score".to_string()),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"stats_score".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Stats(StatsAggregation::from_field_name(
|
||||
"score".to_string(),
|
||||
))),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let sub_agg: Aggregations = serde_json::from_value(json!({
|
||||
"avg_score": {
|
||||
"avg": {
|
||||
"field": "score",
|
||||
}
|
||||
},
|
||||
"stats_score": {
|
||||
"stats": {
|
||||
"field": "score",
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
// sub agg desc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Desc,
|
||||
target: OrderTarget::SubAggregation("avg_score".to_string()),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"avg_score": "desc"
|
||||
}
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "termb");
|
||||
@@ -902,25 +851,19 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
// sub agg asc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::SubAggregation("avg_score".to_string()),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
//
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"avg_score": "asc"
|
||||
}
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -939,25 +882,18 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
// sub agg multi value asc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::SubAggregation("stats_score.avg".to_string()),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg.clone(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"stats_score.avg": "asc"
|
||||
}
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -976,25 +912,18 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
// sub agg invalid request
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::SubAggregation("doesnotexist".to_string()),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: sub_agg,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"doesnotexist": "asc"
|
||||
}
|
||||
},
|
||||
"aggs": sub_agg,
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index);
|
||||
assert!(res.is_err());
|
||||
@@ -1026,25 +955,17 @@ mod tests {
|
||||
let index = get_test_index_from_values_and_terms(merge_segments, &segment_and_terms)?;
|
||||
|
||||
// key asc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "asc"
|
||||
}
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -1056,26 +977,18 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
// key desc and size cut_off
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "asc"
|
||||
},
|
||||
"size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -1090,27 +1003,19 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 3);
|
||||
|
||||
// key asc and segment_size cut_off
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Asc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
size: Some(2),
|
||||
segment_size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "asc"
|
||||
},
|
||||
"size": 2,
|
||||
"segment_size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "terma");
|
||||
@@ -1123,25 +1028,17 @@ mod tests {
|
||||
);
|
||||
|
||||
// key desc
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Desc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
},
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "termc");
|
||||
@@ -1153,26 +1050,18 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 0);
|
||||
|
||||
// key desc, size cut_off
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Desc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
},
|
||||
"size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "termc");
|
||||
@@ -1186,27 +1075,19 @@ mod tests {
|
||||
assert_eq!(res["my_texts"]["sum_other_doc_count"], 5);
|
||||
|
||||
// key desc, segment_size cut_off
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
order: Some(CustomOrder {
|
||||
order: Order::Desc,
|
||||
target: OrderTarget::Key,
|
||||
}),
|
||||
size: Some(2),
|
||||
segment_size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
},
|
||||
"size": 2,
|
||||
"segment_size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
assert_eq!(res["my_texts"]["buckets"][0]["key"], "termc");
|
||||
@@ -1230,22 +1111,15 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_terms(false, &terms_per_segment)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
min_doc_count: Some(0),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"min_doc_count": 0,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
// searching for terma, but min_doc_count will return all terms
|
||||
let res = exec_request_with_query(agg_req, &index, Some(("string_id", "terma")))?;
|
||||
@@ -1273,23 +1147,16 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_terms(false, &terms_per_segment)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
size: Some(2),
|
||||
segment_size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"size": 2,
|
||||
"segment_size": 2
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1306,24 +1173,17 @@ mod tests {
|
||||
|
||||
// disable doc_count_error_upper_bound
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
size: Some(2),
|
||||
segment_size: Some(2),
|
||||
show_term_doc_count_error: Some(false),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"size": 2,
|
||||
"segment_size": 2,
|
||||
"show_term_doc_count_error": false
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request(agg_req, &index)?;
|
||||
|
||||
@@ -1343,19 +1203,15 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_terms(true, &terms_per_segment)?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(Box::new(BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
min_doc_count: Some(0),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
})),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"min_doc_count": 0,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query_and_memory_limit(
|
||||
agg_req,
|
||||
@@ -1377,22 +1233,15 @@ mod tests {
|
||||
|
||||
let index = get_test_index_from_terms(true, &[terms])?;
|
||||
|
||||
let agg_req: Aggregations = vec![(
|
||||
"my_texts".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "text_id".to_string(),
|
||||
min_doc_count: Some(0),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"my_texts": {
|
||||
"terms": {
|
||||
"field": "text_id",
|
||||
"min_doc_count": 0,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let res = exec_request_with_query(agg_req, &index, None).unwrap();
|
||||
println!("{}", serde_json::to_string_pretty(&res).unwrap());
|
||||
@@ -1408,27 +1257,19 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_json_format() -> crate::Result<()> {
|
||||
let agg_req: Aggregations = vec![(
|
||||
"term_agg_test".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
size: Some(2),
|
||||
segment_size: Some(2),
|
||||
order: Some(CustomOrder {
|
||||
target: OrderTarget::Key,
|
||||
order: Order::Desc,
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"term_agg_test": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"size": 2,
|
||||
"segment_size": 2,
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
}
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let elasticsearch_compatible_json = json!(
|
||||
{
|
||||
@@ -1458,22 +1299,15 @@ mod tests {
|
||||
});
|
||||
|
||||
// test alias shard_size, split_size
|
||||
let agg_req: Aggregations = vec![(
|
||||
"term_agg_test".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Terms(TermsAggregation {
|
||||
field: "string_id".to_string(),
|
||||
split_size: Some(2),
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: Default::default(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let agg_req: Aggregations = serde_json::from_value(json!({
|
||||
"term_agg_test": {
|
||||
"terms": {
|
||||
"field": "string_id",
|
||||
"split_size": 2,
|
||||
},
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let agg_req_deser: Aggregations =
|
||||
serde_json::from_str(&serde_json::to_string(&elasticsearch_compatible_json).unwrap())
|
||||
|
||||
@@ -365,7 +365,7 @@ impl IntermediateBucketResult {
|
||||
.into_values()
|
||||
.map(|bucket| {
|
||||
bucket.into_final_bucket_entry(
|
||||
&req.sub_aggregation,
|
||||
req.sub_aggregation(),
|
||||
req.as_range()
|
||||
.expect("unexpected aggregation, expected histogram aggregation"),
|
||||
range_res.column_type,
|
||||
@@ -407,7 +407,7 @@ impl IntermediateBucketResult {
|
||||
buckets,
|
||||
column_type,
|
||||
histogram_req,
|
||||
&req.sub_aggregation,
|
||||
req.sub_aggregation(),
|
||||
limits,
|
||||
)?;
|
||||
|
||||
@@ -426,7 +426,7 @@ impl IntermediateBucketResult {
|
||||
IntermediateBucketResult::Terms(terms) => terms.into_final_result(
|
||||
req.as_term()
|
||||
.expect("unexpected aggregation, expected term aggregation"),
|
||||
&req.sub_aggregation,
|
||||
req.sub_aggregation(),
|
||||
limits,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -261,15 +261,10 @@ impl SegmentAggregationCollector for SegmentStatsCollector {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use std::iter;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::aggregation::agg_req::{
|
||||
Aggregation, Aggregations, BucketAggregation, BucketAggregationType, MetricAggregation,
|
||||
};
|
||||
use crate::aggregation::agg_req::{Aggregation, Aggregations, MetricAggregation};
|
||||
use crate::aggregation::agg_result::AggregationResults;
|
||||
use crate::aggregation::bucket::RangeAggregation;
|
||||
use crate::aggregation::metric::StatsAggregation;
|
||||
use crate::aggregation::tests::{get_test_index_2_segments, get_test_index_from_values};
|
||||
use crate::aggregation::AggregationCollector;
|
||||
@@ -362,6 +357,23 @@ mod tests {
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
|
||||
let range_agg: Aggregation = {
|
||||
serde_json::from_value(json!({
|
||||
"range": {
|
||||
"field": "score",
|
||||
"ranges": [ { "from": 3.0f64, "to": 7.0f64 }, { "from": 7.0f64, "to": 19.0f64 }, { "from": 19.0f64, "to": 20.0f64 } ]
|
||||
},
|
||||
"aggs": {
|
||||
"stats": {
|
||||
"stats": {
|
||||
"field": "score"
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let agg_req_1: Aggregations = vec![
|
||||
(
|
||||
"stats_i64".to_string(),
|
||||
@@ -381,30 +393,7 @@ mod tests {
|
||||
"score".to_string(),
|
||||
))),
|
||||
),
|
||||
(
|
||||
"range".to_string(),
|
||||
Aggregation::Bucket(
|
||||
BucketAggregation {
|
||||
bucket_agg: BucketAggregationType::Range(RangeAggregation {
|
||||
field: "score".to_string(),
|
||||
ranges: vec![
|
||||
(3f64..7f64).into(),
|
||||
(7f64..19f64).into(),
|
||||
(19f64..20f64).into(),
|
||||
],
|
||||
..Default::default()
|
||||
}),
|
||||
sub_aggregation: iter::once((
|
||||
"stats".to_string(),
|
||||
Aggregation::Metric(MetricAggregation::Stats(
|
||||
StatsAggregation::from_field_name("score".to_string()),
|
||||
)),
|
||||
))
|
||||
.collect(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
),
|
||||
("range".to_string(), range_agg),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
@@ -463,7 +463,7 @@ mod tests {
|
||||
let index = Index::create_in_ram(schema_builder.build());
|
||||
{
|
||||
// let mut index_writer = index.writer_for_tests()?;
|
||||
let mut index_writer = index.writer_with_num_threads(1, 30_000_000)?;
|
||||
let mut index_writer = index.writer_with_num_threads(1, 20_000_000)?;
|
||||
index_writer.set_merge_policy(Box::new(NoMergePolicy));
|
||||
for values in segment_and_values {
|
||||
for (i, term) in values {
|
||||
|
||||
@@ -30,7 +30,7 @@ impl FastFieldsWriter {
|
||||
/// Create all `FastFieldWriter` required by the schema.
|
||||
#[cfg(test)]
|
||||
pub fn from_schema(schema: &Schema) -> crate::Result<FastFieldsWriter> {
|
||||
FastFieldsWriter::from_schema_and_tokenizer_manager(&schema, TokenizerManager::new())
|
||||
FastFieldsWriter::from_schema_and_tokenizer_manager(schema, TokenizerManager::new())
|
||||
}
|
||||
|
||||
/// Create all `FastFieldWriter` required by the schema.
|
||||
|
||||
Reference in New Issue
Block a user