mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-08 10:02:55 +00:00
Compare commits
3 Commits
0.25.0
...
unit-test-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b38a9ba62 | ||
|
|
c37af9c1ff | ||
|
|
33794a114c |
27
RELEASE.md
27
RELEASE.md
@@ -1,4 +1,4 @@
|
||||
# Release a new Tantivy Version
|
||||
# Releasing a new Tantivy Version
|
||||
|
||||
## Steps
|
||||
|
||||
@@ -10,12 +10,29 @@
|
||||
6. Set git tag with new version
|
||||
|
||||
|
||||
In conjucation with `cargo-release` Steps 1-4 (I'm not sure if the change detection works):
|
||||
Set new packages to version 0.0.0
|
||||
[`cargo-release`](https://github.com/crate-ci/cargo-release) will help us with steps 1-5:
|
||||
|
||||
Replace prev-tag-name
|
||||
```bash
|
||||
cargo release --workspace --no-publish -v --prev-tag-name 0.19 --push-remote origin minor --no-tag --execute
|
||||
cargo release --workspace --no-publish -v --prev-tag-name 0.24 --push-remote origin minor --no-tag
|
||||
```
|
||||
|
||||
no-tag or it will create tags for all the subpackages
|
||||
`no-tag` or it will create tags for all the subpackages
|
||||
|
||||
cargo release will _not_ ignore unchanged packages, but it will print warnings for them.
|
||||
e.g. "warning: updating ownedbytes to 0.10.0 despite no changes made since tag 0.24"
|
||||
|
||||
We need to manually ignore these unchanged packages
|
||||
```bash
|
||||
cargo release --workspace --no-publish -v --prev-tag-name 0.24 --push-remote origin minor --no-tag --exclude tokenizer-api
|
||||
```
|
||||
|
||||
Add `--execute` to actually publish the packages, otherwise it will only print the commands that would be run.
|
||||
|
||||
### Tag Version
|
||||
```bash
|
||||
git tag 0.25.0
|
||||
git push upstream tag 0.25.0
|
||||
```
|
||||
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ fn test_aggregation_flushing(
|
||||
searcher.search(&AllQuery, &collector).unwrap()
|
||||
};
|
||||
|
||||
let res: Value = serde_json::from_str(&serde_json::to_string(&agg_res)?)?;
|
||||
let res: Value = serde_json::to_value(&agg_res)?;
|
||||
|
||||
assert_eq!(res["bucketsL1"]["buckets"][0]["doc_count"], 3);
|
||||
assert_eq!(
|
||||
@@ -270,7 +270,7 @@ fn test_aggregation_level1_simple() -> crate::Result<()> {
|
||||
let searcher = reader.searcher();
|
||||
let agg_res: AggregationResults = searcher.search(&term_query, &collector).unwrap();
|
||||
|
||||
let res: Value = serde_json::from_str(&serde_json::to_string(&agg_res)?)?;
|
||||
let res: Value = serde_json::to_value(&agg_res)?;
|
||||
assert_eq!(res["average"]["value"], 12.142857142857142);
|
||||
assert_eq!(
|
||||
res["range"]["buckets"],
|
||||
@@ -304,6 +304,29 @@ fn test_aggregation_level1_simple() -> crate::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_term_truncate_sum_other_doc_count() {
|
||||
let index = get_test_index_2_segments(true).unwrap();
|
||||
let reader = index.reader().unwrap();
|
||||
let count_per_text: Aggregation = serde_json::from_value(json!({ "terms": { "field": "text", "size": 1 } })).unwrap();
|
||||
let aggs: Aggregations = vec![("group_by_term_truncate".to_string(), count_per_text)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let collector = get_collector(aggs);
|
||||
let searcher = reader.searcher();
|
||||
let agg_res: AggregationResults = searcher.search(&AllQuery, &collector).unwrap();
|
||||
|
||||
let res: Value = serde_json::to_value(&agg_res).unwrap();
|
||||
assert_eq!(res, serde_json::json!({
|
||||
"group_by_term_truncate": {
|
||||
"buckets": [{ "doc_count": 7, "key": "cool" }],
|
||||
"doc_count_error_upper_bound": 0,
|
||||
"sum_other_doc_count": 2,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_aggregation_level1() -> crate::Result<()> {
|
||||
let index = get_test_index_2_segments(true)?;
|
||||
@@ -342,7 +365,7 @@ fn test_aggregation_level1() -> crate::Result<()> {
|
||||
let searcher = reader.searcher();
|
||||
let agg_res: AggregationResults = searcher.search(&term_query, &collector).unwrap();
|
||||
|
||||
let res: Value = serde_json::from_str(&serde_json::to_string(&agg_res)?)?;
|
||||
let res: Value = serde_json::to_value(&agg_res)?;
|
||||
assert_eq!(res["average"]["value"], 12.142857142857142);
|
||||
assert_eq!(res["average_f64"]["value"], 12.214285714285714);
|
||||
assert_eq!(res["average_i64"]["value"], 12.142857142857142);
|
||||
@@ -397,7 +420,7 @@ fn test_aggregation_level2(
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
|
||||
let elasticsearch_compatible_json_req = r#"
|
||||
let elasticsearch_compatible_json_req = serde_json::json!(
|
||||
{
|
||||
"rangef64": {
|
||||
"range": {
|
||||
@@ -450,9 +473,8 @@ fn test_aggregation_level2(
|
||||
"term_agg": { "terms": { "field": "text" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
"#;
|
||||
let agg_req: Aggregations = serde_json::from_str(elasticsearch_compatible_json_req).unwrap();
|
||||
});
|
||||
let agg_req: Aggregations = serde_json::from_value(elasticsearch_compatible_json_req).unwrap();
|
||||
|
||||
let agg_res: AggregationResults = if use_distributed_collector {
|
||||
let collector =
|
||||
@@ -469,7 +491,7 @@ fn test_aggregation_level2(
|
||||
searcher.search(&term_query, &collector).unwrap()
|
||||
};
|
||||
|
||||
let res: Value = serde_json::from_str(&serde_json::to_string(&agg_res)?)?;
|
||||
let res: Value = serde_json::to_value(agg_res)?;
|
||||
|
||||
assert_eq!(res["range"]["buckets"][1]["key"], "3-7");
|
||||
assert_eq!(res["range"]["buckets"][1]["doc_count"], 2u64);
|
||||
|
||||
Reference in New Issue
Block a user