chore: upgrade some dependencies (#5777)

* chore: upgrade some dependencies

* chore: upgrade some dependencies

* fix: cr

* fix: ci

* fix: test

* fix: cargo fmt
This commit is contained in:
fys
2025-03-27 10:48:44 +08:00
committed by GitHub
parent e107bd5529
commit 2b2ea5bf72
55 changed files with 665 additions and 555 deletions

679
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -88,7 +88,7 @@ rust.unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tokio_unstable)'] }
#
# See for more detaiils: https://github.com/rust-lang/cargo/issues/11329
ahash = { version = "0.8", features = ["compile-time-rng"] }
aquamarine = "0.3"
aquamarine = "0.6"
arrow = { version = "53.0.0", features = ["prettyprint"] }
arrow-array = { version = "53.0.0", default-features = false, features = ["chrono-tz"] }
arrow-flight = "53.0"
@@ -99,9 +99,9 @@ async-trait = "0.1"
# Remember to update axum-extra, axum-macros when updating axum
axum = "0.8"
axum-extra = "0.10"
axum-macros = "0.4"
axum-macros = "0.5"
backon = "1"
base64 = "0.21"
base64 = "0.22"
bigdecimal = "0.4.2"
bitflags = "2.4.1"
bytemuck = "1.12"
@@ -111,7 +111,7 @@ chrono-tz = "0.10.1"
clap = { version = "4.4", features = ["derive"] }
config = "0.13.0"
crossbeam-utils = "0.8"
dashmap = "5.4"
dashmap = "6.1"
datafusion = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
datafusion-common = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
datafusion-expr = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
@@ -121,9 +121,9 @@ datafusion-physical-expr = { git = "https://github.com/apache/datafusion.git", r
datafusion-physical-plan = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
datafusion-sql = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
datafusion-substrait = { git = "https://github.com/apache/datafusion.git", rev = "2464703c84c400a09cc59277018813f0e797bb4e" }
deadpool = "0.10"
deadpool-postgres = "0.12"
derive_builder = "0.12"
deadpool = "0.12"
deadpool-postgres = "0.14"
derive_builder = "0.20"
dotenv = "0.15"
etcd-client = "0.14"
fst = "0.4.7"
@@ -136,16 +136,16 @@ humantime = "2.1"
humantime-serde = "1.1"
hyper = "1.1"
hyper-util = "0.1"
itertools = "0.10"
itertools = "0.14"
jsonb = { git = "https://github.com/databendlabs/jsonb.git", rev = "8c8d2fc294a39f3ff08909d60f718639cfba3875", default-features = false }
lazy_static = "1.4"
local-ip-address = "0.6"
loki-proto = { git = "https://github.com/GreptimeTeam/loki-proto.git", rev = "1434ecf23a2654025d86188fb5205e7a74b225d3" }
meter-core = { git = "https://github.com/GreptimeTeam/greptime-meter.git", rev = "5618e779cf2bb4755b499c630fba4c35e91898cb" }
mockall = "0.11.4"
mockall = "0.13"
moka = "0.12"
nalgebra = "0.33"
notify = "6.1"
notify = "8.0"
num_cpus = "1.16"
once_cell = "1.18"
opentelemetry-proto = { version = "0.27", features = [
@@ -163,8 +163,8 @@ prometheus = { version = "0.13.3", features = ["process"] }
promql-parser = { version = "0.5", features = ["ser"] }
prost = "0.13"
raft-engine = { version = "0.4.1", default-features = false }
rand = "0.8"
ratelimit = "0.9"
rand = "0.9"
ratelimit = "0.10"
regex = "1.8"
regex-automata = "0.4"
reqwest = { version = "0.12", default-features = false, features = [
@@ -176,7 +176,7 @@ reqwest = { version = "0.12", default-features = false, features = [
rskafka = { git = "https://github.com/influxdata/rskafka.git", rev = "75535b5ad9bae4a5dbb582c82e44dfd81ec10105", features = [
"transport-tls",
] }
rstest = "0.21"
rstest = "0.25"
rstest_reuse = "0.7"
rust_decimal = "1.33"
rustc-hash = "2.0"
@@ -184,7 +184,7 @@ rustls = { version = "0.23.20", default-features = false } # override by patch,
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0", features = ["float_roundtrip"] }
serde_with = "3"
shadow-rs = "0.38"
shadow-rs = "1.1"
similar-asserts = "1.6.0"
smallvec = { version = "1", features = ["serde"] }
snafu = "0.8"
@@ -194,13 +194,13 @@ sqlx = { version = "0.8", features = [
"postgres",
"chrono",
] }
sysinfo = "0.30"
sysinfo = "0.33"
# on branch v0.52.x
sqlparser = { git = "https://github.com/GreptimeTeam/sqlparser-rs.git", rev = "71dd86058d2af97b9925093d40c4e03360403170", features = [
"visitor",
"serde",
] } # on branch v0.44.x
strum = { version = "0.25", features = ["derive"] }
strum = { version = "0.27", features = ["derive"] }
tempfile = "3"
tokio = { version = "1.40", features = ["full"] }
tokio-postgres = "0.7"

View File

@@ -3,7 +3,7 @@
This document introduces how to write fuzz tests in GreptimeDB.
## What is a fuzz test
Fuzz test is tool that leverage deterministic random generation to assist in finding bugs. The goal of fuzz tests is to identify inputs generated by the fuzzer that cause system panics, crashes, or unexpected behaviors to occur. And we are using the [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz) to run our fuzz test targets.
Fuzz test is tool that leverage deterministic random generation to assist in finding bugs. The goal of fuzz tests is to identify inputs generated by the fuzzer that cause system panics, crashes, or unexpected behaviors to occur. And we are using the [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz) to run our fuzz test targets.
## Why we need them
- Find bugs by leveraging random generation
@@ -13,7 +13,7 @@ Fuzz test is tool that leverage deterministic random generation to assist in fin
All fuzz test-related resources are located in the `/tests-fuzz` directory.
There are two types of resources: (1) fundamental components and (2) test targets.
### Fundamental components
### Fundamental components
They are located in the `/tests-fuzz/src` directory. The fundamental components define how to generate SQLs (including dialects for different protocols) and validate execution results (e.g., column attribute validation), etc.
### Test targets
@@ -21,25 +21,25 @@ They are located in the `/tests-fuzz/targets` directory, with each file represen
Figure 1 illustrates the fundamental components of the fuzz test provide the ability to generate random SQLs. It utilizes a Random Number Generator (Rng) to generate the Intermediate Representation (IR), then employs a DialectTranslator to produce specified dialects for different protocols. Finally, the fuzz tests send the generated SQL via the specified protocol and verify that the execution results meet expectations.
```
Rng
|
|
v
ExprGenerator
|
|
v
Intermediate representation (IR)
|
|
+----------------------+----------------------+
| | |
v v v
Rng
|
|
v
ExprGenerator
|
|
v
Intermediate representation (IR)
|
|
+----------------------+----------------------+
| | |
v v v
MySQLTranslator PostgreSQLTranslator OtherDialectTranslator
| | |
| | |
v v v
SQL(MySQL Dialect) ..... .....
| | |
| | |
v v v
SQL(MySQL Dialect) ..... .....
|
|
v
@@ -133,4 +133,4 @@ fuzz_target!(|input: FuzzInput| {
cargo fuzz run <fuzz-target> --fuzz-dir tests-fuzz
```
For more details, please refer to this [document](/tests-fuzz/README.md).
For more details, please refer to this [document](/tests-fuzz/README.md).

View File

@@ -177,7 +177,7 @@ fn create_table_info(table_id: TableId, table_name: TableName) -> RawTableInfo {
fn create_region_routes(regions: Vec<RegionNumber>) -> Vec<RegionRoute> {
let mut region_routes = Vec::with_capacity(100);
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
for region_id in regions.into_iter().map(u64::from) {
region_routes.push(RegionRoute {
@@ -188,7 +188,7 @@ fn create_region_routes(regions: Vec<RegionNumber>) -> Vec<RegionRoute> {
attrs: BTreeMap::new(),
},
leader_peer: Some(Peer {
id: rng.gen_range(0..10),
id: rng.random_range(0..10),
addr: String::new(),
}),
follower_peers: vec![],

View File

@@ -13,7 +13,7 @@
// limitations under the License.
use enum_dispatch::enum_dispatch;
use rand::seq::SliceRandom;
use rand::seq::IndexedRandom;
#[enum_dispatch]
pub trait LoadBalance {
@@ -37,7 +37,7 @@ pub struct Random;
impl LoadBalance for Random {
fn get_peer<'a>(&self, peers: &'a [String]) -> Option<&'a String> {
peers.choose(&mut rand::thread_rng())
peers.choose(&mut rand::rng())
}
}

View File

@@ -25,7 +25,7 @@ async fn do_bench_channel_manager() {
let m_clone = m.clone();
let join = tokio::spawn(async move {
for _ in 0..10000 {
let idx = rand::random::<usize>() % 100;
let idx = rand::random::<u32>() % 100;
let ret = m_clone.get(format!("{idx}"));
let _ = ret.unwrap();
}

View File

@@ -290,13 +290,13 @@ mod tests {
num_per_range: u32,
max_bytes: u32,
) {
let num_cases = rand::thread_rng().gen_range(1..=8);
let num_cases = rand::rng().random_range(1..=8);
common_telemetry::info!("num_cases: {}", num_cases);
let mut cases = Vec::with_capacity(num_cases);
for i in 0..num_cases {
let size = rand::thread_rng().gen_range(size_limit..=max_bytes);
let size = rand::rng().random_range(size_limit..=max_bytes);
let mut large_value = vec![0u8; size as usize];
rand::thread_rng().fill_bytes(&mut large_value);
rand::rng().fill_bytes(&mut large_value);
// Starts from `a`.
let prefix = format!("{}/", std::char::from_u32(97 + i as u32).unwrap());
@@ -354,8 +354,8 @@ mod tests {
#[tokio::test]
async fn test_meta_state_store_split_value() {
let size_limit = rand::thread_rng().gen_range(128..=512);
let page_size = rand::thread_rng().gen_range(1..10);
let size_limit = rand::rng().random_range(128..=512);
let page_size = rand::rng().random_range(1..10);
let kv_backend = Arc::new(MemoryKvBackend::new());
test_meta_state_store_split_value_with_size_limit(kv_backend, size_limit, page_size, 8192)
.await;
@@ -388,7 +388,7 @@ mod tests {
// However, some KvBackends, the `ChrootKvBackend`, will add the prefix to `key`;
// we don't know the exact size of the key.
let size_limit = 1536 * 1024 - key_size;
let page_size = rand::thread_rng().gen_range(1..10);
let page_size = rand::rng().random_range(1..10);
test_meta_state_store_split_value_with_size_limit(
kv_backend,
size_limit,

View File

@@ -39,7 +39,7 @@ impl RoundRobinTopicSelector {
// The cursor in the round-robin selector is not persisted which may break the round-robin strategy cross crashes.
// Introducing a shuffling strategy may help mitigate this issue.
pub fn with_shuffle() -> Self {
let offset = rand::thread_rng().gen_range(0..64);
let offset = rand::rng().random_range(0..64);
Self {
cursor: AtomicUsize::new(offset),
}

View File

@@ -207,7 +207,7 @@ impl Runner {
if let Some(d) = retry.next() {
let millis = d.as_millis() as u64;
// Add random noise to the retry delay to avoid retry storms.
let noise = rand::thread_rng().gen_range(0..(millis / 4) + 1);
let noise = rand::rng().random_range(0..(millis / 4) + 1);
let d = d.add(Duration::from_millis(noise));
self.wait_on_err(d, retry_times).await;

View File

@@ -22,6 +22,6 @@ static PORTS: OnceCell<AtomicUsize> = OnceCell::new();
/// Return a unique port(in runtime) for test
pub fn get_port() -> usize {
PORTS
.get_or_init(|| AtomicUsize::new(rand::thread_rng().gen_range(13000..13800)))
.get_or_init(|| AtomicUsize::new(rand::rng().random_range(13000..13800)))
.fetch_add(1, Ordering::Relaxed)
}

View File

@@ -715,10 +715,10 @@ mod tests {
TimeUnit::Microsecond,
TimeUnit::Nanosecond,
];
let mut rng = rand::thread_rng();
let unit_idx: usize = rng.gen_range(0..4);
let mut rng = rand::rng();
let unit_idx: usize = rng.random_range(0..4);
let unit = units[unit_idx];
let value: i64 = rng.gen();
let value: i64 = rng.random();
Timestamp::new(value, unit)
}
@@ -745,8 +745,8 @@ mod tests {
/// Generate timestamp less than or equal to `threshold`
fn gen_ts_le(threshold: &Timestamp) -> Timestamp {
let mut rng = rand::thread_rng();
let timestamp = rng.gen_range(i64::MIN..=threshold.value);
let mut rng = rand::rng();
let timestamp = rng.random_range(i64::MIN..=threshold.value);
Timestamp::new(timestamp, threshold.unit)
}

View File

@@ -179,7 +179,7 @@ impl Context<'_, '_> {
) -> CollectionBundle<Batch> {
let (send_port, recv_port) = self.df.make_edge::<_, Toff<Batch>>("constant_batch");
let mut per_time: BTreeMap<repr::Timestamp, Vec<DiffRow>> = Default::default();
for (key, group) in &rows.into_iter().group_by(|(_row, ts, _diff)| *ts) {
for (key, group) in &rows.into_iter().chunk_by(|(_row, ts, _diff)| *ts) {
per_time.entry(key).or_default().extend(group);
}
@@ -233,7 +233,7 @@ impl Context<'_, '_> {
pub fn render_constant(&mut self, rows: Vec<DiffRow>) -> CollectionBundle {
let (send_port, recv_port) = self.df.make_edge::<_, Toff>("constant");
let mut per_time: BTreeMap<repr::Timestamp, Vec<DiffRow>> = Default::default();
for (key, group) in &rows.into_iter().group_by(|(_row, ts, _diff)| *ts) {
for (key, group) in &rows.into_iter().chunk_by(|(_row, ts, _diff)| *ts) {
per_time.entry(key).or_default().extend(group);
}

View File

@@ -67,7 +67,7 @@ impl BloomFilterApplier {
for ((_, mut group), bloom) in locs
.iter()
.zip(start_seg..end_seg)
.group_by(|(x, _)| **x)
.chunk_by(|(x, _)| **x)
.into_iter()
.zip(bfs.iter())
{

View File

@@ -437,9 +437,9 @@ mod tests {
}
fn random_option_bytes(size: usize) -> Option<Vec<u8>> {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
if rng.gen() {
if rng.random() {
let mut buffer = vec![0u8; size];
rng.fill(&mut buffer[..]);
Some(buffer)
@@ -469,11 +469,11 @@ mod tests {
segment_row_count: usize,
) -> (DictionaryValues, ValueSegIds) {
let mut n = row_count;
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut dic_values = Vec::new();
while n > 0 {
let size = rng.gen_range(1..=n);
let size = rng.random_range(1..=n);
let value = random_option_bytes(100);
dic_values.push((value, size));
n -= size;

View File

@@ -535,7 +535,7 @@ mod tests {
.flatten()
.cloned()
.collect::<Vec<_>>();
all_entries.shuffle(&mut rand::thread_rng());
all_entries.shuffle(&mut rand::rng());
let response = logstore.append_batch(all_entries.clone()).await.unwrap();
// 5 region
@@ -575,7 +575,7 @@ mod tests {
warn!("The endpoints is empty, skipping the test 'test_append_batch_basic_large'");
return;
};
let data_size_kb = rand::thread_rng().gen_range(9..31usize);
let data_size_kb = rand::rng().random_range(9..31usize);
info!("Entry size: {}Ki", data_size_kb);
let broker_endpoints = broker_endpoints
.split(',')
@@ -608,7 +608,7 @@ mod tests {
.cloned()
.collect::<Vec<_>>();
assert_matches!(all_entries[0], Entry::MultiplePart(_));
all_entries.shuffle(&mut rand::thread_rng());
all_entries.shuffle(&mut rand::rng());
let response = logstore.append_batch(all_entries.clone()).await.unwrap();
// 5 region

View File

@@ -1058,11 +1058,11 @@ mod tests {
let tx = new_client("test_cluster_client").await;
let in_memory = tx.in_memory().unwrap();
let cluster_client = tx.client.cluster_client().unwrap();
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
// Generates rough 10MB data, which is larger than the default grpc message size limit.
for i in 0..10 {
let data: Vec<u8> = (0..1024 * 1024).map(|_| rng.gen()).collect();
let data: Vec<u8> = (0..1024 * 1024).map(|_| rng.random()).collect();
in_memory
.put(
PutRequest::new()

View File

@@ -75,7 +75,7 @@ impl AskLeader {
let leadership_group = self.leadership_group.read().unwrap();
leadership_group.peers.clone()
};
peers.shuffle(&mut rand::thread_rng());
peers.shuffle(&mut rand::rng());
let req = AskLeaderRequest {
header: Some(RequestHeader::new(

View File

@@ -22,8 +22,8 @@ where
return None;
}
let mut rng = rand::thread_rng();
let i = rng.gen_range(0..len);
let mut rng = rand::rng();
let i = rng.random_range(0..len);
func(i)
}

View File

@@ -36,7 +36,7 @@ pub enum Error {
#[snafu(implicit)]
location: Location,
#[snafu(source)]
error: rand::distributions::WeightedError,
error: rand::distr::weighted::Error,
},
#[snafu(display("Exceeded deadline, operation: {}", operation))]

View File

@@ -543,11 +543,11 @@ pub(crate) mod tests {
assert!(rx.await.unwrap().is_empty());
fn generate_heartbeats(datanode_id: u64, region_ids: Vec<u32>) -> Vec<DatanodeHeartbeat> {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let start = current_time_millis();
(0..2000)
.map(|i| DatanodeHeartbeat {
timestamp: start + i * 1000 + rng.gen_range(0..100),
timestamp: start + i * 1000 + rng.random_range(0..100),
datanode_id,
regions: region_ids
.iter()

View File

@@ -61,7 +61,7 @@ impl Selector for RandomNodeSelector {
type Output = Vec<Peer>;
async fn select(&self, _ctx: &Self::Context, _opts: SelectorOptions) -> Result<Self::Output> {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut nodes = self.nodes.clone();
nodes.shuffle(&mut rng);
Ok(nodes)

View File

@@ -12,8 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use rand::seq::SliceRandom;
use rand::thread_rng;
use rand::rng;
use rand::seq::IndexedRandom;
use snafu::ResultExt;
use crate::error;
@@ -26,7 +26,10 @@ pub trait WeightedChoose<Item>: Send + Sync {
/// The method will choose multiple items.
///
/// Returns less than `amount` items if the weight_array is not enough.
/// ## Note
///
/// - Returns less than `amount` items if the weight_array is not enough.
/// - The returned items cannot be duplicated.
fn choose_multiple(&mut self, amount: usize) -> Result<Vec<Item>>;
/// Returns the length of the weight_array.
@@ -84,7 +87,7 @@ where
// unwrap safety: whether weighted_index is none has been checked before.
let item = self
.items
.choose_weighted(&mut thread_rng(), |item| item.weight as f64)
.choose_weighted(&mut rng(), |item| item.weight as f64)
.context(error::ChooseItemsSnafu)?
.item
.clone();
@@ -92,9 +95,11 @@ where
}
fn choose_multiple(&mut self, amount: usize) -> Result<Vec<Item>> {
let amount = amount.min(self.items.iter().filter(|item| item.weight > 0).count());
Ok(self
.items
.choose_multiple_weighted(&mut thread_rng(), amount, |item| item.weight as f64)
.choose_multiple_weighted(&mut rng(), amount, |item| item.weight as f64)
.context(error::ChooseItemsSnafu)?
.cloned()
.map(|item| item.item)
@@ -127,7 +132,7 @@ mod tests {
for _ in 0..100 {
let ret = choose.choose_multiple(3).unwrap();
assert_eq!(vec![1, 2], ret);
assert_eq!(vec![1], ret);
}
}
}

View File

@@ -28,7 +28,7 @@ use mito2::region::options::MergeMode;
use mito2::row_converter::DensePrimaryKeyCodec;
use mito2::test_util::memtable_util::{self, region_metadata_to_row_schema};
use rand::rngs::ThreadRng;
use rand::seq::SliceRandom;
use rand::seq::IndexedRandom;
use rand::Rng;
use store_api::metadata::{
ColumnMetadata, RegionMetadata, RegionMetadataBuilder, RegionMetadataRef,
@@ -161,8 +161,8 @@ struct Host {
impl Host {
fn random_with_id(id: usize) -> Host {
let mut rng = rand::thread_rng();
let region = format!("ap-southeast-{}", rng.gen_range(0..10));
let mut rng = rand::rng();
let region = format!("ap-southeast-{}", rng.random_range(0..10));
let datacenter = format!(
"{}{}",
region,
@@ -172,12 +172,12 @@ impl Host {
hostname: format!("host_{id}"),
region,
datacenter,
rack: rng.gen_range(0..100).to_string(),
rack: rng.random_range(0..100).to_string(),
os: "Ubuntu16.04LTS".to_string(),
arch: "x86".to_string(),
team: "CHI".to_string(),
service: rng.gen_range(0..100).to_string(),
service_version: rng.gen_range(0..10).to_string(),
service: rng.random_range(0..100).to_string(),
service_version: rng.random_range(0..10).to_string(),
service_environment: "test".to_string(),
}
}
@@ -254,7 +254,7 @@ impl CpuDataGenerator {
.hosts
.iter()
.map(|host| {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut values = Vec::with_capacity(21);
values.push(api::v1::Value {
value_data: Some(ValueData::TimestampMillisecondValue(current_sec * 1000)),
@@ -288,12 +288,12 @@ impl CpuDataGenerator {
}
fn random_hostname(&self) -> String {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
self.hosts.choose(&mut rng).unwrap().hostname.clone()
}
fn random_f64(rng: &mut ThreadRng) -> f64 {
let base: u32 = rng.gen_range(30..95);
let base: u32 = rng.random_range(30..95);
base as f64
}

View File

@@ -146,14 +146,14 @@ mod test {
#[test]
fn fuzz_index_calculation() {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut data = vec![0u8; 1024 * 1024];
rng.fill_bytes(&mut data);
for _ in 0..FUZZ_REPEAT_TIMES {
let offset = rng.gen_range(0..data.len() as u64);
let size = rng.gen_range(0..data.len() as u32 - offset as u32);
let page_size: usize = rng.gen_range(1..1024);
let offset = rng.random_range(0..data.len() as u64);
let size = rng.random_range(0..data.len() as u32 - offset as u32);
let page_size: usize = rng.random_range(1..1024);
let indexes =
PageKey::generate_page_keys(offset, size, page_size as u64).collect::<Vec<_>>();

View File

@@ -146,14 +146,14 @@ mod test {
#[test]
fn fuzz_index_calculation() {
// randomly generate a large u8 array
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut data = vec![0u8; 1024 * 1024];
rng.fill_bytes(&mut data);
for _ in 0..FUZZ_REPEAT_TIMES {
let offset = rng.gen_range(0..data.len() as u64);
let size = rng.gen_range(0..data.len() as u32 - offset as u32);
let page_size: usize = rng.gen_range(1..1024);
let offset = rng.random_range(0..data.len() as u64);
let size = rng.random_range(0..data.len() as u32 - offset as u32);
let page_size: usize = rng.random_range(1..1024);
let indexes =
PageKey::generate_page_keys(offset, size, page_size as u64).collect::<Vec<_>>();
@@ -357,10 +357,10 @@ mod test {
);
// fuzz test
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
for _ in 0..FUZZ_REPEAT_TIMES {
let offset = rng.gen_range(0..file_size);
let size = rng.gen_range(0..file_size as u32 - offset as u32);
let offset = rng.random_range(0..file_size);
let size = rng.random_range(0..file_size as u32 - offset as u32);
let expected = cached_reader.range_read(offset, size).await.unwrap();
let inner = &cached_reader.inner;
let read = cached_reader

View File

@@ -389,10 +389,10 @@ mod tests {
fn prepare_input_keys(num_keys: usize) -> Vec<Vec<u8>> {
let prefix = ["a", "b", "c", "d", "e", "f"];
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let mut keys = Vec::with_capacity(num_keys);
for i in 0..num_keys {
let prefix_idx = rng.gen_range(0..prefix.len());
let prefix_idx = rng.random_range(0..prefix.len());
// We don't need to decode the primary key in index's test so we format the string
// into the key.
let key = format!("{}{}", prefix[prefix_idx], i);

View File

@@ -509,7 +509,7 @@ impl ParquetReaderBuilder {
(row_group_id, rg_begin_row_id..rg_end_row_id)
})
.group_by(|(row_group_id, _)| *row_group_id);
.chunk_by(|(row_group_id, _)| *row_group_id);
let ranges_in_row_groups = grouped_in_row_groups
.into_iter()

View File

@@ -38,7 +38,7 @@ use common_telemetry::{error, info, warn};
use futures::future::try_join_all;
use object_store::manager::ObjectStoreManagerRef;
use prometheus::IntGauge;
use rand::{thread_rng, Rng};
use rand::{rng, Rng};
use snafu::{ensure, ResultExt};
use store_api::logstore::LogStore;
use store_api::region_engine::{SetRegionRoleStateResponse, SettableRegionRoleState};
@@ -390,7 +390,7 @@ async fn write_cache_from_config(
/// Computes a initial check delay for a worker.
pub(crate) fn worker_init_check_delay() -> Duration {
let init_check_delay = thread_rng().gen_range(0..MAX_INITIAL_CHECK_DELAY_SECS);
let init_check_delay = rng().random_range(0..MAX_INITIAL_CHECK_DELAY_SECS);
Duration::from_secs(init_check_delay)
}

View File

@@ -28,7 +28,7 @@ use crate::QueryEngineRef;
pub fn create_query_engine_for_vector10x3() -> QueryEngineRef {
let mut column_schemas = vec![];
let mut columns = vec![];
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let column_name = "vector";
let column_schema = ColumnSchema::new(column_name, ConcreteDataType::binary_datatype(), true);
@@ -37,9 +37,9 @@ pub fn create_query_engine_for_vector10x3() -> QueryEngineRef {
let vectors = (0..10)
.map(|_| {
let veclit = [
rng.gen_range(-100f32..100.0),
rng.gen_range(-100f32..100.0),
rng.gen_range(-100f32..100.0),
rng.random_range(-100f32..100.0),
rng.random_range(-100f32..100.0),
rng.random_range(-100f32..100.0),
];
veclit_to_binlit(&veclit)
})

View File

@@ -92,7 +92,7 @@ impl MysqlInstanceShim {
) -> MysqlInstanceShim {
// init a random salt
let mut bs = vec![0u8; 20];
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
rng.fill_bytes(bs.as_mut());
let mut scramble: [u8; 20] = [0; 20];

View File

@@ -26,7 +26,6 @@ use datatypes::schema::{ColumnSchema, Schema};
use datatypes::value::Value;
use mysql_async::prelude::*;
use mysql_async::{Conn, Row, SslOpts};
use rand::rngs::StdRng;
use rand::Rng;
use servers::error::Result;
use servers::install_ring_crypto_provider;
@@ -426,13 +425,11 @@ async fn test_query_concurrently() -> Result<()> {
let mut join_handles = vec![];
for _ in 0..threads {
join_handles.push(tokio::spawn(async move {
let mut rand: StdRng = rand::SeedableRng::from_entropy();
let mut connection = create_connection_default_db_name(server_port, false)
.await
.unwrap();
for _ in 0..expect_executed_queries_per_worker {
let expected: u32 = rand.gen_range(0..100);
let expected: u32 = rand::rng().random_range(0..100);
let result: u32 = connection
.query_first(format!(
"SELECT uint32s FROM numbers WHERE uint32s = {expected}"

View File

@@ -22,7 +22,6 @@ use common_catalog::consts::{DEFAULT_CATALOG_NAME, DEFAULT_SCHEMA_NAME};
use common_runtime::runtime::BuilderBuild;
use common_runtime::Builder as RuntimeBuilder;
use pgwire::api::Type;
use rand::rngs::StdRng;
use rand::Rng;
use rustls::client::danger::{ServerCertVerified, ServerCertVerifier};
use rustls::{Error, SignatureScheme};
@@ -202,12 +201,10 @@ async fn test_query_pg_concurrently() -> Result<()> {
let mut join_handles = vec![];
for _i in 0..threads {
join_handles.push(tokio::spawn(async move {
let mut rand: StdRng = rand::SeedableRng::from_entropy();
let mut client = create_plain_connection(server_port, false).await.unwrap();
for _k in 0..expect_executed_queries_per_worker {
let expected: u32 = rand.gen_range(0..100);
let expected: u32 = rand::rng().random_range(0..100);
let result: u32 = unwrap_results(
client
.simple_query(&format!(

View File

@@ -44,7 +44,7 @@ nix = { version = "0.28", features = ["process", "signal"], optional = true }
partition = { workspace = true }
paste.workspace = true
rand = { workspace = true }
rand_chacha = "0.3.1"
rand_chacha = "0.9"
reqwest = { workspace = true }
schemars = "0.8"
serde = { workspace = true }

View File

@@ -16,6 +16,7 @@ use std::collections::HashSet;
use std::marker::PhantomData;
use lazy_static::lazy_static;
use rand::prelude::IndexedRandom;
use rand::seq::{IteratorRandom, SliceRandom};
use rand::Rng;
@@ -33,9 +34,9 @@ lazy_static! {
/// Modified from https://github.com/ucarion/faker_rand/blob/ea70c660e1ecd7320156eddb31d2830a511f8842/src/lib.rs
macro_rules! faker_impl_from_values {
($name: ident, $values: expr) => {
impl rand::distributions::Distribution<$name> for rand::distributions::Standard {
impl rand::distr::Distribution<$name> for rand::distr::StandardUniform {
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> $name {
$name($values[rng.gen_range(0..$values.len())].clone())
$name($values[rng.random_range(0..$values.len())].clone())
}
}
@@ -68,7 +69,7 @@ pub fn random_capitalize_map<R: Rng + 'static>(rng: &mut R, s: Ident) -> Ident {
let mut v = s.value.chars().collect::<Vec<_>>();
let str_len = s.value.len();
let select = rng.gen_range(0..str_len);
let select = rng.random_range(0..str_len);
for idx in (0..str_len).choose_multiple(rng, select) {
v[idx] = v[idx].to_uppercase().next().unwrap();
}

View File

@@ -40,7 +40,7 @@ fn add_column_options_generator<R: Rng>(
// 0 -> NULL
// 1 -> DEFAULT VALUE
// 2 -> PRIMARY KEY + DEFAULT VALUE
let idx = rng.gen_range(0..3);
let idx = rng.random_range(0..3);
match idx {
0 => vec![ColumnOption::Null],
1 => {
@@ -79,15 +79,15 @@ impl<R: Rng + 'static> Generator<AlterTableExpr, R> for AlterExprAddColumnGenera
type Error = Error;
fn generate(&self, rng: &mut R) -> Result<AlterTableExpr> {
let with_location = self.location && rng.gen::<bool>();
let with_location = self.location && rng.random::<bool>();
let location = if with_location {
let use_first = rng.gen::<bool>();
let use_first = rng.random::<bool>();
let location = if use_first {
AddColumnLocation::First
} else {
AddColumnLocation::After {
column_name: self.table_ctx.columns
[rng.gen_range(0..self.table_ctx.columns.len())]
[rng.random_range(0..self.table_ctx.columns.len())]
.name
.to_string(),
}
@@ -129,7 +129,7 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprDropColumnGenerator<R> {
fn generate(&self, rng: &mut R) -> Result<AlterTableExpr> {
let droppable = droppable_columns(&self.table_ctx.columns);
ensure!(!droppable.is_empty(), error::DroppableColumnsSnafu);
let name = droppable[rng.gen_range(0..droppable.len())].name.clone();
let name = droppable[rng.random_range(0..droppable.len())].name.clone();
Ok(AlterTableExpr {
table_name: self.table_ctx.name.clone(),
alter_kinds: AlterTableOperation::DropColumn { name },
@@ -174,7 +174,7 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprModifyDataTypeGenerator<R
fn generate(&self, rng: &mut R) -> Result<AlterTableExpr> {
let modifiable = modifiable_columns(&self.table_ctx.columns);
let changed = modifiable[rng.gen_range(0..modifiable.len())].clone();
let changed = modifiable[rng.random_range(0..modifiable.len())].clone();
let mut to_type = self.column_type_generator.gen(rng);
while !changed.column_type.can_arrow_type_cast_to(&to_type) {
to_type = self.column_type_generator.gen(rng);
@@ -209,8 +209,8 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprSetTableOptionsGenerator<
let all_options = AlterTableOption::iter().collect::<Vec<_>>();
// Generate random distinct options
let mut option_templates_idx = vec![];
for _ in 1..rng.gen_range(2..=all_options.len()) {
let option = rng.gen_range(0..all_options.len());
for _ in 1..rng.random_range(2..=all_options.len()) {
let option = rng.random_range(0..all_options.len());
if !option_templates_idx.contains(&option) {
option_templates_idx.push(option);
}
@@ -219,10 +219,10 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprSetTableOptionsGenerator<
.iter()
.map(|idx| match all_options[*idx] {
AlterTableOption::Ttl(_) => {
let ttl_type = rng.gen_range(0..3);
let ttl_type = rng.random_range(0..3);
match ttl_type {
0 => {
let duration: u32 = rng.gen();
let duration: u32 = rng.random();
AlterTableOption::Ttl(Ttl::Duration((duration as i64).into()))
}
1 => AlterTableOption::Ttl(Ttl::Instant),
@@ -231,27 +231,27 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprSetTableOptionsGenerator<
}
}
AlterTableOption::TwcsTimeWindow(_) => {
let time_window: u32 = rng.gen();
let time_window: u32 = rng.random();
AlterTableOption::TwcsTimeWindow((time_window as i64).into())
}
AlterTableOption::TwcsMaxOutputFileSize(_) => {
let max_output_file_size: u64 = rng.gen();
let max_output_file_size: u64 = rng.random();
AlterTableOption::TwcsMaxOutputFileSize(ReadableSize(max_output_file_size))
}
AlterTableOption::TwcsMaxInactiveWindowRuns(_) => {
let max_inactive_window_runs: u64 = rng.gen();
let max_inactive_window_runs: u64 = rng.random();
AlterTableOption::TwcsMaxInactiveWindowRuns(max_inactive_window_runs)
}
AlterTableOption::TwcsMaxActiveWindowFiles(_) => {
let max_active_window_files: u64 = rng.gen();
let max_active_window_files: u64 = rng.random();
AlterTableOption::TwcsMaxActiveWindowFiles(max_active_window_files)
}
AlterTableOption::TwcsMaxActiveWindowRuns(_) => {
let max_active_window_runs: u64 = rng.gen();
let max_active_window_runs: u64 = rng.random();
AlterTableOption::TwcsMaxActiveWindowRuns(max_active_window_runs)
}
AlterTableOption::TwcsMaxInactiveWindowFiles(_) => {
let max_inactive_window_files: u64 = rng.gen();
let max_inactive_window_files: u64 = rng.random();
AlterTableOption::TwcsMaxInactiveWindowFiles(max_inactive_window_files)
}
})
@@ -279,8 +279,8 @@ impl<R: Rng> Generator<AlterTableExpr, R> for AlterExprUnsetTableOptionsGenerato
let all_options = AlterTableOption::iter().collect::<Vec<_>>();
// Generate random distinct options
let mut option_templates_idx = vec![];
for _ in 1..rng.gen_range(2..=all_options.len()) {
let option = rng.gen_range(0..all_options.len());
for _ in 1..rng.random_range(2..=all_options.len()) {
let option = rng.random_range(0..all_options.len());
if !option_templates_idx.contains(&option) {
option_templates_idx.push(option);
}
@@ -325,7 +325,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"AddColumn":{"column":{"name":{"value":"velit","quote_style":null},"column_type":{"Int32":{}},"options":[{"DefaultValue":{"Int32":1606462472}}]},"location":null}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"AddColumn":{"column":{"name":{"value":"consequatur","quote_style":null},"column_type":{"Float64":{}},"options":[{"DefaultValue":{"Float64":0.48809950435391647}}]},"location":null}}}"#;
assert_eq!(expected, serialized);
let expr = AlterExprRenameGeneratorBuilder::default()
@@ -335,7 +335,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"RenameTable":{"new_table_name":{"value":"nihil","quote_style":null}}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"RenameTable":{"new_table_name":{"value":"voluptates","quote_style":null}}}}"#;
assert_eq!(expected, serialized);
let expr = AlterExprDropColumnGeneratorBuilder::default()
@@ -345,7 +345,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"DropColumn":{"name":{"value":"cUmquE","quote_style":null}}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"DropColumn":{"name":{"value":"ImPEDiT","quote_style":null}}}}"#;
assert_eq!(expected, serialized);
let expr = AlterExprModifyDataTypeGeneratorBuilder::default()
@@ -355,7 +355,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"ModifyDataType":{"column":{"name":{"value":"toTAm","quote_style":null},"column_type":{"Int64":{}},"options":[]}}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"ModifyDataType":{"column":{"name":{"value":"ADIpisci","quote_style":null},"column_type":{"Int64":{}},"options":[]}}}}"#;
assert_eq!(expected, serialized);
let expr = AlterExprSetTableOptionsGeneratorBuilder::default()
@@ -365,7 +365,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"SetTableOptions":{"options":[{"TwcsMaxActiveWindowRuns":14908016120444947142},{"TwcsMaxActiveWindowFiles":5840340123887173415},{"TwcsMaxOutputFileSize":17740311466571102265}]}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"SetTableOptions":{"options":[{"TwcsMaxOutputFileSize":16770910638250818741}]}}}"#;
assert_eq!(expected, serialized);
let expr = AlterExprUnsetTableOptionsGeneratorBuilder::default()
@@ -375,7 +375,7 @@ mod tests {
.generate(&mut rng)
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"alter_kinds":{"UnsetTableOptions":{"keys":["compaction.twcs.max_active_window_runs"]}}}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"alter_kinds":{"UnsetTableOptions":{"keys":["compaction.twcs.max_active_window_runs","compaction.twcs.max_output_file_size","compaction.twcs.time_window","compaction.twcs.max_inactive_window_files","compaction.twcs.max_active_window_files"]}}}"#;
assert_eq!(expected, serialized);
}
}

View File

@@ -399,7 +399,7 @@ mod tests {
#[test]
fn test_create_table_expr_generator() {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let expr = CreateTableExprGeneratorBuilder::default()
.columns(10)
@@ -440,13 +440,13 @@ mod tests {
.unwrap();
let serialized = serde_json::to_string(&expr).unwrap();
let expected = r#"{"table_name":{"value":"animI","quote_style":null},"columns":[{"name":{"value":"IMpEdIT","quote_style":null},"column_type":{"Float64":{}},"options":["PrimaryKey","NotNull"]},{"name":{"value":"natuS","quote_style":null},"column_type":{"Timestamp":{"Millisecond":null}},"options":["TimeIndex"]},{"name":{"value":"ADIPisCI","quote_style":null},"column_type":{"Float64":{}},"options":["Null"]},{"name":{"value":"EXpEdita","quote_style":null},"column_type":{"Int16":{}},"options":[{"DefaultValue":{"Int16":4864}}]},{"name":{"value":"cUlpA","quote_style":null},"column_type":{"Int64":{}},"options":["PrimaryKey"]},{"name":{"value":"MOLeStIAs","quote_style":null},"column_type":{"Float64":{}},"options":["NotNull"]},{"name":{"value":"cUmquE","quote_style":null},"column_type":{"Boolean":null},"options":["Null"]},{"name":{"value":"toTAm","quote_style":null},"column_type":{"Float32":{}},"options":[{"DefaultValue":{"Float32":0.21569687}}]},{"name":{"value":"deBitIs","quote_style":null},"column_type":{"Float64":{}},"options":["NotNull"]},{"name":{"value":"QUi","quote_style":null},"column_type":{"Float32":{}},"options":["Null"]}],"if_not_exists":true,"partition":{"partition_columns":["IMpEdIT"],"partition_bounds":[{"Expr":{"lhs":{"Column":"IMpEdIT"},"op":"Lt","rhs":{"Value":{"Float64":5.992310449541053e307}}}},{"Expr":{"lhs":{"Expr":{"lhs":{"Column":"IMpEdIT"},"op":"GtEq","rhs":{"Value":{"Float64":5.992310449541053e307}}}},"op":"And","rhs":{"Expr":{"lhs":{"Column":"IMpEdIT"},"op":"Lt","rhs":{"Value":{"Float64":1.1984620899082105e308}}}}}},{"Expr":{"lhs":{"Column":"IMpEdIT"},"op":"GtEq","rhs":{"Value":{"Float64":1.1984620899082105e308}}}}]},"engine":"mito2","options":{},"primary_keys":[0,4]}"#;
let expected = r#"{"table_name":{"value":"quasi","quote_style":null},"columns":[{"name":{"value":"mOLEsTIAs","quote_style":null},"column_type":{"Float64":{}},"options":["PrimaryKey","Null"]},{"name":{"value":"CUMQUe","quote_style":null},"column_type":{"Timestamp":{"Second":null}},"options":["TimeIndex"]},{"name":{"value":"NaTus","quote_style":null},"column_type":{"Int64":{}},"options":[]},{"name":{"value":"EXPeDITA","quote_style":null},"column_type":{"Float64":{}},"options":[]},{"name":{"value":"ImPEDiT","quote_style":null},"column_type":{"Float32":{}},"options":[{"DefaultValue":{"Float32":0.56425774}}]},{"name":{"value":"ADIpisci","quote_style":null},"column_type":{"Float32":{}},"options":["PrimaryKey"]},{"name":{"value":"deBITIs","quote_style":null},"column_type":{"Float32":{}},"options":[{"DefaultValue":{"Float32":0.31315368}}]},{"name":{"value":"toTaM","quote_style":null},"column_type":{"Int32":{}},"options":["NotNull"]},{"name":{"value":"QuI","quote_style":null},"column_type":{"Float32":{}},"options":[{"DefaultValue":{"Float32":0.39941502}}]},{"name":{"value":"INVeNtOre","quote_style":null},"column_type":{"Boolean":null},"options":["PrimaryKey"]}],"if_not_exists":true,"partition":{"partition_columns":["mOLEsTIAs"],"partition_bounds":[{"Expr":{"lhs":{"Column":"mOLEsTIAs"},"op":"Lt","rhs":{"Value":{"Float64":5.992310449541053e307}}}},{"Expr":{"lhs":{"Expr":{"lhs":{"Column":"mOLEsTIAs"},"op":"GtEq","rhs":{"Value":{"Float64":5.992310449541053e307}}}},"op":"And","rhs":{"Expr":{"lhs":{"Column":"mOLEsTIAs"},"op":"Lt","rhs":{"Value":{"Float64":1.1984620899082105e308}}}}}},{"Expr":{"lhs":{"Column":"mOLEsTIAs"},"op":"GtEq","rhs":{"Value":{"Float64":1.1984620899082105e308}}}}]},"engine":"mito2","options":{},"primary_keys":[0,5,9]}"#;
assert_eq!(expected, serialized);
}
#[test]
fn test_create_logical_table_expr_generator() {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
.if_not_exists(false)
@@ -529,13 +529,13 @@ mod tests {
.unwrap();
let logical_table_serialized = serde_json::to_string(&logical_table_expr).unwrap();
let logical_table_expected = r#"{"table_name":{"value":"impedit","quote_style":null},"columns":[{"name":{"value":"ts","quote_style":null},"column_type":{"Timestamp":{"Millisecond":null}},"options":["TimeIndex"]},{"name":{"value":"val","quote_style":null},"column_type":{"Float64":{}},"options":[]},{"name":{"value":"qui","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"totam","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"molestias","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"natus","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"cumque","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]}],"if_not_exists":false,"partition":null,"engine":"metric","options":{"on_physical_table":{"String":"expedita"}},"primary_keys":[2,5,3,6,4]}"#;
let logical_table_expected = r#"{"table_name":{"value":"impedit","quote_style":null},"columns":[{"name":{"value":"ts","quote_style":null},"column_type":{"Timestamp":{"Millisecond":null}},"options":["TimeIndex"]},{"name":{"value":"val","quote_style":null},"column_type":{"Float64":{}},"options":[]},{"name":{"value":"totam","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"cumque","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"natus","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"molestias","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]},{"name":{"value":"qui","quote_style":null},"column_type":{"String":null},"options":["PrimaryKey"]}],"if_not_exists":false,"partition":null,"engine":"metric","options":{"on_physical_table":{"String":"expedita"}},"primary_keys":[4,2,3,6,5]}"#;
assert_eq!(logical_table_expected, logical_table_serialized);
}
#[test]
fn test_create_database_expr_generator() {
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
let expr = CreateDatabaseExprGeneratorBuilder::default()
.if_not_exists(true)
@@ -558,7 +558,7 @@ mod tests {
let serialized = serde_json::to_string(&expr).unwrap();
let expected =
r#"{"database_name":{"value":"eXPedITa","quote_style":null},"if_not_exists":true}"#;
r#"{"database_name":{"value":"EXPediTA","quote_style":null},"if_not_exists":true}"#;
assert_eq!(expected, serialized);
}
}

View File

@@ -16,7 +16,7 @@ use std::marker::PhantomData;
use datatypes::value::Value;
use derive_builder::Builder;
use rand::seq::SliceRandom;
use rand::seq::{IndexedRandom, SliceRandom};
use rand::Rng;
use super::TsValueGenerator;
@@ -60,7 +60,7 @@ impl<R: Rng + 'static> Generator<InsertIntoExpr, R> for InsertExprGenerator<R> {
let can_omit = column.is_nullable() || column.has_default_value();
// 50% chance to omit a column if it's not required
if !can_omit || rng.gen_bool(0.5) {
if !can_omit || rng.random_bool(0.5) {
values_columns.push(column.clone());
}
}
@@ -76,12 +76,12 @@ impl<R: Rng + 'static> Generator<InsertIntoExpr, R> for InsertExprGenerator<R> {
for _ in 0..self.rows {
let mut row = Vec::with_capacity(values_columns.len());
for column in &values_columns {
if column.is_nullable() && rng.gen_bool(0.2) {
if column.is_nullable() && rng.random_bool(0.2) {
row.push(RowValue::Value(Value::Null));
continue;
}
if column.has_default_value() && rng.gen_bool(0.2) {
if column.has_default_value() && rng.random_bool(0.2) {
row.push(RowValue::Default);
continue;
}

View File

@@ -15,7 +15,7 @@
use std::marker::PhantomData;
use derive_builder::Builder;
use rand::seq::SliceRandom;
use rand::seq::{IndexedRandom, SliceRandom};
use rand::Rng;
use crate::context::TableContextRef;
@@ -37,7 +37,7 @@ impl<R: Rng + 'static> Generator<SelectExpr, R> for SelectExprGenerator<R> {
type Error = Error;
fn generate(&self, rng: &mut R) -> Result<SelectExpr> {
let selection = rng.gen_range(1..self.table_ctx.columns.len());
let selection = rng.random_range(1..self.table_ctx.columns.len());
let mut selected_columns = self
.table_ctx
.columns
@@ -46,16 +46,16 @@ impl<R: Rng + 'static> Generator<SelectExpr, R> for SelectExprGenerator<R> {
.collect::<Vec<_>>();
selected_columns.shuffle(rng);
let order_by_selection = rng.gen_range(1..selection);
let order_by_selection = rng.random_range(1..selection);
let order_by = selected_columns
.choose_multiple(rng, order_by_selection)
.map(|c| c.name.to_string())
.collect::<Vec<_>>();
let limit = rng.gen_range(1..self.max_limit);
let limit = rng.random_range(1..self.max_limit);
let direction = if rng.gen_bool(1.0 / 2.0) {
let direction = if rng.random_bool(1.0 / 2.0) {
Direction::Asc
} else {
Direction::Desc

View File

@@ -34,7 +34,7 @@ use datatypes::value::Value;
use derive_builder::Builder;
pub use insert_expr::InsertIntoExpr;
use lazy_static::lazy_static;
use rand::seq::SliceRandom;
use rand::seq::{IndexedRandom, SliceRandom};
use rand::Rng;
use serde::{Deserialize, Serialize};
@@ -146,15 +146,15 @@ pub fn generate_random_value<R: Rng>(
random_str: Option<&dyn Random<Ident, R>>,
) -> Value {
match datatype {
&ConcreteDataType::Boolean(_) => Value::from(rng.gen::<bool>()),
ConcreteDataType::Int16(_) => Value::from(rng.gen::<i16>()),
ConcreteDataType::Int32(_) => Value::from(rng.gen::<i32>()),
ConcreteDataType::Int64(_) => Value::from(rng.gen::<i64>()),
ConcreteDataType::Float32(_) => Value::from(rng.gen::<f32>()),
ConcreteDataType::Float64(_) => Value::from(rng.gen::<f64>()),
&ConcreteDataType::Boolean(_) => Value::from(rng.random::<bool>()),
ConcreteDataType::Int16(_) => Value::from(rng.random::<i16>()),
ConcreteDataType::Int32(_) => Value::from(rng.random::<i32>()),
ConcreteDataType::Int64(_) => Value::from(rng.random::<i64>()),
ConcreteDataType::Float32(_) => Value::from(rng.random::<f32>()),
ConcreteDataType::Float64(_) => Value::from(rng.random::<f64>()),
ConcreteDataType::String(_) => match random_str {
Some(random) => Value::from(random.gen(rng).value),
None => Value::from(rng.gen::<char>().to_string()),
None => Value::from(rng.random::<char>().to_string()),
},
ConcreteDataType::Date(_) => generate_random_date(rng),
@@ -188,25 +188,25 @@ pub fn generate_random_timestamp<R: Rng>(rng: &mut R, ts_type: TimestampType) ->
TimestampType::Second(_) => {
let min = i64::from(Timestamp::MIN_SECOND);
let max = i64::from(Timestamp::MAX_SECOND);
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_second(value)
}
TimestampType::Millisecond(_) => {
let min = i64::from(Timestamp::MIN_MILLISECOND);
let max = i64::from(Timestamp::MAX_MILLISECOND);
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_millisecond(value)
}
TimestampType::Microsecond(_) => {
let min = i64::from(Timestamp::MIN_MICROSECOND);
let max = i64::from(Timestamp::MAX_MICROSECOND);
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_microsecond(value)
}
TimestampType::Nanosecond(_) => {
let min = i64::from(Timestamp::MIN_NANOSECOND);
let max = i64::from(Timestamp::MAX_NANOSECOND);
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_nanosecond(value)
}
};
@@ -219,25 +219,25 @@ pub fn generate_random_timestamp_for_mysql<R: Rng>(rng: &mut R, ts_type: Timesta
TimestampType::Second(_) => {
let min = 1;
let max = 2_147_483_647;
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_second(value)
}
TimestampType::Millisecond(_) => {
let min = 1000;
let max = 2_147_483_647_499;
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_millisecond(value)
}
TimestampType::Microsecond(_) => {
let min = 1_000_000;
let max = 2_147_483_647_499_999;
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_microsecond(value)
}
TimestampType::Nanosecond(_) => {
let min = 1_000_000_000;
let max = 2_147_483_647_499_999_000;
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
Timestamp::new_nanosecond(value)
}
};
@@ -247,7 +247,7 @@ pub fn generate_random_timestamp_for_mysql<R: Rng>(rng: &mut R, ts_type: Timesta
fn generate_random_date<R: Rng>(rng: &mut R) -> Value {
let min = i64::from(Timestamp::MIN_MILLISECOND);
let max = i64::from(Timestamp::MAX_MILLISECOND);
let value = rng.gen_range(min..=max);
let value = rng.random_range(min..=max);
let date = Timestamp::new_millisecond(value).to_chrono_date().unwrap();
Value::from(Date::from(date))
}
@@ -411,7 +411,7 @@ pub fn column_options_generator<R: Rng>(
// 2 -> DEFAULT VALUE
// 3 -> PRIMARY KEY
// 4 -> EMPTY
let option_idx = rng.gen_range(0..5);
let option_idx = rng.random_range(0..5);
match option_idx {
0 => vec![ColumnOption::Null],
1 => vec![ColumnOption::NotNull],
@@ -434,7 +434,7 @@ pub fn partible_column_options_generator<R: Rng + 'static>(
// 1 -> NOT NULL
// 2 -> DEFAULT VALUE
// 3 -> PRIMARY KEY
let option_idx = rng.gen_range(0..4);
let option_idx = rng.random_range(0..4);
match option_idx {
0 => vec![ColumnOption::PrimaryKey, ColumnOption::Null],
1 => vec![ColumnOption::PrimaryKey, ColumnOption::NotNull],

View File

@@ -82,7 +82,7 @@ mod tests {
#[test]
fn test_insert_into_translator() {
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(0);
let omit_column_list = rng.gen_bool(0.2);
let omit_column_list = rng.random_bool(0.2);
let test_ctx = test_utils::new_test_ctx();
let insert_expr_generator = InsertExprGeneratorBuilder::default()
@@ -95,23 +95,23 @@ mod tests {
let insert_expr = insert_expr_generator.generate(&mut rng).unwrap();
let output = InsertIntoExprTranslator.translate(&insert_expr).unwrap();
let expected = r#"INSERT INTO test (ts, host, cpu_util) VALUES
('+199601-11-07 21:32:56.695+0000', 'corrupti', 0.051130243193075464),
('+40822-03-25 02:17:34.328+0000', NULL, 0.6552502332327004);"#;
let expected = r#"INSERT INTO test (cpu_util, ts, host) VALUES
(0.494276426950336, '+210328-02-20 15:44:23.848+0000', 'aut'),
(0.5240550121500691, '-78231-02-16 05:32:41.400+0000', 'in');"#;
assert_eq!(output, expected);
let insert_expr = insert_expr_generator.generate(&mut rng).unwrap();
let output = InsertIntoExprTranslator.translate(&insert_expr).unwrap();
let expected = r#"INSERT INTO test (ts, memory_util) VALUES
('+22606-05-02 04:44:02.976+0000', 0.7074194466620976),
('+33689-06-12 08:42:11.037+0000', 0.40987428386535585);"#;
let expected = r#"INSERT INTO test (ts, host) VALUES
('+137972-11-29 18:23:19.505+0000', 'repellendus'),
('-237884-01-11 09:44:43.491+0000', 'a');"#;
assert_eq!(output, expected);
let insert_expr = insert_expr_generator.generate(&mut rng).unwrap();
let output = InsertIntoExprTranslator.translate(&insert_expr).unwrap();
let expected = r#"INSERT INTO test (ts, disk_util, cpu_util, host) VALUES
('+200107-10-22 01:36:36.924+0000', 0.9082597320638828, 0.020853190804573818, 'voluptates'),
('+241156-12-16 20:52:15.185+0000', 0.6492772846116915, 0.18078027701087784, 'repellat');"#;
let expected = r#"INSERT INTO test (disk_util, ts) VALUES
(0.399415030703252, '+154545-01-21 09:38:13.768+0000'),
(NULL, '-227688-03-19 14:23:24.582+0000');"#;
assert_eq!(output, expected);
}
}

View File

@@ -69,7 +69,8 @@ mod tests {
let select_expr = select_expr_generator.generate(&mut rng).unwrap();
let output = SelectExprTranslator.translate(&select_expr).unwrap();
let expected = r#"SELECT memory_util, ts, cpu_util, disk_util FROM test ORDER BY cpu_util, disk_util DESC;"#;
let expected =
r#"SELECT ts, memory_util, cpu_util, disk_util FROM test ORDER BY disk_util, ts DESC;"#;
assert_eq!(output, expected);
}
}

View File

@@ -191,9 +191,9 @@ impl UnstableProcessController {
self.running.store(true, Ordering::Relaxed);
let mut rng = ChaChaRng::seed_from_u64(self.seed);
while self.running.load(Ordering::Relaxed) {
let min = rng.gen_range(50..100);
let max = rng.gen_range(300..600);
let ms = rng.gen_range(min..max);
let min = rng.random_range(50..100);
let max = rng.random_range(300..600);
let ms = rng.random_range(min..max);
let pid = self
.start_process_with_retry(3)
.await

View File

@@ -69,16 +69,16 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_actions = get_gt_fuzz_input_max_alter_actions();
let actions = rng.gen_range(1..max_actions);
let actions = rng.random_range(1..max_actions);
Ok(FuzzInput { seed, actions })
}
}
fn generate_create_physical_table_expr<R: Rng + 'static>(rng: &mut R) -> Result<CreateTableExpr> {
let physical_table_if_not_exists = rng.gen_bool(0.5);
let physical_table_if_not_exists = rng.random_bool(0.5);
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}
let create_physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
@@ -97,8 +97,8 @@ fn generate_create_logical_table_expr<R: Rng + 'static>(
physical_table_ctx: TableContextRef,
rng: &mut R,
) -> Result<CreateTableExpr> {
let labels = rng.gen_range(1..=5);
let logical_table_if_not_exists = rng.gen_bool(0.5);
let labels = rng.random_range(1..=5);
let logical_table_if_not_exists = rng.random_bool(0.5);
let create_logical_table_expr = CreateLogicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(

View File

@@ -76,9 +76,9 @@ enum AlterTableKind {
fn generate_create_table_expr<R: Rng + 'static>(rng: &mut R) -> Result<CreateTableExpr> {
let max_columns = get_gt_fuzz_input_max_columns();
let columns = rng.gen_range(2..max_columns);
let columns = rng.random_range(2..max_columns);
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}
let create_table_generator = CreateTableExprGeneratorBuilder::default()
@@ -99,7 +99,7 @@ fn generate_alter_table_expr<R: Rng + 'static>(
rng: &mut R,
) -> Result<AlterTableExpr> {
let kinds = AlterTableKind::iter().collect::<Vec<_>>();
match kinds[rng.gen_range(0..kinds.len())] {
match kinds[rng.random_range(0..kinds.len())] {
AlterTableKind::DropColumn if !droppable_columns(&table_ctx.columns).is_empty() => {
AlterExprDropColumnGeneratorBuilder::default()
.table_ctx(table_ctx)
@@ -138,7 +138,7 @@ fn generate_alter_table_expr<R: Rng + 'static>(
expr_generator.generate(rng)
}
_ => {
let location = rng.gen_bool(0.5);
let location = rng.random_bool(0.5);
let expr_generator = AlterExprAddColumnGeneratorBuilder::default()
.table_ctx(table_ctx)
.location(location)
@@ -153,7 +153,7 @@ impl Arbitrary<'_> for FuzzInput {
fn arbitrary(u: &mut Unstructured<'_>) -> arbitrary::Result<Self> {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let actions = rng.gen_range(1..256);
let actions = rng.random_range(1..256);
Ok(FuzzInput { seed, actions })
}

View File

@@ -57,7 +57,7 @@ impl Arbitrary<'_> for FuzzInput {
fn generate_expr(input: FuzzInput) -> Result<CreateDatabaseExpr> {
let mut rng = ChaChaRng::seed_from_u64(input.seed);
let if_not_exists = rng.gen_bool(0.5);
let if_not_exists = rng.random_bool(0.5);
let create_database_generator = CreateDatabaseExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
WordGenerator,

View File

@@ -68,9 +68,9 @@ async fn execute_create_logic_table(ctx: FuzzContext, input: FuzzInput) -> Resul
let mut rng = ChaChaRng::seed_from_u64(input.seed);
// Create physical table
let physical_table_if_not_exists = rng.gen_bool(0.5);
let physical_table_if_not_exists = rng.random_bool(0.5);
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}
let create_physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
@@ -113,8 +113,8 @@ async fn execute_create_logic_table(ctx: FuzzContext, input: FuzzInput) -> Resul
// Create logical table
let physical_table_ctx = Arc::new(TableContext::from(&create_physical_table_expr));
let labels = rng.gen_range(1..=5);
let logical_table_if_not_exists = rng.gen_bool(0.5);
let labels = rng.random_range(1..=5);
let logical_table_if_not_exists = rng.random_bool(0.5);
let create_logical_table_expr = CreateLogicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(

View File

@@ -59,16 +59,16 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_columns = get_gt_fuzz_input_max_columns();
let columns = rng.gen_range(2..max_columns);
let columns = rng.random_range(2..max_columns);
Ok(FuzzInput { columns, seed })
}
}
fn generate_expr(input: FuzzInput) -> Result<CreateTableExpr> {
let mut rng = ChaChaRng::seed_from_u64(input.seed);
let if_not_exists = rng.gen_bool(0.5);
let if_not_exists = rng.random_bool(0.5);
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}

View File

@@ -80,15 +80,15 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_rows = get_gt_fuzz_input_max_rows();
let rows = rng.gen_range(2..max_rows);
let rows = rng.random_range(2..max_rows);
let max_tables = get_gt_fuzz_input_max_tables();
let tables = rng.gen_range(1..max_tables);
let tables = rng.random_range(1..max_tables);
Ok(FuzzInput { rows, seed, tables })
}
}
fn generate_create_physical_table_expr<R: Rng + 'static>(rng: &mut R) -> Result<CreateTableExpr> {
let physical_table_if_not_exists = rng.gen_bool(0.5);
let physical_table_if_not_exists = rng.random_bool(0.5);
let create_physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
WordGenerator,
@@ -121,8 +121,8 @@ fn generate_create_logical_table_expr<R: Rng + 'static>(
physical_table_ctx: TableContextRef,
rng: &mut R,
) -> Result<CreateTableExpr> {
let labels = rng.gen_range(1..=5);
let logical_table_if_not_exists = rng.gen_bool(0.5);
let labels = rng.random_range(1..=5);
let logical_table_if_not_exists = rng.random_bool(0.5);
let create_logical_table_expr = CreateLogicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
@@ -208,10 +208,10 @@ async fn execute_failover(ctx: FuzzContext, input: FuzzInput) -> Result<()> {
let insert_expr =
insert_values(input.rows, &ctx, &mut rng, logical_table_ctx.clone()).await?;
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
flush_memtable(&ctx.greptime, &physical_table_ctx.name).await?;
}
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
compact_table(&ctx.greptime, &physical_table_ctx.name).await?;
}

View File

@@ -24,7 +24,7 @@ use common_telemetry::info;
use common_time::util::current_time_millis;
use futures::future::try_join_all;
use libfuzzer_sys::fuzz_target;
use rand::seq::SliceRandom;
use rand::seq::IndexedRandom;
use rand::{Rng, SeedableRng};
use rand_chacha::{ChaCha20Rng, ChaChaRng};
use snafu::{ensure, ResultExt};
@@ -87,13 +87,13 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_columns = get_gt_fuzz_input_max_columns();
let columns = rng.gen_range(2..max_columns);
let columns = rng.random_range(2..max_columns);
let max_rows = get_gt_fuzz_input_max_rows();
let rows = rng.gen_range(2..max_rows);
let rows = rng.random_range(2..max_rows);
let max_tables = get_gt_fuzz_input_max_tables();
let tables = rng.gen_range(2..max_tables);
let tables = rng.random_range(2..max_tables);
let max_inserts = get_gt_fuzz_input_max_insert_actions();
let inserts = rng.gen_range(2..max_inserts);
let inserts = rng.random_range(2..max_inserts);
Ok(FuzzInput {
columns,
rows,
@@ -116,7 +116,7 @@ fn generate_create_exprs<R: Rng + 'static>(
let base_table_name = name_generator.gen(rng);
let min_column = columns / 2;
let columns = rng.gen_range(min_column..columns);
let columns = rng.random_range(min_column..columns);
let mut exprs = Vec::with_capacity(tables);
for i in 0..tables {
let table_name = Ident {
@@ -174,11 +174,11 @@ fn generate_insert_exprs<R: Rng + 'static>(
) -> Result<Vec<Vec<InsertIntoExpr>>> {
let mut exprs = Vec::with_capacity(tables.len());
for table_ctx in tables {
let omit_column_list = rng.gen_bool(0.2);
let omit_column_list = rng.random_bool(0.2);
let min_rows = rows / 2;
let rows = rng.gen_range(min_rows..rows);
let rows = rng.random_range(min_rows..rows);
let min_inserts = inserts / 2;
let inserts = rng.gen_range(min_inserts..inserts);
let inserts = rng.random_range(min_inserts..inserts);
let insert_generator = InsertExprGeneratorBuilder::default()
.table_ctx(table_ctx.clone())
@@ -207,9 +207,9 @@ async fn execute_insert_exprs<R: Rng + 'static>(
let semaphore = Arc::new(Semaphore::new(parallelism));
let tasks = inserts.into_iter().map(|inserts| {
let flush_probability = rng.gen_range(0.0..1.0);
let compact_probability = rng.gen_range(0.0..1.0);
let seed: u64 = rng.gen();
let flush_probability = rng.random_range(0.0..1.0);
let compact_probability = rng.random_range(0.0..1.0);
let seed: u64 = rng.random();
let semaphore = semaphore.clone();
let greptime = ctx.greptime.clone();
@@ -235,10 +235,10 @@ async fn execute_insert_exprs<R: Rng + 'static>(
)
}
);
if rng.gen_bool(flush_probability) {
if rng.random_bool(flush_probability) {
flush_memtable(&greptime, &insert_expr.table_name).await?;
}
if rng.gen_bool(compact_probability) {
if rng.random_bool(compact_probability) {
compact_table(&greptime, &insert_expr.table_name).await?;
}
total_affected += result.rows_affected();

View File

@@ -69,9 +69,9 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_columns = get_gt_fuzz_input_max_columns();
let columns = rng.gen_range(2..max_columns);
let columns = rng.random_range(2..max_columns);
let max_row = get_gt_fuzz_input_max_rows();
let rows = rng.gen_range(1..max_row);
let rows = rng.random_range(1..max_row);
Ok(FuzzInput {
columns,
rows,
@@ -85,7 +85,7 @@ fn generate_create_expr<R: Rng + 'static>(
rng: &mut R,
) -> Result<CreateTableExpr> {
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}
@@ -108,7 +108,7 @@ fn generate_insert_expr<R: Rng + 'static>(
rng: &mut R,
table_ctx: TableContextRef,
) -> Result<InsertIntoExpr> {
let omit_column_list = rng.gen_bool(0.2);
let omit_column_list = rng.random_bool(0.2);
let insert_generator = InsertExprGeneratorBuilder::default()
.table_ctx(table_ctx)
@@ -155,7 +155,7 @@ async fn execute_insert(ctx: FuzzContext, input: FuzzInput) -> Result<()> {
}
);
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
flush_memtable(&ctx.greptime, &create_expr.table_name).await?;
}

View File

@@ -70,17 +70,17 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_tables = get_gt_fuzz_input_max_tables();
let tables = rng.gen_range(1..max_tables);
let tables = rng.random_range(1..max_tables);
let max_row = get_gt_fuzz_input_max_rows();
let rows = rng.gen_range(1..max_row);
let rows = rng.random_range(1..max_row);
Ok(FuzzInput { tables, seed, rows })
}
}
fn generate_create_physical_table_expr<R: Rng + 'static>(rng: &mut R) -> Result<CreateTableExpr> {
let physical_table_if_not_exists = rng.gen_bool(0.5);
let physical_table_if_not_exists = rng.random_bool(0.5);
let mut with_clause = HashMap::new();
if rng.gen_bool(0.5) {
if rng.random_bool(0.5) {
with_clause.insert("append_mode".to_string(), "true".to_string());
}
let create_physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
@@ -99,8 +99,8 @@ fn generate_create_logical_table_expr<R: Rng + 'static>(
physical_table_ctx: TableContextRef,
rng: &mut R,
) -> Result<CreateTableExpr> {
let labels = rng.gen_range(1..=5);
let logical_table_if_not_exists = rng.gen_bool(0.5);
let labels = rng.random_range(1..=5);
let logical_table_if_not_exists = rng.random_bool(0.5);
let create_logical_table_expr = CreateLogicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
@@ -259,11 +259,11 @@ async fn execute_insert(ctx: FuzzContext, input: FuzzInput) -> Result<()> {
insert_values(input.rows, &ctx, &mut rng, logical_table_ctx.clone()).await?;
validate_values(&ctx, logical_table_ctx.clone(), &insert_expr).await?;
tables.insert(logical_table_ctx.name.clone(), logical_table_ctx.clone());
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
flush_memtable(&ctx.greptime, &physical_table_ctx.name).await?;
validate_values(&ctx, logical_table_ctx.clone(), &insert_expr).await?;
}
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
compact_table(&ctx.greptime, &physical_table_ctx.name).await?;
validate_values(&ctx, logical_table_ctx.clone(), &insert_expr).await?;
}

View File

@@ -78,16 +78,16 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_rows = get_gt_fuzz_input_max_rows();
let rows = rng.gen_range(2..max_rows);
let rows = rng.random_range(2..max_rows);
let max_tables = get_gt_fuzz_input_max_tables();
let tables = rng.gen_range(1..max_tables);
let tables = rng.random_range(1..max_tables);
Ok(FuzzInput { rows, seed, tables })
}
}
fn generate_create_physical_table_expr<R: Rng + 'static>(rng: &mut R) -> Result<CreateTableExpr> {
let physical_table_if_not_exists = rng.gen_bool(0.5);
let physical_table_if_not_exists = rng.random_bool(0.5);
let create_physical_table_expr = CreatePhysicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
WordGenerator,
@@ -120,8 +120,8 @@ fn generate_create_logical_table_expr<R: Rng + 'static>(
physical_table_ctx: TableContextRef,
rng: &mut R,
) -> Result<CreateTableExpr> {
let labels = rng.gen_range(1..=5);
let logical_table_if_not_exists = rng.gen_bool(0.5);
let labels = rng.random_range(1..=5);
let logical_table_if_not_exists = rng.random_bool(0.5);
let create_logical_table_expr = CreateLogicalTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
@@ -214,10 +214,10 @@ async fn create_logical_table_and_insert_values(
let logical_table_ctx = Arc::new(TableContext::from(&create_logical_table_expr));
let insert_expr = insert_values(input.rows, ctx, rng, logical_table_ctx.clone()).await?;
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
flush_memtable(&ctx.greptime, &physical_table_ctx.name).await?;
}
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
compact_table(&ctx.greptime, &physical_table_ctx.name).await?;
}
@@ -338,7 +338,7 @@ async fn execute_migration(ctx: FuzzContext, input: FuzzInput) -> Result<()> {
let mut migrations = Vec::with_capacity(num_partitions);
let mut new_distribution: BTreeMap<u64, HashSet<_>> = BTreeMap::new();
for (datanode_id, regions) in region_distribution {
let step = rng.gen_range(1..datanodes.len());
let step = rng.random_range(1..datanodes.len());
for region in regions {
let to_peer = (datanode_id + step as u64) % datanodes.len() as u64;
new_distribution.entry(to_peer).or_default().insert(region);

View File

@@ -76,10 +76,10 @@ impl Arbitrary<'_> for FuzzInput {
fn arbitrary(u: &mut Unstructured<'_>) -> arbitrary::Result<Self> {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let partitions = rng.gen_range(3..32);
let columns = rng.gen_range(2..30);
let rows = rng.gen_range(128..1024);
let inserts = rng.gen_range(2..8);
let partitions = rng.random_range(3..32);
let columns = rng.random_range(2..30);
let rows = rng.random_range(128..1024);
let inserts = rng.random_range(2..8);
Ok(FuzzInput {
seed,
columns,
@@ -113,7 +113,7 @@ fn generate_insert_exprs<R: Rng + 'static>(
rng: &mut R,
table_ctx: TableContextRef,
) -> Result<Vec<InsertIntoExpr>> {
let omit_column_list = rng.gen_bool(0.2);
let omit_column_list = rng.random_bool(0.2);
let insert_generator = InsertExprGeneratorBuilder::default()
.table_ctx(table_ctx.clone())
.omit_column_list(omit_column_list)
@@ -161,10 +161,10 @@ async fn insert_values<R: Rng + 'static>(
)
}
);
if rng.gen_bool(0.2) {
if rng.random_bool(0.2) {
flush_memtable(&ctx.greptime, &table_ctx.name).await?;
}
if rng.gen_bool(0.1) {
if rng.random_bool(0.1) {
compact_table(&ctx.greptime, &table_ctx.name).await?;
}
}
@@ -309,7 +309,7 @@ async fn execute_region_migration(ctx: FuzzContext, input: FuzzInput) -> Result<
let mut migrations = Vec::with_capacity(num_partitions);
let mut new_distribution: BTreeMap<u64, HashSet<_>> = BTreeMap::new();
for (datanode_id, regions) in region_distribution {
let step = rng.gen_range(1..datanodes.len());
let step = rng.random_range(1..datanodes.len());
for region in regions {
let to_peer = (datanode_id + step as u64) % datanodes.len() as u64;
new_distribution.entry(to_peer).or_default().insert(region);

View File

@@ -69,7 +69,7 @@ impl Arbitrary<'_> for FuzzInput {
let seed = u.int_in_range(u64::MIN..=u64::MAX)?;
let mut rng = ChaChaRng::seed_from_u64(seed);
let max_tables = get_gt_fuzz_input_max_tables();
let tables = rng.gen_range(1..max_tables);
let tables = rng.random_range(1..max_tables);
Ok(FuzzInput { seed, tables })
}
}
@@ -81,7 +81,7 @@ const DEFAULT_MYSQL_URL: &str = "127.0.0.1:4002";
const DEFAULT_HTTP_HEALTH_URL: &str = "http://127.0.0.1:4000/health";
fn generate_create_table_expr<R: Rng + 'static>(rng: &mut R) -> CreateTableExpr {
let columns = rng.gen_range(2..30);
let columns = rng.random_range(2..30);
let create_table_generator = CreateTableExprGeneratorBuilder::default()
.name_generator(Box::new(MappedGenerator::new(
WordGenerator,

View File

@@ -193,8 +193,8 @@ async fn create_tables(test_name: &str, instance: &Arc<Instance>, num_tables: us
async fn insert_data(tables: &[Table], instance: &Arc<Instance>, num_writers: usize) {
// Each writer randomly chooses a table and inserts a sequence of rows into the table.
futures::future::join_all((0..num_writers).map(|_| async {
let mut rng = rand::thread_rng();
let table = &tables[rng.gen_range(0..tables.len())];
let mut rng = rand::rng();
let table = &tables[rng.random_range(0..tables.len())];
for _ in 0..10 {
let ts = table.logical_timer.fetch_add(1000, Ordering::Relaxed);
let row = make_row(ts, &mut rng);
@@ -302,8 +302,8 @@ async fn execute_sql_with(
}
fn make_row(ts: u64, rng: &mut ThreadRng) -> String {
let host = format!("host{}", rng.gen_range(0..5));
let cpu: f64 = rng.gen_range(0.0..99.9);
let memory: f64 = rng.gen_range(0.0..999.9);
let host = format!("host{}", rng.random_range(0..5));
let cpu: f64 = rng.random_range(0.0..99.9);
let memory: f64 = rng.random_range(0.0..999.9);
format!("('{host}', {cpu}, {memory}, {ts})")
}