fix: various typos reported by CI (#7047)

* fix: various typos reported by CI

* fix: additional typo

Signed-off-by: WenyXu <wenymedia@gmail.com>
This commit is contained in:
Ning Sun
2025-10-02 23:11:09 +08:00
committed by Weny Xu
parent 4a936d7320
commit f7f52592b4
14 changed files with 25 additions and 25 deletions

View File

@@ -29,7 +29,7 @@ use futures::TryStreamExt;
use crate::error::InvalidArgumentsSnafu;
use crate::metadata::common::StoreConfig;
use crate::metadata::control::utils::{decode_key_value, get_table_id_by_name, json_fromatter};
use crate::metadata::control::utils::{decode_key_value, get_table_id_by_name, json_formatter};
use crate::Tool;
/// Getting metadata from metadata store.
@@ -206,7 +206,7 @@ impl Tool for GetTableTool {
println!(
"{}\n{}",
TableInfoKey::new(table_id),
json_fromatter(self.pretty, &*table_info)
json_formatter(self.pretty, &*table_info)
);
} else {
println!("Table info not found");
@@ -221,7 +221,7 @@ impl Tool for GetTableTool {
println!(
"{}\n{}",
TableRouteKey::new(table_id),
json_fromatter(self.pretty, &table_route)
json_formatter(self.pretty, &table_route)
);
} else {
println!("Table route not found");

View File

@@ -27,7 +27,7 @@ pub fn decode_key_value(kv: KeyValue) -> CommonMetaResult<(String, String)> {
}
/// Formats a value as a JSON string.
pub fn json_fromatter<T>(pretty: bool, value: &T) -> String
pub fn json_formatter<T>(pretty: bool, value: &T) -> String
where
T: Serialize,
{

View File

@@ -332,7 +332,7 @@ impl AggregateUDFImpl for StateWrapper {
self.inner.signature()
}
/// Coerce types also do nothing, as optimzer should be able to already make struct types
/// Coerce types also do nothing, as optimizer should be able to already make struct types
fn coerce_types(&self, arg_types: &[DataType]) -> datafusion_common::Result<Vec<DataType>> {
self.inner.coerce_types(arg_types)
}
@@ -486,7 +486,7 @@ impl AggregateUDFImpl for MergeWrapper {
&self.merge_signature
}
/// Coerce types also do nothing, as optimzer should be able to already make struct types
/// Coerce types also do nothing, as optimizer should be able to already make struct types
fn coerce_types(&self, arg_types: &[DataType]) -> datafusion_common::Result<Vec<DataType>> {
// just check if the arg_types are only one and is struct array
if arg_types.len() != 1 || !matches!(arg_types.first(), Some(DataType::Struct(_))) {

View File

@@ -19,7 +19,7 @@ use opentelemetry::propagation::TextMapPropagator;
use opentelemetry_sdk::propagation::TraceContextPropagator;
use tracing_opentelemetry::OpenTelemetrySpanExt;
// An wapper for `Futures` that provides tracing instrument adapters.
// An wrapper for `Futures` that provides tracing instrument adapters.
pub trait FutureExt: std::future::Future + Sized {
fn trace(self, span: tracing::span::Span) -> tracing::instrument::Instrumented<Self>;
}

View File

@@ -71,6 +71,6 @@ pub struct LinearStagePlan {
/// The key expressions to use for the lookup relation.
pub lookup_key: Vec<ScalarExpr>,
/// The closure to apply to the concatenation of the key columns,
/// the stream value columns, and the lookup value colunms.
/// the stream value columns, and the lookup value columns.
pub closure: JoinFilter,
}

View File

@@ -525,7 +525,7 @@ impl MetaClient {
self.heartbeat_client()?.ask_leader().await
}
/// Returns a heartbeat bidirectional streaming: (sender, recever), the
/// Returns a heartbeat bidirectional streaming: (sender, receiver), the
/// other end is the leader of `metasrv`.
///
/// The `datanode` needs to use the sender to continuously send heartbeat

View File

@@ -1190,7 +1190,7 @@ mod tests {
));
handles.push(handle);
}
// Wait for candidates to registrate themselves and renew their leases at least once.
// Wait for candidates to register themselves and renew their leases at least once.
tokio::time::sleep(candidate_lease_ttl / 2 + Duration::from_secs(1)).await;
let (tx, _) = broadcast::channel(100);

View File

@@ -1012,7 +1012,7 @@ mod tests {
));
handles.push(handle);
}
// Wait for candidates to registrate themselves and renew their leases at least once.
// Wait for candidates to register themselves and renew their leases at least once.
tokio::time::sleep(Duration::from_secs(3)).await;
let (tx, _) = broadcast::channel(100);

View File

@@ -76,7 +76,7 @@ pub struct RegionManifestOptions {
/// -RegionMetadataRef metadata
/// }
/// class RegionEdit {
/// -VersionNumber regoin_version
/// -VersionNumber region_version
/// -Vec~FileMeta~ files_to_add
/// -Vec~FileMeta~ files_to_remove
/// -SequenceNumber flushed_sequence

View File

@@ -70,7 +70,7 @@ impl ScalarCalculate {
interval: Millisecond,
input: LogicalPlan,
time_index: &str,
tag_colunms: &[String],
tag_columns: &[String],
field_column: &str,
table_name: Option<&str>,
) -> Result<Self> {
@@ -97,7 +97,7 @@ impl ScalarCalculate {
end,
interval,
time_index: time_index.to_string(),
tag_columns: tag_colunms.to_vec(),
tag_columns: tag_columns.to_vec(),
field_column: field_column.to_string(),
input,
output_schema: Arc::new(schema),

View File

@@ -82,7 +82,7 @@ impl ExtensionPlanner for MergeSortExtensionPlanner {
// and we only need to do a merge sort, otherwise fallback to quick sort
let can_merge_sort = partition_cnt >= region_cnt;
if can_merge_sort {
// TODO(discord9): use `SortPreversingMergeExec here`
// TODO(discord9): use `SortPreservingMergeExec here`
}
// for now merge sort only exist in logical plan, and have the same effect as `Sort`
// doesn't change the execution plan, this will change in the future

View File

@@ -352,7 +352,7 @@ async fn dryrun_pipeline_inner(
)
.await?;
let colume_type_key = "colume_type";
let column_type_key = "column_type";
let data_type_key = "data_type";
let name_key = "name";
@@ -376,7 +376,7 @@ async fn dryrun_pipeline_inner(
JsonValue::String(cs.datatype().as_str_name().to_string()),
);
map.insert(
colume_type_key.to_string(),
column_type_key.to_string(),
JsonValue::String(cs.semantic_type().as_str_name().to_string()),
);
map.insert(
@@ -409,7 +409,7 @@ async fn dryrun_pipeline_inner(
);
map.insert(
"semantic_type".to_string(),
schema[idx][colume_type_key].clone(),
schema[idx][column_type_key].clone(),
);
map.insert(
"data_type".to_string(),

View File

@@ -105,7 +105,7 @@ mod tests {
use crate::statements::statement::Statement;
#[test]
fn test_display_for_tuncate_table() {
fn test_display_for_truncate_table() {
let sql = r"truncate table t1;";
let stmts: Vec<Statement> =
ParserContext::create_with_dialect(sql, &GreptimeDbDialect {}, ParseOptions::default())

View File

@@ -3212,37 +3212,37 @@ transform:
let dryrun_schema = json!([
{
"colume_type": "FIELD",
"column_type": "FIELD",
"data_type": "INT32",
"fulltext": false,
"name": "id1"
},
{
"colume_type": "FIELD",
"column_type": "FIELD",
"data_type": "INT32",
"fulltext": false,
"name": "id2"
},
{
"colume_type": "FIELD",
"column_type": "FIELD",
"data_type": "STRING",
"fulltext": false,
"name": "type"
},
{
"colume_type": "FIELD",
"column_type": "FIELD",
"data_type": "STRING",
"fulltext": false,
"name": "log"
},
{
"colume_type": "FIELD",
"column_type": "FIELD",
"data_type": "STRING",
"fulltext": false,
"name": "logger"
},
{
"colume_type": "TIMESTAMP",
"column_type": "TIMESTAMP",
"data_type": "TIMESTAMP_NANOSECOND",
"fulltext": false,
"name": "time"