mirror of
https://github.com/GreptimeTeam/greptimedb.git
synced 2025-12-22 22:20:02 +00:00
Compare commits
9 Commits
4650935fc0
...
chore/udap
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
250110415b | ||
|
|
a5c00e8591 | ||
|
|
f7d9c5315e | ||
|
|
5bfde4f074 | ||
|
|
594a60194a | ||
|
|
2ac380c85c | ||
|
|
e2bdd84cac | ||
|
|
7024079822 | ||
|
|
64ca0b435f |
8
Cargo.lock
generated
8
Cargo.lock
generated
@@ -1,6 +1,6 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
@@ -5250,7 +5250,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "influxdb_line_protocol"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/evenyag/influxdb_iox?branch=feat/line-protocol#10ef0d0b02705ac7518717390939fa3a9bcfcacc"
|
||||
source = "git+https://github.com/evenyag/influxdb_iox?branch=feat%2Fline-protocol#10ef0d0b02705ac7518717390939fa3a9bcfcacc"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"nom",
|
||||
@@ -10912,9 +10912,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "shadow-rs"
|
||||
version = "0.31.1"
|
||||
version = "0.35.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02c282402d25101f9c893e9cd7e4cae535fe7db18b81291de973026c219ddf1e"
|
||||
checksum = "2311e39772c00391875f40e34d43efef247b23930143a70ca5fbec9505937420"
|
||||
dependencies = [
|
||||
"const_format",
|
||||
"git2",
|
||||
|
||||
@@ -167,7 +167,7 @@ schemars = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["float_roundtrip"] }
|
||||
serde_with = "3"
|
||||
shadow-rs = "0.31"
|
||||
shadow-rs = "0.35"
|
||||
similar-asserts = "1.6.0"
|
||||
smallvec = { version = "1", features = ["serde"] }
|
||||
snafu = "0.8"
|
||||
|
||||
2
Makefile
2
Makefile
@@ -8,7 +8,7 @@ CARGO_BUILD_OPTS := --locked
|
||||
IMAGE_REGISTRY ?= docker.io
|
||||
IMAGE_NAMESPACE ?= greptime
|
||||
IMAGE_TAG ?= latest
|
||||
DEV_BUILDER_IMAGE_TAG ?= 2024-06-06-5674c14f-20240920110415
|
||||
DEV_BUILDER_IMAGE_TAG ?= 2024-10-19-a5c00e85-20241024184445
|
||||
BUILDX_MULTI_PLATFORM_BUILD ?= false
|
||||
BUILDX_BUILDER_NAME ?= gtbuilder
|
||||
BASE_IMAGE ?= ubuntu
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2024-06-06"
|
||||
|
||||
channel = "nightly-2024-10-19"
|
||||
|
||||
@@ -33,7 +33,7 @@ impl StaticUserProvider {
|
||||
value: value.to_string(),
|
||||
msg: "StaticUserProviderOption must be in format `<option>:<value>`",
|
||||
})?;
|
||||
return match mode {
|
||||
match mode {
|
||||
"file" => {
|
||||
let users = load_credential_from_file(content)?
|
||||
.context(InvalidConfigSnafu {
|
||||
@@ -58,7 +58,7 @@ impl StaticUserProvider {
|
||||
msg: "StaticUserProviderOption must be in format `file:<path>` or `cmd:<values>`",
|
||||
}
|
||||
.fail(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/// All table names in `information_schema`.
|
||||
//! All table names in `information_schema`.
|
||||
|
||||
pub const TABLES: &str = "tables";
|
||||
pub const COLUMNS: &str = "columns";
|
||||
|
||||
@@ -12,6 +12,9 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! The `pg_catalog.pg_namespace` table implementation.
|
||||
//! namespace is a schema in greptime
|
||||
|
||||
pub(super) mod oid_map;
|
||||
|
||||
use std::sync::{Arc, Weak};
|
||||
@@ -40,9 +43,6 @@ use crate::system_schema::utils::tables::{string_column, u32_column};
|
||||
use crate::system_schema::SystemTable;
|
||||
use crate::CatalogManager;
|
||||
|
||||
/// The `pg_catalog.pg_namespace` table implementation.
|
||||
/// namespace is a schema in greptime
|
||||
|
||||
const NSPNAME: &str = "nspname";
|
||||
const INIT_CAPACITY: usize = 42;
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ pub trait App: Send {
|
||||
}
|
||||
|
||||
/// Log the versions of the application, and the arguments passed to the cli.
|
||||
///
|
||||
/// `version` should be the same as the output of cli "--version";
|
||||
/// and the `short_version` is the short version of the codes, often consist of git branch and commit.
|
||||
pub fn log_versions(version: &str, short_version: &str, app: &str) {
|
||||
|
||||
@@ -46,8 +46,9 @@ impl From<String> for SecretString {
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper type for values that contains secrets, which attempts to limit
|
||||
/// accidental exposure and ensure secrets are wiped from memory when dropped.
|
||||
/// Wrapper type for values that contains secrets.
|
||||
///
|
||||
/// It attempts to limit accidental exposure and ensure secrets are wiped from memory when dropped.
|
||||
/// (e.g. passwords, cryptographic keys, access tokens or other credentials)
|
||||
///
|
||||
/// Access to the secret inner value occurs through the [`ExposeSecret`]
|
||||
|
||||
@@ -103,14 +103,15 @@ pub const INFORMATION_SCHEMA_PROCEDURE_INFO_TABLE_ID: u32 = 34;
|
||||
/// id for information_schema.region_statistics
|
||||
pub const INFORMATION_SCHEMA_REGION_STATISTICS_TABLE_ID: u32 = 35;
|
||||
|
||||
/// ----- End of information_schema tables -----
|
||||
// ----- End of information_schema tables -----
|
||||
|
||||
/// ----- Begin of pg_catalog tables -----
|
||||
pub const PG_CATALOG_PG_CLASS_TABLE_ID: u32 = 256;
|
||||
pub const PG_CATALOG_PG_TYPE_TABLE_ID: u32 = 257;
|
||||
pub const PG_CATALOG_PG_NAMESPACE_TABLE_ID: u32 = 258;
|
||||
|
||||
/// ----- End of pg_catalog tables -----
|
||||
// ----- End of pg_catalog tables -----
|
||||
|
||||
pub const MITO_ENGINE: &str = "mito";
|
||||
pub const MITO2_ENGINE: &str = "mito2";
|
||||
pub const METRIC_ENGINE: &str = "metric";
|
||||
|
||||
@@ -199,6 +199,7 @@ pub fn default_get_uuid(working_home: &Option<String>) -> Option<String> {
|
||||
}
|
||||
|
||||
/// Report version info to GreptimeDB.
|
||||
///
|
||||
/// We do not collect any identity-sensitive information.
|
||||
/// This task is scheduled to run every 30 minutes.
|
||||
/// The task will be disabled default. It can be enabled by setting the build feature `greptimedb-telemetry`
|
||||
|
||||
@@ -35,7 +35,9 @@ pub fn aggr_func_type_store_derive(input: TokenStream) -> TokenStream {
|
||||
}
|
||||
|
||||
/// A struct can be used as a creator for aggregate function if it has been annotated with this
|
||||
/// attribute first. This attribute add a necessary field which is intended to store the input
|
||||
/// attribute first.
|
||||
///
|
||||
/// This attribute add a necessary field which is intended to store the input
|
||||
/// data's types to the struct.
|
||||
/// This attribute is expected to be used along with derive macro [AggrFuncTypeStore].
|
||||
#[proc_macro_attribute]
|
||||
@@ -44,9 +46,10 @@ pub fn as_aggr_func_creator(args: TokenStream, input: TokenStream) -> TokenStrea
|
||||
}
|
||||
|
||||
/// Attribute macro to convert an arithimetic function to a range function. The annotated function
|
||||
/// should accept servaral arrays as input and return a single value as output. This procedure
|
||||
/// macro can works on any number of input parameters. Return type can be either primitive type
|
||||
/// or wrapped in `Option`.
|
||||
/// should accept servaral arrays as input and return a single value as output.
|
||||
///
|
||||
/// This procedure macro can works on any number of input parameters. Return type can be either
|
||||
/// primitive type or wrapped in `Option`.
|
||||
///
|
||||
/// # Example
|
||||
/// Take `count_over_time()` in PromQL as an example:
|
||||
|
||||
@@ -55,6 +55,7 @@ pub trait ClusterInfo {
|
||||
}
|
||||
|
||||
/// The key of [NodeInfo] in the storage. The format is `__meta_cluster_node_info-{cluster_id}-{role}-{node_id}`.
|
||||
///
|
||||
/// This key cannot be used to describe the `Metasrv` because the `Metasrv` does not have
|
||||
/// a `cluster_id`, it serves multiple clusters.
|
||||
#[derive(Debug, Clone, Eq, Hash, PartialEq, Serialize, Deserialize)]
|
||||
|
||||
@@ -35,7 +35,7 @@ pub struct CatalogNameKey<'a> {
|
||||
pub catalog: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Default for CatalogNameKey<'a> {
|
||||
impl Default for CatalogNameKey<'_> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
catalog: DEFAULT_CATALOG_NAME,
|
||||
|
||||
@@ -77,7 +77,7 @@ impl DatanodeTableKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetadataKey<'a, DatanodeTableKey> for DatanodeTableKey {
|
||||
impl MetadataKey<'_, DatanodeTableKey> for DatanodeTableKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.to_string().into_bytes()
|
||||
}
|
||||
|
||||
@@ -42,6 +42,8 @@ lazy_static! {
|
||||
/// The layout: `__flow/info/{flow_id}`.
|
||||
pub struct FlowInfoKey(FlowScoped<FlowInfoKeyInner>);
|
||||
|
||||
pub type FlowInfoDecodeResult = Result<Option<DeserializedValueWithBytes<FlowInfoValue>>>;
|
||||
|
||||
impl<'a> MetadataKey<'a, FlowInfoKey> for FlowInfoKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.0.to_bytes()
|
||||
@@ -203,9 +205,7 @@ impl FlowInfoManager {
|
||||
flow_value: &FlowInfoValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<FlowInfoValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> FlowInfoDecodeResult,
|
||||
)> {
|
||||
let key = FlowInfoKey::new(flow_id).to_bytes();
|
||||
let txn = Txn::put_if_not_exists(key.clone(), flow_value.try_as_raw_value()?);
|
||||
|
||||
@@ -46,6 +46,8 @@ lazy_static! {
|
||||
/// The layout: `__flow/name/{catalog_name}/{flow_name}`.
|
||||
pub struct FlowNameKey<'a>(FlowScoped<FlowNameKeyInner<'a>>);
|
||||
|
||||
pub type FlowNameDecodeResult = Result<Option<DeserializedValueWithBytes<FlowNameValue>>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<'a> FlowNameKey<'a> {
|
||||
/// Returns the [FlowNameKey]
|
||||
@@ -104,7 +106,7 @@ impl<'a> MetadataKey<'a, FlowNameKeyInner<'a>> for FlowNameKeyInner<'_> {
|
||||
.into_bytes()
|
||||
}
|
||||
|
||||
fn from_bytes(bytes: &'a [u8]) -> Result<FlowNameKeyInner> {
|
||||
fn from_bytes(bytes: &'a [u8]) -> Result<FlowNameKeyInner<'a>> {
|
||||
let key = std::str::from_utf8(bytes).map_err(|e| {
|
||||
error::InvalidMetadataSnafu {
|
||||
err_msg: format!(
|
||||
@@ -223,9 +225,7 @@ impl FlowNameManager {
|
||||
flow_id: FlowId,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<FlowNameValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> FlowNameDecodeResult,
|
||||
)> {
|
||||
let key = FlowNameKey::new(catalog_name, flow_name);
|
||||
let raw_key = key.to_bytes();
|
||||
|
||||
@@ -52,7 +52,7 @@ impl NodeAddressValue {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetadataKey<'a, NodeAddressKey> for NodeAddressKey {
|
||||
impl MetadataKey<'_, NodeAddressKey> for NodeAddressKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.to_string().into_bytes()
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ pub struct SchemaNameKey<'a> {
|
||||
pub schema: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Default for SchemaNameKey<'a> {
|
||||
impl Default for SchemaNameKey<'_> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
catalog: DEFAULT_CATALOG_NAME,
|
||||
|
||||
@@ -51,7 +51,7 @@ impl Display for TableInfoKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetadataKey<'a, TableInfoKey> for TableInfoKey {
|
||||
impl MetadataKey<'_, TableInfoKey> for TableInfoKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.to_string().into_bytes()
|
||||
}
|
||||
@@ -132,6 +132,7 @@ pub type TableInfoManagerRef = Arc<TableInfoManager>;
|
||||
pub struct TableInfoManager {
|
||||
kv_backend: KvBackendRef,
|
||||
}
|
||||
pub type TableInfoDecodeResult = Result<Option<DeserializedValueWithBytes<TableInfoValue>>>;
|
||||
|
||||
impl TableInfoManager {
|
||||
pub fn new(kv_backend: KvBackendRef) -> Self {
|
||||
@@ -145,9 +146,7 @@ impl TableInfoManager {
|
||||
table_info_value: &TableInfoValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<TableInfoValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> TableInfoDecodeResult,
|
||||
)> {
|
||||
let key = TableInfoKey::new(table_id);
|
||||
let raw_key = key.to_bytes();
|
||||
@@ -169,9 +168,7 @@ impl TableInfoManager {
|
||||
new_table_info_value: &TableInfoValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<TableInfoValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> TableInfoDecodeResult,
|
||||
)> {
|
||||
let key = TableInfoKey::new(table_id);
|
||||
let raw_key = key.to_bytes();
|
||||
|
||||
@@ -245,7 +245,7 @@ impl LogicalTableRouteValue {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetadataKey<'a, TableRouteKey> for TableRouteKey {
|
||||
impl MetadataKey<'_, TableRouteKey> for TableRouteKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.to_string().into_bytes()
|
||||
}
|
||||
@@ -472,6 +472,8 @@ pub struct TableRouteStorage {
|
||||
kv_backend: KvBackendRef,
|
||||
}
|
||||
|
||||
pub type TableRouteValueDecodeResult = Result<Option<DeserializedValueWithBytes<TableRouteValue>>>;
|
||||
|
||||
impl TableRouteStorage {
|
||||
pub fn new(kv_backend: KvBackendRef) -> Self {
|
||||
Self { kv_backend }
|
||||
@@ -485,9 +487,7 @@ impl TableRouteStorage {
|
||||
table_route_value: &TableRouteValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<TableRouteValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> TableRouteValueDecodeResult,
|
||||
)> {
|
||||
let key = TableRouteKey::new(table_id);
|
||||
let raw_key = key.to_bytes();
|
||||
@@ -510,9 +510,7 @@ impl TableRouteStorage {
|
||||
new_table_route_value: &TableRouteValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<TableRouteValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> TableRouteValueDecodeResult,
|
||||
)> {
|
||||
let key = TableRouteKey::new(table_id);
|
||||
let raw_key = key.to_bytes();
|
||||
|
||||
@@ -53,7 +53,7 @@ impl Display for ViewInfoKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetadataKey<'a, ViewInfoKey> for ViewInfoKey {
|
||||
impl MetadataKey<'_, ViewInfoKey> for ViewInfoKey {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
self.to_string().into_bytes()
|
||||
}
|
||||
@@ -139,6 +139,8 @@ pub struct ViewInfoManager {
|
||||
|
||||
pub type ViewInfoManagerRef = Arc<ViewInfoManager>;
|
||||
|
||||
pub type ViewInfoValueDecodeResult = Result<Option<DeserializedValueWithBytes<ViewInfoValue>>>;
|
||||
|
||||
impl ViewInfoManager {
|
||||
pub fn new(kv_backend: KvBackendRef) -> Self {
|
||||
Self { kv_backend }
|
||||
@@ -151,9 +153,7 @@ impl ViewInfoManager {
|
||||
view_info_value: &ViewInfoValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<ViewInfoValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> ViewInfoValueDecodeResult,
|
||||
)> {
|
||||
let key = ViewInfoKey::new(view_id);
|
||||
let raw_key = key.to_bytes();
|
||||
@@ -175,9 +175,7 @@ impl ViewInfoManager {
|
||||
new_view_info_value: &ViewInfoValue,
|
||||
) -> Result<(
|
||||
Txn,
|
||||
impl FnOnce(
|
||||
&mut TxnOpGetResponseSet,
|
||||
) -> Result<Option<DeserializedValueWithBytes<ViewInfoValue>>>,
|
||||
impl FnOnce(&mut TxnOpGetResponseSet) -> ViewInfoValueDecodeResult,
|
||||
)> {
|
||||
let key = ViewInfoKey::new(view_id);
|
||||
let raw_key = key.to_bytes();
|
||||
|
||||
@@ -34,7 +34,7 @@ pub enum CatalogLock<'a> {
|
||||
Write(&'a str),
|
||||
}
|
||||
|
||||
impl<'a> Display for CatalogLock<'a> {
|
||||
impl Display for CatalogLock<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let key = match self {
|
||||
CatalogLock::Read(s) => s,
|
||||
@@ -44,7 +44,7 @@ impl<'a> Display for CatalogLock<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<CatalogLock<'a>> for StringKey {
|
||||
impl From<CatalogLock<'_>> for StringKey {
|
||||
fn from(value: CatalogLock) -> Self {
|
||||
match value {
|
||||
CatalogLock::Write(_) => StringKey::Exclusive(value.to_string()),
|
||||
|
||||
@@ -297,7 +297,7 @@ struct ParsedKey<'a> {
|
||||
key_type: KeyType,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for ParsedKey<'a> {
|
||||
impl fmt::Display for ParsedKey<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
|
||||
@@ -220,7 +220,7 @@ pub struct RecordBatchRowIterator<'a> {
|
||||
}
|
||||
|
||||
impl<'a> RecordBatchRowIterator<'a> {
|
||||
fn new(record_batch: &'a RecordBatch) -> RecordBatchRowIterator {
|
||||
fn new(record_batch: &'a RecordBatch) -> RecordBatchRowIterator<'a> {
|
||||
RecordBatchRowIterator {
|
||||
record_batch,
|
||||
rows: record_batch.df_record_batch.num_rows(),
|
||||
@@ -230,7 +230,7 @@ impl<'a> RecordBatchRowIterator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for RecordBatchRowIterator<'a> {
|
||||
impl Iterator for RecordBatchRowIterator<'_> {
|
||||
type Item = Vec<Value>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@@ -203,7 +203,7 @@ impl Scalar for bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScalarRef<'a> for bool {
|
||||
impl ScalarRef<'_> for bool {
|
||||
type ScalarType = bool;
|
||||
|
||||
#[inline]
|
||||
@@ -273,7 +273,7 @@ impl Scalar for Date {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScalarRef<'a> for Date {
|
||||
impl ScalarRef<'_> for Date {
|
||||
type ScalarType = Date;
|
||||
|
||||
fn to_owned_scalar(&self) -> Self::ScalarType {
|
||||
@@ -294,7 +294,7 @@ impl Scalar for Decimal128 {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScalarRef<'a> for Decimal128 {
|
||||
impl ScalarRef<'_> for Decimal128 {
|
||||
type ScalarType = Decimal128;
|
||||
|
||||
fn to_owned_scalar(&self) -> Self::ScalarType {
|
||||
@@ -315,7 +315,7 @@ impl Scalar for DateTime {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ScalarRef<'a> for DateTime {
|
||||
impl ScalarRef<'_> for DateTime {
|
||||
type ScalarType = DateTime;
|
||||
|
||||
fn to_owned_scalar(&self) -> Self::ScalarType {
|
||||
|
||||
@@ -82,8 +82,8 @@ pub fn cast_with_opt(
|
||||
}
|
||||
}
|
||||
|
||||
/// Return true if the src_value can be casted to dest_type,
|
||||
/// Otherwise, return false.
|
||||
/// Return true if the src_value can be casted to dest_type, Otherwise, return false.
|
||||
///
|
||||
/// Notice: this function does not promise that the `cast_with_opt` will succeed,
|
||||
/// it only checks whether the src_value can be casted to dest_type.
|
||||
pub fn can_cast_type(src_value: &Value, dest_type: &ConcreteDataType) -> bool {
|
||||
|
||||
@@ -83,9 +83,10 @@ pub trait LogicalPrimitiveType: 'static + Sized {
|
||||
fn cast_value_ref(value: ValueRef) -> Result<Option<Self::Wrapper>>;
|
||||
}
|
||||
|
||||
/// A new type for [WrapperType], complement the `Ord` feature for it. Wrapping non ordered
|
||||
/// primitive types like `f32` and `f64` in `OrdPrimitive` can make them be used in places that
|
||||
/// require `Ord`. For example, in `Median` UDAFs.
|
||||
/// A new type for [WrapperType], complement the `Ord` feature for it.
|
||||
///
|
||||
/// Wrapping non ordered primitive types like `f32` and `f64` in `OrdPrimitive`
|
||||
/// can make them be used in places that require `Ord`. For example, in `Median` UDAFs.
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub struct OrdPrimitive<T: WrapperType>(pub T);
|
||||
|
||||
|
||||
@@ -1087,7 +1087,7 @@ macro_rules! impl_as_for_value_ref {
|
||||
};
|
||||
}
|
||||
|
||||
impl<'a> ValueRef<'a> {
|
||||
impl ValueRef<'_> {
|
||||
define_data_type_func!(ValueRef);
|
||||
|
||||
/// Returns true if this is null.
|
||||
@@ -1214,13 +1214,13 @@ impl<'a> ValueRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for ValueRef<'a> {
|
||||
impl PartialOrd for ValueRef<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Ord for ValueRef<'a> {
|
||||
impl Ord for ValueRef<'_> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
impl_ord_for_value_like!(ValueRef, self, other)
|
||||
}
|
||||
@@ -1347,7 +1347,7 @@ pub enum ListValueRef<'a> {
|
||||
Ref { val: &'a ListValue },
|
||||
}
|
||||
|
||||
impl<'a> ListValueRef<'a> {
|
||||
impl ListValueRef<'_> {
|
||||
/// Convert self to [Value]. This method would clone the underlying data.
|
||||
fn to_value(self) -> Value {
|
||||
match self {
|
||||
@@ -1365,7 +1365,7 @@ impl<'a> ListValueRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Serialize for ListValueRef<'a> {
|
||||
impl Serialize for ListValueRef<'_> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {
|
||||
match self {
|
||||
ListValueRef::Indexed { vector, idx } => match vector.get(*idx) {
|
||||
@@ -1377,28 +1377,28 @@ impl<'a> Serialize for ListValueRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for ListValueRef<'a> {
|
||||
impl PartialEq for ListValueRef<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_value().eq(&other.to_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Eq for ListValueRef<'a> {}
|
||||
impl Eq for ListValueRef<'_> {}
|
||||
|
||||
impl<'a> Ord for ListValueRef<'a> {
|
||||
impl Ord for ListValueRef<'_> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Respect the order of `Value` by converting into value before comparison.
|
||||
self.to_value().cmp(&other.to_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for ListValueRef<'a> {
|
||||
impl PartialOrd for ListValueRef<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ValueRef<'a> {
|
||||
impl ValueRef<'_> {
|
||||
/// Returns the size of the underlying data in bytes,
|
||||
/// The size is estimated and only considers the data size.
|
||||
pub fn data_size(&self) -> usize {
|
||||
|
||||
@@ -247,7 +247,7 @@ pub struct Decimal128Iter<'a> {
|
||||
iter: ArrayIter<&'a Decimal128Array>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Decimal128Iter<'a> {
|
||||
impl Iterator for Decimal128Iter<'_> {
|
||||
type Item = Option<Decimal128>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@@ -157,7 +157,7 @@ pub struct ListIter<'a> {
|
||||
}
|
||||
|
||||
impl<'a> ListIter<'a> {
|
||||
fn new(vector: &'a ListVector) -> ListIter {
|
||||
fn new(vector: &'a ListVector) -> ListIter<'a> {
|
||||
ListIter { vector, idx: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,7 +207,7 @@ pub struct PrimitiveIter<'a, T: LogicalPrimitiveType> {
|
||||
iter: ArrayIter<&'a PrimitiveArray<T::ArrowPrimitive>>,
|
||||
}
|
||||
|
||||
impl<'a, T: LogicalPrimitiveType> Iterator for PrimitiveIter<'a, T> {
|
||||
impl<T: LogicalPrimitiveType> Iterator for PrimitiveIter<'_, T> {
|
||||
type Item = Option<T::Wrapper>;
|
||||
|
||||
fn next(&mut self) -> Option<Option<T::Wrapper>> {
|
||||
|
||||
@@ -61,7 +61,7 @@ pub struct Context<'referred, 'df> {
|
||||
pub err_collector: ErrCollector,
|
||||
}
|
||||
|
||||
impl<'referred, 'df> Drop for Context<'referred, 'df> {
|
||||
impl Drop for Context<'_, '_> {
|
||||
fn drop(&mut self) {
|
||||
for bundle in std::mem::take(&mut self.input_collection)
|
||||
.into_values()
|
||||
@@ -92,7 +92,7 @@ impl<'referred, 'df> Drop for Context<'referred, 'df> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'referred, 'df> Context<'referred, 'df> {
|
||||
impl Context<'_, '_> {
|
||||
pub fn insert_global(&mut self, id: GlobalId, collection: CollectionBundle) {
|
||||
self.input_collection.insert(id, collection);
|
||||
}
|
||||
@@ -120,7 +120,7 @@ impl<'referred, 'df> Context<'referred, 'df> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'referred, 'df> Context<'referred, 'df> {
|
||||
impl Context<'_, '_> {
|
||||
/// Like `render_plan` but in Batch Mode
|
||||
pub fn render_plan_batch(&mut self, plan: TypedPlan) -> Result<CollectionBundle<Batch>, Error> {
|
||||
match plan.plan {
|
||||
|
||||
@@ -28,7 +28,7 @@ use crate::plan::TypedPlan;
|
||||
use crate::repr::{self, DiffRow, KeyValDiffRow, Row};
|
||||
use crate::utils::ArrangeHandler;
|
||||
|
||||
impl<'referred, 'df> Context<'referred, 'df> {
|
||||
impl Context<'_, '_> {
|
||||
/// Like `render_mfp` but in batch mode
|
||||
pub fn render_mfp_batch(
|
||||
&mut self,
|
||||
|
||||
@@ -34,7 +34,7 @@ use crate::plan::{AccumulablePlan, AggrWithIndex, KeyValPlan, ReducePlan, TypedP
|
||||
use crate::repr::{self, DiffRow, KeyValDiffRow, RelationType, Row};
|
||||
use crate::utils::{ArrangeHandler, ArrangeReader, ArrangeWriter, KeyExpiryManager};
|
||||
|
||||
impl<'referred, 'df> Context<'referred, 'df> {
|
||||
impl Context<'_, '_> {
|
||||
const REDUCE_BATCH: &'static str = "reduce_batch";
|
||||
/// Like `render_reduce`, but for batch mode, and only barebone implementation
|
||||
/// no support for distinct aggregation for now
|
||||
|
||||
@@ -31,7 +31,7 @@ use crate::expr::{Batch, EvalError};
|
||||
use crate::repr::{DiffRow, Row, BROADCAST_CAP};
|
||||
|
||||
#[allow(clippy::mutable_key_type)]
|
||||
impl<'referred, 'df> Context<'referred, 'df> {
|
||||
impl Context<'_, '_> {
|
||||
/// simply send the batch to downstream, without fancy features like buffering
|
||||
pub fn render_source_batch(
|
||||
&mut self,
|
||||
|
||||
@@ -273,7 +273,7 @@ impl<'a> ExpandAvgRewriter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TreeNodeRewriter for ExpandAvgRewriter<'a> {
|
||||
impl TreeNodeRewriter for ExpandAvgRewriter<'_> {
|
||||
type Node = Expr;
|
||||
|
||||
fn f_up(&mut self, expr: Expr) -> Result<Transformed<Expr>, DataFusionError> {
|
||||
|
||||
@@ -19,7 +19,9 @@ use crate::inverted_index::error::Result;
|
||||
use crate::inverted_index::format::reader::InvertedIndexReader;
|
||||
|
||||
/// `FstValuesMapper` maps FST-encoded u64 values to their corresponding bitmaps
|
||||
/// within an inverted index. The higher 32 bits of each u64 value represent the
|
||||
/// within an inverted index.
|
||||
///
|
||||
/// The higher 32 bits of each u64 value represent the
|
||||
/// bitmap offset and the lower 32 bits represent its size. This mapper uses these
|
||||
/// combined offset-size pairs to fetch and union multiple bitmaps into a single `BitVec`.
|
||||
pub struct FstValuesMapper<'a> {
|
||||
|
||||
@@ -134,7 +134,7 @@ impl PredicatesIndexApplier {
|
||||
fn bitmap_full_range(metadata: &InvertedIndexMetas) -> BitVec {
|
||||
let total_count = metadata.total_row_count;
|
||||
let segment_count = metadata.segment_row_count;
|
||||
let len = (total_count + segment_count - 1) / segment_count;
|
||||
let len = total_count.div_ceil(segment_count);
|
||||
BitVec::repeat(true, len as _)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
#![feature(result_flattening)]
|
||||
#![feature(assert_matches)]
|
||||
#![feature(extract_if)]
|
||||
#![feature(option_take_if)]
|
||||
|
||||
pub mod bootstrap;
|
||||
mod cache_invalidator;
|
||||
|
||||
2
src/mito2/src/cache/cache_size.rs
vendored
2
src/mito2/src/cache/cache_size.rs
vendored
@@ -137,6 +137,6 @@ mod tests {
|
||||
fn test_parquet_meta_size() {
|
||||
let metadata = parquet_meta();
|
||||
|
||||
assert_eq!(964, parquet_meta_size(&metadata));
|
||||
assert_eq!(956, parquet_meta_size(&metadata));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -499,7 +499,7 @@ fn divide_num_cpus(divisor: usize) -> usize {
|
||||
let cores = common_config::utils::get_cpus();
|
||||
debug_assert!(cores > 0);
|
||||
|
||||
(cores + divisor - 1) / divisor
|
||||
cores.div_ceil(divisor)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -126,7 +126,7 @@ pub struct KeyValue<'a> {
|
||||
op_type: OpType,
|
||||
}
|
||||
|
||||
impl<'a> KeyValue<'a> {
|
||||
impl KeyValue<'_> {
|
||||
/// Get primary key columns.
|
||||
pub fn primary_keys(&self) -> impl Iterator<Item = ValueRef> {
|
||||
self.helper.indices[..self.helper.num_primary_key_column]
|
||||
|
||||
@@ -18,7 +18,7 @@ use index::inverted_index::search::predicate::{Bound, Predicate, Range, RangePre
|
||||
use crate::error::Result;
|
||||
use crate::sst::index::inverted_index::applier::builder::InvertedIndexApplierBuilder;
|
||||
|
||||
impl<'a> InvertedIndexApplierBuilder<'a> {
|
||||
impl InvertedIndexApplierBuilder<'_> {
|
||||
/// Collects a `BETWEEN` expression in the form of `column BETWEEN lit AND lit`.
|
||||
pub(crate) fn collect_between(&mut self, between: &Between) -> Result<()> {
|
||||
if between.negated {
|
||||
|
||||
@@ -19,7 +19,7 @@ use index::inverted_index::Bytes;
|
||||
use crate::error::Result;
|
||||
use crate::sst::index::inverted_index::applier::builder::InvertedIndexApplierBuilder;
|
||||
|
||||
impl<'a> InvertedIndexApplierBuilder<'a> {
|
||||
impl InvertedIndexApplierBuilder<'_> {
|
||||
/// Collects a comparison expression in the form of
|
||||
/// `column < lit`, `column > lit`, `column <= lit`, `column >= lit`,
|
||||
/// `lit < column`, `lit > column`, `lit <= column`, `lit >= column`.
|
||||
|
||||
@@ -22,7 +22,7 @@ use index::inverted_index::Bytes;
|
||||
use crate::error::Result;
|
||||
use crate::sst::index::inverted_index::applier::builder::InvertedIndexApplierBuilder;
|
||||
|
||||
impl<'a> InvertedIndexApplierBuilder<'a> {
|
||||
impl InvertedIndexApplierBuilder<'_> {
|
||||
/// Collects an eq expression in the form of `column = lit`.
|
||||
pub(crate) fn collect_eq(&mut self, left: &DfExpr, right: &DfExpr) -> Result<()> {
|
||||
let Some(column_name) = Self::column_name(left).or_else(|| Self::column_name(right)) else {
|
||||
|
||||
@@ -20,7 +20,7 @@ use index::inverted_index::search::predicate::{InListPredicate, Predicate};
|
||||
use crate::error::Result;
|
||||
use crate::sst::index::inverted_index::applier::builder::InvertedIndexApplierBuilder;
|
||||
|
||||
impl<'a> InvertedIndexApplierBuilder<'a> {
|
||||
impl InvertedIndexApplierBuilder<'_> {
|
||||
/// Collects an in list expression in the form of `column IN (lit, lit, ...)`.
|
||||
pub(crate) fn collect_inlist(&mut self, inlist: &InList) -> Result<()> {
|
||||
if inlist.negated {
|
||||
|
||||
@@ -19,7 +19,7 @@ use index::inverted_index::search::predicate::{Predicate, RegexMatchPredicate};
|
||||
use crate::error::Result;
|
||||
use crate::sst::index::inverted_index::applier::builder::InvertedIndexApplierBuilder;
|
||||
|
||||
impl<'a> InvertedIndexApplierBuilder<'a> {
|
||||
impl InvertedIndexApplierBuilder<'_> {
|
||||
/// Collects a regex match expression in the form of `column ~ pattern`.
|
||||
pub(crate) fn collect_regex_match(&mut self, column: &DfExpr, pattern: &DfExpr) -> Result<()> {
|
||||
let Some(column_name) = Self::column_name(column) else {
|
||||
|
||||
@@ -153,7 +153,7 @@ impl<'a, R> InstrumentedAsyncRead<'a, R> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: AsyncRead + Unpin + Send> AsyncRead for InstrumentedAsyncRead<'a, R> {
|
||||
impl<R: AsyncRead + Unpin + Send> AsyncRead for InstrumentedAsyncRead<'_, R> {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
@@ -168,7 +168,7 @@ impl<'a, R: AsyncRead + Unpin + Send> AsyncRead for InstrumentedAsyncRead<'a, R>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: AsyncSeek + Unpin + Send> AsyncSeek for InstrumentedAsyncRead<'a, R> {
|
||||
impl<R: AsyncSeek + Unpin + Send> AsyncSeek for InstrumentedAsyncRead<'_, R> {
|
||||
fn poll_seek(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
@@ -209,7 +209,7 @@ impl<'a, W> InstrumentedAsyncWrite<'a, W> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, W: AsyncWrite + Unpin + Send> AsyncWrite for InstrumentedAsyncWrite<'a, W> {
|
||||
impl<W: AsyncWrite + Unpin + Send> AsyncWrite for InstrumentedAsyncWrite<'_, W> {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
@@ -254,7 +254,7 @@ impl<'a> CounterGuard<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for CounterGuard<'a> {
|
||||
impl Drop for CounterGuard<'_> {
|
||||
fn drop(&mut self) {
|
||||
if self.count > 0 {
|
||||
self.counter.inc_by(self.count as _);
|
||||
|
||||
@@ -35,7 +35,11 @@ pub(crate) struct MetadataLoader<'a> {
|
||||
|
||||
impl<'a> MetadataLoader<'a> {
|
||||
/// Create a new parquet metadata loader.
|
||||
pub fn new(object_store: ObjectStore, file_path: &'a str, file_size: u64) -> MetadataLoader {
|
||||
pub fn new(
|
||||
object_store: ObjectStore,
|
||||
file_path: &'a str,
|
||||
file_size: u64,
|
||||
) -> MetadataLoader<'a> {
|
||||
Self {
|
||||
object_store,
|
||||
file_path,
|
||||
|
||||
@@ -360,7 +360,7 @@ fn cache_uncompressed_pages(column: &ColumnChunkMetaData) -> bool {
|
||||
column.uncompressed_size() as usize <= DEFAULT_PAGE_SIZE
|
||||
}
|
||||
|
||||
impl<'a> RowGroups for InMemoryRowGroup<'a> {
|
||||
impl RowGroups for InMemoryRowGroup<'_> {
|
||||
fn num_rows(&self) -> usize {
|
||||
self.row_count
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ impl<'a, T> RowGroupPruningStats<'a, T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Borrow<RowGroupMetaData>> PruningStatistics for RowGroupPruningStats<'a, T> {
|
||||
impl<T: Borrow<RowGroupMetaData>> PruningStatistics for RowGroupPruningStats<'_, T> {
|
||||
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
|
||||
let column_id = self.column_id_to_prune(&column.name)?;
|
||||
self.read_format.min_values(self.row_groups, column_id)
|
||||
|
||||
@@ -153,7 +153,7 @@ static LOGGING_TARGET: &str = "opendal::services";
|
||||
|
||||
struct LoggingContext<'a>(&'a [(&'a str, &'a str)]);
|
||||
|
||||
impl<'a> Display for LoggingContext<'a> {
|
||||
impl Display for LoggingContext<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for (i, (k, v)) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
|
||||
@@ -62,7 +62,8 @@ const TARGET_FIELDS_NAME: &str = "target_fields";
|
||||
// const ON_FAILURE_NAME: &str = "on_failure";
|
||||
// const TAG_NAME: &str = "tag";
|
||||
|
||||
/// Processor trait defines the interface for all processors
|
||||
/// Processor trait defines the interface for all processors.
|
||||
///
|
||||
/// A processor is a transformation that can be applied to a field in a document
|
||||
/// It can be used to extract, transform, or enrich data
|
||||
/// Now Processor only have one input field. In the future, we may support multiple input fields.
|
||||
|
||||
@@ -254,7 +254,7 @@ impl TimestampProcessor {
|
||||
}
|
||||
|
||||
fn parse_formats(yaml: &yaml_rust::yaml::Yaml) -> Result<Vec<(Arc<String>, Tz)>> {
|
||||
return match yaml.as_vec() {
|
||||
match yaml.as_vec() {
|
||||
Some(formats_yaml) => {
|
||||
let mut formats = Vec::with_capacity(formats_yaml.len());
|
||||
for v in formats_yaml {
|
||||
@@ -286,7 +286,7 @@ fn parse_formats(yaml: &yaml_rust::yaml::Yaml) -> Result<Vec<(Arc<String>, Tz)>>
|
||||
s: format!("{yaml:?}"),
|
||||
}
|
||||
.fail(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&yaml_rust::yaml::Hash> for TimestampProcessorBuilder {
|
||||
|
||||
@@ -446,12 +446,10 @@ fn coerce_string_value(s: &String, transform: &Transform) -> Result<Option<Value
|
||||
None => CoerceUnsupportedEpochTypeSnafu { ty: "String" }.fail(),
|
||||
},
|
||||
|
||||
Value::Array(_) | Value::Map(_) => {
|
||||
return CoerceJsonTypeToSnafu {
|
||||
ty: transform.type_.to_str_type(),
|
||||
}
|
||||
.fail()
|
||||
Value::Array(_) | Value::Map(_) => CoerceJsonTypeToSnafu {
|
||||
ty: transform.type_.to_str_type(),
|
||||
}
|
||||
.fail(),
|
||||
|
||||
Value::Null => Ok(None),
|
||||
}
|
||||
|
||||
@@ -335,7 +335,7 @@ impl TryFrom<&yaml_rust::Yaml> for Value {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Value> for JsonbValue<'a> {
|
||||
impl From<&Value> for JsonbValue<'_> {
|
||||
fn from(value: &Value) -> Self {
|
||||
match value {
|
||||
Value::Null => JsonbValue::Null,
|
||||
@@ -373,7 +373,7 @@ impl<'a> From<&Value> for JsonbValue<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<Value> for JsonbValue<'a> {
|
||||
impl From<Value> for JsonbValue<'_> {
|
||||
fn from(value: Value) -> Self {
|
||||
match value {
|
||||
Value::Null => JsonbValue::Null,
|
||||
|
||||
@@ -26,6 +26,7 @@ pub mod table;
|
||||
pub mod util;
|
||||
|
||||
/// Pipeline version. An optional timestamp with nanosecond precision.
|
||||
///
|
||||
/// If the version is None, it means the latest version of the pipeline.
|
||||
/// User can specify the version by providing a timestamp string formatted as iso8601.
|
||||
/// When it used in cache key, it will be converted to i64 meaning the number of nanoseconds since the epoch.
|
||||
|
||||
@@ -44,7 +44,9 @@ use datatypes::vectors::MutableVector;
|
||||
use futures::{ready, Stream, StreamExt};
|
||||
|
||||
/// `HistogramFold` will fold the conventional (non-native) histogram ([1]) for later
|
||||
/// computing. Specifically, it will transform the `le` and `field` column into a complex
|
||||
/// computing.
|
||||
///
|
||||
/// Specifically, it will transform the `le` and `field` column into a complex
|
||||
/// type, and samples on other tag columns:
|
||||
/// - `le` will become a [ListArray] of [f64]. With each bucket bound parsed
|
||||
/// - `field` will become a [ListArray] of [f64]
|
||||
|
||||
@@ -46,7 +46,8 @@ use crate::error::{ColumnNotFoundSnafu, DataFusionPlanningSnafu, DeserializeSnaf
|
||||
/// `ScalarCalculate` is the custom logical plan to calculate
|
||||
/// [`scalar`](https://prometheus.io/docs/prometheus/latest/querying/functions/#scalar)
|
||||
/// in PromQL, return NaN when have multiple time series.
|
||||
/// return the time series as scalar value when only have one time series.
|
||||
///
|
||||
/// Return the time series as scalar value when only have one time series.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ScalarCalculate {
|
||||
start: Millisecond,
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![feature(option_get_or_insert_default)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(map_try_insert)]
|
||||
|
||||
|
||||
@@ -40,6 +40,7 @@ use crate::error::{CatalogSnafu, TableNotFoundSnafu};
|
||||
use crate::region_query::RegionQueryHandlerRef;
|
||||
|
||||
/// Planner for convert merge sort logical plan to physical plan
|
||||
///
|
||||
/// it is currently a fallback to sort, and doesn't change the execution plan:
|
||||
/// `MergeSort(MergeScan) -> Sort(MergeScan) - to physical plan -> ...`
|
||||
/// It should be applied after `DistExtensionPlanner`
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
#![feature(let_chains)]
|
||||
#![feature(int_roundings)]
|
||||
#![feature(option_get_or_insert_default)]
|
||||
#![feature(trait_upcasting)]
|
||||
|
||||
mod analyze;
|
||||
|
||||
@@ -62,7 +62,7 @@ pub struct RangeExprRewriter<'a> {
|
||||
query_ctx: &'a QueryContextRef,
|
||||
}
|
||||
|
||||
impl<'a> RangeExprRewriter<'a> {
|
||||
impl RangeExprRewriter<'_> {
|
||||
pub fn get_range_expr(&self, args: &[Expr], i: usize) -> DFResult<Expr> {
|
||||
match args.get(i) {
|
||||
Some(Expr::Column(column)) => {
|
||||
@@ -263,7 +263,7 @@ macro_rules! inconsistent_check {
|
||||
};
|
||||
}
|
||||
|
||||
impl<'a> TreeNodeRewriter for RangeExprRewriter<'a> {
|
||||
impl TreeNodeRewriter for RangeExprRewriter<'_> {
|
||||
type Node = Expr;
|
||||
|
||||
fn f_down(&mut self, node: Expr) -> DFResult<Transformed<Expr>> {
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! PyVectors' rustpython specify methods
|
||||
|
||||
use arrow::compute::kernels::numeric;
|
||||
use common_time::date::Date;
|
||||
use common_time::datetime::DateTime;
|
||||
@@ -40,7 +42,6 @@ use crate::python::ffi_types::vector::{
|
||||
arrow_rfloordiv, arrow_rsub, arrow_rtruediv, rspy_is_pyobj_scalar, wrap_result, PyVector,
|
||||
};
|
||||
use crate::python::rspython::utils::{is_instance, obj_cast_to};
|
||||
/// PyVectors' rustpython specify methods
|
||||
|
||||
fn to_type_error(vm: &'_ VirtualMachine) -> impl FnOnce(String) -> PyBaseExceptionRef + '_ {
|
||||
|msg: String| vm.new_type_error(msg)
|
||||
|
||||
@@ -31,7 +31,9 @@ use crate::error::{InvalidExportMetricsConfigSnafu, Result, SendPromRemoteReques
|
||||
use crate::prom_store::{snappy_compress, to_grpc_row_insert_requests};
|
||||
use crate::query_handler::PromStoreProtocolHandlerRef;
|
||||
|
||||
/// Use to export the metrics generated by greptimedb, encoded to Prometheus [RemoteWrite format](https://prometheus.io/docs/concepts/remote_write_spec/),
|
||||
/// Use to export the metrics generated by greptimedb.
|
||||
///
|
||||
/// Encoded to Prometheus [RemoteWrite format](https://prometheus.io/docs/concepts/remote_write_spec/),
|
||||
/// and send to Prometheus remote-write compatible receiver (e.g. send to `greptimedb` itself)
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(default)]
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
// limitations under the License.
|
||||
|
||||
//! All query handler traits for various request protocols, like SQL or GRPC.
|
||||
//!
|
||||
//! Instance that wishes to support certain request protocol, just implement the corresponding
|
||||
//! trait, the Server will handle codec for you.
|
||||
//!
|
||||
@@ -131,6 +132,7 @@ pub trait OpenTelemetryProtocolHandler: LogHandler {
|
||||
}
|
||||
|
||||
/// LogHandler is responsible for handling log related requests.
|
||||
///
|
||||
/// It should be able to insert logs and manage pipelines.
|
||||
/// The pipeline is a series of transformations that can be applied to logs.
|
||||
/// The pipeline is stored in the database and can be retrieved by name.
|
||||
|
||||
@@ -37,9 +37,9 @@ pub struct ParserContext<'a> {
|
||||
pub(crate) sql: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Construct a new ParserContext.
|
||||
pub fn new(dialect: &'a dyn Dialect, sql: &'a str) -> Result<ParserContext<'a>> {
|
||||
pub fn new<'a>(dialect: &'a dyn Dialect, sql: &'a str) -> Result<ParserContext<'a>> {
|
||||
let parser = Parser::new(dialect)
|
||||
.with_options(ParserOptions::new().with_trailing_commas(true))
|
||||
.try_with_sql(sql)
|
||||
@@ -55,7 +55,7 @@ impl<'a> ParserContext<'a> {
|
||||
|
||||
/// Parses SQL with given dialect
|
||||
pub fn create_with_dialect(
|
||||
sql: &'a str,
|
||||
sql: &str,
|
||||
dialect: &dyn Dialect,
|
||||
_opts: ParseOptions,
|
||||
) -> Result<Vec<Statement>> {
|
||||
@@ -87,7 +87,7 @@ impl<'a> ParserContext<'a> {
|
||||
Ok(stmts)
|
||||
}
|
||||
|
||||
pub fn parse_table_name(sql: &'a str, dialect: &dyn Dialect) -> Result<ObjectName> {
|
||||
pub fn parse_table_name(sql: &str, dialect: &dyn Dialect) -> Result<ObjectName> {
|
||||
let parser = Parser::new(dialect)
|
||||
.with_options(ParserOptions::new().with_trailing_commas(true))
|
||||
.try_with_sql(sql)
|
||||
@@ -106,7 +106,7 @@ impl<'a> ParserContext<'a> {
|
||||
Ok(Self::canonicalize_object_name(raw_table_name))
|
||||
}
|
||||
|
||||
pub fn parse_function(sql: &'a str, dialect: &dyn Dialect) -> Result<Expr> {
|
||||
pub fn parse_function(sql: &str, dialect: &dyn Dialect) -> Result<Expr> {
|
||||
let mut parser = Parser::new(dialect)
|
||||
.with_options(ParserOptions::new().with_trailing_commas(true))
|
||||
.try_with_sql(sql)
|
||||
@@ -191,23 +191,20 @@ impl<'a> ParserContext<'a> {
|
||||
}
|
||||
|
||||
/// Parses MySQL style 'PREPARE stmt_name FROM stmt' into a (stmt_name, stmt) tuple.
|
||||
pub fn parse_mysql_prepare_stmt(
|
||||
sql: &'a str,
|
||||
dialect: &dyn Dialect,
|
||||
) -> Result<(String, String)> {
|
||||
pub fn parse_mysql_prepare_stmt(sql: &str, dialect: &dyn Dialect) -> Result<(String, String)> {
|
||||
ParserContext::new(dialect, sql)?.parse_mysql_prepare()
|
||||
}
|
||||
|
||||
/// Parses MySQL style 'EXECUTE stmt_name USING param_list' into a stmt_name string and a list of parameters.
|
||||
pub fn parse_mysql_execute_stmt(
|
||||
sql: &'a str,
|
||||
sql: &str,
|
||||
dialect: &dyn Dialect,
|
||||
) -> Result<(String, Vec<Expr>)> {
|
||||
ParserContext::new(dialect, sql)?.parse_mysql_execute()
|
||||
}
|
||||
|
||||
/// Parses MySQL style 'DEALLOCATE stmt_name' into a stmt_name string.
|
||||
pub fn parse_mysql_deallocate_stmt(sql: &'a str, dialect: &dyn Dialect) -> Result<String> {
|
||||
pub fn parse_mysql_deallocate_stmt(sql: &str, dialect: &dyn Dialect) -> Result<String> {
|
||||
ParserContext::new(dialect, sql)?.parse_deallocate()
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::statements::statement::Statement;
|
||||
|
||||
/// `admin` extension parser: `admin function(arg1, arg2, ...)`
|
||||
/// or `admin function`
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parse `admin function(arg1, arg2, ...)` or `admin function` statement
|
||||
pub(crate) fn parse_admin_command(&mut self) -> Result<Statement> {
|
||||
let _token = self.parser.next_token();
|
||||
|
||||
@@ -23,7 +23,7 @@ use crate::parser::ParserContext;
|
||||
use crate::statements::alter::{AlterTable, AlterTableOperation};
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_alter(&mut self) -> Result<Statement> {
|
||||
let alter_table = self.parse_alter_table().context(error::SyntaxSnafu)?;
|
||||
Ok(Statement::Alter(alter_table))
|
||||
@@ -46,7 +46,7 @@ impl<'a> ParserContext<'a> {
|
||||
let location = if self.parser.parse_keyword(Keyword::FIRST) {
|
||||
Some(AddColumnLocation::First)
|
||||
} else if let Token::Word(word) = self.parser.peek_token().token {
|
||||
if word.value.to_ascii_uppercase() == "AFTER" {
|
||||
if word.value.eq_ignore_ascii_case("AFTER") {
|
||||
let _ = self.parser.next_token();
|
||||
let name = Self::canonicalize_identifier(self.parse_identifier()?);
|
||||
Some(AddColumnLocation::After {
|
||||
|
||||
@@ -28,7 +28,7 @@ pub type With = HashMap<String, String>;
|
||||
pub type Connection = HashMap<String, String>;
|
||||
|
||||
// COPY tbl TO 'output.parquet';
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_copy(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
let next = self.parser.peek_token();
|
||||
|
||||
@@ -630,7 +630,7 @@ impl<'a> ParserContext<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_optional_column_option(parser: &mut Parser<'a>) -> Result<Option<ColumnOption>> {
|
||||
fn parse_optional_column_option(parser: &mut Parser<'_>) -> Result<Option<ColumnOption>> {
|
||||
if parser.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
|
||||
Ok(Some(ColumnOption::CharacterSet(
|
||||
parser.parse_object_name(false).context(SyntaxSnafu)?,
|
||||
@@ -681,7 +681,7 @@ impl<'a> ParserContext<'a> {
|
||||
}
|
||||
|
||||
fn parse_column_extensions(
|
||||
parser: &mut Parser<'a>,
|
||||
parser: &mut Parser<'_>,
|
||||
column_name: &Ident,
|
||||
column_type: &DataType,
|
||||
column_extensions: &mut ColumnExtensions,
|
||||
|
||||
@@ -18,7 +18,7 @@ use sqlparser::keywords::Keyword;
|
||||
use crate::error::{Result, SyntaxSnafu};
|
||||
use crate::parser::ParserContext;
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parses MySQL style 'PREPARE stmt_name' into a stmt_name string.
|
||||
pub(crate) fn parse_deallocate(&mut self) -> Result<String> {
|
||||
self.parser
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::statements::delete::Delete;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// DELETE statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_delete(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
let spstatement = self.parser.parse_delete().context(error::SyntaxSnafu)?;
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::statements::describe::DescribeTable;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// DESCRIBE statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_describe(&mut self) -> Result<Statement> {
|
||||
if self.matches_keyword(Keyword::TABLE) {
|
||||
let _ = self.parser.next_token();
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::statements::drop::{DropDatabase, DropFlow, DropTable, DropView};
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// DROP statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_drop(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
match self.parser.peek_token().token {
|
||||
|
||||
@@ -20,7 +20,7 @@ use sqlparser::parser::Parser;
|
||||
use crate::error::{Result, SyntaxSnafu};
|
||||
use crate::parser::ParserContext;
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parses MySQL style 'EXECUTE stmt_name USING param_list' into a stmt_name string and a list of parameters.
|
||||
/// Only use for MySQL. for PostgreSQL, use `sqlparser::parser::Parser::parse_execute` instead.
|
||||
pub(crate) fn parse_mysql_execute(&mut self) -> Result<(String, Vec<Expr>)> {
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::statements::explain::Explain;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// EXPLAIN statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_explain(&mut self) -> Result<Statement> {
|
||||
let explain_statement = self
|
||||
.parser
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::statements::insert::Insert;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// INSERT statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_insert(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
let spstatement = self.parser.parse_insert().context(error::SyntaxSnafu)?;
|
||||
|
||||
@@ -19,7 +19,7 @@ use sqlparser::tokenizer::Token;
|
||||
use crate::error::{Result, SyntaxSnafu};
|
||||
use crate::parser::ParserContext;
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parses MySQL style 'PREPARE stmt_name FROM stmt' into a (stmt_name, stmt) tuple.
|
||||
/// Only use for MySQL. for PostgreSQL, use `sqlparser::parser::Parser::parse_prepare` instead.
|
||||
pub(crate) fn parse_mysql_prepare(&mut self) -> Result<(String, String)> {
|
||||
|
||||
@@ -19,7 +19,7 @@ use crate::parser::ParserContext;
|
||||
use crate::statements::query::Query;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parses select and it's variants.
|
||||
pub(crate) fn parse_query(&mut self) -> Result<Statement> {
|
||||
let spquery = self.parser.parse_query().context(error::SyntaxSnafu)?;
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::statements::set_variables::SetVariables;
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// SET variables statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_set_variables(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
let spstatement = self.parser.parse_set().context(error::SyntaxSnafu)?;
|
||||
|
||||
@@ -28,7 +28,7 @@ use crate::statements::show::{
|
||||
use crate::statements::statement::Statement;
|
||||
|
||||
/// SHOW statement parser implementation
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
/// Parses SHOW statements
|
||||
/// todo(hl) support `show settings`/`show create`/`show users` etc.
|
||||
pub(crate) fn parse_show(&mut self) -> Result<Statement> {
|
||||
|
||||
@@ -39,7 +39,7 @@ use crate::parsers::error::{EvaluationSnafu, ParserSnafu, TQLError};
|
||||
/// - `TQL EVAL <query>`
|
||||
/// - `TQL EXPLAIN [VERBOSE] <query>`
|
||||
/// - `TQL ANALYZE [VERBOSE] <query>`
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_tql(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::statements::statement::Statement;
|
||||
use crate::statements::truncate::TruncateTable;
|
||||
|
||||
/// `TRUNCATE [TABLE] table_name;`
|
||||
impl<'a> ParserContext<'a> {
|
||||
impl ParserContext<'_> {
|
||||
pub(crate) fn parse_truncate(&mut self) -> Result<Statement> {
|
||||
let _ = self.parser.next_token();
|
||||
let _ = self.parser.parse_keyword(Keyword::TABLE);
|
||||
|
||||
@@ -131,6 +131,7 @@ impl Display for TqlAnalyze {
|
||||
}
|
||||
|
||||
/// Intermediate structure used to unify parameter mappings for various TQL operations.
|
||||
///
|
||||
/// This struct serves as a common parameter container for parsing TQL queries
|
||||
/// and constructing corresponding TQL operations: `TqlEval`, `TqlAnalyze` or `TqlExplain`.
|
||||
#[derive(Debug)]
|
||||
|
||||
@@ -25,7 +25,7 @@ pub fn format_raw_object_name(name: &ObjectName) -> String {
|
||||
name: &'a ObjectName,
|
||||
}
|
||||
|
||||
impl<'a> Display for Inner<'a> {
|
||||
impl Display for Inner<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut delim = "";
|
||||
for ident in self.name.0.iter() {
|
||||
|
||||
@@ -31,8 +31,9 @@ pub fn supported_protocol_version() -> (ProtocolVersion, ProtocolVersion) {
|
||||
}
|
||||
|
||||
/// Protocol action that used to block older clients from reading or writing the log when backwards
|
||||
/// incompatible changes are made to the protocol. clients should be tolerant of messages and
|
||||
/// fields that they do not understand.
|
||||
/// incompatible changes are made to the protocol.
|
||||
///
|
||||
/// clients should be tolerant of messages and fields that they do not understand.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct ProtocolAction {
|
||||
pub min_reader_version: ProtocolVersion,
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
// limitations under the License.
|
||||
|
||||
//! Path constants for table engines, cluster states and WAL
|
||||
//! All paths relative to data_home(file storage) or root path(S3, OSS etc).
|
||||
|
||||
use crate::storage::{RegionId, RegionNumber, TableId};
|
||||
/// All paths relative to data_home(file storage) or root path(S3, OSS etc).
|
||||
|
||||
/// WAL dir for local file storage
|
||||
pub const WAL_DIR: &str = "wal/";
|
||||
@@ -45,7 +45,9 @@ pub fn region_dir(path: &str, region_id: RegionId) -> String {
|
||||
)
|
||||
}
|
||||
|
||||
/// get_storage_path returns the storage path from the region_dir. It will always return the storage path if the region_dir is valid, otherwise None.
|
||||
/// get_storage_path returns the storage path from the region_dir.
|
||||
///
|
||||
/// It will always return the storage path if the region_dir is valid, otherwise None.
|
||||
/// The storage path is constructed from the catalog and schema, which are generated by `common_meta::ddl::utils::region_storage_path`.
|
||||
/// We can extract the catalog and schema from the region_dir by following example:
|
||||
/// ```
|
||||
|
||||
@@ -247,6 +247,7 @@ impl ScannerProperties {
|
||||
}
|
||||
|
||||
/// A scanner that provides a way to scan the region concurrently.
|
||||
///
|
||||
/// The scanner splits the region into partitions so that each partition can be scanned concurrently.
|
||||
/// You can use this trait to implement an [`ExecutionPlan`](datafusion_physical_plan::ExecutionPlan).
|
||||
pub trait RegionScanner: Debug + DisplayAs + Send {
|
||||
|
||||
@@ -100,7 +100,8 @@ pub struct TableIdent {
|
||||
pub version: TableVersion,
|
||||
}
|
||||
|
||||
/// The table metadata
|
||||
/// The table metadata.
|
||||
///
|
||||
/// Note: if you add new fields to this struct, please ensure 'new_meta_builder' function works.
|
||||
/// TODO(dennis): find a better way to ensure 'new_meta_builder' works when adding new fields.
|
||||
#[derive(Clone, Debug, Builder, PartialEq, Eq)]
|
||||
|
||||
@@ -92,7 +92,7 @@ macro_rules! impl_min_max_values {
|
||||
}};
|
||||
}
|
||||
|
||||
impl<'a> PruningStatistics for RowGroupPruningStatistics<'a> {
|
||||
impl PruningStatistics for RowGroupPruningStatistics<'_> {
|
||||
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
|
||||
impl_min_max_values!(self, column, min)
|
||||
}
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
|
||||
use datatypes::value::Value;
|
||||
|
||||
/// Statistics for a relation
|
||||
/// Statistics for a relation.
|
||||
///
|
||||
/// Fields are optional and can be inexact because the sources
|
||||
/// sometimes provide approximate estimates for performance reasons
|
||||
/// and the transformations output are not always predictable.
|
||||
|
||||
@@ -44,7 +44,7 @@ impl<'a> TableReference<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for TableReference<'a> {
|
||||
impl Display for TableReference<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}.{}.{}", self.catalog, self.schema, self.table)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user