Compare commits

..

3 Commits

Author SHA1 Message Date
shuiyisong
acbaa4c599 fix: load with latest
Signed-off-by: shuiyisong <xixing.sys@gmail.com>
2025-12-26 19:12:08 +08:00
shuiyisong
8cc43fd848 fix: load with latest
Signed-off-by: shuiyisong <xixing.sys@gmail.com>
2025-12-23 20:18:36 +08:00
shuiyisong
4b3e60c1d3 fix: load with latest
Signed-off-by: shuiyisong <xixing.sys@gmail.com>
2025-12-23 20:06:23 +08:00
6 changed files with 93 additions and 40 deletions

View File

@@ -115,6 +115,7 @@ impl PipelineHandler for Instance {
.get_pipeline_str(name, version, query_ctx)
.await
.context(PipelineSnafu)
.map(|(p, _)| p)
}
}

View File

@@ -646,10 +646,14 @@ pub enum Error {
},
#[snafu(display(
"Multiple pipelines with different schemas found, but none under current schema. Please replicate one of them or delete until only one schema left. schemas: {}",
schemas
"Multiple pipelines with different schemas found, but none under current schema. Please replicate one of them or delete until only one schema left. name: {}, current_schema: {}, schemas: {}",
name,
current_schema,
schemas,
))]
MultiPipelineWithDiffSchema {
name: String,
current_schema: String,
schemas: String,
#[snafu(implicit)]
location: Location,

View File

@@ -15,6 +15,7 @@
use std::sync::Arc;
use std::time::Duration;
use common_telemetry::debug;
use datatypes::timestamp::TimestampNanosecond;
use moka::sync::Cache;
@@ -45,12 +46,17 @@ impl PipelineCache {
pipelines: Cache::builder()
.max_capacity(PIPELINES_CACHE_SIZE)
.time_to_live(PIPELINES_CACHE_TTL)
.name("pipelines")
.build(),
original_pipelines: Cache::builder()
.max_capacity(PIPELINES_CACHE_SIZE)
.time_to_live(PIPELINES_CACHE_TTL)
.name("original_pipelines")
.build(),
failover_cache: Cache::builder()
.max_capacity(PIPELINES_CACHE_SIZE)
.name("failover_cache")
.build(),
failover_cache: Cache::builder().max_capacity(PIPELINES_CACHE_SIZE).build(),
}
}
@@ -103,7 +109,7 @@ impl PipelineCache {
schema: &str,
name: &str,
version: PipelineVersion,
) -> Result<Option<Arc<Pipeline>>> {
) -> Result<Option<(Arc<Pipeline>, String)>> {
get_cache_generic(&self.pipelines, schema, name, version)
}
@@ -112,7 +118,7 @@ impl PipelineCache {
schema: &str,
name: &str,
version: PipelineVersion,
) -> Result<Option<(String, TimestampNanosecond)>> {
) -> Result<Option<((String, TimestampNanosecond), String)>> {
get_cache_generic(&self.failover_cache, schema, name, version)
}
@@ -121,7 +127,7 @@ impl PipelineCache {
schema: &str,
name: &str,
version: PipelineVersion,
) -> Result<Option<(String, TimestampNanosecond)>> {
) -> Result<Option<((String, TimestampNanosecond), String)>> {
get_cache_generic(&self.original_pipelines, schema, name, version)
}
@@ -172,16 +178,16 @@ fn get_cache_generic<T: Clone + Send + Sync + 'static>(
schema: &str,
name: &str,
version: PipelineVersion,
) -> Result<Option<T>> {
) -> Result<Option<(T, String)>> {
// lets try empty schema first
let k = generate_pipeline_cache_key(EMPTY_SCHEMA_NAME, name, version);
if let Some(value) = cache.get(&k) {
return Ok(Some(value));
let emp_key = generate_pipeline_cache_key(EMPTY_SCHEMA_NAME, name, version);
if let Some(value) = cache.get(&emp_key) {
return Ok(Some((value, EMPTY_SCHEMA_NAME.to_string())));
}
// use input schema
let k = generate_pipeline_cache_key(schema, name, version);
if let Some(value) = cache.get(&k) {
return Ok(Some(value));
let schema_k = generate_pipeline_cache_key(schema, name, version);
if let Some(value) = cache.get(&schema_k) {
return Ok(Some((value, schema.to_string())));
}
// try all schemas
@@ -193,14 +199,32 @@ fn get_cache_generic<T: Clone + Send + Sync + 'static>(
match ks.len() {
0 => Ok(None),
1 => Ok(Some(ks.remove(0).1)),
_ => MultiPipelineWithDiffSchemaSnafu {
schemas: ks
.iter()
.filter_map(|(k, _)| k.split_once('/').map(|k| k.0))
.collect::<Vec<_>>()
.join(","),
1 => {
let (key, value) = ks.remove(0);
if let Some((key_schema, _)) = key.split_once("/") {
Ok(Some((value, key_schema.to_string())))
} else {
Ok(Some((value, EMPTY_SCHEMA_NAME.to_string())))
}
}
_ => {
debug!(
"caches keys: {:?}, emp key: {:?}, schema key: {:?}, suffix key: {:?}",
cache.iter().map(|e| e.0).collect::<Vec<_>>(),
emp_key,
schema_k,
suffix_key
);
MultiPipelineWithDiffSchemaSnafu {
name: name.to_string(),
current_schema: schema.to_string(),
schemas: ks
.iter()
.filter_map(|(k, _)| k.split_once('/').map(|k| k.0))
.collect::<Vec<_>>()
.join(","),
}
.fail()?
}
.fail()?,
}
}

View File

@@ -205,7 +205,7 @@ impl PipelineOperator {
name: &str,
version: PipelineVersion,
query_ctx: QueryContextRef,
) -> Result<(String, TimestampNanosecond)> {
) -> Result<((String, TimestampNanosecond), String)> {
let schema = query_ctx.current_schema();
self.create_pipeline_table_if_not_exists(query_ctx.clone())
.await?;

View File

@@ -261,14 +261,19 @@ impl PipelineTable {
version: PipelineVersion,
) -> Result<Arc<Pipeline>> {
if let Some(pipeline) = self.cache.get_pipeline_cache(schema, name, version)? {
return Ok(pipeline);
return Ok(pipeline.0);
}
let pipeline = self.get_pipeline_str(schema, name, version).await?;
let (pipeline, found_schema) = self.get_pipeline_str(schema, name, version).await?;
let compiled_pipeline = Arc::new(Self::compile_pipeline(&pipeline.0)?);
self.cache
.insert_pipeline_cache(schema, name, version, compiled_pipeline.clone(), false);
self.cache.insert_pipeline_cache(
&found_schema,
name,
version,
compiled_pipeline.clone(),
version.is_none(),
);
Ok(compiled_pipeline)
}
@@ -278,14 +283,17 @@ impl PipelineTable {
&self,
schema: &str,
name: &str,
version: PipelineVersion,
) -> Result<(String, TimestampNanosecond)> {
if let Some(pipeline) = self.cache.get_pipeline_str_cache(schema, name, version)? {
input_version: PipelineVersion,
) -> Result<((String, TimestampNanosecond), String)> {
if let Some(pipeline) = self
.cache
.get_pipeline_str_cache(schema, name, input_version)?
{
return Ok(pipeline);
}
let mut pipeline_vec;
match self.find_pipeline(name, version).await {
match self.find_pipeline(name, input_version).await {
Ok(p) => {
METRIC_PIPELINE_TABLE_FIND_COUNT
.with_label_values(&["true"])
@@ -302,8 +310,14 @@ impl PipelineTable {
.inc();
return self
.cache
.get_failover_cache(schema, name, version)?
.ok_or(PipelineNotFoundSnafu { name, version }.build());
.get_failover_cache(schema, name, input_version)?
.ok_or(
PipelineNotFoundSnafu {
name,
version: input_version,
}
.build(),
);
}
_ => {
// if other error, we should return it
@@ -314,7 +328,10 @@ impl PipelineTable {
};
ensure!(
!pipeline_vec.is_empty(),
PipelineNotFoundSnafu { name, version }
PipelineNotFoundSnafu {
name,
version: input_version
}
);
// if the result is exact one, use it
@@ -326,9 +343,9 @@ impl PipelineTable {
name,
Some(version),
p.clone(),
false,
input_version.is_none(),
);
return Ok(p);
return Ok((p, found_schema));
}
// check if there's empty schema pipeline
@@ -342,14 +359,22 @@ impl PipelineTable {
// throw an error
let (pipeline_content, found_schema, version) =
pipeline.context(MultiPipelineWithDiffSchemaSnafu {
name: name.to_string(),
current_schema: schema.to_string(),
schemas: pipeline_vec.iter().map(|v| v.1.clone()).join(","),
})?;
let v = *version;
let p = (pipeline_content.clone(), v);
self.cache
.insert_pipeline_str_cache(found_schema, name, Some(v), p.clone(), false);
Ok(p)
self.cache.insert_pipeline_str_cache(
found_schema,
name,
Some(v),
p.clone(),
input_version.is_none(),
);
Ok((p, found_schema.clone()))
}
/// Insert a pipeline into the pipeline table and compile it.

View File

@@ -1,2 +1 @@
v0.11.9
v0.10.1