feat: extract MemTable to ease testing (#133)

* feat: memtable backed by DataFusion to ease testing

* move test utility codes out of src folder

* Implement our own MemTable because DataFusion's MemTable does not support limit; and replace the original testing numbers table.

* fix: address PR comments

* fix: "testutil" -> "test-util"

* roll back "NumbersTable"

Co-authored-by: luofucong <luofucong@greptime.com>
This commit is contained in:
LFC
2022-08-05 13:58:05 +08:00
committed by GitHub
parent 97be052b33
commit e833167ad6
14 changed files with 354 additions and 149 deletions

View File

@@ -36,14 +36,27 @@ pub enum InnerError {
source: ArrowError,
backtrace: Backtrace,
},
#[snafu(display("Failed to convert Arrow schema, source: {}", source))]
SchemaConversion {
source: datatypes::error::Error,
backtrace: Backtrace,
},
#[snafu(display("Table projection error, source: {}", source))]
TableProjection {
source: ArrowError,
backtrace: Backtrace,
},
}
impl ErrorExt for InnerError {
fn status_code(&self) -> StatusCode {
match self {
InnerError::Datafusion { .. } | InnerError::PollStream { .. } => {
StatusCode::EngineExecuteQuery
}
InnerError::Datafusion { .. }
| InnerError::PollStream { .. }
| InnerError::SchemaConversion { .. }
| InnerError::TableProjection { .. } => StatusCode::EngineExecuteQuery,
InnerError::MissingColumn { .. } => StatusCode::InvalidArguments,
InnerError::ExecuteRepeatedly { .. } => StatusCode::Unexpected,
}

View File

@@ -25,8 +25,8 @@ use datafusion::physical_plan::{
};
use datafusion_common::record_batch::RecordBatch as DfRecordBatch;
use datatypes::arrow::error::{ArrowError, Result as ArrowResult};
use datatypes::schema::SchemaRef as TableSchemaRef;
use datatypes::schema::SchemaRef;
use datatypes::schema::{Schema, SchemaRef as TableSchemaRef};
use futures::Stream;
use snafu::prelude::*;
@@ -215,10 +215,7 @@ impl Table for TableAdapter {
.await
.context(error::DatafusionSnafu)?;
Ok(Box::pin(RecordBatchStreamAdapter::new(
self.schema.clone(),
df_stream,
)))
Ok(Box::pin(RecordBatchStreamAdapter::try_new(df_stream)?))
}
fn supports_filter_pushdown(&self, filter: &Expr) -> Result<FilterPushDownType> {
@@ -278,8 +275,10 @@ pub struct RecordBatchStreamAdapter {
}
impl RecordBatchStreamAdapter {
pub fn new(schema: SchemaRef, stream: DfSendableRecordBatchStream) -> Self {
Self { schema, stream }
pub fn try_new(stream: DfSendableRecordBatchStream) -> Result<Self> {
let schema =
Arc::new(Schema::try_from(stream.schema()).context(error::SchemaConversionSnafu)?);
Ok(Self { schema, stream })
}
}