chore: bump lance to 0.8.5 (#561)

Bump lance to 0.5.8
This commit is contained in:
Lei Xu
2023-10-14 12:38:43 -07:00
committed by Weston Pace
parent 1c42894918
commit 7b5bfadab2
10 changed files with 29 additions and 26 deletions

View File

@@ -5,23 +5,23 @@ exclude = ["python"]
resolver = "2"
[workspace.dependencies]
lance = { "version" = "=0.8.3", "features" = ["dynamodb"] }
lance-linalg = { "version" = "=0.8.3" }
lance-testing = { "version" = "=0.8.3" }
lance = { "version" = "=0.8.5", "features" = ["dynamodb"] }
lance-linalg = { "version" = "=0.8.5" }
lance-testing = { "version" = "=0.8.5" }
# Note that this one does not include pyarrow
arrow = { version = "43.0.0", optional = false }
arrow-array = "43.0"
arrow-data = "43.0"
arrow-ipc = "43.0"
arrow-ord = "43.0"
arrow-schema = "43.0"
arrow-arith = "43.0"
arrow-cast = "43.0"
arrow = { version = "47.0.0", optional = false }
arrow-array = "47.0"
arrow-data = "47.0"
arrow-ipc = "47.0"
arrow-ord = "47.0"
arrow-schema = "47.0"
arrow-arith = "47.0"
arrow-cast = "47.0"
chrono = "0.4.23"
half = { "version" = "=2.2.1", default-features = false, features = [
"num-traits"
] }
log = "0.4"
object_store = "0.6.1"
object_store = "0.7.1"
snafu = "0.7.4"
url = "2"

1
python/LICENSE Symbolic link
View File

@@ -0,0 +1 @@
../LICENSE

View File

@@ -3,7 +3,7 @@ name = "lancedb"
version = "0.3.1"
dependencies = [
"deprecation",
"pylance==0.8.3",
"pylance==0.8.5",
"ratelimiter~=1.0",
"retry>=0.9.2",
"tqdm>=4.1.0",

View File

@@ -458,7 +458,8 @@ def test_compact_cleanup(db):
stats = table.compact_files()
assert len(table) == 3
assert table.version == 4
# Compact_files bump 2 versions.
assert table.version == 5
assert stats.fragments_removed > 0
assert stats.fragments_added == 1
@@ -467,7 +468,7 @@ def test_compact_cleanup(db):
stats = table.cleanup_old_versions(older_than=timedelta(0), delete_unverified=True)
assert stats.bytes_removed > 0
assert table.version == 4
assert table.version == 5
with pytest.raises(Exception, match="Version 3 no longer exists"):
table.checkout(3)

View File

@@ -74,7 +74,7 @@ fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
static LOG: OnceCell<()> = OnceCell::new();
LOG.get_or_init(|| env_logger::init());
LOG.get_or_init(env_logger::init);
RUNTIME.get_or_try_init(|| Runtime::new().or_throw(cx))
}
@@ -148,7 +148,7 @@ fn get_aws_creds(
match (secret_key_id, secret_key, temp_token) {
(Some(key_id), Some(key), optional_token) => Ok(Some(Arc::new(
StaticCredentialProvider::new(AwsCredential {
key_id: key_id,
key_id,
secret_key: key,
token: optional_token,
}),

View File

@@ -70,7 +70,7 @@ impl JsTable {
store_params: Some(ObjectStoreParams::with_aws_credentials(
aws_creds, aws_region,
)),
mode: mode,
mode,
..WriteParams::default()
};
@@ -121,7 +121,7 @@ impl JsTable {
let add_result = table.add(batch_reader, Some(params)).await;
deferred.settle_with(&channel, move |mut cx| {
let _added = add_result.or_throw(&mut cx)?;
add_result.or_throw(&mut cx)?;
Ok(cx.boxed(JsTable::from(table)))
});
});

View File

@@ -18,9 +18,9 @@ use arrow::compute::kernels::{aggregate::bool_and, length::length};
use arrow_array::{
cast::AsArray,
types::{ArrowPrimitiveType, Int32Type, Int64Type},
Array, GenericListArray, OffsetSizeTrait, RecordBatchReader,
Array, GenericListArray, OffsetSizeTrait, PrimitiveArray, RecordBatchReader,
};
use arrow_ord::comparison::eq_dyn_scalar;
use arrow_ord::cmp::eq;
use arrow_schema::DataType;
use num_traits::{ToPrimitive, Zero};
@@ -38,7 +38,8 @@ where
}
let dim = len_arr.as_primitive::<T>().value(0);
if bool_and(&eq_dyn_scalar(len_arr.as_primitive::<T>(), dim)?) != Some(true) {
let datum = PrimitiveArray::<T>::new_scalar(dim);
if bool_and(&eq(len_arr.as_primitive::<T>(), &datum)?) != Some(true) {
Ok(None)
} else {
Ok(Some(dim))

View File

@@ -135,7 +135,7 @@ impl Database {
async fn open_path(path: &str) -> Result<Database> {
let (object_store, base_path) = ObjectStore::from_uri(path).await?;
if object_store.is_local() {
Self::try_create_dir(path).context(CreateDirSnafu { path: path })?;
Self::try_create_dir(path).context(CreateDirSnafu { path })?;
}
Ok(Self {
uri: path.to_string(),

View File

@@ -95,8 +95,8 @@ impl VectorIndexBuilder for IvfPQIndexBuilder {
}
fn build(&self) -> VectorIndexParams {
let ivf_params = self.ivf_params.clone().unwrap_or(IvfBuildParams::default());
let pq_params = self.pq_params.clone().unwrap_or(PQBuildParams::default());
let ivf_params = self.ivf_params.clone().unwrap_or_default();
let pq_params = self.pq_params.clone().unwrap_or_default();
VectorIndexParams::with_ivf_pq_params(pq_params.metric_type, ivf_params, pq_params)
}

View File

@@ -339,7 +339,7 @@ impl Table {
/// This calls into [lance::dataset::optimize::compact_files].
pub async fn compact_files(&mut self, options: CompactionOptions) -> Result<CompactionMetrics> {
let mut dataset = self.dataset.as_ref().clone();
let metrics = compact_files(&mut dataset, options).await?;
let metrics = compact_files(&mut dataset, options, None).await?;
self.dataset = Arc::new(dataset);
Ok(metrics)
}