mirror of
https://github.com/lancedb/lancedb.git
synced 2026-01-16 08:42:57 +00:00
24
Cargo.toml
24
Cargo.toml
@@ -5,23 +5,23 @@ exclude = ["python"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
lance = { "version" = "=0.8.3", "features" = ["dynamodb"] }
|
||||
lance-linalg = { "version" = "=0.8.3" }
|
||||
lance-testing = { "version" = "=0.8.3" }
|
||||
lance = { "version" = "=0.8.5", "features" = ["dynamodb"] }
|
||||
lance-linalg = { "version" = "=0.8.5" }
|
||||
lance-testing = { "version" = "=0.8.5" }
|
||||
# Note that this one does not include pyarrow
|
||||
arrow = { version = "43.0.0", optional = false }
|
||||
arrow-array = "43.0"
|
||||
arrow-data = "43.0"
|
||||
arrow-ipc = "43.0"
|
||||
arrow-ord = "43.0"
|
||||
arrow-schema = "43.0"
|
||||
arrow-arith = "43.0"
|
||||
arrow-cast = "43.0"
|
||||
arrow = { version = "47.0.0", optional = false }
|
||||
arrow-array = "47.0"
|
||||
arrow-data = "47.0"
|
||||
arrow-ipc = "47.0"
|
||||
arrow-ord = "47.0"
|
||||
arrow-schema = "47.0"
|
||||
arrow-arith = "47.0"
|
||||
arrow-cast = "47.0"
|
||||
chrono = "0.4.23"
|
||||
half = { "version" = "=2.2.1", default-features = false, features = [
|
||||
"num-traits"
|
||||
] }
|
||||
log = "0.4"
|
||||
object_store = "0.6.1"
|
||||
object_store = "0.7.1"
|
||||
snafu = "0.7.4"
|
||||
url = "2"
|
||||
|
||||
1
python/LICENSE
Symbolic link
1
python/LICENSE
Symbolic link
@@ -0,0 +1 @@
|
||||
../LICENSE
|
||||
@@ -3,7 +3,7 @@ name = "lancedb"
|
||||
version = "0.3.1"
|
||||
dependencies = [
|
||||
"deprecation",
|
||||
"pylance==0.8.3",
|
||||
"pylance==0.8.5",
|
||||
"ratelimiter~=1.0",
|
||||
"retry>=0.9.2",
|
||||
"tqdm>=4.1.0",
|
||||
|
||||
@@ -458,7 +458,8 @@ def test_compact_cleanup(db):
|
||||
|
||||
stats = table.compact_files()
|
||||
assert len(table) == 3
|
||||
assert table.version == 4
|
||||
# Compact_files bump 2 versions.
|
||||
assert table.version == 5
|
||||
assert stats.fragments_removed > 0
|
||||
assert stats.fragments_added == 1
|
||||
|
||||
@@ -467,7 +468,7 @@ def test_compact_cleanup(db):
|
||||
|
||||
stats = table.cleanup_old_versions(older_than=timedelta(0), delete_unverified=True)
|
||||
assert stats.bytes_removed > 0
|
||||
assert table.version == 4
|
||||
assert table.version == 5
|
||||
|
||||
with pytest.raises(Exception, match="Version 3 no longer exists"):
|
||||
table.checkout(3)
|
||||
|
||||
@@ -74,7 +74,7 @@ fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
||||
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
||||
static LOG: OnceCell<()> = OnceCell::new();
|
||||
|
||||
LOG.get_or_init(|| env_logger::init());
|
||||
LOG.get_or_init(env_logger::init);
|
||||
|
||||
RUNTIME.get_or_try_init(|| Runtime::new().or_throw(cx))
|
||||
}
|
||||
@@ -148,7 +148,7 @@ fn get_aws_creds(
|
||||
match (secret_key_id, secret_key, temp_token) {
|
||||
(Some(key_id), Some(key), optional_token) => Ok(Some(Arc::new(
|
||||
StaticCredentialProvider::new(AwsCredential {
|
||||
key_id: key_id,
|
||||
key_id,
|
||||
secret_key: key,
|
||||
token: optional_token,
|
||||
}),
|
||||
|
||||
@@ -70,7 +70,7 @@ impl JsTable {
|
||||
store_params: Some(ObjectStoreParams::with_aws_credentials(
|
||||
aws_creds, aws_region,
|
||||
)),
|
||||
mode: mode,
|
||||
mode,
|
||||
..WriteParams::default()
|
||||
};
|
||||
|
||||
@@ -121,7 +121,7 @@ impl JsTable {
|
||||
let add_result = table.add(batch_reader, Some(params)).await;
|
||||
|
||||
deferred.settle_with(&channel, move |mut cx| {
|
||||
let _added = add_result.or_throw(&mut cx)?;
|
||||
add_result.or_throw(&mut cx)?;
|
||||
Ok(cx.boxed(JsTable::from(table)))
|
||||
});
|
||||
});
|
||||
|
||||
@@ -18,9 +18,9 @@ use arrow::compute::kernels::{aggregate::bool_and, length::length};
|
||||
use arrow_array::{
|
||||
cast::AsArray,
|
||||
types::{ArrowPrimitiveType, Int32Type, Int64Type},
|
||||
Array, GenericListArray, OffsetSizeTrait, RecordBatchReader,
|
||||
Array, GenericListArray, OffsetSizeTrait, PrimitiveArray, RecordBatchReader,
|
||||
};
|
||||
use arrow_ord::comparison::eq_dyn_scalar;
|
||||
use arrow_ord::cmp::eq;
|
||||
use arrow_schema::DataType;
|
||||
use num_traits::{ToPrimitive, Zero};
|
||||
|
||||
@@ -38,7 +38,8 @@ where
|
||||
}
|
||||
|
||||
let dim = len_arr.as_primitive::<T>().value(0);
|
||||
if bool_and(&eq_dyn_scalar(len_arr.as_primitive::<T>(), dim)?) != Some(true) {
|
||||
let datum = PrimitiveArray::<T>::new_scalar(dim);
|
||||
if bool_and(&eq(len_arr.as_primitive::<T>(), &datum)?) != Some(true) {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(dim))
|
||||
|
||||
@@ -135,7 +135,7 @@ impl Database {
|
||||
async fn open_path(path: &str) -> Result<Database> {
|
||||
let (object_store, base_path) = ObjectStore::from_uri(path).await?;
|
||||
if object_store.is_local() {
|
||||
Self::try_create_dir(path).context(CreateDirSnafu { path: path })?;
|
||||
Self::try_create_dir(path).context(CreateDirSnafu { path })?;
|
||||
}
|
||||
Ok(Self {
|
||||
uri: path.to_string(),
|
||||
|
||||
@@ -95,8 +95,8 @@ impl VectorIndexBuilder for IvfPQIndexBuilder {
|
||||
}
|
||||
|
||||
fn build(&self) -> VectorIndexParams {
|
||||
let ivf_params = self.ivf_params.clone().unwrap_or(IvfBuildParams::default());
|
||||
let pq_params = self.pq_params.clone().unwrap_or(PQBuildParams::default());
|
||||
let ivf_params = self.ivf_params.clone().unwrap_or_default();
|
||||
let pq_params = self.pq_params.clone().unwrap_or_default();
|
||||
|
||||
VectorIndexParams::with_ivf_pq_params(pq_params.metric_type, ivf_params, pq_params)
|
||||
}
|
||||
|
||||
@@ -339,7 +339,7 @@ impl Table {
|
||||
/// This calls into [lance::dataset::optimize::compact_files].
|
||||
pub async fn compact_files(&mut self, options: CompactionOptions) -> Result<CompactionMetrics> {
|
||||
let mut dataset = self.dataset.as_ref().clone();
|
||||
let metrics = compact_files(&mut dataset, options).await?;
|
||||
let metrics = compact_files(&mut dataset, options, None).await?;
|
||||
self.dataset = Arc::new(dataset);
|
||||
Ok(metrics)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user