Compare commits

..

1 Commits

Author SHA1 Message Date
Lance Release
6fd61579de Bump version: 0.23.0-beta.2 → 0.23.0 2025-12-16 16:58:00 +00:00
46 changed files with 351 additions and 1810 deletions

View File

@@ -1,5 +1,5 @@
[tool.bumpversion] [tool.bumpversion]
current_version = "0.23.1" current_version = "0.23.0"
parse = """(?x) parse = """(?x)
(?P<major>0|[1-9]\\d*)\\. (?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\. (?P<minor>0|[1-9]\\d*)\\.

199
Cargo.lock generated
View File

@@ -1041,61 +1041,6 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "axum"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
dependencies = [
"async-trait",
"axum-core",
"bytes",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http-body-util",
"hyper 1.7.0",
"hyper-util",
"itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-core"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
dependencies = [
"async-trait",
"bytes",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http-body-util",
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper",
"tower-layer",
"tower-service",
"tracing",
]
[[package]] [[package]]
name = "backoff" name = "backoff"
version = "0.4.0" version = "0.4.0"
@@ -3141,9 +3086,8 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
[[package]] [[package]]
name = "fsst" name = "fsst"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "5ffdff7a2d68d22afc0657eddde3e946371ce7cfe730a3f78a5ed44ea5b1cb2e"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"rand 0.9.2", "rand 0.9.2",
@@ -3986,7 +3930,6 @@ dependencies = [
"http 1.3.1", "http 1.3.1",
"http-body 1.0.1", "http-body 1.0.1",
"httparse", "httparse",
"httpdate",
"itoa", "itoa",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
@@ -4262,7 +4205,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown 0.16.0", "hashbrown 0.15.5",
"serde", "serde",
"serde_core", "serde_core",
] ]
@@ -4479,9 +4422,8 @@ dependencies = [
[[package]] [[package]]
name = "lance" name = "lance"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "e8c439decbc304e180748e34bb6d3df729069a222e83e74e2185c38f107136e9"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-arith", "arrow-arith",
@@ -4546,9 +4488,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-arrow" name = "lance-arrow"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "f4ee5508b225456d3d56998eaeef0d8fbce5ea93856df47b12a94d2e74153210"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"arrow-buffer", "arrow-buffer",
@@ -4566,9 +4507,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-bitpacking" name = "lance-bitpacking"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "d1c065fb3bd4a8cc4f78428443e990d4921aa08f707b676753db740e0b402a21"
dependencies = [ dependencies = [
"arrayref", "arrayref",
"paste", "paste",
@@ -4577,9 +4517,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-core" name = "lance-core"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "e8856abad92e624b75cd57a04703f6441948a239463bdf973f2ac1924b0bcdbe"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"arrow-buffer", "arrow-buffer",
@@ -4615,9 +4554,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-datafusion" name = "lance-datafusion"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "4c8835308044cef5467d7751be87fcbefc2db01c22370726a8704bd62991693f"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-array", "arrow-array",
@@ -4647,9 +4585,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-datagen" name = "lance-datagen"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "612de1e888bb36f6bf51196a6eb9574587fdf256b1759a4c50e643e00d5f96d0"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-array", "arrow-array",
@@ -4666,9 +4603,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-encoding" name = "lance-encoding"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "2b456b29b135d3c7192602e516ccade38b5483986e121895fa43cf1fdb38bf60"
dependencies = [ dependencies = [
"arrow-arith", "arrow-arith",
"arrow-array", "arrow-array",
@@ -4705,9 +4641,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-file" name = "lance-file"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "ab1538d14d5bb3735b4222b3f5aff83cfa59cc6ef7cdd3dd9139e4c77193c80b"
dependencies = [ dependencies = [
"arrow-arith", "arrow-arith",
"arrow-array", "arrow-array",
@@ -4739,9 +4674,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-geo" name = "lance-geo"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "a5a69a2f3b55703d9c240ad7c5ffa2c755db69e9cf8aa05efe274a212910472d"
dependencies = [ dependencies = [
"datafusion", "datafusion",
"geo-types", "geo-types",
@@ -4752,9 +4686,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-index" name = "lance-index"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "0ea84613df6fa6b9168a1f056ba4f9cb73b90a1b452814c6fd4b3529bcdbfc78"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-arith", "arrow-arith",
@@ -4815,9 +4748,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-io" name = "lance-io"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "6b3fc4c1d941fceef40a0edbd664dbef108acfc5d559bb9e7f588d0c733cbc35"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-arith", "arrow-arith",
@@ -4857,9 +4789,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-linalg" name = "lance-linalg"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "b62ffbc5ce367fbf700a69de3fe0612ee1a11191a64a632888610b6bacfa0f63"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"arrow-buffer", "arrow-buffer",
@@ -4875,9 +4806,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-namespace" name = "lance-namespace"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "791bbcd868ee758123a34e07d320a1fb99379432b5ecc0e78d6b4686e999b629"
dependencies = [ dependencies = [
"arrow", "arrow",
"async-trait", "async-trait",
@@ -4889,15 +4819,13 @@ dependencies = [
[[package]] [[package]]
name = "lance-namespace-impls" name = "lance-namespace-impls"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "ee713505576f6b1988a491f77c7ca8b0cf7090a393598e63c85079fa70a53ebf"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-ipc", "arrow-ipc",
"arrow-schema", "arrow-schema",
"async-trait", "async-trait",
"axum",
"bytes", "bytes",
"futures", "futures",
"lance", "lance",
@@ -4909,12 +4837,9 @@ dependencies = [
"object_store", "object_store",
"rand 0.9.2", "rand 0.9.2",
"reqwest", "reqwest",
"serde",
"serde_json", "serde_json",
"snafu", "snafu",
"tokio", "tokio",
"tower",
"tower-http 0.5.2",
"url", "url",
] ]
@@ -4933,9 +4858,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-table" name = "lance-table"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "6fdb2d56bfa4d1511c765fa0cc00fdaa37e5d2d1cd2f57b3c6355d9072177052"
dependencies = [ dependencies = [
"arrow", "arrow",
"arrow-array", "arrow-array",
@@ -4974,9 +4898,8 @@ dependencies = [
[[package]] [[package]]
name = "lance-testing" name = "lance-testing"
version = "1.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/lance-format/lance.git?tag=v1.0.0#a0979691926f57afd5d3ac90bf6e5bb11188c0c3"
checksum = "d8ccb1a4a9284435c6a8c02c8c06e7e041bece0d7f722152159353cf55dc51e3"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"arrow-schema", "arrow-schema",
@@ -4987,7 +4910,7 @@ dependencies = [
[[package]] [[package]]
name = "lancedb" name = "lancedb"
version = "0.23.1" version = "0.23.0-beta.1"
dependencies = [ dependencies = [
"ahash", "ahash",
"anyhow", "anyhow",
@@ -5066,7 +4989,7 @@ dependencies = [
[[package]] [[package]]
name = "lancedb-nodejs" name = "lancedb-nodejs"
version = "0.23.1" version = "0.23.0-beta.1"
dependencies = [ dependencies = [
"arrow-array", "arrow-array",
"arrow-ipc", "arrow-ipc",
@@ -5086,7 +5009,7 @@ dependencies = [
[[package]] [[package]]
name = "lancedb-python" name = "lancedb-python"
version = "0.26.1" version = "0.26.0-beta.1"
dependencies = [ dependencies = [
"arrow", "arrow",
"async-trait", "async-trait",
@@ -5354,12 +5277,6 @@ dependencies = [
"regex-automata", "regex-automata",
] ]
[[package]]
name = "matchit"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]] [[package]]
name = "matrixmultiply" name = "matrixmultiply"
version = "0.3.10" version = "0.3.10"
@@ -6742,8 +6659,8 @@ version = "0.13.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.4.1",
"itertools 0.14.0", "itertools 0.12.1",
"log", "log",
"multimap", "multimap",
"once_cell", "once_cell",
@@ -6763,7 +6680,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"itertools 0.14.0", "itertools 0.12.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.106", "syn 2.0.106",
@@ -7348,7 +7265,7 @@ dependencies = [
"tokio-rustls 0.26.4", "tokio-rustls 0.26.4",
"tokio-util", "tokio-util",
"tower", "tower",
"tower-http 0.6.6", "tower-http",
"tower-service", "tower-service",
"url", "url",
"wasm-bindgen", "wasm-bindgen",
@@ -7867,17 +7784,6 @@ dependencies = [
"serde_core", "serde_core",
] ]
[[package]]
name = "serde_path_to_error"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
dependencies = [
"itoa",
"serde",
"serde_core",
]
[[package]] [[package]]
name = "serde_plain" name = "serde_plain"
version = "1.0.2" version = "1.0.2"
@@ -8093,7 +7999,7 @@ version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451"
dependencies = [ dependencies = [
"heck 0.5.0", "heck 0.4.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.106", "syn 2.0.106",
@@ -8913,24 +8819,6 @@ dependencies = [
"tokio", "tokio",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
"tracing",
]
[[package]]
name = "tower-http"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
dependencies = [
"bitflags 2.9.4",
"bytes",
"http 1.3.1",
"http-body 1.0.1",
"http-body-util",
"pin-project-lite",
"tower-layer",
"tower-service",
"tracing",
] ]
[[package]] [[package]]
@@ -8969,7 +8857,6 @@ version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [ dependencies = [
"log",
"pin-project-lite", "pin-project-lite",
"tracing-attributes", "tracing-attributes",
"tracing-core", "tracing-core",

View File

@@ -15,20 +15,20 @@ categories = ["database-implementations"]
rust-version = "1.78.0" rust-version = "1.78.0"
[workspace.dependencies] [workspace.dependencies]
lance = { "version" = "=1.0.1", default-features = false } lance = { "version" = "=1.0.0", default-features = false, "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-core = "=1.0.1" lance-core = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-datagen = "=1.0.1" lance-datagen = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-file = "=1.0.1" lance-file = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-io = { "version" = "=1.0.1", default-features = false } lance-io = { "version" = "=1.0.0", default-features = false, "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-index = "=1.0.1" lance-index = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-linalg = "=1.0.1" lance-linalg = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-namespace = "=1.0.1" lance-namespace = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-namespace-impls = { "version" = "=1.0.1", default-features = false } lance-namespace-impls = { "version" = "=1.0.0", default-features = false, "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-table = "=1.0.1" lance-table = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-testing = "=1.0.1" lance-testing = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-datafusion = "=1.0.1" lance-datafusion = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-encoding = "=1.0.1" lance-encoding = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
lance-arrow = "=1.0.1" lance-arrow = { "version" = "=1.0.0", "tag" = "v1.0.0", "git" = "https://github.com/lance-format/lance.git" }
ahash = "0.8" ahash = "0.8"
# Note that this one does not include pyarrow # Note that this one does not include pyarrow
arrow = { version = "56.2", optional = false } arrow = { version = "56.2", optional = false }

View File

@@ -16,7 +16,7 @@ check_command_exists() {
} }
if [[ ! -e ./lancedb ]]; then if [[ ! -e ./lancedb ]]; then
if [[ x${SOPHON_READ_TOKEN} != "x" ]]; then if [[ -v SOPHON_READ_TOKEN ]]; then
INPUT="lancedb-linux-x64" INPUT="lancedb-linux-x64"
gh release \ gh release \
--repo lancedb/lancedb \ --repo lancedb/lancedb \

View File

@@ -11,7 +11,7 @@ watch:
theme: theme:
name: "material" name: "material"
logo: assets/logo.png logo: assets/logo.png
favicon: assets/favicon.ico favicon: assets/logo.png
palette: palette:
# Palette toggle for light mode # Palette toggle for light mode
- scheme: lancedb - scheme: lancedb
@@ -32,6 +32,8 @@ theme:
- content.tooltips - content.tooltips
- toc.follow - toc.follow
- navigation.top - navigation.top
- navigation.tabs
- navigation.tabs.sticky
- navigation.footer - navigation.footer
- navigation.tracking - navigation.tracking
- navigation.instant - navigation.instant
@@ -113,13 +115,12 @@ markdown_extensions:
emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_index: !!python/name:material.extensions.emoji.twemoji
emoji_generator: !!python/name:material.extensions.emoji.to_svg emoji_generator: !!python/name:material.extensions.emoji.to_svg
- markdown.extensions.toc: - markdown.extensions.toc:
toc_depth: 3 baselevel: 1
permalink: true permalink: ""
permalink_title: Anchor link to this section
nav: nav:
- Documentation: - API reference:
- SDK Reference: index.md - Overview: index.md
- Python: python/python.md - Python: python/python.md
- Javascript/TypeScript: js/globals.md - Javascript/TypeScript: js/globals.md
- Java: java/java.md - Java: java/java.md

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1,111 +0,0 @@
# VoyageAI Embeddings : Multimodal
VoyageAI embeddings can also be used to embed both text and image data, only some of the models support image data and you can check the list
under [https://docs.voyageai.com/docs/multimodal-embeddings](https://docs.voyageai.com/docs/multimodal-embeddings)
Supported multimodal models:
- `voyage-multimodal-3` - 1024 dimensions (text + images)
- `voyage-multimodal-3.5` - Flexible dimensions (256, 512, 1024 default, 2048). Supports text, images, and video.
### Video Support (voyage-multimodal-3.5)
The `voyage-multimodal-3.5` model supports video input through:
- Video URLs (`.mp4`, `.webm`, `.mov`, `.avi`, `.mkv`, `.m4v`, `.gif`)
- Video file paths
Constraints: Max 20MB video size.
Supported parameters (to be passed in `create` method) are:
| Parameter | Type | Default Value | Description |
|---|---|-------------------------|-------------------------------------------|
| `name` | `str` | `"voyage-multimodal-3"` | The model ID of the VoyageAI model to use |
| `output_dimension` | `int` | `None` | Output dimension for voyage-multimodal-3.5. Valid: 256, 512, 1024, 2048 |
Usage Example:
```python
import base64
import os
from io import BytesIO
import requests
import lancedb
from lancedb.pydantic import LanceModel, Vector
from lancedb.embeddings import get_registry
import pandas as pd
os.environ['VOYAGE_API_KEY'] = 'YOUR_VOYAGE_API_KEY'
db = lancedb.connect(".lancedb")
func = get_registry().get("voyageai").create(name="voyage-multimodal-3")
def image_to_base64(image_bytes: bytes):
buffered = BytesIO(image_bytes)
img_str = base64.b64encode(buffered.getvalue())
return img_str.decode("utf-8")
class Images(LanceModel):
label: str
image_uri: str = func.SourceField() # image uri as the source
image_bytes: str = func.SourceField() # image bytes base64 encoded as the source
vector: Vector(func.ndims()) = func.VectorField() # vector column
vec_from_bytes: Vector(func.ndims()) = func.VectorField() # Another vector column
if "images" in db.table_names():
db.drop_table("images")
table = db.create_table("images", schema=Images)
labels = ["cat", "cat", "dog", "dog", "horse", "horse"]
uris = [
"http://farm1.staticflickr.com/53/167798175_7c7845bbbd_z.jpg",
"http://farm1.staticflickr.com/134/332220238_da527d8140_z.jpg",
"http://farm9.staticflickr.com/8387/8602747737_2e5c2a45d4_z.jpg",
"http://farm5.staticflickr.com/4092/5017326486_1f46057f5f_z.jpg",
"http://farm9.staticflickr.com/8216/8434969557_d37882c42d_z.jpg",
"http://farm6.staticflickr.com/5142/5835678453_4f3a4edb45_z.jpg",
]
# get each uri as bytes
images_bytes = [image_to_base64(requests.get(uri).content) for uri in uris]
table.add(
pd.DataFrame({"label": labels, "image_uri": uris, "image_bytes": images_bytes})
)
```
Now we can search using text from both the default vector column and the custom vector column
```python
# text search
actual = table.search("man's best friend", "vec_from_bytes").limit(1).to_pydantic(Images)[0]
print(actual.label) # prints "dog"
frombytes = (
table.search("man's best friend", vector_column_name="vec_from_bytes")
.limit(1)
.to_pydantic(Images)[0]
)
print(frombytes.label)
```
Because we're using a multi-modal embedding function, we can also search using images
```python
# image search
query_image_uri = "http://farm1.staticflickr.com/200/467715466_ed4a31801f_z.jpg"
image_bytes = requests.get(query_image_uri).content
query_image = Image.open(BytesIO(image_bytes))
actual = table.search(query_image, "vec_from_bytes").limit(1).to_pydantic(Images)[0]
print(actual.label == "dog")
# image search using a custom vector column
other = (
table.search(query_image, vector_column_name="vec_from_bytes")
.limit(1)
.to_pydantic(Images)[0]
)
print(actual.label)
```

View File

@@ -1,12 +1,8 @@
# SDK Reference # API Reference
This site contains the API reference for the client SDKs supported by [LanceDB](https://lancedb.com). This page contains the API reference for the SDKs supported by the LanceDB team.
- [Python](python/python.md) - [Python](python/python.md)
- [JavaScript/TypeScript](js/globals.md) - [JavaScript/TypeScript](js/globals.md)
- [Java](java/java.md) - [Java](java/java.md)
- [Rust](https://docs.rs/lancedb/latest/lancedb/index.html) - [Rust](https://docs.rs/lancedb/latest/lancedb/index.html)
!!! info "LanceDB Documentation"
If you're looking for the full documentation of LanceDB, visit [docs.lancedb.com](https://docs.lancedb.com).

View File

@@ -14,7 +14,7 @@ Add the following dependency to your `pom.xml`:
<dependency> <dependency>
<groupId>com.lancedb</groupId> <groupId>com.lancedb</groupId>
<artifactId>lancedb-core</artifactId> <artifactId>lancedb-core</artifactId>
<version>0.23.1</version> <version>0.23.0</version>
</dependency> </dependency>
``` ```

View File

@@ -85,26 +85,17 @@
/* Header gradient (only header area) */ /* Header gradient (only header area) */
.md-header { .md-header {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
box-shadow: inset 0 1px 0 rgba(255,255,255,0.08), 0 1px 0 rgba(0,0,0,0.08); box-shadow: inset 0 1px 0 rgba(255,255,255,0.08), 0 1px 0 rgba(0,0,0,0.08);
} }
/* Improve brand title contrast on the lavender side */
.md-header__title,
.md-header__topic,
.md-header__title .md-ellipsis,
.md-header__topic .md-ellipsis {
color: #2b1b3a;
text-shadow: 0 1px 0 rgba(255, 255, 255, 0.25);
}
/* Same colors as header for tabs (that hold the text) */ /* Same colors as header for tabs (that hold the text) */
.md-tabs { .md-tabs {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
} }
/* Dark scheme variant */ /* Dark scheme variant */
[data-md-color-scheme="slate"] .md-header, [data-md-color-scheme="slate"] .md-header,
[data-md-color-scheme="slate"] .md-tabs { [data-md-color-scheme="slate"] .md-tabs {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
} }

View File

@@ -8,7 +8,7 @@
<parent> <parent>
<groupId>com.lancedb</groupId> <groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId> <artifactId>lancedb-parent</artifactId>
<version>0.23.1-final.0</version> <version>0.23.0-final.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@@ -6,7 +6,7 @@
<groupId>com.lancedb</groupId> <groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId> <artifactId>lancedb-parent</artifactId>
<version>0.23.1-final.0</version> <version>0.23.0-final.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>
<description>LanceDB Java SDK Parent POM</description> <description>LanceDB Java SDK Parent POM</description>

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "lancedb-nodejs" name = "lancedb-nodejs"
edition.workspace = true edition.workspace = true
version = "0.23.1" version = "0.23.0"
license.workspace = true license.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-darwin-arm64", "name": "@lancedb/lancedb-darwin-arm64",
"version": "0.23.1", "version": "0.23.0",
"os": ["darwin"], "os": ["darwin"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.darwin-arm64.node", "main": "lancedb.darwin-arm64.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-darwin-x64", "name": "@lancedb/lancedb-darwin-x64",
"version": "0.23.1", "version": "0.23.0",
"os": ["darwin"], "os": ["darwin"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.darwin-x64.node", "main": "lancedb.darwin-x64.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-arm64-gnu", "name": "@lancedb/lancedb-linux-arm64-gnu",
"version": "0.23.1", "version": "0.23.0",
"os": ["linux"], "os": ["linux"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.linux-arm64-gnu.node", "main": "lancedb.linux-arm64-gnu.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-arm64-musl", "name": "@lancedb/lancedb-linux-arm64-musl",
"version": "0.23.1", "version": "0.23.0",
"os": ["linux"], "os": ["linux"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.linux-arm64-musl.node", "main": "lancedb.linux-arm64-musl.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-x64-gnu", "name": "@lancedb/lancedb-linux-x64-gnu",
"version": "0.23.1", "version": "0.23.0",
"os": ["linux"], "os": ["linux"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.linux-x64-gnu.node", "main": "lancedb.linux-x64-gnu.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-x64-musl", "name": "@lancedb/lancedb-linux-x64-musl",
"version": "0.23.1", "version": "0.23.0",
"os": ["linux"], "os": ["linux"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.linux-x64-musl.node", "main": "lancedb.linux-x64-musl.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-win32-arm64-msvc", "name": "@lancedb/lancedb-win32-arm64-msvc",
"version": "0.23.1", "version": "0.23.0",
"os": [ "os": [
"win32" "win32"
], ],

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-win32-x64-msvc", "name": "@lancedb/lancedb-win32-x64-msvc",
"version": "0.23.1", "version": "0.23.0",
"os": ["win32"], "os": ["win32"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.win32-x64-msvc.node", "main": "lancedb.win32-x64-msvc.node",

View File

@@ -1,12 +1,12 @@
{ {
"name": "@lancedb/lancedb", "name": "@lancedb/lancedb",
"version": "0.23.1", "version": "0.23.0-beta.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@lancedb/lancedb", "name": "@lancedb/lancedb",
"version": "0.23.1", "version": "0.23.0-beta.1",
"cpu": [ "cpu": [
"x64", "x64",
"arm64" "arm64"

View File

@@ -11,7 +11,7 @@
"ann" "ann"
], ],
"private": false, "private": false,
"version": "0.23.1", "version": "0.23.0",
"main": "dist/index.js", "main": "dist/index.js",
"exports": { "exports": {
".": "./dist/index.js", ".": "./dist/index.js",

View File

@@ -1,5 +1,5 @@
[tool.bumpversion] [tool.bumpversion]
current_version = "0.26.1" current_version = "0.26.0"
parse = """(?x) parse = """(?x)
(?P<major>0|[1-9]\\d*)\\. (?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\. (?P<minor>0|[1-9]\\d*)\\.

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "lancedb-python" name = "lancedb-python"
version = "0.26.1" version = "0.26.0"
edition.workspace = true edition.workspace = true
description = "Python bindings for LanceDB" description = "Python bindings for LanceDB"
license.workspace = true license.workspace = true

View File

@@ -13,7 +13,6 @@ __version__ = importlib.metadata.version("lancedb")
from ._lancedb import connect as lancedb_connect from ._lancedb import connect as lancedb_connect
from .common import URI, sanitize_uri from .common import URI, sanitize_uri
from urllib.parse import urlparse
from .db import AsyncConnection, DBConnection, LanceDBConnection from .db import AsyncConnection, DBConnection, LanceDBConnection
from .io import StorageOptionsProvider from .io import StorageOptionsProvider
from .remote import ClientConfig from .remote import ClientConfig
@@ -29,39 +28,6 @@ from .namespace import (
) )
def _check_s3_bucket_with_dots(
uri: str, storage_options: Optional[Dict[str, str]]
) -> None:
"""
Check if an S3 URI has a bucket name containing dots and warn if no region
is specified. S3 buckets with dots cannot use virtual-hosted-style URLs,
which breaks automatic region detection.
See: https://github.com/lancedb/lancedb/issues/1898
"""
if not isinstance(uri, str) or not uri.startswith("s3://"):
return
parsed = urlparse(uri)
bucket = parsed.netloc
if "." not in bucket:
return
# Check if region is provided in storage_options
region_keys = {"region", "aws_region"}
has_region = storage_options and any(k in storage_options for k in region_keys)
if not has_region:
raise ValueError(
f"S3 bucket name '{bucket}' contains dots, which prevents automatic "
f"region detection. Please specify the region explicitly via "
f"storage_options={{'region': '<your-region>'}} or "
f"storage_options={{'aws_region': '<your-region>'}}. "
f"See https://github.com/lancedb/lancedb/issues/1898 for details."
)
def connect( def connect(
uri: URI, uri: URI,
*, *,
@@ -155,11 +121,9 @@ def connect(
storage_options=storage_options, storage_options=storage_options,
**kwargs, **kwargs,
) )
_check_s3_bucket_with_dots(str(uri), storage_options)
if kwargs: if kwargs:
raise ValueError(f"Unknown keyword arguments: {kwargs}") raise ValueError(f"Unknown keyword arguments: {kwargs}")
return LanceDBConnection( return LanceDBConnection(
uri, uri,
read_consistency_interval=read_consistency_interval, read_consistency_interval=read_consistency_interval,
@@ -247,8 +211,6 @@ async def connect_async(
if isinstance(client_config, dict): if isinstance(client_config, dict):
client_config = ClientConfig(**client_config) client_config = ClientConfig(**client_config)
_check_s3_bucket_with_dots(str(uri), storage_options)
return AsyncConnection( return AsyncConnection(
await lancedb_connect( await lancedb_connect(
sanitize_uri(uri), sanitize_uri(uri),

View File

@@ -210,8 +210,10 @@ class DBConnection(EnforceOverrides):
page_token: str, optional page_token: str, optional
The token to use for pagination. If not present, start from the beginning. The token to use for pagination. If not present, start from the beginning.
Typically, this token is last table name from the previous page. Typically, this token is last table name from the previous page.
Only supported by LanceDb Cloud.
limit: int, default 10 limit: int, default 10
The size of the page to return. The size of the page to return.
Only supported by LanceDb Cloud.
Returns Returns
------- -------

View File

@@ -2,7 +2,7 @@
# SPDX-FileCopyrightText: Copyright The LanceDB Authors # SPDX-FileCopyrightText: Copyright The LanceDB Authors
import base64 import base64
import os import os
from typing import ClassVar, TYPE_CHECKING, List, Union, Any, Generator, Optional from typing import ClassVar, TYPE_CHECKING, List, Union, Any, Generator
from pathlib import Path from pathlib import Path
from urllib.parse import urlparse from urllib.parse import urlparse
@@ -45,28 +45,10 @@ def is_valid_url(text):
return False return False
VIDEO_EXTENSIONS = {".mp4", ".webm", ".mov", ".avi", ".mkv", ".m4v", ".gif"}
def is_video_url(url: str) -> bool:
"""Check if URL points to a video file based on extension."""
parsed = urlparse(url)
path = parsed.path.lower()
return any(path.endswith(ext) for ext in VIDEO_EXTENSIONS)
def is_video_path(path: Path) -> bool:
"""Check if file path is a video file based on extension."""
return path.suffix.lower() in VIDEO_EXTENSIONS
def transform_input(input_data: Union[str, bytes, Path]): def transform_input(input_data: Union[str, bytes, Path]):
PIL = attempt_import_or_raise("PIL", "pillow") PIL = attempt_import_or_raise("PIL", "pillow")
if isinstance(input_data, str): if isinstance(input_data, str):
if is_valid_url(input_data): if is_valid_url(input_data):
if is_video_url(input_data):
content = {"type": "video_url", "video_url": input_data}
else:
content = {"type": "image_url", "image_url": input_data} content = {"type": "image_url", "image_url": input_data}
else: else:
content = {"type": "text", "text": input_data} content = {"type": "text", "text": input_data}
@@ -88,16 +70,6 @@ def transform_input(input_data: Union[str, bytes, Path]):
"image_base64": "data:image/jpeg;base64," + img_str, "image_base64": "data:image/jpeg;base64," + img_str,
} }
elif isinstance(input_data, Path): elif isinstance(input_data, Path):
if is_video_path(input_data):
# Read video file and encode as base64
with open(input_data, "rb") as f:
video_bytes = f.read()
video_str = base64.b64encode(video_bytes).decode("utf-8")
content = {
"type": "video_base64",
"video_base64": video_str,
}
else:
img = PIL.Image.open(input_data) img = PIL.Image.open(input_data)
buffered = BytesIO() buffered = BytesIO()
img.save(buffered, format="JPEG") img.save(buffered, format="JPEG")
@@ -119,8 +91,6 @@ def sanitize_multimodal_input(inputs: Union[TEXT, IMAGES]) -> List[Any]:
PIL = attempt_import_or_raise("PIL", "pillow") PIL = attempt_import_or_raise("PIL", "pillow")
if isinstance(inputs, (str, bytes, Path, PIL.Image.Image)): if isinstance(inputs, (str, bytes, Path, PIL.Image.Image)):
inputs = [inputs] inputs = [inputs]
elif isinstance(inputs, list):
pass # Already a list, use as-is
elif isinstance(inputs, pa.Array): elif isinstance(inputs, pa.Array):
inputs = inputs.to_pylist() inputs = inputs.to_pylist()
elif isinstance(inputs, pa.ChunkedArray): elif isinstance(inputs, pa.ChunkedArray):
@@ -173,16 +143,11 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
* voyage-3 * voyage-3
* voyage-3-lite * voyage-3-lite
* voyage-multimodal-3 * voyage-multimodal-3
* voyage-multimodal-3.5
* voyage-finance-2 * voyage-finance-2
* voyage-multilingual-2 * voyage-multilingual-2
* voyage-law-2 * voyage-law-2
* voyage-code-2 * voyage-code-2
output_dimension: int, optional
The output dimension for models that support flexible dimensions.
Currently only voyage-multimodal-3.5 supports this feature.
Valid options: 256, 512, 1024 (default), 2048.
Examples Examples
-------- --------
@@ -210,10 +175,7 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
""" """
name: str name: str
output_dimension: Optional[int] = None
client: ClassVar = None client: ClassVar = None
_FLEXIBLE_DIM_MODELS: ClassVar[list] = ["voyage-multimodal-3.5"]
_VALID_DIMENSIONS: ClassVar[list] = [256, 512, 1024, 2048]
text_embedding_models: list = [ text_embedding_models: list = [
"voyage-3.5", "voyage-3.5",
"voyage-3.5-lite", "voyage-3.5-lite",
@@ -224,7 +186,7 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
"voyage-law-2", "voyage-law-2",
"voyage-code-2", "voyage-code-2",
] ]
multimodal_embedding_models: list = ["voyage-multimodal-3", "voyage-multimodal-3.5"] multimodal_embedding_models: list = ["voyage-multimodal-3"]
contextual_embedding_models: list = ["voyage-context-3"] contextual_embedding_models: list = ["voyage-context-3"]
def _is_multimodal_model(self, model_name: str): def _is_multimodal_model(self, model_name: str):
@@ -236,17 +198,6 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
return model_name in self.contextual_embedding_models or "context" in model_name return model_name in self.contextual_embedding_models or "context" in model_name
def ndims(self): def ndims(self):
# Handle flexible dimension models
if self.name in self._FLEXIBLE_DIM_MODELS:
if self.output_dimension is not None:
if self.output_dimension not in self._VALID_DIMENSIONS:
raise ValueError(
f"Invalid output_dimension {self.output_dimension} "
f"for {self.name}. Valid options: {self._VALID_DIMENSIONS}"
)
return self.output_dimension
return 1024 # default dimension
if self.name == "voyage-3-lite": if self.name == "voyage-3-lite":
return 512 return 512
elif self.name == "voyage-code-2": elif self.name == "voyage-code-2":
@@ -260,17 +211,12 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
"voyage-finance-2", "voyage-finance-2",
"voyage-multilingual-2", "voyage-multilingual-2",
"voyage-law-2", "voyage-law-2",
"voyage-multimodal-3",
]: ]:
return 1024 return 1024
else: else:
raise ValueError(f"Model {self.name} not supported") raise ValueError(f"Model {self.name} not supported")
def _get_multimodal_kwargs(self, **kwargs):
"""Get kwargs for multimodal embed call, including output_dimension if set."""
if self.name in self._FLEXIBLE_DIM_MODELS and self.output_dimension is not None:
kwargs["output_dimension"] = self.output_dimension
return kwargs
def compute_query_embeddings( def compute_query_embeddings(
self, query: Union[str, "PIL.Image.Image"], *args, **kwargs self, query: Union[str, "PIL.Image.Image"], *args, **kwargs
) -> List[np.ndarray]: ) -> List[np.ndarray]:
@@ -288,7 +234,6 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
""" """
client = VoyageAIEmbeddingFunction._get_client() client = VoyageAIEmbeddingFunction._get_client()
if self._is_multimodal_model(self.name): if self._is_multimodal_model(self.name):
kwargs = self._get_multimodal_kwargs(**kwargs)
result = client.multimodal_embed( result = client.multimodal_embed(
inputs=[[query]], model=self.name, input_type="query", **kwargs inputs=[[query]], model=self.name, input_type="query", **kwargs
) )
@@ -330,7 +275,6 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
) )
if has_images: if has_images:
# Use non-batched API for images # Use non-batched API for images
kwargs = self._get_multimodal_kwargs(**kwargs)
result = client.multimodal_embed( result = client.multimodal_embed(
inputs=sanitized, model=self.name, input_type="document", **kwargs inputs=sanitized, model=self.name, input_type="document", **kwargs
) )
@@ -413,7 +357,6 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
callable: A function that takes a batch of texts and returns embeddings. callable: A function that takes a batch of texts and returns embeddings.
""" """
if self._is_multimodal_model(self.name): if self._is_multimodal_model(self.name):
multimodal_kwargs = self._get_multimodal_kwargs(**kwargs)
def embed_batch(batch: List[str]) -> List[np.array]: def embed_batch(batch: List[str]) -> List[np.array]:
batch_inputs = sanitize_multimodal_input(batch) batch_inputs = sanitize_multimodal_input(batch)
@@ -421,7 +364,7 @@ class VoyageAIEmbeddingFunction(EmbeddingFunction):
inputs=batch_inputs, inputs=batch_inputs,
model=self.name, model=self.name,
input_type=input_type, input_type=input_type,
**multimodal_kwargs, **kwargs,
) )
return result.embeddings return result.embeddings

View File

@@ -384,7 +384,6 @@ class RemoteDBConnection(DBConnection):
on_bad_vectors: str = "error", on_bad_vectors: str = "error",
fill_value: float = 0.0, fill_value: float = 0.0,
mode: Optional[str] = None, mode: Optional[str] = None,
exist_ok: bool = False,
embedding_functions: Optional[List[EmbeddingFunctionConfig]] = None, embedding_functions: Optional[List[EmbeddingFunctionConfig]] = None,
*, *,
namespace: Optional[List[str]] = None, namespace: Optional[List[str]] = None,
@@ -413,12 +412,6 @@ class RemoteDBConnection(DBConnection):
- pyarrow.Schema - pyarrow.Schema
- [LanceModel][lancedb.pydantic.LanceModel] - [LanceModel][lancedb.pydantic.LanceModel]
mode: str, default "create"
The mode to use when creating the table.
Can be either "create", "overwrite", or "exist_ok".
exist_ok: bool, default False
If exist_ok is True, and mode is None or "create", mode will be changed
to "exist_ok".
on_bad_vectors: str, default "error" on_bad_vectors: str, default "error"
What to do if any of the vectors are not the same size or contains NaNs. What to do if any of the vectors are not the same size or contains NaNs.
One of "error", "drop", "fill". One of "error", "drop", "fill".
@@ -490,11 +483,6 @@ class RemoteDBConnection(DBConnection):
LanceTable(table4) LanceTable(table4)
""" """
if exist_ok:
if mode == "create":
mode = "exist_ok"
elif not mode:
mode = "exist_ok"
if namespace is None: if namespace is None:
namespace = [] namespace = []
validate_table_name(name) validate_table_name(name)

View File

@@ -18,17 +18,7 @@ from lancedb._lancedb import (
UpdateResult, UpdateResult,
) )
from lancedb.embeddings.base import EmbeddingFunctionConfig from lancedb.embeddings.base import EmbeddingFunctionConfig
from lancedb.index import ( from lancedb.index import FTS, BTree, Bitmap, HnswSq, IvfFlat, IvfPq, IvfSq, LabelList
FTS,
BTree,
Bitmap,
HnswSq,
IvfFlat,
IvfPq,
IvfRq,
IvfSq,
LabelList,
)
from lancedb.remote.db import LOOP from lancedb.remote.db import LOOP
import pyarrow as pa import pyarrow as pa
@@ -275,12 +265,6 @@ class RemoteTable(Table):
num_sub_vectors=num_sub_vectors, num_sub_vectors=num_sub_vectors,
num_bits=num_bits, num_bits=num_bits,
) )
elif index_type == "IVF_RQ":
config = IvfRq(
distance_type=metric,
num_partitions=num_partitions,
num_bits=num_bits,
)
elif index_type == "IVF_SQ": elif index_type == "IVF_SQ":
config = IvfSq(distance_type=metric, num_partitions=num_partitions) config = IvfSq(distance_type=metric, num_partitions=num_partitions)
elif index_type == "IVF_HNSW_PQ": elif index_type == "IVF_HNSW_PQ":
@@ -295,8 +279,7 @@ class RemoteTable(Table):
else: else:
raise ValueError( raise ValueError(
f"Unknown vector index type: {index_type}. Valid options are" f"Unknown vector index type: {index_type}. Valid options are"
" 'IVF_FLAT', 'IVF_PQ', 'IVF_RQ', 'IVF_SQ'," " 'IVF_FLAT', 'IVF_SQ', 'IVF_PQ', 'IVF_HNSW_PQ', 'IVF_HNSW_SQ'"
" 'IVF_HNSW_PQ', 'IVF_HNSW_SQ'"
) )
LOOP.run( LOOP.run(

View File

@@ -684,24 +684,6 @@ class Table(ABC):
""" """
raise NotImplementedError raise NotImplementedError
def to_lance(self, **kwargs) -> lance.LanceDataset:
"""Return the table as a lance.LanceDataset.
Returns
-------
lance.LanceDataset
"""
raise NotImplementedError
def to_polars(self, **kwargs) -> "pl.DataFrame":
"""Return the table as a polars.DataFrame.
Returns
-------
polars.DataFrame
"""
raise NotImplementedError
def create_index( def create_index(
self, self,
metric="l2", metric="l2",

View File

@@ -613,133 +613,6 @@ def test_voyageai_multimodal_embedding_text_function():
assert len(tbl.to_pandas()["vector"][0]) == voyageai.ndims() assert len(tbl.to_pandas()["vector"][0]) == voyageai.ndims()
@pytest.mark.slow
@pytest.mark.skipif(
os.environ.get("VOYAGE_API_KEY") is None, reason="VOYAGE_API_KEY not set"
)
def test_voyageai_multimodal_35_embedding_function():
"""Test voyage-multimodal-3.5 model with text input."""
voyageai = (
get_registry()
.get("voyageai")
.create(name="voyage-multimodal-3.5", max_retries=0)
)
class TextModel(LanceModel):
text: str = voyageai.SourceField()
vector: Vector(voyageai.ndims()) = voyageai.VectorField()
df = pd.DataFrame({"text": ["hello world", "goodbye world"]})
db = lancedb.connect("~/lancedb")
tbl = db.create_table("test_multimodal_35", schema=TextModel, mode="overwrite")
tbl.add(df)
assert len(tbl.to_pandas()["vector"][0]) == voyageai.ndims()
assert voyageai.ndims() == 1024
@pytest.mark.slow
@pytest.mark.skipif(
os.environ.get("VOYAGE_API_KEY") is None, reason="VOYAGE_API_KEY not set"
)
def test_voyageai_multimodal_35_flexible_dimensions():
"""Test voyage-multimodal-3.5 model with custom output dimension."""
voyageai = (
get_registry()
.get("voyageai")
.create(name="voyage-multimodal-3.5", output_dimension=512, max_retries=0)
)
class TextModel(LanceModel):
text: str = voyageai.SourceField()
vector: Vector(voyageai.ndims()) = voyageai.VectorField()
assert voyageai.ndims() == 512
df = pd.DataFrame({"text": ["hello world", "goodbye world"]})
db = lancedb.connect("~/lancedb")
tbl = db.create_table("test_multimodal_35_dim", schema=TextModel, mode="overwrite")
tbl.add(df)
assert len(tbl.to_pandas()["vector"][0]) == 512
@pytest.mark.slow
@pytest.mark.skipif(
os.environ.get("VOYAGE_API_KEY") is None, reason="VOYAGE_API_KEY not set"
)
def test_voyageai_multimodal_35_image_embedding():
"""Test voyage-multimodal-3.5 model with image input."""
voyageai = (
get_registry()
.get("voyageai")
.create(name="voyage-multimodal-3.5", max_retries=0)
)
class Images(LanceModel):
label: str
image_uri: str = voyageai.SourceField()
vector: Vector(voyageai.ndims()) = voyageai.VectorField()
db = lancedb.connect("~/lancedb")
table = db.create_table(
"test_multimodal_35_images", schema=Images, mode="overwrite"
)
labels = ["cat", "dog"]
uris = [
"http://farm1.staticflickr.com/53/167798175_7c7845bbbd_z.jpg",
"http://farm9.staticflickr.com/8387/8602747737_2e5c2a45d4_z.jpg",
]
table.add(pd.DataFrame({"label": labels, "image_uri": uris}))
assert len(table.to_pandas()["vector"][0]) == voyageai.ndims()
assert voyageai.ndims() == 1024
@pytest.mark.slow
@pytest.mark.skipif(
os.environ.get("VOYAGE_API_KEY") is None, reason="VOYAGE_API_KEY not set"
)
@pytest.mark.parametrize("dimension", [256, 512, 1024, 2048])
def test_voyageai_multimodal_35_all_dimensions(dimension):
"""Test voyage-multimodal-3.5 model with all valid output dimensions."""
voyageai = (
get_registry()
.get("voyageai")
.create(name="voyage-multimodal-3.5", output_dimension=dimension, max_retries=0)
)
assert voyageai.ndims() == dimension
class TextModel(LanceModel):
text: str = voyageai.SourceField()
vector: Vector(voyageai.ndims()) = voyageai.VectorField()
df = pd.DataFrame({"text": ["hello world"]})
db = lancedb.connect("~/lancedb")
tbl = db.create_table(
f"test_multimodal_35_dim_{dimension}", schema=TextModel, mode="overwrite"
)
tbl.add(df)
assert len(tbl.to_pandas()["vector"][0]) == dimension
@pytest.mark.slow
@pytest.mark.skipif(
os.environ.get("VOYAGE_API_KEY") is None, reason="VOYAGE_API_KEY not set"
)
def test_voyageai_multimodal_35_invalid_dimension():
"""Test voyage-multimodal-3.5 model raises error for invalid output dimension."""
with pytest.raises(ValueError, match="Invalid output_dimension"):
voyageai = (
get_registry()
.get("voyageai")
.create(name="voyage-multimodal-3.5", output_dimension=999, max_retries=0)
)
# ndims() is where the validation happens
voyageai.ndims()
@pytest.mark.slow @pytest.mark.slow
@pytest.mark.skipif( @pytest.mark.skipif(
importlib.util.find_spec("colpali_engine") is None, importlib.util.find_spec("colpali_engine") is None,

View File

@@ -168,42 +168,6 @@ def test_table_len_sync():
assert len(table) == 1 assert len(table) == 1
def test_create_table_exist_ok():
def handler(request):
if request.path == "/v1/table/test/create/?mode=exist_ok":
request.send_response(200)
request.send_header("Content-Type", "application/json")
request.end_headers()
request.wfile.write(b"{}")
else:
request.send_response(404)
request.end_headers()
with mock_lancedb_connection(handler) as db:
table = db.create_table("test", [{"id": 1}], exist_ok=True)
assert table is not None
with mock_lancedb_connection(handler) as db:
table = db.create_table("test", [{"id": 1}], mode="create", exist_ok=True)
assert table is not None
def test_create_table_exist_ok_with_mode_overwrite():
def handler(request):
if request.path == "/v1/table/test/create/?mode=overwrite":
request.send_response(200)
request.send_header("Content-Type", "application/json")
request.end_headers()
request.wfile.write(b"{}")
else:
request.send_response(404)
request.end_headers()
with mock_lancedb_connection(handler) as db:
table = db.create_table("test", [{"id": 1}], mode="overwrite", exist_ok=True)
assert table is not None
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_http_error(): async def test_http_error():
request_id_holder = {"request_id": None} request_id_holder = {"request_id": None}

View File

@@ -1,68 +0,0 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright The LanceDB Authors
"""
Tests for S3 bucket names containing dots.
Related issue: https://github.com/lancedb/lancedb/issues/1898
These tests validate the early error checking for S3 bucket names with dots.
No actual S3 connection is made - validation happens before connection.
"""
import pytest
import lancedb
# Test URIs
BUCKET_WITH_DOTS = "s3://my.bucket.name/path"
BUCKET_WITH_DOTS_AND_REGION = ("s3://my.bucket.name", {"region": "us-east-1"})
BUCKET_WITH_DOTS_AND_AWS_REGION = ("s3://my.bucket.name", {"aws_region": "us-east-1"})
BUCKET_WITHOUT_DOTS = "s3://my-bucket/path"
class TestS3BucketWithDotsSync:
"""Tests for connect()."""
def test_bucket_with_dots_requires_region(self):
with pytest.raises(ValueError, match="contains dots"):
lancedb.connect(BUCKET_WITH_DOTS)
def test_bucket_with_dots_and_region_passes(self):
uri, opts = BUCKET_WITH_DOTS_AND_REGION
db = lancedb.connect(uri, storage_options=opts)
assert db is not None
def test_bucket_with_dots_and_aws_region_passes(self):
uri, opts = BUCKET_WITH_DOTS_AND_AWS_REGION
db = lancedb.connect(uri, storage_options=opts)
assert db is not None
def test_bucket_without_dots_passes(self):
db = lancedb.connect(BUCKET_WITHOUT_DOTS)
assert db is not None
class TestS3BucketWithDotsAsync:
"""Tests for connect_async()."""
@pytest.mark.asyncio
async def test_bucket_with_dots_requires_region(self):
with pytest.raises(ValueError, match="contains dots"):
await lancedb.connect_async(BUCKET_WITH_DOTS)
@pytest.mark.asyncio
async def test_bucket_with_dots_and_region_passes(self):
uri, opts = BUCKET_WITH_DOTS_AND_REGION
db = await lancedb.connect_async(uri, storage_options=opts)
assert db is not None
@pytest.mark.asyncio
async def test_bucket_with_dots_and_aws_region_passes(self):
uri, opts = BUCKET_WITH_DOTS_AND_AWS_REGION
db = await lancedb.connect_async(uri, storage_options=opts)
assert db is not None
@pytest.mark.asyncio
async def test_bucket_without_dots_passes(self):
db = await lancedb.connect_async(BUCKET_WITHOUT_DOTS)
assert db is not None

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "lancedb" name = "lancedb"
version = "0.23.1" version = "0.23.0"
edition.workspace = true edition.workspace = true
description = "LanceDB: A serverless, low-latency vector database for AI applications" description = "LanceDB: A serverless, low-latency vector database for AI applications"
license.workspace = true license.workspace = true
@@ -110,7 +110,7 @@ oss = ["lance/oss", "lance-io/oss", "lance-namespace-impls/dir-oss"]
gcs = ["lance/gcp", "lance-io/gcp", "lance-namespace-impls/dir-gcp"] gcs = ["lance/gcp", "lance-io/gcp", "lance-namespace-impls/dir-gcp"]
azure = ["lance/azure", "lance-io/azure", "lance-namespace-impls/dir-azure"] azure = ["lance/azure", "lance-io/azure", "lance-namespace-impls/dir-azure"]
dynamodb = ["lance/dynamodb", "aws"] dynamodb = ["lance/dynamodb", "aws"]
remote = ["dep:reqwest", "dep:http", "lance-namespace-impls/rest", "lance-namespace-impls/rest-adapter"] remote = ["dep:reqwest", "dep:http", "lance-namespace-impls/rest"]
fp16kernels = ["lance-linalg/fp16kernels"] fp16kernels = ["lance-linalg/fp16kernels"]
s3-test = [] s3-test = []
bedrock = ["dep:aws-sdk-bedrockruntime"] bedrock = ["dep:aws-sdk-bedrockruntime"]

View File

@@ -804,14 +804,6 @@ impl Connection {
self.internal.describe_namespace(request).await self.internal.describe_namespace(request).await
} }
/// Get the equivalent namespace client in the database of this connection.
/// For LanceNamespaceDatabase, it is the underlying LanceNamespace.
/// For ListingDatabase, it is the equivalent DirectoryNamespace.
/// For RemoteDatabase, it is the equivalent RestNamespace.
pub async fn namespace_client(&self) -> Result<Arc<dyn lance_namespace::LanceNamespace>> {
self.internal.namespace_client().await
}
/// List tables with pagination support /// List tables with pagination support
pub async fn list_tables(&self, request: ListTablesRequest) -> Result<ListTablesResponse> { pub async fn list_tables(&self, request: ListTablesRequest) -> Result<ListTablesResponse> {
self.internal.list_tables(request).await self.internal.list_tables(request).await
@@ -1325,27 +1317,25 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_table_names() { async fn test_table_names() {
let tc = new_test_connection().await.unwrap(); let tmp_dir = tempdir().unwrap();
let db = tc.connection;
let schema = Arc::new(Schema::new(vec![Field::new("x", DataType::Int32, false)]));
let mut names = Vec::with_capacity(100); let mut names = Vec::with_capacity(100);
for _ in 0..100 { for _ in 0..100 {
let name = uuid::Uuid::new_v4().to_string(); let mut name = uuid::Uuid::new_v4().to_string();
names.push(name.clone()); names.push(name.clone());
db.create_empty_table(name, schema.clone()) name.push_str(".lance");
.execute() create_dir_all(tmp_dir.path().join(&name)).unwrap();
.await
.unwrap();
} }
names.sort(); names.sort();
let tables = db.table_names().limit(100).execute().await.unwrap();
let uri = tmp_dir.path().to_str().unwrap();
let db = connect(uri).execute().await.unwrap();
let tables = db.table_names().execute().await.unwrap();
assert_eq!(tables, names); assert_eq!(tables, names);
let tables = db let tables = db
.table_names() .table_names()
.start_after(&names[30]) .start_after(&names[30])
.limit(100)
.execute() .execute()
.await .await
.unwrap(); .unwrap();

View File

@@ -296,10 +296,4 @@ pub trait Database:
/// Drop all tables in the database /// Drop all tables in the database
async fn drop_all_tables(&self, namespace: &[String]) -> Result<()>; async fn drop_all_tables(&self, namespace: &[String]) -> Result<()>;
fn as_any(&self) -> &dyn std::any::Any; fn as_any(&self) -> &dyn std::any::Any;
/// Get the equivalent namespace client of this database
/// For LanceNamespaceDatabase, it is the underlying LanceNamespace.
/// For ListingDatabase, it is the equivalent DirectoryNamespace.
/// For RemoteDatabase, it is the equivalent RestNamespace.
async fn namespace_client(&self) -> Result<Arc<dyn LanceNamespace>>;
} }

View File

@@ -1043,24 +1043,6 @@ impl Database for ListingDatabase {
fn as_any(&self) -> &dyn std::any::Any { fn as_any(&self) -> &dyn std::any::Any {
self self
} }
async fn namespace_client(&self) -> Result<Arc<dyn lance_namespace::LanceNamespace>> {
// Create a DirectoryNamespace pointing to the same root with the same storage options
let mut builder = lance_namespace_impls::DirectoryNamespaceBuilder::new(&self.uri);
// Add storage options
if !self.storage_options.is_empty() {
builder = builder.storage_options(self.storage_options.clone());
}
// Use the same session
builder = builder.session(self.session.clone());
let namespace = builder.build().await.map_err(|e| Error::Runtime {
message: format!("Failed to create namespace client: {}", e),
})?;
Ok(Arc::new(namespace) as Arc<dyn lance_namespace::LanceNamespace>)
}
} }
#[cfg(test)] #[cfg(test)]
@@ -2045,63 +2027,4 @@ mod tests {
let db_options = ListingDatabaseOptions::parse_from_map(&options).unwrap(); let db_options = ListingDatabaseOptions::parse_from_map(&options).unwrap();
assert_eq!(db_options.new_table_config.enable_stable_row_ids, None); assert_eq!(db_options.new_table_config.enable_stable_row_ids, None);
} }
#[tokio::test]
async fn test_namespace_client() {
let (_tempdir, db) = setup_database().await;
// Create some tables first
let schema = Arc::new(Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("name", DataType::Utf8, false),
]));
db.create_table(CreateTableRequest {
name: "table1".to_string(),
namespace: vec![],
data: CreateTableData::Empty(TableDefinition::new_from_schema(schema.clone())),
mode: CreateTableMode::Create,
write_options: Default::default(),
location: None,
namespace_client: None,
})
.await
.unwrap();
db.create_table(CreateTableRequest {
name: "table2".to_string(),
namespace: vec![],
data: CreateTableData::Empty(TableDefinition::new_from_schema(schema)),
mode: CreateTableMode::Create,
write_options: Default::default(),
location: None,
namespace_client: None,
})
.await
.unwrap();
// Get the namespace client
let namespace_client = db.namespace_client().await;
assert!(namespace_client.is_ok());
let namespace_client = namespace_client.unwrap();
// Verify the namespace client can list the tables we created
// Use empty vec for root namespace
let list_result = namespace_client
.list_tables(lance_namespace::models::ListTablesRequest {
id: Some(vec![]),
..Default::default()
})
.await;
assert!(
list_result.is_ok(),
"list_tables failed: {:?}",
list_result.err()
);
let tables = list_result.unwrap().tables;
assert_eq!(tables.len(), 2);
assert!(tables.contains(&"table1".to_string()));
assert!(tables.contains(&"table2".to_string()));
}
} }

View File

@@ -7,6 +7,7 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use async_trait::async_trait; use async_trait::async_trait;
use lance_io::object_store::{LanceNamespaceStorageOptionsProvider, StorageOptionsProvider};
use lance_namespace::{ use lance_namespace::{
models::{ models::{
CreateEmptyTableRequest, CreateNamespaceRequest, CreateNamespaceResponse, CreateEmptyTableRequest, CreateNamespaceRequest, CreateNamespaceResponse,
@@ -18,13 +19,13 @@ use lance_namespace::{
}; };
use lance_namespace_impls::ConnectBuilder; use lance_namespace_impls::ConnectBuilder;
use crate::connection::ConnectRequest;
use crate::database::ReadConsistency; use crate::database::ReadConsistency;
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use crate::table::NativeTable;
use super::{ use super::{
BaseTable, CloneTableRequest, CreateTableMode, CreateTableRequest as DbCreateTableRequest, listing::ListingDatabase, BaseTable, CloneTableRequest, CreateTableMode,
Database, OpenTableRequest, TableNamesRequest, CreateTableRequest as DbCreateTableRequest, Database, OpenTableRequest, TableNamesRequest,
}; };
/// A database implementation that uses lance-namespace for table management /// A database implementation that uses lance-namespace for table management
@@ -89,6 +90,51 @@ impl std::fmt::Display for LanceNamespaceDatabase {
} }
} }
impl LanceNamespaceDatabase {
/// Create a temporary listing database for the given location
///
/// Merges storage options with priority: connection < user < namespace
async fn create_listing_database(
&self,
location: &str,
table_id: Vec<String>,
user_storage_options: Option<&HashMap<String, String>>,
response_storage_options: Option<&HashMap<String, String>>,
) -> Result<ListingDatabase> {
// Merge storage options: connection < user < namespace
let mut merged_storage_options = self.storage_options.clone();
if let Some(opts) = user_storage_options {
merged_storage_options.extend(opts.clone());
}
if let Some(opts) = response_storage_options {
merged_storage_options.extend(opts.clone());
}
let request = ConnectRequest {
uri: location.to_string(),
#[cfg(feature = "remote")]
client_config: Default::default(),
options: merged_storage_options,
read_consistency_interval: self.read_consistency_interval,
session: self.session.clone(),
};
let mut listing_db = ListingDatabase::connect_with_options(&request).await?;
// Create storage options provider only if namespace returned storage options
// (not just user-provided options)
if response_storage_options.is_some() {
let provider = Arc::new(LanceNamespaceStorageOptionsProvider::new(
self.namespace.clone(),
table_id,
)) as Arc<dyn StorageOptionsProvider>;
listing_db.storage_options_provider = Some(provider);
}
Ok(listing_db)
}
}
#[async_trait] #[async_trait]
impl Database for LanceNamespaceDatabase { impl Database for LanceNamespaceDatabase {
fn uri(&self) -> &str { fn uri(&self) -> &str {
@@ -149,6 +195,14 @@ impl Database for LanceNamespaceDatabase {
} }
async fn create_table(&self, request: DbCreateTableRequest) -> Result<Arc<dyn BaseTable>> { async fn create_table(&self, request: DbCreateTableRequest) -> Result<Arc<dyn BaseTable>> {
// Extract user-provided storage options from request
let user_storage_options = request
.write_options
.lance_write_params
.as_ref()
.and_then(|lwp| lwp.store_params.as_ref())
.and_then(|sp| sp.storage_options.as_ref());
let mut table_id = request.namespace.clone(); let mut table_id = request.namespace.clone();
table_id.push(request.name.clone()); table_id.push(request.name.clone());
let describe_request = DescribeTableRequest { let describe_request = DescribeTableRequest {
@@ -181,20 +235,34 @@ impl Database for LanceNamespaceDatabase {
} }
} }
CreateTableMode::ExistOk(_) => { CreateTableMode::ExistOk(_) => {
if describe_result.is_ok() { if let Ok(response) = describe_result {
let native_table = NativeTable::open_from_namespace( let location = response.location.ok_or_else(|| Error::Runtime {
self.namespace.clone(), message: "Table location is missing from namespace response".to_string(),
&request.name, })?;
request.namespace.clone(),
None, let listing_db = self
None, .create_listing_database(
self.read_consistency_interval, &location,
self.server_side_query_enabled, table_id.clone(),
self.session.clone(), user_storage_options,
response.storage_options.as_ref(),
) )
.await?; .await?;
return Ok(Arc::new(native_table)); let namespace_client = self
.server_side_query_enabled
.then(|| self.namespace.clone());
return listing_db
.open_table(OpenTableRequest {
name: request.name.clone(),
namespace: request.namespace.clone(),
index_cache_size: None,
lance_read_params: None,
location: Some(location),
namespace_client,
})
.await;
} }
} }
} }
@@ -226,37 +294,82 @@ impl Database for LanceNamespaceDatabase {
message: "Table location is missing from create_empty_table response".to_string(), message: "Table location is missing from create_empty_table response".to_string(),
})?; })?;
let native_table = NativeTable::create_from_namespace( let listing_db = self
self.namespace.clone(), .create_listing_database(
&location, &location,
&request.name, table_id.clone(),
request.namespace.clone(), user_storage_options,
request.data, create_empty_response.storage_options.as_ref(),
None, // write_store_wrapper not used for namespace connections
request.write_options.lance_write_params,
self.read_consistency_interval,
self.server_side_query_enabled,
self.session.clone(),
) )
.await?; .await?;
Ok(Arc::new(native_table)) let namespace_client = self
.server_side_query_enabled
.then(|| self.namespace.clone());
let create_request = DbCreateTableRequest {
name: request.name,
namespace: request.namespace,
data: request.data,
mode: request.mode,
write_options: request.write_options,
location: Some(location),
namespace_client,
};
listing_db.create_table(create_request).await
} }
async fn open_table(&self, request: OpenTableRequest) -> Result<Arc<dyn BaseTable>> { async fn open_table(&self, request: OpenTableRequest) -> Result<Arc<dyn BaseTable>> {
let native_table = NativeTable::open_from_namespace( // Extract user-provided storage options from request
self.namespace.clone(), let user_storage_options = request
&request.name, .lance_read_params
request.namespace.clone(), .as_ref()
None, // write_store_wrapper not used for namespace connections .and_then(|lrp| lrp.store_options.as_ref())
request.lance_read_params, .and_then(|so| so.storage_options.as_ref());
self.read_consistency_interval,
self.server_side_query_enabled, let mut table_id = request.namespace.clone();
self.session.clone(), table_id.push(request.name.clone());
let describe_request = DescribeTableRequest {
id: Some(table_id.clone()),
version: None,
};
let response = self
.namespace
.describe_table(describe_request)
.await
.map_err(|e| Error::Runtime {
message: format!("Failed to describe table: {}", e),
})?;
let location = response.location.ok_or_else(|| Error::Runtime {
message: "Table location is missing from namespace response".to_string(),
})?;
let listing_db = self
.create_listing_database(
&location,
table_id.clone(),
user_storage_options,
response.storage_options.as_ref(),
) )
.await?; .await?;
Ok(Arc::new(native_table)) let namespace_client = self
.server_side_query_enabled
.then(|| self.namespace.clone());
let open_request = OpenTableRequest {
name: request.name.clone(),
namespace: request.namespace.clone(),
index_cache_size: request.index_cache_size,
lance_read_params: request.lance_read_params,
location: Some(location),
namespace_client,
};
listing_db.open_table(open_request).await
} }
async fn clone_table(&self, _request: CloneTableRequest) -> Result<Arc<dyn BaseTable>> { async fn clone_table(&self, _request: CloneTableRequest) -> Result<Arc<dyn BaseTable>> {
@@ -312,10 +425,6 @@ impl Database for LanceNamespaceDatabase {
fn as_any(&self) -> &dyn std::any::Any { fn as_any(&self) -> &dyn std::any::Any {
self self
} }
async fn namespace_client(&self) -> Result<Arc<dyn LanceNamespace>> {
Ok(self.namespace.clone())
}
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -120,13 +120,8 @@ impl MemoryRegistry {
} }
/// A record batch reader that has embeddings applied to it /// A record batch reader that has embeddings applied to it
/// /// This is a wrapper around another record batch reader that applies an embedding function
/// This is a wrapper around another record batch reader that applies embedding functions /// when reading from the record batch
/// when reading from the record batch.
///
/// When multiple embedding functions are defined, they are computed in parallel using
/// scoped threads to improve performance. For a single embedding function, computation
/// is done inline without threading overhead.
pub struct WithEmbeddings<R: RecordBatchReader> { pub struct WithEmbeddings<R: RecordBatchReader> {
inner: R, inner: R,
embeddings: Vec<(EmbeddingDefinition, Arc<dyn EmbeddingFunction>)>, embeddings: Vec<(EmbeddingDefinition, Arc<dyn EmbeddingFunction>)>,
@@ -240,48 +235,6 @@ impl<R: RecordBatchReader> WithEmbeddings<R> {
column_definitions, column_definitions,
}) })
} }
fn compute_embeddings_parallel(&self, batch: &RecordBatch) -> Result<Vec<Arc<dyn Array>>> {
if self.embeddings.len() == 1 {
let (fld, func) = &self.embeddings[0];
let src_column =
batch
.column_by_name(&fld.source_column)
.ok_or_else(|| Error::InvalidInput {
message: format!("Source column '{}' not found", fld.source_column),
})?;
return Ok(vec![func.compute_source_embeddings(src_column.clone())?]);
}
// Parallel path: multiple embeddings
std::thread::scope(|s| {
let handles: Vec<_> = self
.embeddings
.iter()
.map(|(fld, func)| {
let src_column = batch.column_by_name(&fld.source_column).ok_or_else(|| {
Error::InvalidInput {
message: format!("Source column '{}' not found", fld.source_column),
}
})?;
let handle =
s.spawn(move || func.compute_source_embeddings(src_column.clone()));
Ok(handle)
})
.collect::<Result<_>>()?;
handles
.into_iter()
.map(|h| {
h.join().map_err(|e| Error::Runtime {
message: format!("Thread panicked during embedding computation: {:?}", e),
})?
})
.collect()
})
}
} }
impl<R: RecordBatchReader> Iterator for MaybeEmbedded<R> { impl<R: RecordBatchReader> Iterator for MaybeEmbedded<R> {
@@ -309,9 +262,12 @@ impl<R: RecordBatchReader> Iterator for WithEmbeddings<R> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let batch = self.inner.next()?; let batch = self.inner.next()?;
match batch { match batch {
Ok(batch) => { Ok(mut batch) => {
let embeddings = match self.compute_embeddings_parallel(&batch) { // todo: parallelize this
Ok(emb) => emb, for (fld, func) in self.embeddings.iter() {
let src_column = batch.column_by_name(&fld.source_column).unwrap();
let embedding = match func.compute_source_embeddings(src_column.clone()) {
Ok(embedding) => embedding,
Err(e) => { Err(e) => {
return Some(Err(arrow_schema::ArrowError::ComputeError(format!( return Some(Err(arrow_schema::ArrowError::ComputeError(format!(
"Error computing embedding: {}", "Error computing embedding: {}",
@@ -319,9 +275,6 @@ impl<R: RecordBatchReader> Iterator for WithEmbeddings<R> {
)))) ))))
} }
}; };
let mut batch = batch;
for ((fld, _), embedding) in self.embeddings.iter().zip(embeddings.iter()) {
let dst_field_name = fld let dst_field_name = fld
.dest_column .dest_column
.clone() .clone()
@@ -333,7 +286,7 @@ impl<R: RecordBatchReader> Iterator for WithEmbeddings<R> {
embedding.nulls().is_some(), embedding.nulls().is_some(),
); );
match batch.try_with_column(dst_field.clone(), embedding.clone()) { match batch.try_with_column(dst_field.clone(), embedding) {
Ok(b) => batch = b, Ok(b) => batch = b,
Err(e) => return Some(Err(e)), Err(e) => return Some(Err(e)),
}; };

View File

@@ -232,38 +232,6 @@ impl HttpSend for Sender {
} }
} }
/// Parsed components from a database URL (db://...)
pub struct ParsedDbUrl {
pub db_name: String,
pub db_prefix: Option<String>,
}
/// Parse a database URL and extract the database name and optional prefix.
///
/// Expected format: `db://db_name` or `db://db_name/prefix`
pub fn parse_db_url(db_url: &str) -> Result<ParsedDbUrl> {
let parsed_url = url::Url::parse(db_url).map_err(|err| Error::InvalidInput {
message: format!("db_url is not a valid URL. '{db_url}'. Error: {err}"),
})?;
debug_assert_eq!(parsed_url.scheme(), "db");
if !parsed_url.has_host() {
return Err(Error::InvalidInput {
message: format!("Invalid database URL (missing host) '{}'", db_url),
});
}
let db_name = parsed_url.host_str().unwrap().to_string();
let db_prefix = {
let prefix = parsed_url.path().trim_start_matches('/');
if prefix.is_empty() {
None
} else {
Some(prefix.to_string())
}
};
Ok(ParsedDbUrl { db_name, db_prefix })
}
impl RestfulLanceDbClient<Sender> { impl RestfulLanceDbClient<Sender> {
fn get_timeout(passed: Option<Duration>, env_var: &str) -> Result<Option<Duration>> { fn get_timeout(passed: Option<Duration>, env_var: &str) -> Result<Option<Duration>> {
if let Some(passed) = passed { if let Some(passed) = passed {
@@ -282,12 +250,32 @@ impl RestfulLanceDbClient<Sender> {
} }
pub fn try_new( pub fn try_new(
parsed_url: &ParsedDbUrl, db_url: &str,
api_key: &str,
region: &str, region: &str,
host_override: Option<String>, host_override: Option<String>,
default_headers: HeaderMap,
client_config: ClientConfig, client_config: ClientConfig,
options: &RemoteOptions,
) -> Result<Self> { ) -> Result<Self> {
let parsed_url = url::Url::parse(db_url).map_err(|err| Error::InvalidInput {
message: format!("db_url is not a valid URL. '{db_url}'. Error: {err}"),
})?;
debug_assert_eq!(parsed_url.scheme(), "db");
if !parsed_url.has_host() {
return Err(Error::InvalidInput {
message: format!("Invalid database URL (missing host) '{}'", db_url),
});
}
let db_name = parsed_url.host_str().unwrap();
let db_prefix = {
let prefix = parsed_url.path().trim_start_matches('/');
if prefix.is_empty() {
None
} else {
Some(prefix)
}
};
// Get the timeouts // Get the timeouts
let timeout = let timeout =
Self::get_timeout(client_config.timeout_config.timeout, "LANCE_CLIENT_TIMEOUT")?; Self::get_timeout(client_config.timeout_config.timeout, "LANCE_CLIENT_TIMEOUT")?;
@@ -360,7 +348,15 @@ impl RestfulLanceDbClient<Sender> {
} }
let client = client_builder let client = client_builder
.default_headers(default_headers) .default_headers(Self::default_headers(
api_key,
region,
db_name,
host_override.is_some(),
options,
db_prefix,
&client_config,
)?)
.user_agent(client_config.user_agent) .user_agent(client_config.user_agent)
.build() .build()
.map_err(|err| Error::Other { .map_err(|err| Error::Other {
@@ -370,7 +366,7 @@ impl RestfulLanceDbClient<Sender> {
let host = match host_override { let host = match host_override {
Some(host_override) => host_override, Some(host_override) => host_override,
None => format!("https://{}.{}.api.lancedb.com", parsed_url.db_name, region), None => format!("https://{}.{}.api.lancedb.com", db_name, region),
}; };
debug!("Created client for host: {}", host); debug!("Created client for host: {}", host);
let retry_config = client_config.retry_config.clone().try_into()?; let retry_config = client_config.retry_config.clone().try_into()?;
@@ -393,7 +389,7 @@ impl<S: HttpSend> RestfulLanceDbClient<S> {
&self.host &self.host
} }
pub fn default_headers( fn default_headers(
api_key: &str, api_key: &str,
region: &str, region: &str,
db_name: &str, db_name: &str,

View File

@@ -189,10 +189,6 @@ pub struct RemoteDatabase<S: HttpSend = Sender> {
client: RestfulLanceDbClient<S>, client: RestfulLanceDbClient<S>,
table_cache: Cache<String, Arc<RemoteTable<S>>>, table_cache: Cache<String, Arc<RemoteTable<S>>>,
uri: String, uri: String,
/// Headers to pass to the namespace client for authentication
namespace_headers: HashMap<String, String>,
/// TLS configuration for mTLS support
tls_config: Option<super::client::TlsConfig>,
} }
impl RemoteDatabase { impl RemoteDatabase {
@@ -204,32 +200,13 @@ impl RemoteDatabase {
client_config: ClientConfig, client_config: ClientConfig,
options: RemoteOptions, options: RemoteOptions,
) -> Result<Self> { ) -> Result<Self> {
let parsed = super::client::parse_db_url(uri)?; let client = RestfulLanceDbClient::try_new(
let header_map = RestfulLanceDbClient::<Sender>::default_headers( uri,
api_key, api_key,
region, region,
&parsed.db_name,
host_override.is_some(),
&options,
parsed.db_prefix.as_deref(),
&client_config,
)?;
let namespace_headers: HashMap<String, String> = header_map
.iter()
.filter_map(|(k, v)| {
v.to_str()
.ok()
.map(|val| (k.as_str().to_string(), val.to_string()))
})
.collect();
let client = RestfulLanceDbClient::try_new(
&parsed,
region,
host_override, host_override,
header_map, client_config,
client_config.clone(), &options,
)?; )?;
let table_cache = Cache::builder() let table_cache = Cache::builder()
@@ -241,8 +218,6 @@ impl RemoteDatabase {
client, client,
table_cache, table_cache,
uri: uri.to_owned(), uri: uri.to_owned(),
namespace_headers,
tls_config: client_config.tls_config,
}) })
} }
} }
@@ -265,8 +240,6 @@ mod test_utils {
client, client,
table_cache: Cache::new(0), table_cache: Cache::new(0),
uri: "http://localhost".to_string(), uri: "http://localhost".to_string(),
namespace_headers: HashMap::new(),
tls_config: None,
} }
} }
@@ -275,13 +248,11 @@ mod test_utils {
F: Fn(reqwest::Request) -> http::Response<T> + Send + Sync + 'static, F: Fn(reqwest::Request) -> http::Response<T> + Send + Sync + 'static,
T: Into<reqwest::Body>, T: Into<reqwest::Body>,
{ {
let client = client_with_handler_and_config(handler, config.clone()); let client = client_with_handler_and_config(handler, config);
Self { Self {
client, client,
table_cache: Cache::new(0), table_cache: Cache::new(0),
uri: "http://localhost".to_string(), uri: "http://localhost".to_string(),
namespace_headers: config.extra_headers.clone(),
tls_config: config.tls_config.clone(),
} }
} }
} }
@@ -745,8 +716,7 @@ impl<S: HttpSend> Database for RemoteDatabase<S> {
let namespace_id = build_namespace_identifier(namespace_parts, &self.client.id_delimiter); let namespace_id = build_namespace_identifier(namespace_parts, &self.client.id_delimiter);
let req = self let req = self
.client .client
.post(&format!("/v1/namespace/{}/describe", namespace_id)) .get(&format!("/v1/namespace/{}/describe", namespace_id));
.json(&DescribeNamespaceRequest::default());
let (request_id, resp) = self.client.send(req).await?; let (request_id, resp) = self.client.send(req).await?;
let resp = self.client.check_response(&request_id, resp).await?; let resp = self.client.check_response(&request_id, resp).await?;
@@ -757,31 +727,6 @@ impl<S: HttpSend> Database for RemoteDatabase<S> {
fn as_any(&self) -> &dyn std::any::Any { fn as_any(&self) -> &dyn std::any::Any {
self self
} }
async fn namespace_client(&self) -> Result<Arc<dyn lance_namespace::LanceNamespace>> {
// Create a RestNamespace pointing to the same remote host with the same authentication headers
let mut builder = lance_namespace_impls::RestNamespaceBuilder::new(self.client.host())
.delimiter(&self.client.id_delimiter)
// TODO: support header provider
.headers(self.namespace_headers.clone());
// Apply mTLS configuration if present
if let Some(tls_config) = &self.tls_config {
if let Some(cert_file) = &tls_config.cert_file {
builder = builder.cert_file(cert_file);
}
if let Some(key_file) = &tls_config.key_file {
builder = builder.key_file(key_file);
}
if let Some(ssl_ca_cert) = &tls_config.ssl_ca_cert {
builder = builder.ssl_ca_cert(ssl_ca_cert);
}
builder = builder.assert_hostname(tls_config.assert_hostname);
}
let namespace = builder.build();
Ok(Arc::new(namespace) as Arc<dyn lance_namespace::LanceNamespace>)
}
} }
/// RemoteOptions contains a subset of StorageOptions that are compatible with Remote LanceDB connections /// RemoteOptions contains a subset of StorageOptions that are compatible with Remote LanceDB connections
@@ -1573,265 +1518,4 @@ mod tests {
panic!("Expected HTTP error"); panic!("Expected HTTP error");
} }
} }
#[tokio::test]
async fn test_namespace_client() {
let conn = Connection::new_with_handler(|_| {
http::Response::builder()
.status(200)
.body(r#"{"tables": []}"#)
.unwrap()
});
// Get the namespace client from the connection's internal database
let namespace_client = conn.namespace_client().await;
assert!(namespace_client.is_ok());
}
#[tokio::test]
async fn test_namespace_client_with_tls_config() {
use crate::remote::client::TlsConfig;
let tls_config = TlsConfig {
cert_file: Some("/path/to/cert.pem".to_string()),
key_file: Some("/path/to/key.pem".to_string()),
ssl_ca_cert: Some("/path/to/ca.pem".to_string()),
assert_hostname: true,
};
let client_config = ClientConfig {
tls_config: Some(tls_config),
..Default::default()
};
let conn = Connection::new_with_handler_and_config(
|_| {
http::Response::builder()
.status(200)
.body(r#"{"tables": []}"#)
.unwrap()
},
client_config,
);
// Get the namespace client - it should be created with the TLS config
let namespace_client = conn.namespace_client().await;
assert!(namespace_client.is_ok());
}
#[tokio::test]
async fn test_namespace_client_with_headers() {
let mut extra_headers = HashMap::new();
extra_headers.insert("X-Custom-Header".to_string(), "custom-value".to_string());
let client_config = ClientConfig {
extra_headers,
..Default::default()
};
let conn = Connection::new_with_handler_and_config(
|_| {
http::Response::builder()
.status(200)
.body(r#"{"tables": []}"#)
.unwrap()
},
client_config,
);
// Get the namespace client - it should be created with the extra headers
let namespace_client = conn.namespace_client().await;
assert!(namespace_client.is_ok());
}
/// Integration tests using RestAdapter to run RemoteDatabase against a real namespace server
mod rest_adapter_integration {
use super::*;
use lance_namespace::models::ListTablesRequest;
use lance_namespace_impls::{DirectoryNamespaceBuilder, RestAdapter, RestAdapterConfig};
use std::sync::Arc;
use tempfile::TempDir;
/// Test fixture that manages a REST server backed by DirectoryNamespace
struct RestServerFixture {
_temp_dir: TempDir,
server_handle: lance_namespace_impls::RestAdapterHandle,
server_url: String,
}
impl RestServerFixture {
async fn new() -> Self {
let temp_dir = TempDir::new().unwrap();
let temp_path = temp_dir.path().to_str().unwrap().to_string();
// Create DirectoryNamespace backend
let backend = DirectoryNamespaceBuilder::new(&temp_path)
.build()
.await
.unwrap();
let backend = Arc::new(backend);
// Start REST server with port 0 (OS assigns available port)
let config = RestAdapterConfig {
port: 0,
..Default::default()
};
let server = RestAdapter::new(backend, config);
let server_handle = server.start().await.unwrap();
// Get the actual port assigned by OS
let actual_port = server_handle.port();
let server_url = format!("http://127.0.0.1:{}", actual_port);
Self {
_temp_dir: temp_dir,
server_handle,
server_url,
}
}
}
impl Drop for RestServerFixture {
fn drop(&mut self) {
self.server_handle.shutdown();
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_remote_database_with_rest_adapter() {
use lance_namespace::models::CreateNamespaceRequest;
let fixture = RestServerFixture::new().await;
// Connect to the REST server using lancedb Connection
// Use db://dummy as URI and set actual server URL via host_override
let conn = ConnectBuilder::new("db://dummy")
.api_key("test-api-key")
.region("us-east-1")
.host_override(&fixture.server_url)
.execute()
.await
.unwrap();
// Create a child namespace first
let namespace = vec!["test_ns".to_string()];
conn.create_namespace(CreateNamespaceRequest {
id: Some(namespace.clone()),
mode: None,
properties: None,
})
.await
.expect("Failed to create namespace");
// Create a table in the child namespace
let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Int32, false)]));
let data = RecordBatch::try_new(
schema.clone(),
vec![Arc::new(Int32Array::from(vec![1, 2, 3]))],
)
.unwrap();
let reader = RecordBatchIterator::new([Ok(data.clone())], schema.clone());
let table = conn
.create_table("test_table", reader)
.namespace(namespace.clone())
.execute()
.await;
assert!(table.is_ok(), "Failed to create table: {:?}", table.err());
// List tables in the child namespace
let list_response = conn
.list_tables(ListTablesRequest {
id: Some(namespace.clone()),
page_token: None,
limit: None,
})
.await
.expect("Failed to list tables");
assert_eq!(list_response.tables, vec!["test_table"]);
// Get namespace client and verify it can also list tables
let namespace_client = conn.namespace_client().await.unwrap();
let list_response = namespace_client
.list_tables(ListTablesRequest {
id: Some(namespace.clone()),
page_token: None,
limit: None,
})
.await
.unwrap();
assert_eq!(list_response.tables, vec!["test_table"]);
// Open the table from the child namespace
let opened_table = conn
.open_table("test_table")
.namespace(namespace.clone())
.execute()
.await;
assert!(
opened_table.is_ok(),
"Failed to open table: {:?}",
opened_table.err()
);
assert_eq!(opened_table.unwrap().name(), "test_table");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_remote_database_with_multiple_tables() {
use lance_namespace::models::CreateNamespaceRequest;
let fixture = RestServerFixture::new().await;
// Connect to the REST server
// Use db://dummy as URI and set actual server URL via host_override
let conn = ConnectBuilder::new("db://dummy")
.api_key("test-api-key")
.region("us-east-1")
.host_override(&fixture.server_url)
.execute()
.await
.unwrap();
// Create a child namespace first
let namespace = vec!["multi_table_ns".to_string()];
conn.create_namespace(CreateNamespaceRequest {
id: Some(namespace.clone()),
mode: None,
properties: None,
})
.await
.expect("Failed to create namespace");
// Create multiple tables in the child namespace
let schema = Arc::new(Schema::new(vec![Field::new("id", DataType::Int32, false)]));
for i in 1..=3 {
let data =
RecordBatch::try_new(schema.clone(), vec![Arc::new(Int32Array::from(vec![i]))])
.unwrap();
let reader = RecordBatchIterator::new([Ok(data.clone())], schema.clone());
conn.create_table(format!("table{}", i), reader)
.namespace(namespace.clone())
.execute()
.await
.unwrap_or_else(|e| panic!("Failed to create table{}: {:?}", i, e));
}
// List tables in the child namespace
let list_response = conn
.list_tables(ListTablesRequest {
id: Some(namespace.clone()),
page_token: None,
limit: None,
})
.await
.unwrap();
assert_eq!(list_response.tables.len(), 3);
assert!(list_response.tables.contains(&"table1".to_string()));
assert!(list_response.tables.contains(&"table2".to_string()));
assert!(list_response.tables.contains(&"table3".to_string()));
}
}
} }

View File

@@ -1088,17 +1088,6 @@ impl<S: HttpSend> BaseTable for RemoteTable<S> {
body["num_partitions"] = serde_json::Value::Number(num_partitions.into()); body["num_partitions"] = serde_json::Value::Number(num_partitions.into());
} }
} }
Index::IvfRq(index) => {
body[INDEX_TYPE_KEY] = serde_json::Value::String("IVF_RQ".to_string());
body[METRIC_TYPE_KEY] =
serde_json::Value::String(index.distance_type.to_string().to_lowercase());
if let Some(num_partitions) = index.num_partitions {
body["num_partitions"] = serde_json::Value::Number(num_partitions.into());
}
if let Some(num_bits) = index.num_bits {
body["num_bits"] = serde_json::Value::Number(num_bits.into());
}
}
Index::BTree(_) => { Index::BTree(_) => {
body[INDEX_TYPE_KEY] = serde_json::Value::String("BTREE".to_string()); body[INDEX_TYPE_KEY] = serde_json::Value::String("BTREE".to_string());
} }

View File

@@ -29,7 +29,7 @@ use lance::dataset::{
use lance::dataset::{MergeInsertBuilder as LanceMergeInsertBuilder, WhenNotMatchedBySource}; use lance::dataset::{MergeInsertBuilder as LanceMergeInsertBuilder, WhenNotMatchedBySource};
use lance::index::vector::utils::infer_vector_dim; use lance::index::vector::utils::infer_vector_dim;
use lance::index::vector::VectorIndexParams; use lance::index::vector::VectorIndexParams;
use lance::io::{ObjectStoreParams, WrappingObjectStore}; use lance::io::WrappingObjectStore;
use lance_datafusion::exec::{analyze_plan as lance_analyze_plan, execute_plan}; use lance_datafusion::exec::{analyze_plan as lance_analyze_plan, execute_plan};
use lance_datafusion::utils::StreamingWriteSource; use lance_datafusion::utils::StreamingWriteSource;
use lance_index::scalar::{BuiltinIndexType, ScalarIndexParams}; use lance_index::scalar::{BuiltinIndexType, ScalarIndexParams};
@@ -40,7 +40,6 @@ use lance_index::vector::pq::PQBuildParams;
use lance_index::vector::sq::builder::SQBuildParams; use lance_index::vector::sq::builder::SQBuildParams;
use lance_index::DatasetIndexExt; use lance_index::DatasetIndexExt;
use lance_index::IndexType; use lance_index::IndexType;
use lance_io::object_store::LanceNamespaceStorageOptionsProvider;
use lance_namespace::models::{ use lance_namespace::models::{
QueryTableRequest as NsQueryTableRequest, QueryTableRequestFullTextQuery, QueryTableRequest as NsQueryTableRequest, QueryTableRequestFullTextQuery,
QueryTableRequestVector, StringFtsQuery, QueryTableRequestVector, StringFtsQuery,
@@ -1612,105 +1611,6 @@ impl NativeTable {
self self
} }
/// Opens an existing Table using a namespace client.
///
/// This method uses `DatasetBuilder::from_namespace` to open the table, which
/// automatically fetches the table location and storage options from the namespace.
/// This eliminates the need to pre-fetch and merge storage options before opening.
///
/// # Arguments
///
/// * `namespace_client` - The namespace client to use for fetching table metadata
/// * `name` - The table name
/// * `namespace` - The namespace path (e.g., vec!["parent", "child"])
/// * `write_store_wrapper` - Optional wrapper for the object store on write path
/// * `params` - Optional read parameters
/// * `read_consistency_interval` - Optional interval for read consistency
/// * `server_side_query_enabled` - Whether to enable server-side query execution.
/// When true, the namespace_client will be stored and queries will be executed
/// on the namespace server. When false, the namespace is only used for opening
/// the table, and queries are executed locally.
/// * `session` - Optional session for object stores and caching
///
/// # Returns
///
/// * A [NativeTable] object.
#[allow(clippy::too_many_arguments)]
pub async fn open_from_namespace(
namespace_client: Arc<dyn LanceNamespace>,
name: &str,
namespace: Vec<String>,
write_store_wrapper: Option<Arc<dyn WrappingObjectStore>>,
params: Option<ReadParams>,
read_consistency_interval: Option<std::time::Duration>,
server_side_query_enabled: bool,
session: Option<Arc<lance::session::Session>>,
) -> Result<Self> {
let mut params = params.unwrap_or_default();
// Set the session in read params
if let Some(sess) = session {
params.session(sess);
}
// patch the params if we have a write store wrapper
let params = match write_store_wrapper.clone() {
Some(wrapper) => params.patch_with_store_wrapper(wrapper)?,
None => params,
};
// Build table_id from namespace + name
let mut table_id = namespace.clone();
table_id.push(name.to_string());
// Use DatasetBuilder::from_namespace which automatically fetches location
// and storage options from the namespace
let builder = DatasetBuilder::from_namespace(
namespace_client.clone(),
table_id,
false, // Don't ignore namespace storage options
)
.await
.map_err(|e| match e {
lance::Error::Namespace { source, .. } => Error::Runtime {
message: format!("Failed to get table info from namespace: {:?}", source),
},
source => Error::Lance { source },
})?;
let dataset = builder
.with_read_params(params)
.load()
.await
.map_err(|e| match e {
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
name: name.to_string(),
source: Box::new(e),
},
source => Error::Lance { source },
})?;
let uri = dataset.uri().to_string();
let dataset = DatasetConsistencyWrapper::new_latest(dataset, read_consistency_interval);
let id = Self::build_id(&namespace, name);
let stored_namespace_client = if server_side_query_enabled {
Some(namespace_client)
} else {
None
};
Ok(Self {
name: name.to_string(),
namespace,
id,
uri,
dataset,
read_consistency_interval,
namespace_client: stored_namespace_client,
})
}
fn get_table_name(uri: &str) -> Result<String> { fn get_table_name(uri: &str) -> Result<String> {
let path = Path::new(uri); let path = Path::new(uri);
let name = path let name = path
@@ -1822,102 +1722,6 @@ impl NativeTable {
.await .await
} }
/// Creates a new Table using a namespace client for storage options.
///
/// This method sets up a `StorageOptionsProvider` from the namespace client,
/// enabling automatic credential refresh for cloud storage. The namespace
/// is used for:
/// 1. Setting up storage options provider for credential vending
/// 2. Optionally enabling server-side query execution
///
/// # Arguments
///
/// * `namespace_client` - The namespace client to use for storage options
/// * `uri` - The URI to the table (obtained from create_empty_table response)
/// * `name` - The table name
/// * `namespace` - The namespace path (e.g., vec!["parent", "child"])
/// * `batches` - RecordBatch to be saved in the database
/// * `write_store_wrapper` - Optional wrapper for the object store on write path
/// * `params` - Optional write parameters
/// * `read_consistency_interval` - Optional interval for read consistency
/// * `server_side_query_enabled` - Whether to enable server-side query execution
///
/// # Returns
///
/// * A [NativeTable] object.
#[allow(clippy::too_many_arguments)]
pub async fn create_from_namespace(
namespace_client: Arc<dyn LanceNamespace>,
uri: &str,
name: &str,
namespace: Vec<String>,
batches: impl StreamingWriteSource,
write_store_wrapper: Option<Arc<dyn WrappingObjectStore>>,
params: Option<WriteParams>,
read_consistency_interval: Option<std::time::Duration>,
server_side_query_enabled: bool,
session: Option<Arc<lance::session::Session>>,
) -> Result<Self> {
// Build table_id from namespace + name for the storage options provider
let mut table_id = namespace.clone();
table_id.push(name.to_string());
// Set up storage options provider from namespace
let storage_options_provider = Arc::new(LanceNamespaceStorageOptionsProvider::new(
namespace_client.clone(),
table_id,
));
// Start with provided params or defaults
let mut params = params.unwrap_or_default();
// Set the session in write params
if let Some(sess) = session {
params.session = Some(sess);
}
// Ensure store_params exists and set the storage options provider
let store_params = params
.store_params
.get_or_insert_with(ObjectStoreParams::default);
store_params.storage_options_provider = Some(storage_options_provider);
// Patch the params if we have a write store wrapper
let params = match write_store_wrapper.clone() {
Some(wrapper) => params.patch_with_store_wrapper(wrapper)?,
None => params,
};
let insert_builder = InsertBuilder::new(uri).with_params(&params);
let dataset = insert_builder
.execute_stream(batches)
.await
.map_err(|e| match e {
lance::Error::DatasetAlreadyExists { .. } => Error::TableAlreadyExists {
name: name.to_string(),
},
source => Error::Lance { source },
})?;
let id = Self::build_id(&namespace, name);
let stored_namespace_client = if server_side_query_enabled {
Some(namespace_client)
} else {
None
};
Ok(Self {
name: name.to_string(),
namespace,
id,
uri: uri.to_string(),
dataset: DatasetConsistencyWrapper::new_latest(dataset, read_consistency_interval),
read_consistency_interval,
namespace_client: stored_namespace_client,
})
}
async fn optimize_indices(&self, options: &OptimizeOptions) -> Result<()> { async fn optimize_indices(&self, options: &OptimizeOptions) -> Result<()> {
info!("LanceDB: optimizing indices: {:?}", options); info!("LanceDB: optimizing indices: {:?}", options);
self.dataset self.dataset

View File

@@ -5,19 +5,16 @@
use regex::Regex; use regex::Regex;
use std::env; use std::env;
use std::process::Stdio; use std::io::{BufRead, BufReader};
use tokio::io::{AsyncBufReadExt, BufReader}; use std::process::{Child, ChildStdout, Command, Stdio};
use tokio::process::{Child, ChildStdout, Command};
use tokio::sync::mpsc;
use crate::{connect, Connection}; use crate::{connect, Connection};
use anyhow::{anyhow, bail, Result}; use anyhow::{bail, Result};
use tempfile::{tempdir, TempDir}; use tempfile::{tempdir, TempDir};
pub struct TestConnection { pub struct TestConnection {
pub uri: String, pub uri: String,
pub connection: Connection, pub connection: Connection,
pub is_remote: bool,
_temp_dir: Option<TempDir>, _temp_dir: Option<TempDir>,
_process: Option<TestProcess>, _process: Option<TestProcess>,
} }
@@ -40,56 +37,6 @@ pub async fn new_test_connection() -> Result<TestConnection> {
} }
} }
async fn spawn_stdout_reader(
mut stdout: BufReader<ChildStdout>,
port_sender: mpsc::Sender<anyhow::Result<String>>,
) -> tokio::task::JoinHandle<()> {
let print_stdout = env::var("PRINT_LANCEDB_TEST_CONNECTION_SCRIPT_OUTPUT").is_ok();
tokio::spawn(async move {
let mut line = String::new();
let re = Regex::new(r"Query node now listening on 0.0.0.0:(.*)").unwrap();
loop {
line.clear();
let result = stdout.read_line(&mut line).await;
if let Err(err) = result {
port_sender
.send(Err(anyhow!(
"error while reading from process output: {}",
err
)))
.await
.unwrap();
return;
} else if result.unwrap() == 0 {
port_sender
.send(Err(anyhow!(
" hit EOF before reading port from process output."
)))
.await
.unwrap();
return;
}
if re.is_match(&line) {
let caps = re.captures(&line).unwrap();
port_sender.send(Ok(caps[1].to_string())).await.unwrap();
break;
}
}
loop {
line.clear();
match stdout.read_line(&mut line).await {
Err(_) => return,
Ok(0) => return,
Ok(_size) => {
if print_stdout {
print!("{}", line);
}
}
}
}
})
}
async fn new_remote_connection(script_path: &str) -> Result<TestConnection> { async fn new_remote_connection(script_path: &str) -> Result<TestConnection> {
let temp_dir = tempdir()?; let temp_dir = tempdir()?;
let data_path = temp_dir.path().to_str().unwrap().to_string(); let data_path = temp_dir.path().to_str().unwrap().to_string();
@@ -110,25 +57,38 @@ async fn new_remote_connection(script_path: &str) -> Result<TestConnection> {
child: child_result.unwrap(), child: child_result.unwrap(),
}; };
let stdout = BufReader::new(process.child.stdout.take().unwrap()); let stdout = BufReader::new(process.child.stdout.take().unwrap());
let (port_sender, mut port_receiver) = mpsc::channel(5); let port = read_process_port(stdout)?;
let _reader = spawn_stdout_reader(stdout, port_sender).await;
let port = match port_receiver.recv().await {
None => bail!("Unable to determine the port number used by the phalanx process we spawned, because the reader thread was closed too soon."),
Some(Err(err)) => bail!("Unable to determine the port number used by the phalanx process we spawned, because of an error, {}", err),
Some(Ok(port)) => port,
};
let uri = "db://test"; let uri = "db://test";
let host_override = format!("http://localhost:{}", port); let host_override = format!("http://localhost:{}", port);
let connection = create_new_connection(uri, &host_override).await?; let connection = create_new_connection(uri, &host_override).await?;
Ok(TestConnection { Ok(TestConnection {
uri: uri.to_string(), uri: uri.to_string(),
connection, connection,
is_remote: true,
_temp_dir: Some(temp_dir), _temp_dir: Some(temp_dir),
_process: Some(process), _process: Some(process),
}) })
} }
fn read_process_port(mut stdout: BufReader<ChildStdout>) -> Result<String> {
let mut line = String::new();
let re = Regex::new(r"Query node now listening on 0.0.0.0:(.*)").unwrap();
loop {
let result = stdout.read_line(&mut line);
if let Err(err) = result {
bail!(format!(
"read_process_port: error while reading from process output: {}",
err
));
} else if result.unwrap() == 0 {
bail!("read_process_port: hit EOF before reading port from process output.");
}
if re.is_match(&line) {
let caps = re.captures(&line).unwrap();
return Ok(caps[1].to_string());
}
}
}
#[cfg(feature = "remote")] #[cfg(feature = "remote")]
async fn create_new_connection(uri: &str, host_override: &str) -> crate::error::Result<Connection> { async fn create_new_connection(uri: &str, host_override: &str) -> crate::error::Result<Connection> {
connect(uri) connect(uri)
@@ -154,7 +114,6 @@ async fn new_local_connection() -> Result<TestConnection> {
Ok(TestConnection { Ok(TestConnection {
uri: uri.to_string(), uri: uri.to_string(),
connection, connection,
is_remote: false,
_temp_dir: Some(temp_dir), _temp_dir: Some(temp_dir),
_process: None, _process: None,
}) })

View File

@@ -1,253 +0,0 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use std::{
borrow::Cow,
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
},
time::Duration,
};
use arrow::buffer::NullBuffer;
use arrow_array::{
Array, FixedSizeListArray, Float32Array, RecordBatch, RecordBatchIterator, StringArray,
};
use arrow_schema::{DataType, Field, Schema};
use lancedb::{
embeddings::{EmbeddingDefinition, EmbeddingFunction, MaybeEmbedded, WithEmbeddings},
Error, Result,
};
#[derive(Debug)]
struct SlowMockEmbed {
name: String,
dim: usize,
delay_ms: u64,
call_count: Arc<AtomicUsize>,
}
impl SlowMockEmbed {
pub fn new(name: String, dim: usize, delay_ms: u64) -> Self {
Self {
name,
dim,
delay_ms,
call_count: Arc::new(AtomicUsize::new(0)),
}
}
pub fn get_call_count(&self) -> usize {
self.call_count.load(Ordering::SeqCst)
}
}
impl EmbeddingFunction for SlowMockEmbed {
fn name(&self) -> &str {
&self.name
}
fn source_type(&self) -> Result<Cow<'_, DataType>> {
Ok(Cow::Owned(DataType::Utf8))
}
fn dest_type(&self) -> Result<Cow<'_, DataType>> {
Ok(Cow::Owned(DataType::new_fixed_size_list(
DataType::Float32,
self.dim as _,
true,
)))
}
fn compute_source_embeddings(&self, source: Arc<dyn Array>) -> Result<Arc<dyn Array>> {
// Simulate slow embedding computation
std::thread::sleep(Duration::from_millis(self.delay_ms));
self.call_count.fetch_add(1, Ordering::SeqCst);
let len = source.len();
let inner = Arc::new(Float32Array::from(vec![Some(1.0); len * self.dim]));
let field = Field::new("item", inner.data_type().clone(), false);
let arr = FixedSizeListArray::new(
Arc::new(field),
self.dim as _,
inner,
Some(NullBuffer::new_valid(len)),
);
Ok(Arc::new(arr))
}
fn compute_query_embeddings(&self, _input: Arc<dyn Array>) -> Result<Arc<dyn Array>> {
unimplemented!()
}
}
fn create_test_batch() -> Result<RecordBatch> {
let schema = Arc::new(Schema::new(vec![Field::new("text", DataType::Utf8, false)]));
let text = StringArray::from(vec!["hello", "world"]);
RecordBatch::try_new(schema, vec![Arc::new(text)]).map_err(|e| Error::Runtime {
message: format!("Failed to create test batch: {}", e),
})
}
#[test]
fn test_single_embedding_fast_path() {
// Single embedding should execute without spawning threads
let batch = create_test_batch().unwrap();
let schema = batch.schema();
let embed = Arc::new(SlowMockEmbed::new("test".to_string(), 2, 10));
let embedding_def = EmbeddingDefinition::new("text", "test", Some("embedding"));
let reader = RecordBatchIterator::new(vec![Ok(batch)], schema);
let embeddings = vec![(embedding_def, embed.clone() as Arc<dyn EmbeddingFunction>)];
let mut with_embeddings = WithEmbeddings::new(reader, embeddings);
let result = with_embeddings.next().unwrap().unwrap();
assert!(result.column_by_name("embedding").is_some());
assert_eq!(embed.get_call_count(), 1);
}
#[test]
fn test_multiple_embeddings_parallel() {
// Multiple embeddings should execute in parallel
let batch = create_test_batch().unwrap();
let schema = batch.schema();
let embed1 = Arc::new(SlowMockEmbed::new("embed1".to_string(), 2, 100));
let embed2 = Arc::new(SlowMockEmbed::new("embed2".to_string(), 3, 100));
let embed3 = Arc::new(SlowMockEmbed::new("embed3".to_string(), 4, 100));
let def1 = EmbeddingDefinition::new("text", "embed1", Some("emb1"));
let def2 = EmbeddingDefinition::new("text", "embed2", Some("emb2"));
let def3 = EmbeddingDefinition::new("text", "embed3", Some("emb3"));
let reader = RecordBatchIterator::new(vec![Ok(batch)], schema);
let embeddings = vec![
(def1, embed1.clone() as Arc<dyn EmbeddingFunction>),
(def2, embed2.clone() as Arc<dyn EmbeddingFunction>),
(def3, embed3.clone() as Arc<dyn EmbeddingFunction>),
];
let mut with_embeddings = WithEmbeddings::new(reader, embeddings);
let result = with_embeddings.next().unwrap().unwrap();
// Verify all embedding columns are present
assert!(result.column_by_name("emb1").is_some());
assert!(result.column_by_name("emb2").is_some());
assert!(result.column_by_name("emb3").is_some());
// Verify all embeddings were computed
assert_eq!(embed1.get_call_count(), 1);
assert_eq!(embed2.get_call_count(), 1);
assert_eq!(embed3.get_call_count(), 1);
}
#[test]
fn test_embedding_column_order_preserved() {
// Verify that embedding columns are added in the same order as definitions
let batch = create_test_batch().unwrap();
let schema = batch.schema();
let embed1 = Arc::new(SlowMockEmbed::new("embed1".to_string(), 2, 10));
let embed2 = Arc::new(SlowMockEmbed::new("embed2".to_string(), 3, 10));
let embed3 = Arc::new(SlowMockEmbed::new("embed3".to_string(), 4, 10));
let def1 = EmbeddingDefinition::new("text", "embed1", Some("first"));
let def2 = EmbeddingDefinition::new("text", "embed2", Some("second"));
let def3 = EmbeddingDefinition::new("text", "embed3", Some("third"));
let reader = RecordBatchIterator::new(vec![Ok(batch)], schema);
let embeddings = vec![
(def1, embed1 as Arc<dyn EmbeddingFunction>),
(def2, embed2 as Arc<dyn EmbeddingFunction>),
(def3, embed3 as Arc<dyn EmbeddingFunction>),
];
let mut with_embeddings = WithEmbeddings::new(reader, embeddings);
let result = with_embeddings.next().unwrap().unwrap();
let result_schema = result.schema();
// Original column is first
assert_eq!(result_schema.field(0).name(), "text");
// Embedding columns follow in order
assert_eq!(result_schema.field(1).name(), "first");
assert_eq!(result_schema.field(2).name(), "second");
assert_eq!(result_schema.field(3).name(), "third");
}
#[test]
fn test_embedding_error_propagation() {
// Test that errors from embedding computation are properly propagated
#[derive(Debug)]
struct FailingEmbed {
name: String,
}
impl EmbeddingFunction for FailingEmbed {
fn name(&self) -> &str {
&self.name
}
fn source_type(&self) -> Result<Cow<'_, DataType>> {
Ok(Cow::Owned(DataType::Utf8))
}
fn dest_type(&self) -> Result<Cow<'_, DataType>> {
Ok(Cow::Owned(DataType::new_fixed_size_list(
DataType::Float32,
2,
true,
)))
}
fn compute_source_embeddings(&self, _source: Arc<dyn Array>) -> Result<Arc<dyn Array>> {
Err(Error::Runtime {
message: "Intentional failure".to_string(),
})
}
fn compute_query_embeddings(&self, _input: Arc<dyn Array>) -> Result<Arc<dyn Array>> {
unimplemented!()
}
}
let batch = create_test_batch().unwrap();
let schema = batch.schema();
let embed = Arc::new(FailingEmbed {
name: "failing".to_string(),
});
let def = EmbeddingDefinition::new("text", "failing", Some("emb"));
let reader = RecordBatchIterator::new(vec![Ok(batch)], schema);
let embeddings = vec![(def, embed as Arc<dyn EmbeddingFunction>)];
let mut with_embeddings = WithEmbeddings::new(reader, embeddings);
let result = with_embeddings.next().unwrap();
assert!(result.is_err());
let err_msg = format!("{}", result.err().unwrap());
assert!(err_msg.contains("Intentional failure"));
}
#[test]
fn test_maybe_embedded_with_no_embeddings() {
// Test that MaybeEmbedded::No variant works correctly
let batch = create_test_batch().unwrap();
let schema = batch.schema();
let reader = RecordBatchIterator::new(vec![Ok(batch.clone())], schema.clone());
let table_def = lancedb::table::TableDefinition {
schema: schema.clone(),
column_definitions: vec![lancedb::table::ColumnDefinition {
kind: lancedb::table::ColumnKind::Physical,
}],
};
let mut maybe_embedded = MaybeEmbedded::try_new(reader, table_def, None).unwrap();
let result = maybe_embedded.next().unwrap().unwrap();
assert_eq!(result.num_columns(), 1);
assert_eq!(result.column(0).as_ref(), batch.column(0).as_ref());
}