mirror of
https://github.com/lancedb/lancedb.git
synced 2026-01-05 03:12:57 +00:00
Compare commits
11 Commits
python-v0.
...
lance-main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a732bbf39 | ||
|
|
4beb2d2877 | ||
|
|
a00b8595d1 | ||
|
|
9c8314b4fd | ||
|
|
c625b6f2b2 | ||
|
|
bec8fe6547 | ||
|
|
dc1150c011 | ||
|
|
afaefc6264 | ||
|
|
cb70ff8cee | ||
|
|
cbb5a841b1 | ||
|
|
c72f6770fd |
@@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.20.1-beta.1"
|
current_version = "0.21.0"
|
||||||
parse = """(?x)
|
parse = """(?x)
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
(?P<major>0|[1-9]\\d*)\\.
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
(?P<minor>0|[1-9]\\d*)\\.
|
||||||
|
|||||||
3
.github/workflows/npm-publish.yml
vendored
3
.github/workflows/npm-publish.yml
vendored
@@ -550,6 +550,9 @@ jobs:
|
|||||||
bash ci/update_lockfiles.sh
|
bash ci/update_lockfiles.sh
|
||||||
- name: Push new commit
|
- name: Push new commit
|
||||||
uses: ad-m/github-push-action@master
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
branch: main
|
||||||
- name: Notify Slack Action
|
- name: Notify Slack Action
|
||||||
uses: ravsamhq/notify-slack-action@2.3.0
|
uses: ravsamhq/notify-slack-action@2.3.0
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
|
|||||||
128
Cargo.lock
generated
128
Cargo.lock
generated
@@ -1852,7 +1852,7 @@ dependencies = [
|
|||||||
"futures",
|
"futures",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"regex",
|
"regex",
|
||||||
@@ -1884,7 +1884,7 @@ dependencies = [
|
|||||||
"futures",
|
"futures",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@@ -1908,7 +1908,7 @@ dependencies = [
|
|||||||
"datafusion-session",
|
"datafusion-session",
|
||||||
"futures",
|
"futures",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -1927,7 +1927,7 @@ dependencies = [
|
|||||||
"indexmap 2.9.0",
|
"indexmap 2.9.0",
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"paste",
|
"paste",
|
||||||
"sqlparser 0.55.0",
|
"sqlparser 0.55.0",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -1967,7 +1967,7 @@ dependencies = [
|
|||||||
"glob",
|
"glob",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"tokio",
|
"tokio",
|
||||||
"url",
|
"url",
|
||||||
@@ -1993,7 +1993,7 @@ dependencies = [
|
|||||||
"datafusion-physical-plan",
|
"datafusion-physical-plan",
|
||||||
"datafusion-session",
|
"datafusion-session",
|
||||||
"futures",
|
"futures",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"regex",
|
"regex",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@@ -2018,7 +2018,7 @@ dependencies = [
|
|||||||
"datafusion-physical-plan",
|
"datafusion-physical-plan",
|
||||||
"datafusion-session",
|
"datafusion-session",
|
||||||
"futures",
|
"futures",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@@ -2041,7 +2041,7 @@ dependencies = [
|
|||||||
"datafusion-expr",
|
"datafusion-expr",
|
||||||
"futures",
|
"futures",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@@ -2340,7 +2340,7 @@ dependencies = [
|
|||||||
"futures",
|
"futures",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.12.2",
|
"object_store",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
@@ -2813,8 +2813,7 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fsst"
|
name = "fsst"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
]
|
]
|
||||||
@@ -3906,8 +3905,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance"
|
name = "lance"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-arith",
|
"arrow-arith",
|
||||||
@@ -3950,7 +3948,7 @@ dependencies = [
|
|||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"moka",
|
"moka",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"permutation",
|
"permutation",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"prost",
|
"prost",
|
||||||
@@ -3970,8 +3968,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-arrow"
|
name = "lance-arrow"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -3988,8 +3985,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-core"
|
name = "lance-core"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -4009,7 +4005,7 @@ dependencies = [
|
|||||||
"mock_instant",
|
"mock_instant",
|
||||||
"moka",
|
"moka",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"prost",
|
"prost",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
@@ -4025,8 +4021,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-datafusion"
|
name = "lance-datafusion"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4055,8 +4050,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-datagen"
|
name = "lance-datagen"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4071,8 +4065,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-encoding"
|
name = "lance-encoding"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayref",
|
"arrayref",
|
||||||
"arrow",
|
"arrow",
|
||||||
@@ -4106,13 +4099,13 @@ dependencies = [
|
|||||||
"snafu",
|
"snafu",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"xxhash-rust",
|
||||||
"zstd",
|
"zstd",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-file"
|
name = "lance-file"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-arith",
|
"arrow-arith",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4133,7 +4126,7 @@ dependencies = [
|
|||||||
"lance-io",
|
"lance-io",
|
||||||
"log",
|
"log",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"prost",
|
"prost",
|
||||||
"prost-build",
|
"prost-build",
|
||||||
"prost-types",
|
"prost-types",
|
||||||
@@ -4146,8 +4139,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-index"
|
name = "lance-index"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4184,7 +4176,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"moka",
|
"moka",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"prost",
|
"prost",
|
||||||
"prost-build",
|
"prost-build",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
@@ -4202,8 +4194,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-io"
|
name = "lance-io"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-arith",
|
"arrow-arith",
|
||||||
@@ -4227,7 +4218,7 @@ dependencies = [
|
|||||||
"lance-core",
|
"lance-core",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"path_abs",
|
"path_abs",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"prost",
|
"prost",
|
||||||
@@ -4242,8 +4233,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-linalg"
|
name = "lance-linalg"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-ord",
|
"arrow-ord",
|
||||||
@@ -4266,8 +4256,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-table"
|
name = "lance-table"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4288,7 +4277,7 @@ dependencies = [
|
|||||||
"lance-io",
|
"lance-io",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"prost",
|
"prost",
|
||||||
"prost-build",
|
"prost-build",
|
||||||
"prost-types",
|
"prost-types",
|
||||||
@@ -4306,8 +4295,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance-testing"
|
name = "lance-testing"
|
||||||
version = "0.30.0"
|
version = "0.31.0"
|
||||||
source = "git+https://github.com/lancedb/lance.git?tag=v0.30.0-beta.1#a499cfa06b7221b895bc13908cfc2ee7aadba46e"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
@@ -4318,7 +4306,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -4365,7 +4353,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"moka",
|
"moka",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"polars",
|
"polars",
|
||||||
"polars-arrow",
|
"polars-arrow",
|
||||||
@@ -4405,7 +4393,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lancedb-node"
|
name = "lancedb-node"
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-ipc",
|
"arrow-ipc",
|
||||||
@@ -4422,7 +4410,7 @@ dependencies = [
|
|||||||
"lancedb",
|
"lancedb",
|
||||||
"lzma-sys",
|
"lzma-sys",
|
||||||
"neon",
|
"neon",
|
||||||
"object_store 0.11.2",
|
"object_store",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"snafu",
|
"snafu",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -4430,7 +4418,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lancedb-nodejs"
|
name = "lancedb-nodejs"
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-ipc",
|
"arrow-ipc",
|
||||||
@@ -4450,7 +4438,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lancedb-python"
|
name = "lancedb-python"
|
||||||
version = "0.23.1-beta.1"
|
version = "0.24.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
@@ -5159,38 +5147,6 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "object_store"
|
|
||||||
version = "0.11.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3cfccb68961a56facde1163f9319e0d15743352344e7808a11795fb99698dcaf"
|
|
||||||
dependencies = [
|
|
||||||
"async-trait",
|
|
||||||
"base64 0.22.1",
|
|
||||||
"bytes",
|
|
||||||
"chrono",
|
|
||||||
"futures",
|
|
||||||
"httparse",
|
|
||||||
"humantime",
|
|
||||||
"hyper 1.6.0",
|
|
||||||
"itertools 0.13.0",
|
|
||||||
"md-5",
|
|
||||||
"parking_lot",
|
|
||||||
"percent-encoding",
|
|
||||||
"quick-xml",
|
|
||||||
"rand 0.8.5",
|
|
||||||
"reqwest",
|
|
||||||
"ring",
|
|
||||||
"rustls-pemfile 2.2.0",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"snafu",
|
|
||||||
"tokio",
|
|
||||||
"tracing",
|
|
||||||
"url",
|
|
||||||
"walkdir",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object_store"
|
name = "object_store"
|
||||||
version = "0.12.2"
|
version = "0.12.2"
|
||||||
@@ -5198,14 +5154,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "7781f96d79ed0f961a7021424ab01840efbda64ae7a505aaea195efc91eaaec4"
|
checksum = "7781f96d79ed0f961a7021424ab01840efbda64ae7a505aaea195efc91eaaec4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"base64 0.22.1",
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
"form_urlencoded",
|
||||||
"futures",
|
"futures",
|
||||||
"http 1.3.1",
|
"http 1.3.1",
|
||||||
|
"http-body-util",
|
||||||
|
"httparse",
|
||||||
"humantime",
|
"humantime",
|
||||||
|
"hyper 1.6.0",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
|
"md-5",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
|
"quick-xml",
|
||||||
|
"rand 0.9.1",
|
||||||
|
"reqwest",
|
||||||
|
"ring",
|
||||||
|
"rustls-pemfile 2.2.0",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_urlencoded",
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
|||||||
18
Cargo.toml
18
Cargo.toml
@@ -21,14 +21,14 @@ categories = ["database-implementations"]
|
|||||||
rust-version = "1.78.0"
|
rust-version = "1.78.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = { "version" = "=0.30.0", "features" = ["dynamodb"], tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance = { path = "../lance/rust/lance", "features" = ["dynamodb"] }
|
||||||
lance-io = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-io = { path = "../lance/rust/lance-io" }
|
||||||
lance-index = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-index = { path = "../lance/rust/lance-index" }
|
||||||
lance-linalg = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-linalg = { path = "../lance/rust/lance-linalg" }
|
||||||
lance-table = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-table = { path = "../lance/rust/lance-table" }
|
||||||
lance-testing = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-testing = { path = "../lance/rust/lance-testing" }
|
||||||
lance-datafusion = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-datafusion = { path = "../lance/rust/lance-datafusion" }
|
||||||
lance-encoding = { version = "=0.30.0", tag = "v0.30.0-beta.1", git="https://github.com/lancedb/lance.git" }
|
lance-encoding = { path = "../lance/rust/lance-encoding" }
|
||||||
# Note that this one does not include pyarrow
|
# Note that this one does not include pyarrow
|
||||||
arrow = { version = "55.1", optional = false }
|
arrow = { version = "55.1", optional = false }
|
||||||
arrow-array = "55.1"
|
arrow-array = "55.1"
|
||||||
@@ -52,7 +52,7 @@ half = { "version" = "=2.5.0", default-features = false, features = [
|
|||||||
futures = "0"
|
futures = "0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
moka = { version = "0.12", features = ["future"] }
|
moka = { version = "0.12", features = ["future"] }
|
||||||
object_store = "0.11.0"
|
object_store = "0.12.0"
|
||||||
pin-project = "1.0.7"
|
pin-project = "1.0.7"
|
||||||
snafu = "0.8"
|
snafu = "0.8"
|
||||||
url = "2"
|
url = "2"
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.lancedb</groupId>
|
<groupId>com.lancedb</groupId>
|
||||||
<artifactId>lancedb-parent</artifactId>
|
<artifactId>lancedb-parent</artifactId>
|
||||||
<version>0.20.1-beta.1</version>
|
<version>0.21.0-final.0</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
<groupId>com.lancedb</groupId>
|
<groupId>com.lancedb</groupId>
|
||||||
<artifactId>lancedb-parent</artifactId>
|
<artifactId>lancedb-parent</artifactId>
|
||||||
<version>0.20.1-beta.1</version>
|
<version>0.21.0-final.0</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<name>LanceDB Parent</name>
|
<name>LanceDB Parent</name>
|
||||||
|
|||||||
74
node/package-lock.json
generated
74
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64",
|
"x64",
|
||||||
"arm64"
|
"arm64"
|
||||||
@@ -52,11 +52,11 @@
|
|||||||
"uuid": "^9.0.0"
|
"uuid": "^9.0.0"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.20.1-beta.1",
|
"@lancedb/vectordb-darwin-arm64": "0.21.0",
|
||||||
"@lancedb/vectordb-darwin-x64": "0.20.1-beta.1",
|
"@lancedb/vectordb-darwin-x64": "0.21.0",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.20.1-beta.1",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.21.0",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.20.1-beta.1",
|
"@lancedb/vectordb-linux-x64-gnu": "0.21.0",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.20.1-beta.1"
|
"@lancedb/vectordb-win32-x64-msvc": "0.21.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"@apache-arrow/ts": "^14.0.2",
|
"@apache-arrow/ts": "^14.0.2",
|
||||||
@@ -326,66 +326,6 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
|
||||||
"version": "0.20.1-beta.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.20.1-beta.1.tgz",
|
|
||||||
"integrity": "sha512-DPD8gwFQz5aENYYbTFS/l3YX/rqzS6Kj2B4IZERccVFULQsdR5YwtaAfFwTMp7NSnsjWKwJAknohiMZlJr4njQ==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
|
||||||
"version": "0.20.1-beta.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.20.1-beta.1.tgz",
|
|
||||||
"integrity": "sha512-lTPtlRSTC08UgQW5Bv8WYhdbogAgUJ+9ejg+UE+fwP9gEsgEKXL/SHBm+9gmAlTo7LbrxJjg0CtCde/mW68UTw==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
|
||||||
"version": "0.20.1-beta.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.20.1-beta.1.tgz",
|
|
||||||
"integrity": "sha512-w/3O9FvwQiGegYsM21yZ0FezfOFVsW7HttYwwPzZMZaCpK3/i+LvZVSqwO4qXHHJBtHgKevonINyvVlg5487aQ==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
|
||||||
"version": "0.20.1-beta.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.20.1-beta.1.tgz",
|
|
||||||
"integrity": "sha512-rq7Q6Lq9kJmBcgwplYQVJmRbyeP+xPVmXyyQfAO3IjekqeSsyjj1HoCZYqZIfBZyN5ELiSvIJB0731aKf9pr1A==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
|
||||||
"version": "0.20.1-beta.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.20.1-beta.1.tgz",
|
|
||||||
"integrity": "sha512-kHra0SEXeMKdgqi5h0igsqHcBr73hKBhEVJBa8VTv1DUv6Jvazwl4B4ueqllcyD4k3vvOTb2XzZomm7dhQ9QnA==",
|
|
||||||
"cpu": [
|
|
||||||
"x64"
|
|
||||||
],
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"win32"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@neon-rs/cli": {
|
"node_modules/@neon-rs/cli": {
|
||||||
"version": "0.0.160",
|
"version": "0.0.160",
|
||||||
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"private": false,
|
"private": false,
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
@@ -89,10 +89,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@lancedb/vectordb-darwin-x64": "0.20.1-beta.1",
|
"@lancedb/vectordb-darwin-x64": "0.21.0",
|
||||||
"@lancedb/vectordb-darwin-arm64": "0.20.1-beta.1",
|
"@lancedb/vectordb-darwin-arm64": "0.21.0",
|
||||||
"@lancedb/vectordb-linux-x64-gnu": "0.20.1-beta.1",
|
"@lancedb/vectordb-linux-x64-gnu": "0.21.0",
|
||||||
"@lancedb/vectordb-linux-arm64-gnu": "0.20.1-beta.1",
|
"@lancedb/vectordb-linux-arm64-gnu": "0.21.0",
|
||||||
"@lancedb/vectordb-win32-x64-msvc": "0.20.1-beta.1"
|
"@lancedb/vectordb-win32-x64-msvc": "0.21.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb-nodejs"
|
name = "lancedb-nodejs"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
description.workspace = true
|
description.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
|||||||
@@ -1650,13 +1650,25 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
|||||||
expect(resultSet.has("fob")).toBe(true);
|
expect(resultSet.has("fob")).toBe(true);
|
||||||
expect(resultSet.has("fo")).toBe(true);
|
expect(resultSet.has("fo")).toBe(true);
|
||||||
expect(resultSet.has("food")).toBe(true);
|
expect(resultSet.has("food")).toBe(true);
|
||||||
|
|
||||||
|
const prefixResults = await table
|
||||||
|
.search(
|
||||||
|
new MatchQuery("foo", "text", { fuzziness: 3, prefixLength: 3 }),
|
||||||
|
)
|
||||||
|
.toArray();
|
||||||
|
expect(prefixResults.length).toBe(2);
|
||||||
|
const resultSet2 = new Set(prefixResults.map((r) => r.text));
|
||||||
|
expect(resultSet2.has("foo")).toBe(true);
|
||||||
|
expect(resultSet2.has("food")).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("full text search boolean query", async () => {
|
test("full text search boolean query", async () => {
|
||||||
const db = await connect(tmpDir.name);
|
const db = await connect(tmpDir.name);
|
||||||
const data = [
|
const data = [
|
||||||
{ text: "hello world", vector: [0.1, 0.2, 0.3] },
|
{ text: "The cat and dog are playing" },
|
||||||
{ text: "goodbye world", vector: [0.4, 0.5, 0.6] },
|
{ text: "The cat is sleeping" },
|
||||||
|
{ text: "The dog is barking" },
|
||||||
|
{ text: "The dog chases the cat" },
|
||||||
];
|
];
|
||||||
const table = await db.createTable("test", data);
|
const table = await db.createTable("test", data);
|
||||||
await table.createIndex("text", {
|
await table.createIndex("text", {
|
||||||
@@ -1666,22 +1678,32 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
|||||||
const shouldResults = await table
|
const shouldResults = await table
|
||||||
.search(
|
.search(
|
||||||
new BooleanQuery([
|
new BooleanQuery([
|
||||||
[Occur.Should, new MatchQuery("hello", "text")],
|
[Occur.Should, new MatchQuery("cat", "text")],
|
||||||
[Occur.Should, new MatchQuery("goodbye", "text")],
|
[Occur.Should, new MatchQuery("dog", "text")],
|
||||||
]),
|
]),
|
||||||
)
|
)
|
||||||
.toArray();
|
.toArray();
|
||||||
expect(shouldResults.length).toBe(2);
|
expect(shouldResults.length).toBe(4);
|
||||||
|
|
||||||
const mustResults = await table
|
const mustResults = await table
|
||||||
.search(
|
.search(
|
||||||
new BooleanQuery([
|
new BooleanQuery([
|
||||||
[Occur.Must, new MatchQuery("hello", "text")],
|
[Occur.Must, new MatchQuery("cat", "text")],
|
||||||
[Occur.Must, new MatchQuery("world", "text")],
|
[Occur.Must, new MatchQuery("dog", "text")],
|
||||||
]),
|
]),
|
||||||
)
|
)
|
||||||
.toArray();
|
.toArray();
|
||||||
expect(mustResults.length).toBe(1);
|
expect(mustResults.length).toBe(2);
|
||||||
|
|
||||||
|
const mustNotResults = await table
|
||||||
|
.search(
|
||||||
|
new BooleanQuery([
|
||||||
|
[Occur.Must, new MatchQuery("cat", "text")],
|
||||||
|
[Occur.MustNot, new MatchQuery("dog", "text")],
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.toArray();
|
||||||
|
expect(mustNotResults.length).toBe(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each([
|
test.each([
|
||||||
|
|||||||
@@ -812,10 +812,12 @@ export enum Operator {
|
|||||||
*
|
*
|
||||||
* - `Must`: The term must be present in the document.
|
* - `Must`: The term must be present in the document.
|
||||||
* - `Should`: The term should contribute to the document score, but is not required.
|
* - `Should`: The term should contribute to the document score, but is not required.
|
||||||
|
* - `MustNot`: The term must not be present in the document.
|
||||||
*/
|
*/
|
||||||
export enum Occur {
|
export enum Occur {
|
||||||
Must = "MUST",
|
|
||||||
Should = "SHOULD",
|
Should = "SHOULD",
|
||||||
|
Must = "MUST",
|
||||||
|
MustNot = "MUST_NOT",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -856,6 +858,7 @@ export class MatchQuery implements FullTextQuery {
|
|||||||
* - `fuzziness`: The fuzziness level for the query (default is 0).
|
* - `fuzziness`: The fuzziness level for the query (default is 0).
|
||||||
* - `maxExpansions`: The maximum number of terms to consider for fuzzy matching (default is 50).
|
* - `maxExpansions`: The maximum number of terms to consider for fuzzy matching (default is 50).
|
||||||
* - `operator`: The logical operator to use for combining terms in the query (default is "OR").
|
* - `operator`: The logical operator to use for combining terms in the query (default is "OR").
|
||||||
|
* - `prefixLength`: The number of beginning characters being unchanged for fuzzy matching.
|
||||||
*/
|
*/
|
||||||
constructor(
|
constructor(
|
||||||
query: string,
|
query: string,
|
||||||
@@ -865,6 +868,7 @@ export class MatchQuery implements FullTextQuery {
|
|||||||
fuzziness?: number;
|
fuzziness?: number;
|
||||||
maxExpansions?: number;
|
maxExpansions?: number;
|
||||||
operator?: Operator;
|
operator?: Operator;
|
||||||
|
prefixLength?: number;
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
let fuzziness = options?.fuzziness;
|
let fuzziness = options?.fuzziness;
|
||||||
@@ -878,6 +882,7 @@ export class MatchQuery implements FullTextQuery {
|
|||||||
fuzziness,
|
fuzziness,
|
||||||
options?.maxExpansions ?? 50,
|
options?.maxExpansions ?? 50,
|
||||||
options?.operator ?? Operator.Or,
|
options?.operator ?? Operator.Or,
|
||||||
|
options?.prefixLength ?? 0,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-darwin-arm64",
|
"name": "@lancedb/lancedb-darwin-arm64",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["darwin"],
|
"os": ["darwin"],
|
||||||
"cpu": ["arm64"],
|
"cpu": ["arm64"],
|
||||||
"main": "lancedb.darwin-arm64.node",
|
"main": "lancedb.darwin-arm64.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-darwin-x64",
|
"name": "@lancedb/lancedb-darwin-x64",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["darwin"],
|
"os": ["darwin"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.darwin-x64.node",
|
"main": "lancedb.darwin-x64.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-arm64-gnu",
|
"name": "@lancedb/lancedb-linux-arm64-gnu",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["arm64"],
|
"cpu": ["arm64"],
|
||||||
"main": "lancedb.linux-arm64-gnu.node",
|
"main": "lancedb.linux-arm64-gnu.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-arm64-musl",
|
"name": "@lancedb/lancedb-linux-arm64-musl",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["arm64"],
|
"cpu": ["arm64"],
|
||||||
"main": "lancedb.linux-arm64-musl.node",
|
"main": "lancedb.linux-arm64-musl.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-x64-gnu",
|
"name": "@lancedb/lancedb-linux-x64-gnu",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.linux-x64-gnu.node",
|
"main": "lancedb.linux-x64-gnu.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-linux-x64-musl",
|
"name": "@lancedb/lancedb-linux-x64-musl",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["linux"],
|
"os": ["linux"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.linux-x64-musl.node",
|
"main": "lancedb.linux-x64-musl.node",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-win32-arm64-msvc",
|
"name": "@lancedb/lancedb-win32-arm64-msvc",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": [
|
"os": [
|
||||||
"win32"
|
"win32"
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb-win32-x64-msvc",
|
"name": "@lancedb/lancedb-win32-x64-msvc",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"os": ["win32"],
|
"os": ["win32"],
|
||||||
"cpu": ["x64"],
|
"cpu": ["x64"],
|
||||||
"main": "lancedb.win32-x64-msvc.node",
|
"main": "lancedb.win32-x64-msvc.node",
|
||||||
|
|||||||
4
nodejs/package-lock.json
generated
4
nodejs/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "@lancedb/lancedb",
|
"name": "@lancedb/lancedb",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@lancedb/lancedb",
|
"name": "@lancedb/lancedb",
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64",
|
"x64",
|
||||||
"arm64"
|
"arm64"
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
"ann"
|
"ann"
|
||||||
],
|
],
|
||||||
"private": false,
|
"private": false,
|
||||||
"version": "0.20.1-beta.1",
|
"version": "0.21.0",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": "./dist/index.js",
|
".": "./dist/index.js",
|
||||||
|
|||||||
@@ -335,6 +335,7 @@ impl JsFullTextQuery {
|
|||||||
fuzziness: Option<u32>,
|
fuzziness: Option<u32>,
|
||||||
max_expansions: u32,
|
max_expansions: u32,
|
||||||
operator: String,
|
operator: String,
|
||||||
|
prefix_length: u32,
|
||||||
) -> napi::Result<Self> {
|
) -> napi::Result<Self> {
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
inner: MatchQuery::new(query)
|
inner: MatchQuery::new(query)
|
||||||
@@ -347,6 +348,7 @@ impl JsFullTextQuery {
|
|||||||
napi::Error::from_reason(format!("Invalid operator: {}", e))
|
napi::Error::from_reason(format!("Invalid operator: {}", e))
|
||||||
})?,
|
})?,
|
||||||
)
|
)
|
||||||
|
.with_prefix_length(prefix_length)
|
||||||
.into(),
|
.into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.23.1-beta.2"
|
current_version = "0.24.0"
|
||||||
parse = """(?x)
|
parse = """(?x)
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
(?P<major>0|[1-9]\\d*)\\.
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
(?P<minor>0|[1-9]\\d*)\\.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb-python"
|
name = "lancedb-python"
|
||||||
version = "0.23.1-beta.2"
|
version = "0.24.0"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
description = "Python bindings for LanceDB"
|
description = "Python bindings for LanceDB"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|||||||
@@ -101,8 +101,9 @@ class FullTextOperator(str, Enum):
|
|||||||
|
|
||||||
|
|
||||||
class Occur(str, Enum):
|
class Occur(str, Enum):
|
||||||
MUST = "MUST"
|
|
||||||
SHOULD = "SHOULD"
|
SHOULD = "SHOULD"
|
||||||
|
MUST = "MUST"
|
||||||
|
MUST_NOT = "MUST_NOT"
|
||||||
|
|
||||||
|
|
||||||
@pydantic.dataclasses.dataclass
|
@pydantic.dataclasses.dataclass
|
||||||
@@ -181,6 +182,9 @@ class MatchQuery(FullTextQuery):
|
|||||||
Can be either `AND` or `OR`.
|
Can be either `AND` or `OR`.
|
||||||
If `AND`, all terms in the query must match.
|
If `AND`, all terms in the query must match.
|
||||||
If `OR`, at least one term in the query must match.
|
If `OR`, at least one term in the query must match.
|
||||||
|
prefix_length : int, optional
|
||||||
|
The number of beginning characters being unchanged for fuzzy matching.
|
||||||
|
This is useful to achieve prefix matching.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
query: str
|
query: str
|
||||||
@@ -189,6 +193,7 @@ class MatchQuery(FullTextQuery):
|
|||||||
fuzziness: int = pydantic.Field(0, kw_only=True)
|
fuzziness: int = pydantic.Field(0, kw_only=True)
|
||||||
max_expansions: int = pydantic.Field(50, kw_only=True)
|
max_expansions: int = pydantic.Field(50, kw_only=True)
|
||||||
operator: FullTextOperator = pydantic.Field(FullTextOperator.OR, kw_only=True)
|
operator: FullTextOperator = pydantic.Field(FullTextOperator.OR, kw_only=True)
|
||||||
|
prefix_length: int = pydantic.Field(0, kw_only=True)
|
||||||
|
|
||||||
def query_type(self) -> FullTextQueryType:
|
def query_type(self) -> FullTextQueryType:
|
||||||
return FullTextQueryType.MATCH
|
return FullTextQueryType.MATCH
|
||||||
@@ -1446,10 +1451,13 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
|
|||||||
|
|
||||||
query = self._query
|
query = self._query
|
||||||
if self._phrase_query:
|
if self._phrase_query:
|
||||||
raise NotImplementedError(
|
if isinstance(query, str):
|
||||||
"Phrase query is not yet supported in Lance FTS. "
|
if not query.startswith('"') or not query.endswith('"'):
|
||||||
"Use tantivy-based index instead for now."
|
query = f'"{query}"'
|
||||||
)
|
elif isinstance(query, FullTextQuery) and not isinstance(
|
||||||
|
query, PhraseQuery
|
||||||
|
):
|
||||||
|
raise TypeError("Please use PhraseQuery for phrase queries.")
|
||||||
query = self.to_query_object()
|
query = self.to_query_object()
|
||||||
results = self._table._execute_query(query, timeout=timeout)
|
results = self._table._execute_query(query, timeout=timeout)
|
||||||
results = results.read_all()
|
results = results.read_all()
|
||||||
@@ -3034,15 +3042,21 @@ class AsyncHybridQuery(AsyncQueryBase, AsyncVectorQueryBase):
|
|||||||
>>> asyncio.run(doctest_example()) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
|
>>> asyncio.run(doctest_example()) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
|
||||||
Vector Search Plan:
|
Vector Search Plan:
|
||||||
ProjectionExec: expr=[vector@0 as vector, text@3 as text, _distance@2 as _distance]
|
ProjectionExec: expr=[vector@0 as vector, text@3 as text, _distance@2 as _distance]
|
||||||
Take: columns="vector, _rowid, _distance, (text)"
|
Take: columns="vector, _rowid, _distance, (text)"
|
||||||
CoalesceBatchesExec: target_batch_size=1024
|
CoalesceBatchesExec: target_batch_size=1024
|
||||||
GlobalLimitExec: skip=0, fetch=10
|
GlobalLimitExec: skip=0, fetch=10
|
||||||
FilterExec: _distance@2 IS NOT NULL
|
FilterExec: _distance@2 IS NOT NULL
|
||||||
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST], preserve_partitioning=[false]
|
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST], preserve_partitioning=[false]
|
||||||
KNNVectorDistance: metric=l2
|
KNNVectorDistance: metric=l2
|
||||||
LanceScan: uri=..., projection=[vector], row_id=true, row_addr=false, ordered=false
|
LanceScan: uri=..., projection=[vector], row_id=true, row_addr=false, ordered=false
|
||||||
|
<BLANKLINE>
|
||||||
FTS Search Plan:
|
FTS Search Plan:
|
||||||
LanceScan: uri=..., projection=[vector, text], row_id=false, row_addr=false, ordered=true
|
ProjectionExec: expr=[vector@2 as vector, text@3 as text, _score@1 as _score]
|
||||||
|
Take: columns="_rowid, _score, (vector), (text)"
|
||||||
|
CoalesceBatchesExec: target_batch_size=1024
|
||||||
|
GlobalLimitExec: skip=0, fetch=10
|
||||||
|
MatchQuery: query=hello
|
||||||
|
<BLANKLINE>
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
|||||||
@@ -827,7 +827,7 @@ class Table(ABC):
|
|||||||
ordering_field_names: Optional[Union[str, List[str]]] = None,
|
ordering_field_names: Optional[Union[str, List[str]]] = None,
|
||||||
replace: bool = False,
|
replace: bool = False,
|
||||||
writer_heap_size: Optional[int] = 1024 * 1024 * 1024,
|
writer_heap_size: Optional[int] = 1024 * 1024 * 1024,
|
||||||
use_tantivy: bool = True,
|
use_tantivy: bool = False,
|
||||||
tokenizer_name: Optional[str] = None,
|
tokenizer_name: Optional[str] = None,
|
||||||
with_position: bool = False,
|
with_position: bool = False,
|
||||||
# tokenizer configs:
|
# tokenizer configs:
|
||||||
@@ -864,7 +864,7 @@ class Table(ABC):
|
|||||||
The tokenizer to use for the index. Can be "raw", "default" or the 2 letter
|
The tokenizer to use for the index. Can be "raw", "default" or the 2 letter
|
||||||
language code followed by "_stem". So for english it would be "en_stem".
|
language code followed by "_stem". So for english it would be "en_stem".
|
||||||
For available languages see: https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html
|
For available languages see: https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html
|
||||||
use_tantivy: bool, default True
|
use_tantivy: bool, default False
|
||||||
If True, use the legacy full-text search implementation based on tantivy.
|
If True, use the legacy full-text search implementation based on tantivy.
|
||||||
If False, use the new full-text search implementation based on lance-index.
|
If False, use the new full-text search implementation based on lance-index.
|
||||||
with_position: bool, default False
|
with_position: bool, default False
|
||||||
@@ -1970,7 +1970,7 @@ class LanceTable(Table):
|
|||||||
ordering_field_names: Optional[Union[str, List[str]]] = None,
|
ordering_field_names: Optional[Union[str, List[str]]] = None,
|
||||||
replace: bool = False,
|
replace: bool = False,
|
||||||
writer_heap_size: Optional[int] = 1024 * 1024 * 1024,
|
writer_heap_size: Optional[int] = 1024 * 1024 * 1024,
|
||||||
use_tantivy: bool = True,
|
use_tantivy: bool = False,
|
||||||
tokenizer_name: Optional[str] = None,
|
tokenizer_name: Optional[str] = None,
|
||||||
with_position: bool = False,
|
with_position: bool = False,
|
||||||
# tokenizer configs:
|
# tokenizer configs:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import lancedb
|
|||||||
|
|
||||||
# --8<-- [end:import-lancedb]
|
# --8<-- [end:import-lancedb]
|
||||||
# --8<-- [start:import-numpy]
|
# --8<-- [start:import-numpy]
|
||||||
from lancedb.query import BoostQuery, MatchQuery
|
from lancedb.query import BooleanQuery, BoostQuery, MatchQuery, Occur
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
@@ -191,6 +191,15 @@ def test_fts_fuzzy_query():
|
|||||||
"food", # 1 insertion
|
"food", # 1 insertion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
results = table.search(
|
||||||
|
MatchQuery("foo", "text", fuzziness=1, prefix_length=3)
|
||||||
|
).to_pandas()
|
||||||
|
assert len(results) == 2
|
||||||
|
assert set(results["text"].to_list()) == {
|
||||||
|
"foo",
|
||||||
|
"food",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
os.name == "nt", reason="Need to fix https://github.com/lancedb/lance/issues/3905"
|
os.name == "nt", reason="Need to fix https://github.com/lancedb/lance/issues/3905"
|
||||||
@@ -240,6 +249,60 @@ def test_fts_boost_query():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
os.name == "nt", reason="Need to fix https://github.com/lancedb/lance/issues/3905"
|
||||||
|
)
|
||||||
|
def test_fts_boolean_query(tmp_path):
|
||||||
|
uri = tmp_path / "boolean-example"
|
||||||
|
db = lancedb.connect(uri)
|
||||||
|
table = db.create_table(
|
||||||
|
"my_table_fts_boolean",
|
||||||
|
data=[
|
||||||
|
{"text": "The cat and dog are playing"},
|
||||||
|
{"text": "The cat is sleeping"},
|
||||||
|
{"text": "The dog is barking"},
|
||||||
|
{"text": "The dog chases the cat"},
|
||||||
|
],
|
||||||
|
mode="overwrite",
|
||||||
|
)
|
||||||
|
table.create_fts_index("text", use_tantivy=False, replace=True)
|
||||||
|
|
||||||
|
# SHOULD
|
||||||
|
results = table.search(
|
||||||
|
MatchQuery("cat", "text") | MatchQuery("dog", "text")
|
||||||
|
).to_pandas()
|
||||||
|
assert len(results) == 4
|
||||||
|
assert set(results["text"].to_list()) == {
|
||||||
|
"The cat and dog are playing",
|
||||||
|
"The cat is sleeping",
|
||||||
|
"The dog is barking",
|
||||||
|
"The dog chases the cat",
|
||||||
|
}
|
||||||
|
# MUST
|
||||||
|
results = table.search(
|
||||||
|
MatchQuery("cat", "text") & MatchQuery("dog", "text")
|
||||||
|
).to_pandas()
|
||||||
|
assert len(results) == 2
|
||||||
|
assert set(results["text"].to_list()) == {
|
||||||
|
"The cat and dog are playing",
|
||||||
|
"The dog chases the cat",
|
||||||
|
}
|
||||||
|
|
||||||
|
# MUST NOT
|
||||||
|
results = table.search(
|
||||||
|
BooleanQuery(
|
||||||
|
[
|
||||||
|
(Occur.MUST, MatchQuery("cat", "text")),
|
||||||
|
(Occur.MUST_NOT, MatchQuery("dog", "text")),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
).to_pandas()
|
||||||
|
assert len(results) == 1
|
||||||
|
assert set(results["text"].to_list()) == {
|
||||||
|
"The cat is sleeping",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
os.name == "nt", reason="Need to fix https://github.com/lancedb/lance/issues/3905"
|
os.name == "nt", reason="Need to fix https://github.com/lancedb/lance/issues/3905"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -775,6 +775,82 @@ async def test_explain_plan_async(table_async: AsyncTable):
|
|||||||
assert "KNN" in plan
|
assert "KNN" in plan
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_explain_plan_fts(table_async: AsyncTable):
|
||||||
|
"""Test explain plan for FTS queries"""
|
||||||
|
# Create FTS index
|
||||||
|
from lancedb.index import FTS
|
||||||
|
|
||||||
|
await table_async.create_index("text", config=FTS())
|
||||||
|
|
||||||
|
# Test pure FTS query
|
||||||
|
query = await table_async.search("dog", query_type="fts", fts_columns="text")
|
||||||
|
plan = await query.explain_plan()
|
||||||
|
# Should show FTS details (issue #2465 is now fixed)
|
||||||
|
assert "MatchQuery: query=dog" in plan
|
||||||
|
assert "GlobalLimitExec" in plan # Default limit
|
||||||
|
|
||||||
|
# Test FTS query with limit
|
||||||
|
query_with_limit = await table_async.search(
|
||||||
|
"dog", query_type="fts", fts_columns="text"
|
||||||
|
)
|
||||||
|
plan_with_limit = await query_with_limit.limit(1).explain_plan()
|
||||||
|
assert "MatchQuery: query=dog" in plan_with_limit
|
||||||
|
assert "GlobalLimitExec: skip=0, fetch=1" in plan_with_limit
|
||||||
|
|
||||||
|
# Test FTS query with offset and limit
|
||||||
|
query_with_offset = await table_async.search(
|
||||||
|
"dog", query_type="fts", fts_columns="text"
|
||||||
|
)
|
||||||
|
plan_with_offset = await query_with_offset.offset(1).limit(1).explain_plan()
|
||||||
|
assert "MatchQuery: query=dog" in plan_with_offset
|
||||||
|
assert "GlobalLimitExec: skip=1, fetch=1" in plan_with_offset
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_explain_plan_vector_with_limit_offset(table_async: AsyncTable):
|
||||||
|
"""Test explain plan for vector queries with limit and offset"""
|
||||||
|
# Test vector query with limit
|
||||||
|
plan_with_limit = await (
|
||||||
|
table_async.query().nearest_to(pa.array([1, 2])).limit(1).explain_plan()
|
||||||
|
)
|
||||||
|
assert "KNN" in plan_with_limit
|
||||||
|
assert "GlobalLimitExec: skip=0, fetch=1" in plan_with_limit
|
||||||
|
|
||||||
|
# Test vector query with offset and limit
|
||||||
|
plan_with_offset = await (
|
||||||
|
table_async.query()
|
||||||
|
.nearest_to(pa.array([1, 2]))
|
||||||
|
.offset(1)
|
||||||
|
.limit(1)
|
||||||
|
.explain_plan()
|
||||||
|
)
|
||||||
|
assert "KNN" in plan_with_offset
|
||||||
|
assert "GlobalLimitExec: skip=1, fetch=1" in plan_with_offset
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_explain_plan_with_filters(table_async: AsyncTable):
|
||||||
|
"""Test explain plan for queries with filters"""
|
||||||
|
# Test vector query with filter
|
||||||
|
plan_with_filter = await (
|
||||||
|
table_async.query().nearest_to(pa.array([1, 2])).where("id = 1").explain_plan()
|
||||||
|
)
|
||||||
|
assert "KNN" in plan_with_filter
|
||||||
|
assert "FilterExec" in plan_with_filter
|
||||||
|
|
||||||
|
# Test FTS query with filter
|
||||||
|
from lancedb.index import FTS
|
||||||
|
|
||||||
|
await table_async.create_index("text", config=FTS())
|
||||||
|
query_fts_filter = await table_async.search(
|
||||||
|
"dog", query_type="fts", fts_columns="text"
|
||||||
|
)
|
||||||
|
plan_fts_filter = await query_fts_filter.where("id = 1").explain_plan()
|
||||||
|
assert "MatchQuery: query=dog" in plan_fts_filter
|
||||||
|
assert "FilterExec: id@" in plan_fts_filter # Should show filter details
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_query_camelcase_async(tmp_path):
|
async def test_query_camelcase_async(tmp_path):
|
||||||
db = await lancedb.connect_async(tmp_path)
|
db = await lancedb.connect_async(tmp_path)
|
||||||
|
|||||||
@@ -245,7 +245,7 @@ def test_s3_dynamodb_sync(s3_bucket: str, commit_table: str, monkeypatch):
|
|||||||
NotImplementedError,
|
NotImplementedError,
|
||||||
match="Full-text search is only supported on the local filesystem",
|
match="Full-text search is only supported on the local filesystem",
|
||||||
):
|
):
|
||||||
table.create_fts_index("x")
|
table.create_fts_index("x", use_tantivy=True)
|
||||||
|
|
||||||
# make sure list tables still works
|
# make sure list tables still works
|
||||||
assert db.table_names() == ["test_ddb_sync"]
|
assert db.table_names() == ["test_ddb_sync"]
|
||||||
|
|||||||
@@ -50,6 +50,7 @@ impl FromPyObject<'_> for PyLanceDB<FtsQuery> {
|
|||||||
let fuzziness = ob.getattr("fuzziness")?.extract()?;
|
let fuzziness = ob.getattr("fuzziness")?.extract()?;
|
||||||
let max_expansions = ob.getattr("max_expansions")?.extract()?;
|
let max_expansions = ob.getattr("max_expansions")?.extract()?;
|
||||||
let operator = ob.getattr("operator")?.extract::<String>()?;
|
let operator = ob.getattr("operator")?.extract::<String>()?;
|
||||||
|
let prefix_length = ob.getattr("prefix_length")?.extract()?;
|
||||||
|
|
||||||
Ok(PyLanceDB(
|
Ok(PyLanceDB(
|
||||||
MatchQuery::new(query)
|
MatchQuery::new(query)
|
||||||
@@ -60,6 +61,7 @@ impl FromPyObject<'_> for PyLanceDB<FtsQuery> {
|
|||||||
.with_operator(Operator::try_from(operator.as_str()).map_err(|e| {
|
.with_operator(Operator::try_from(operator.as_str()).map_err(|e| {
|
||||||
PyValueError::new_err(format!("Invalid operator: {}", e))
|
PyValueError::new_err(format!("Invalid operator: {}", e))
|
||||||
})?)
|
})?)
|
||||||
|
.with_prefix_length(prefix_length)
|
||||||
.into(),
|
.into(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@@ -139,7 +141,8 @@ impl<'py> IntoPyObject<'py> for PyLanceDB<FtsQuery> {
|
|||||||
kwargs.set_item("boost", query.boost)?;
|
kwargs.set_item("boost", query.boost)?;
|
||||||
kwargs.set_item("fuzziness", query.fuzziness)?;
|
kwargs.set_item("fuzziness", query.fuzziness)?;
|
||||||
kwargs.set_item("max_expansions", query.max_expansions)?;
|
kwargs.set_item("max_expansions", query.max_expansions)?;
|
||||||
kwargs.set_item("operator", operator_to_str(query.operator))?;
|
kwargs.set_item::<_, &str>("operator", query.operator.into())?;
|
||||||
|
kwargs.set_item("prefix_length", query.prefix_length)?;
|
||||||
namespace
|
namespace
|
||||||
.getattr(intern!(py, "MatchQuery"))?
|
.getattr(intern!(py, "MatchQuery"))?
|
||||||
.call((query.terms, query.column.unwrap()), Some(&kwargs))
|
.call((query.terms, query.column.unwrap()), Some(&kwargs))
|
||||||
@@ -169,19 +172,25 @@ impl<'py> IntoPyObject<'py> for PyLanceDB<FtsQuery> {
|
|||||||
.unzip();
|
.unzip();
|
||||||
let kwargs = PyDict::new(py);
|
let kwargs = PyDict::new(py);
|
||||||
kwargs.set_item("boosts", boosts)?;
|
kwargs.set_item("boosts", boosts)?;
|
||||||
kwargs.set_item("operator", operator_to_str(first.operator))?;
|
kwargs.set_item::<_, &str>("operator", first.operator.into())?;
|
||||||
namespace
|
namespace
|
||||||
.getattr(intern!(py, "MultiMatchQuery"))?
|
.getattr(intern!(py, "MultiMatchQuery"))?
|
||||||
.call((first.terms.clone(), columns), Some(&kwargs))
|
.call((first.terms.clone(), columns), Some(&kwargs))
|
||||||
}
|
}
|
||||||
FtsQuery::Boolean(query) => {
|
FtsQuery::Boolean(query) => {
|
||||||
let mut queries = Vec::with_capacity(query.must.len() + query.should.len());
|
let mut queries: Vec<(&str, Bound<'py, PyAny>)> = Vec::with_capacity(
|
||||||
for q in query.must {
|
query.should.len() + query.must.len() + query.must_not.len(),
|
||||||
queries.push((occur_to_str(Occur::Must), PyLanceDB(q).into_pyobject(py)?));
|
);
|
||||||
}
|
|
||||||
for q in query.should {
|
for q in query.should {
|
||||||
queries.push((occur_to_str(Occur::Should), PyLanceDB(q).into_pyobject(py)?));
|
queries.push((Occur::Should.into(), PyLanceDB(q).into_pyobject(py)?));
|
||||||
}
|
}
|
||||||
|
for q in query.must {
|
||||||
|
queries.push((Occur::Must.into(), PyLanceDB(q).into_pyobject(py)?));
|
||||||
|
}
|
||||||
|
for q in query.must_not {
|
||||||
|
queries.push((Occur::MustNot.into(), PyLanceDB(q).into_pyobject(py)?));
|
||||||
|
}
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
.getattr(intern!(py, "BooleanQuery"))?
|
.getattr(intern!(py, "BooleanQuery"))?
|
||||||
.call1((queries,))
|
.call1((queries,))
|
||||||
@@ -190,21 +199,6 @@ impl<'py> IntoPyObject<'py> for PyLanceDB<FtsQuery> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn operator_to_str(op: Operator) -> &'static str {
|
|
||||||
match op {
|
|
||||||
Operator::And => "AND",
|
|
||||||
Operator::Or => "OR",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn occur_to_str(occur: Occur) -> &'static str {
|
|
||||||
match occur {
|
|
||||||
Occur::Must => "MUST",
|
|
||||||
Occur::Should => "SHOULD",
|
|
||||||
Occur::MustNot => "MUST NOT",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Python representation of query vector(s)
|
// Python representation of query vector(s)
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct PyQueryVectors(Vec<Arc<dyn Array>>);
|
pub struct PyQueryVectors(Vec<Arc<dyn Array>>);
|
||||||
@@ -569,7 +563,10 @@ impl FTSQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn explain_plan(self_: PyRef<'_, Self>, verbose: bool) -> PyResult<Bound<'_, PyAny>> {
|
pub fn explain_plan(self_: PyRef<'_, Self>, verbose: bool) -> PyResult<Bound<'_, PyAny>> {
|
||||||
let inner = self_.inner.clone();
|
let inner = self_
|
||||||
|
.inner
|
||||||
|
.clone()
|
||||||
|
.full_text_search(self_.fts_query.clone());
|
||||||
future_into_py(self_.py(), async move {
|
future_into_py(self_.py(), async move {
|
||||||
inner
|
inner
|
||||||
.explain_plan(verbose)
|
.explain_plan(verbose)
|
||||||
@@ -579,7 +576,10 @@ impl FTSQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn analyze_plan(self_: PyRef<'_, Self>) -> PyResult<Bound<'_, PyAny>> {
|
pub fn analyze_plan(self_: PyRef<'_, Self>) -> PyResult<Bound<'_, PyAny>> {
|
||||||
let inner = self_.inner.clone();
|
let inner = self_
|
||||||
|
.inner
|
||||||
|
.clone()
|
||||||
|
.full_text_search(self_.fts_query.clone());
|
||||||
future_into_py(self_.py(), async move {
|
future_into_py(self_.py(), async move {
|
||||||
inner
|
inner
|
||||||
.analyze_plan()
|
.analyze_plan()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb-node"
|
name = "lancedb-node"
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.20.1-beta.1"
|
version = "0.21.0"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
description = "LanceDB: A serverless, low-latency vector database for AI applications"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ impl ObjectStore for MirroringObjectStore {
|
|||||||
self.primary.delete(location).await
|
self.primary.delete(location).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(&self, prefix: Option<&Path>) -> BoxStream<'_, Result<ObjectMeta>> {
|
fn list(&self, prefix: Option<&Path>) -> BoxStream<'static, Result<ObjectMeta>> {
|
||||||
self.primary.list(prefix)
|
self.primary.list(prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ impl ObjectStore for IoTrackingStore {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_range(&self, location: &Path, range: std::ops::Range<usize>) -> OSResult<Bytes> {
|
async fn get_range(&self, location: &Path, range: std::ops::Range<u64>) -> OSResult<Bytes> {
|
||||||
let result = self.target.get_range(location, range).await;
|
let result = self.target.get_range(location, range).await;
|
||||||
if let Ok(result) = &result {
|
if let Ok(result) = &result {
|
||||||
self.record_read(result.len() as u64);
|
self.record_read(result.len() as u64);
|
||||||
@@ -144,7 +144,7 @@ impl ObjectStore for IoTrackingStore {
|
|||||||
async fn get_ranges(
|
async fn get_ranges(
|
||||||
&self,
|
&self,
|
||||||
location: &Path,
|
location: &Path,
|
||||||
ranges: &[std::ops::Range<usize>],
|
ranges: &[std::ops::Range<u64>],
|
||||||
) -> OSResult<Vec<Bytes>> {
|
) -> OSResult<Vec<Bytes>> {
|
||||||
let result = self.target.get_ranges(location, ranges).await;
|
let result = self.target.get_ranges(location, ranges).await;
|
||||||
if let Ok(result) = &result {
|
if let Ok(result) = &result {
|
||||||
@@ -170,7 +170,7 @@ impl ObjectStore for IoTrackingStore {
|
|||||||
self.target.delete_stream(locations)
|
self.target.delete_stream(locations)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(&self, prefix: Option<&Path>) -> BoxStream<'_, OSResult<ObjectMeta>> {
|
fn list(&self, prefix: Option<&Path>) -> BoxStream<'static, OSResult<ObjectMeta>> {
|
||||||
self.record_read(0);
|
self.record_read(0);
|
||||||
self.target.list(prefix)
|
self.target.list(prefix)
|
||||||
}
|
}
|
||||||
@@ -179,7 +179,7 @@ impl ObjectStore for IoTrackingStore {
|
|||||||
&self,
|
&self,
|
||||||
prefix: Option<&Path>,
|
prefix: Option<&Path>,
|
||||||
offset: &Path,
|
offset: &Path,
|
||||||
) -> BoxStream<'_, OSResult<ObjectMeta>> {
|
) -> BoxStream<'static, OSResult<ObjectMeta>> {
|
||||||
self.record_read(0);
|
self.record_read(0);
|
||||||
self.target.list_with_offset(prefix, offset)
|
self.target.list_with_offset(prefix, offset)
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user