mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
94 Commits
python-v0.
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a33a0670f6 | ||
|
|
14c9ff46d1 | ||
|
|
1865f7decf | ||
|
|
a608621476 | ||
|
|
00514999ff | ||
|
|
b3b597fef6 | ||
|
|
bf17144591 | ||
|
|
09e110525f | ||
|
|
40f0dbb64d | ||
|
|
3b19e96ae7 | ||
|
|
78a17ad54c | ||
|
|
a8e6b491e2 | ||
|
|
cea541ca46 | ||
|
|
873ffc1042 | ||
|
|
83273ad997 | ||
|
|
d18d63c69d | ||
|
|
c3e865e8d0 | ||
|
|
a7755cb313 | ||
|
|
3490f3456f | ||
|
|
0a1d0693e1 | ||
|
|
fd330b4b4b | ||
|
|
d4e9fc08e0 | ||
|
|
3626f2f5e1 | ||
|
|
e64712cfa5 | ||
|
|
3e3118f85c | ||
|
|
592598a333 | ||
|
|
5ad21341c9 | ||
|
|
6e08caa091 | ||
|
|
7e259d8b0f | ||
|
|
e84f747464 | ||
|
|
998cd43fe6 | ||
|
|
4bc7eebe61 | ||
|
|
2e3b34e79b | ||
|
|
e7574698eb | ||
|
|
801a9e5f6f | ||
|
|
4e5fbe6c99 | ||
|
|
1a449fa49e | ||
|
|
6bf742c759 | ||
|
|
ef3093bc23 | ||
|
|
16851389ea | ||
|
|
c269524b2f | ||
|
|
f6eef14313 | ||
|
|
32716adaa3 | ||
|
|
5e98b7f4c0 | ||
|
|
3f2589c11f | ||
|
|
e3b99694d6 | ||
|
|
9d42dc349c | ||
|
|
482f1ee1d3 | ||
|
|
2f39274a66 | ||
|
|
2fc174f532 | ||
|
|
dba85f4d6f | ||
|
|
555fa26147 | ||
|
|
e05c0cd87e | ||
|
|
25c17ebf4e | ||
|
|
87b12b57dc | ||
|
|
3dc9b71914 | ||
|
|
2622f34d1a | ||
|
|
a677a4b651 | ||
|
|
e6b4f14c1f | ||
|
|
15f8f4d627 | ||
|
|
6526d6c3b1 | ||
|
|
da4d7e3ca7 | ||
|
|
8fbadca9aa | ||
|
|
29120219cf | ||
|
|
a9897d9d85 | ||
|
|
acda7a4589 | ||
|
|
dac0857745 | ||
|
|
0a9e1eab75 | ||
|
|
d999d72c8d | ||
|
|
de4720993e | ||
|
|
6c14a307e2 | ||
|
|
43747278c8 | ||
|
|
e5f42a850e | ||
|
|
7920ecf66e | ||
|
|
28e1b70e4b | ||
|
|
52b79d2b1e | ||
|
|
c05d45150d | ||
|
|
48ed3bb544 | ||
|
|
bcfc93cc88 | ||
|
|
214d0debf5 | ||
|
|
f059372137 | ||
|
|
3dc1803c07 | ||
|
|
d0501f65f1 | ||
|
|
4703cc6894 | ||
|
|
493f9ce467 | ||
|
|
5c759505b8 | ||
|
|
bb6a39727e | ||
|
|
d57bed90e5 | ||
|
|
648327e90c | ||
|
|
6c7e81ee57 | ||
|
|
905e9d4738 | ||
|
|
38642e349c | ||
|
|
6879861ea8 | ||
|
|
88325e488e |
@@ -1,5 +1,5 @@
|
||||
[tool.bumpversion]
|
||||
current_version = "0.15.0-beta.0"
|
||||
current_version = "0.16.1-beta.2"
|
||||
parse = """(?x)
|
||||
(?P<major>0|[1-9]\\d*)\\.
|
||||
(?P<minor>0|[1-9]\\d*)\\.
|
||||
|
||||
@@ -52,12 +52,7 @@ runs:
|
||||
args: ${{ inputs.args }}
|
||||
before-script-linux: |
|
||||
set -e
|
||||
apt install -y unzip
|
||||
if [ $(uname -m) = "x86_64" ]; then
|
||||
PROTOC_ARCH="x86_64"
|
||||
else
|
||||
PROTOC_ARCH="aarch_64"
|
||||
fi
|
||||
curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-$PROTOC_ARCH.zip > /tmp/protoc.zip \
|
||||
yum install -y openssl-devel clang \
|
||||
&& curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-aarch_64.zip > /tmp/protoc.zip \
|
||||
&& unzip /tmp/protoc.zip -d /usr/local \
|
||||
&& rm /tmp/protoc.zip
|
||||
|
||||
31
.github/workflows/license-header-check.yml
vendored
Normal file
31
.github/workflows/license-header-check.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Check license headers
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths:
|
||||
- rust/**
|
||||
- python/**
|
||||
- nodejs/**
|
||||
- java/**
|
||||
- .github/workflows/license-header-check.yml
|
||||
jobs:
|
||||
check-licenses:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
- name: Install license-header-checker
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
curl -s https://raw.githubusercontent.com/lluissm/license-header-checker/master/install.sh | bash
|
||||
mv /tmp/bin/license-header-checker /usr/local/bin/
|
||||
- name: Check license headers (rust)
|
||||
run: license-header-checker -a -v ./rust/license_header.txt ./ rs && [[ -z `git status -s` ]]
|
||||
- name: Check license headers (python)
|
||||
run: license-header-checker -a -v ./python/license_header.txt python py && [[ -z `git status -s` ]]
|
||||
- name: Check license headers (typescript)
|
||||
run: license-header-checker -a -v ./nodejs/license_header.txt nodejs ts && [[ -z `git status -s` ]]
|
||||
- name: Check license headers (java)
|
||||
run: license-header-checker -a -v ./nodejs/license_header.txt java java && [[ -z `git status -s` ]]
|
||||
12
.github/workflows/nodejs.yml
vendored
12
.github/workflows/nodejs.yml
vendored
@@ -106,6 +106,18 @@ jobs:
|
||||
python ci/mock_openai.py &
|
||||
cd nodejs/examples
|
||||
npm test
|
||||
- name: Check docs
|
||||
run: |
|
||||
# We run this as part of the job because the binary needs to be built
|
||||
# first to export the types of the native code.
|
||||
set -e
|
||||
npm ci
|
||||
npm run docs
|
||||
if ! git diff --exit-code; then
|
||||
echo "Docs need to be updated"
|
||||
echo "Run 'npm run docs', fix any warnings, and commit the changes."
|
||||
exit 1
|
||||
fi
|
||||
macos:
|
||||
timeout-minutes: 30
|
||||
runs-on: "macos-14"
|
||||
|
||||
14
.github/workflows/pypi-publish.yml
vendored
14
.github/workflows/pypi-publish.yml
vendored
@@ -15,15 +15,21 @@ jobs:
|
||||
- platform: x86_64
|
||||
manylinux: "2_17"
|
||||
extra_args: ""
|
||||
runner: ubuntu-22.04
|
||||
- platform: x86_64
|
||||
manylinux: "2_28"
|
||||
extra_args: "--features fp16kernels"
|
||||
runner: ubuntu-22.04
|
||||
- platform: aarch64
|
||||
manylinux: "2_24"
|
||||
manylinux: "2_17"
|
||||
extra_args: ""
|
||||
# We don't build fp16 kernels for aarch64, because it uses
|
||||
# cross compilation image, which doesn't have a new enough compiler.
|
||||
runs-on: "ubuntu-22.04"
|
||||
# For successful fat LTO builds, we need a large runner to avoid OOM errors.
|
||||
runner: ubuntu-2404-8x-arm64
|
||||
- platform: aarch64
|
||||
manylinux: "2_28"
|
||||
extra_args: "--features fp16kernels"
|
||||
runner: ubuntu-2404-8x-arm64
|
||||
runs-on: ${{ matrix.config.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
||||
82
.github/workflows/rust.yml
vendored
82
.github/workflows/rust.yml
vendored
@@ -22,6 +22,7 @@ env:
|
||||
# "1" means line tables only, which is useful for panic tracebacks.
|
||||
RUSTFLAGS: "-C debuginfo=1"
|
||||
RUST_BACKTRACE: "1"
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@@ -51,6 +52,28 @@ jobs:
|
||||
- name: Run clippy
|
||||
run: cargo clippy --workspace --tests --all-features -- -D warnings
|
||||
|
||||
build-no-lock:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
# Need up-to-date compilers for kernels
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Remote cargo.lock to force a fresh build
|
||||
- name: Remove Cargo.lock
|
||||
run: rm -f Cargo.lock
|
||||
- uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y protobuf-compiler libssl-dev
|
||||
- name: Build all
|
||||
run: |
|
||||
cargo build --benches --all-features --tests
|
||||
|
||||
linux:
|
||||
timeout-minutes: 30
|
||||
# To build all features, we need more disk space than is available
|
||||
@@ -75,8 +98,11 @@ jobs:
|
||||
workspaces: rust
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
# This shaves 2 minutes off this step in CI. This doesn't seem to be
|
||||
# necessary in standard runners, but it is in the 4x runners.
|
||||
sudo rm /var/lib/man-db/auto-update
|
||||
sudo apt install -y protobuf-compiler libssl-dev
|
||||
- uses: rui314/setup-mold@v1
|
||||
- name: Make Swap
|
||||
run: |
|
||||
sudo fallocate -l 16G /swapfile
|
||||
@@ -87,11 +113,11 @@ jobs:
|
||||
working-directory: .
|
||||
run: docker compose up --detach --wait
|
||||
- name: Build
|
||||
run: cargo build --all-features
|
||||
run: cargo build --all-features --tests --locked --examples
|
||||
- name: Run tests
|
||||
run: cargo test --all-features
|
||||
run: cargo test --all-features --locked
|
||||
- name: Run examples
|
||||
run: cargo run --example simple
|
||||
run: cargo run --example simple --locked
|
||||
|
||||
macos:
|
||||
timeout-minutes: 30
|
||||
@@ -115,11 +141,14 @@ jobs:
|
||||
workspaces: rust
|
||||
- name: Install dependencies
|
||||
run: brew install protobuf
|
||||
- name: Build
|
||||
run: cargo build --all-features
|
||||
- name: Run tests
|
||||
# Run with everything except the integration tests.
|
||||
run: cargo test --features remote,fp16kernels
|
||||
run: |
|
||||
# Don't run the s3 integration tests since docker isn't available
|
||||
# on this image.
|
||||
ALL_FEATURES=`cargo metadata --format-version=1 --no-deps \
|
||||
| jq -r '.packages[] | .features | keys | .[]' \
|
||||
| grep -v s3-test | sort | uniq | paste -s -d "," -`
|
||||
cargo test --features $ALL_FEATURES --locked
|
||||
|
||||
windows:
|
||||
runs-on: windows-2022
|
||||
@@ -140,8 +169,38 @@ jobs:
|
||||
- name: Run tests
|
||||
run: |
|
||||
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||
cargo build
|
||||
cargo test
|
||||
cargo test --features remote --locked
|
||||
|
||||
windows-arm64-cross:
|
||||
# We cross compile in Node releases, so we want to make sure
|
||||
# this can run successfully.
|
||||
runs-on: ubuntu-latest
|
||||
container: alpine:edge
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
set -e
|
||||
apk add protobuf-dev curl clang lld llvm19 grep npm bash msitools sed
|
||||
|
||||
curl --proto '=https' --tlsv1.3 -sSf https://raw.githubusercontent.com/rust-lang/rustup/refs/heads/master/rustup-init.sh | sh -s -- -y
|
||||
source $HOME/.cargo/env
|
||||
rustup target add aarch64-pc-windows-msvc
|
||||
|
||||
mkdir -p sysroot
|
||||
cd sysroot
|
||||
sh ../ci/sysroot-aarch64-pc-windows-msvc.sh
|
||||
- name: Check
|
||||
env:
|
||||
CC: clang
|
||||
AR: llvm-ar
|
||||
C_INCLUDE_PATH: /usr/aarch64-pc-windows-msvc/usr/include
|
||||
CARGO_BUILD_TARGET: aarch64-pc-windows-msvc
|
||||
RUSTFLAGS: -Ctarget-feature=+crt-static,+neon,+fp16,+fhm,+dotprod -Clinker=lld -Clink-arg=/LIBPATH:/usr/aarch64-pc-windows-msvc/usr/lib -Clink-arg=arm64rt.lib
|
||||
run: |
|
||||
source $HOME/.cargo/env
|
||||
cargo check --features remote --locked
|
||||
|
||||
windows-arm64:
|
||||
runs-on: windows-4x-arm
|
||||
@@ -236,8 +295,7 @@ jobs:
|
||||
- name: Run tests
|
||||
run: |
|
||||
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||
cargo build --target aarch64-pc-windows-msvc
|
||||
cargo test --target aarch64-pc-windows-msvc
|
||||
cargo test --target aarch64-pc-windows-msvc --features remote --locked
|
||||
|
||||
msrv:
|
||||
# Check the minimum supported Rust version
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,7 +9,6 @@ venv
|
||||
.vscode
|
||||
.zed
|
||||
rust/target
|
||||
rust/Cargo.lock
|
||||
|
||||
site
|
||||
|
||||
@@ -42,5 +41,3 @@ dist
|
||||
target
|
||||
|
||||
**/sccache.log
|
||||
|
||||
Cargo.lock
|
||||
|
||||
@@ -7,7 +7,7 @@ repos:
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.2.2
|
||||
rev: v0.8.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: local
|
||||
|
||||
8168
Cargo.lock
generated
Normal file
8168
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
35
Cargo.toml
35
Cargo.toml
@@ -21,14 +21,16 @@ categories = ["database-implementations"]
|
||||
rust-version = "1.78.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
lance = { "version" = "=0.22.0", "features" = ["dynamodb"] }
|
||||
lance-io = "=0.22.0"
|
||||
lance-index = "=0.22.0"
|
||||
lance-linalg = "=0.22.0"
|
||||
lance-table = "=0.22.0"
|
||||
lance-testing = "=0.22.0"
|
||||
lance-datafusion = "=0.22.0"
|
||||
lance-encoding = "=0.22.0"
|
||||
lance = { "version" = "=0.23.1", "features" = [
|
||||
"dynamodb",
|
||||
], git = "https://github.com/lancedb/lance.git", tag = "v0.23.1-beta.4"}
|
||||
lance-io = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-index = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-linalg = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-table = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-testing = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-datafusion = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
lance-encoding = {version = "=0.23.1", tag="v0.23.1-beta.4", git = "https://github.com/lancedb/lance.git"}
|
||||
# Note that this one does not include pyarrow
|
||||
arrow = { version = "53.2", optional = false }
|
||||
arrow-array = "53.2"
|
||||
@@ -40,20 +42,27 @@ arrow-arith = "53.2"
|
||||
arrow-cast = "53.2"
|
||||
async-trait = "0"
|
||||
chrono = "0.4.35"
|
||||
datafusion-common = "44.0"
|
||||
datafusion = { version = "44.0", default-features = false }
|
||||
datafusion-catalog = "44.0"
|
||||
datafusion-common = { version = "44.0", default-features = false }
|
||||
datafusion-execution = "44.0"
|
||||
datafusion-expr = "44.0"
|
||||
datafusion-physical-plan = "44.0"
|
||||
env_logger = "0.10"
|
||||
env_logger = "0.11"
|
||||
half = { "version" = "=2.4.1", default-features = false, features = [
|
||||
"num-traits",
|
||||
] }
|
||||
futures = "0"
|
||||
log = "0.4"
|
||||
moka = { version = "0.11", features = ["future"] }
|
||||
object_store = "0.10.2"
|
||||
moka = { version = "0.12", features = ["future"] }
|
||||
object_store = "0.11.0"
|
||||
pin-project = "1.0.7"
|
||||
snafu = "0.7.4"
|
||||
snafu = "0.8"
|
||||
url = "2"
|
||||
num-traits = "0.2"
|
||||
rand = "0.8"
|
||||
regex = "1.10"
|
||||
lazy_static = "1"
|
||||
|
||||
# Workaround for: https://github.com/eira-fransham/crunchy/issues/13
|
||||
crunchy = "=0.2.2"
|
||||
|
||||
@@ -38,6 +38,13 @@ components:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
index_name:
|
||||
name: index_name
|
||||
in: path
|
||||
description: name of the index
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
invalid_request:
|
||||
description: Invalid request
|
||||
@@ -485,3 +492,22 @@ paths:
|
||||
$ref: "#/components/responses/unauthorized"
|
||||
"404":
|
||||
$ref: "#/components/responses/not_found"
|
||||
/v1/table/{name}/index/{index_name}/drop/:
|
||||
post:
|
||||
description: Drop an index from the table
|
||||
tags:
|
||||
- Tables
|
||||
summary: Drop an index from the table
|
||||
operationId: dropIndex
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/table_name"
|
||||
- $ref: "#/components/parameters/index_name"
|
||||
responses:
|
||||
"200":
|
||||
description: Index successfully dropped
|
||||
"400":
|
||||
$ref: "#/components/responses/invalid_request"
|
||||
"401":
|
||||
$ref: "#/components/responses/unauthorized"
|
||||
"404":
|
||||
$ref: "#/components/responses/not_found"
|
||||
@@ -3,6 +3,7 @@ import * as vectordb from "vectordb";
|
||||
// --8<-- [end:import]
|
||||
|
||||
(async () => {
|
||||
console.log("ann_indexes.ts: start");
|
||||
// --8<-- [start:ingest]
|
||||
const db = await vectordb.connect("data/sample-lancedb");
|
||||
|
||||
@@ -49,5 +50,5 @@ import * as vectordb from "vectordb";
|
||||
.execute();
|
||||
// --8<-- [end:search3]
|
||||
|
||||
console.log("Ann indexes: done");
|
||||
console.log("ann_indexes.ts: done");
|
||||
})();
|
||||
|
||||
@@ -133,11 +133,20 @@ recommend switching to stable releases.
|
||||
## Connect to a database
|
||||
|
||||
=== "Python"
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:imports"
|
||||
--8<-- "python/python/tests/docs/test_basic.py:connect"
|
||||
|
||||
--8<-- "python/python/tests/docs/test_basic.py:set_uri"
|
||||
--8<-- "python/python/tests/docs/test_basic.py:connect"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:imports"
|
||||
|
||||
--8<-- "python/python/tests/docs/test_basic.py:set_uri"
|
||||
--8<-- "python/python/tests/docs/test_basic.py:connect_async"
|
||||
```
|
||||
|
||||
@@ -183,19 +192,31 @@ table.
|
||||
|
||||
=== "Python"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table"
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async"
|
||||
```
|
||||
|
||||
If the table already exists, LanceDB will raise an error by default.
|
||||
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||
to the `create_table` method.
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table"
|
||||
```
|
||||
|
||||
You can also pass in a pandas DataFrame directly:
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_pandas"
|
||||
```
|
||||
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async"
|
||||
```
|
||||
|
||||
You can also pass in a pandas DataFrame directly:
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async_pandas"
|
||||
```
|
||||
|
||||
@@ -247,8 +268,14 @@ similar to a `CREATE TABLE` statement in SQL.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table_async"
|
||||
```
|
||||
|
||||
@@ -281,8 +308,14 @@ Once created, you can open a table as follows:
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:open_table"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:open_table_async"
|
||||
```
|
||||
|
||||
@@ -310,8 +343,14 @@ If you forget the name of your table, you can always get a listing of all table
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:table_names"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:table_names_async"
|
||||
```
|
||||
|
||||
@@ -340,8 +379,14 @@ After a table has been created, you can always add more data to it as follows:
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:add_data"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:add_data_async"
|
||||
```
|
||||
|
||||
@@ -370,8 +415,14 @@ Once you've embedded the query, you can find its nearest neighbors as follows:
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:vector_search"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:vector_search_async"
|
||||
```
|
||||
|
||||
@@ -412,8 +463,14 @@ LanceDB allows you to create an ANN index on a table as follows:
|
||||
|
||||
=== "Python"
|
||||
|
||||
```py
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_index"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_index_async"
|
||||
```
|
||||
|
||||
@@ -451,8 +508,14 @@ This can delete any number of rows that match the filter.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:delete_rows"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:delete_rows_async"
|
||||
```
|
||||
|
||||
@@ -483,7 +546,10 @@ simple or complex as needed. To see what expressions are supported, see the
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
Read more: [lancedb.table.Table.delete][]
|
||||
=== "Async API"
|
||||
Read more: [lancedb.table.AsyncTable.delete][]
|
||||
|
||||
=== "Typescript[^1]"
|
||||
|
||||
@@ -505,8 +571,14 @@ Use the `drop_table()` method on the database to remove a table.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table_async"
|
||||
```
|
||||
|
||||
@@ -543,10 +615,17 @@ You can use the embedding API when working with embedding models. It automatical
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:imports"
|
||||
|
||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
||||
```
|
||||
=== "Async API"
|
||||
|
||||
Coming soon to the async API.
|
||||
https://github.com/lancedb/lancedb/issues/1938
|
||||
|
||||
=== "Typescript[^1]"
|
||||
|
||||
|
||||
@@ -107,7 +107,6 @@ const example = async () => {
|
||||
// --8<-- [start:search]
|
||||
const query = await tbl.search([100, 100]).limit(2).execute();
|
||||
// --8<-- [end:search]
|
||||
console.log(query);
|
||||
|
||||
// --8<-- [start:delete]
|
||||
await tbl.delete('item = "fizz"');
|
||||
@@ -119,8 +118,9 @@ const example = async () => {
|
||||
};
|
||||
|
||||
async function main() {
|
||||
console.log("basic_legacy.ts: start");
|
||||
await example();
|
||||
console.log("Basic example: done");
|
||||
console.log("basic_legacy.ts: done");
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
@@ -601,6 +601,38 @@ After a table has been created, you can always add more data to it using the `ad
|
||||
)
|
||||
```
|
||||
|
||||
## Upserting into a table
|
||||
|
||||
Upserting lets you insert new rows or update existing rows in a table. To upsert
|
||||
in LanceDB, use the merge insert API.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic"
|
||||
```
|
||||
**API Reference**: [lancedb.table.Table.merge_insert][]
|
||||
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic_async"
|
||||
```
|
||||
**API Reference**: [lancedb.table.AsyncTable.merge_insert][]
|
||||
|
||||
=== "Typescript[^1]"
|
||||
|
||||
=== "@lancedb/lancedb"
|
||||
|
||||
```typescript
|
||||
--8<-- "nodejs/examples/merge_insert.test.ts:upsert_basic"
|
||||
```
|
||||
**API Reference**: [lancedb.Table.mergeInsert](../js/classes/Table.md/#mergeInsert)
|
||||
|
||||
Read more in the guide on [merge insert](tables/merge_insert.md).
|
||||
|
||||
## Deleting from a table
|
||||
|
||||
Use the `delete()` method on tables to delete rows from a table. To choose which rows to delete, provide a filter that matches on the metadata columns. This can delete any number of rows that match the filter.
|
||||
|
||||
135
docs/src/guides/tables/merge_insert.md
Normal file
135
docs/src/guides/tables/merge_insert.md
Normal file
@@ -0,0 +1,135 @@
|
||||
The merge insert command is a flexible API that can be used to perform:
|
||||
|
||||
1. Upsert
|
||||
2. Insert-if-not-exists
|
||||
3. Replace range
|
||||
|
||||
It works by joining the input data with the target table on a key you provide.
|
||||
Often this key is a unique row id key. You can then specify what to do when
|
||||
there is a match and when there is not a match. For example, for upsert you want
|
||||
to update if the row has a match and insert if the row doesn't have a match.
|
||||
Whereas for insert-if-not-exists you only want to insert if the row doesn't have
|
||||
a match.
|
||||
|
||||
You can also read more in the API reference:
|
||||
|
||||
* Python
|
||||
* Sync: [lancedb.table.Table.merge_insert][]
|
||||
* Async: [lancedb.table.AsyncTable.merge_insert][]
|
||||
* Typescript: [lancedb.Table.mergeInsert](../../js/classes/Table.md/#mergeinsert)
|
||||
|
||||
!!! tip "Use scalar indices to speed up merge insert"
|
||||
|
||||
The merge insert command needs to perform a join between the input data and the
|
||||
target table on the `on` key you provide. This requires scanning that entire
|
||||
column, which can be expensive for large tables. To speed up this operation,
|
||||
you can create a scalar index on the `on` column, which will allow LanceDB to
|
||||
find matches without having to scan the whole tables.
|
||||
|
||||
Read more about scalar indices in [Building a Scalar Index](../scalar_index.md)
|
||||
guide.
|
||||
|
||||
!!! info "Embedding Functions"
|
||||
|
||||
Like the create table and add APIs, the merge insert API will automatically
|
||||
compute embeddings if the table has a embedding definition in its schema.
|
||||
If the input data doesn't contain the source column, or the vector column
|
||||
is already filled, then the embeddings won't be computed. See the
|
||||
[Embedding Functions](../../embeddings/embedding_functions.md) guide for more
|
||||
information.
|
||||
|
||||
## Upsert
|
||||
|
||||
Upsert updates rows if they exist and inserts them if they don't. To do this
|
||||
with merge insert, enable both `when_matched_update_all()` and
|
||||
`when_not_matched_insert_all()`.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic"
|
||||
```
|
||||
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic_async"
|
||||
```
|
||||
|
||||
=== "Typescript"
|
||||
|
||||
=== "@lancedb/lancedb"
|
||||
|
||||
```typescript
|
||||
--8<-- "nodejs/examples/merge_insert.test.ts:upsert_basic"
|
||||
```
|
||||
|
||||
!!! note "Providing subsets of columns"
|
||||
|
||||
If a column is nullable, it can be omitted from input data and it will be
|
||||
considered `null`. Columns can also be provided in any order.
|
||||
|
||||
## Insert-if-not-exists
|
||||
|
||||
To avoid inserting duplicate rows, you can use the insert-if-not-exists command.
|
||||
This will only insert rows that do not have a match in the target table. To do
|
||||
this with merge insert, enable just `when_not_matched_insert_all()`.
|
||||
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:insert_if_not_exists"
|
||||
```
|
||||
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:insert_if_not_exists_async"
|
||||
```
|
||||
|
||||
=== "Typescript"
|
||||
|
||||
=== "@lancedb/lancedb"
|
||||
|
||||
```typescript
|
||||
--8<-- "nodejs/examples/merge_insert.test.ts:insert_if_not_exists"
|
||||
```
|
||||
|
||||
|
||||
## Replace range
|
||||
|
||||
You can also replace a range of rows in the target table with the input data.
|
||||
For example, if you have a table of document chunks, where each chunk has
|
||||
both a `doc_id` and a `chunk_id`, you can replace all chunks for a given
|
||||
`doc_id` with updated chunks. This can be tricky otherwise because if you
|
||||
try to use upsert when the new data has fewer chunks you will end up with
|
||||
extra chunks. To avoid this, add another clause to delete any chunks for
|
||||
the document that are not in the new data, with
|
||||
`when_not_matched_by_source_delete`.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:replace_range"
|
||||
```
|
||||
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_merge_insert.py:replace_range_async"
|
||||
```
|
||||
|
||||
=== "Typescript"
|
||||
|
||||
=== "@lancedb/lancedb"
|
||||
|
||||
```typescript
|
||||
--8<-- "nodejs/examples/merge_insert.test.ts:replace_range"
|
||||
```
|
||||
@@ -36,41 +36,8 @@ const results = await table.vectorSearch([0.1, 0.3]).limit(20).toArray();
|
||||
console.log(results);
|
||||
```
|
||||
|
||||
The [quickstart](../basic.md) contains a more complete example.
|
||||
The [quickstart](https://lancedb.github.io/lancedb/basic/) contains a more complete example.
|
||||
|
||||
## Development
|
||||
|
||||
```sh
|
||||
npm run build
|
||||
npm run test
|
||||
```
|
||||
|
||||
### Running lint / format
|
||||
|
||||
LanceDb uses [biome](https://biomejs.dev/) for linting and formatting. if you are using VSCode you will need to install the official [Biome](https://marketplace.visualstudio.com/items?itemName=biomejs.biome) extension.
|
||||
To manually lint your code you can run:
|
||||
|
||||
```sh
|
||||
npm run lint
|
||||
```
|
||||
|
||||
to automatically fix all fixable issues:
|
||||
|
||||
```sh
|
||||
npm run lint-fix
|
||||
```
|
||||
|
||||
If you do not have your workspace root set to the `nodejs` directory, unfortunately the extension will not work. You can still run the linting and formatting commands manually.
|
||||
|
||||
### Generating docs
|
||||
|
||||
```sh
|
||||
npm run docs
|
||||
|
||||
cd ../docs
|
||||
# Asssume the virtual environment was created
|
||||
# python3 -m venv venv
|
||||
# pip install -r requirements.txt
|
||||
. ./venv/bin/activate
|
||||
mkdocs build
|
||||
```
|
||||
See [CONTRIBUTING.md](_media/CONTRIBUTING.md) for information on how to contribute to LanceDB.
|
||||
|
||||
76
docs/src/js/_media/CONTRIBUTING.md
Normal file
76
docs/src/js/_media/CONTRIBUTING.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# Contributing to LanceDB Typescript
|
||||
|
||||
This document outlines the process for contributing to LanceDB Typescript.
|
||||
For general contribution guidelines, see [CONTRIBUTING.md](../CONTRIBUTING.md).
|
||||
|
||||
## Project layout
|
||||
|
||||
The Typescript package is a wrapper around the Rust library, `lancedb`. We use
|
||||
the [napi-rs](https://napi.rs/) library to create the bindings between Rust and
|
||||
Typescript.
|
||||
|
||||
* `src/`: Rust bindings source code
|
||||
* `lancedb/`: Typescript package source code
|
||||
* `__test__/`: Unit tests
|
||||
* `examples/`: An npm package with the examples shown in the documentation
|
||||
|
||||
## Development environment
|
||||
|
||||
To set up your development environment, you will need to install the following:
|
||||
|
||||
1. Node.js 14 or later
|
||||
2. Rust's package manager, Cargo. Use [rustup](https://rustup.rs/) to install.
|
||||
3. [protoc](https://grpc.io/docs/protoc-installation/) (Protocol Buffers compiler)
|
||||
|
||||
Initial setup:
|
||||
|
||||
```shell
|
||||
npm install
|
||||
```
|
||||
|
||||
### Commit Hooks
|
||||
|
||||
It is **highly recommended** to install the [pre-commit](https://pre-commit.com/) hooks to ensure that your
|
||||
code is formatted correctly and passes basic checks before committing:
|
||||
|
||||
```shell
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
Most common development commands can be run using the npm scripts.
|
||||
|
||||
Build the package
|
||||
|
||||
```shell
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
Lint:
|
||||
|
||||
```shell
|
||||
npm run lint
|
||||
```
|
||||
|
||||
Format and fix lints:
|
||||
|
||||
```shell
|
||||
npm run lint-fix
|
||||
```
|
||||
|
||||
Run tests:
|
||||
|
||||
```shell
|
||||
npm test
|
||||
```
|
||||
|
||||
To run a single test:
|
||||
|
||||
```shell
|
||||
# Single file: table.test.ts
|
||||
npm test -- table.test.ts
|
||||
# Single test: 'merge insert' in table.test.ts
|
||||
npm test -- table.test.ts --testNamePattern=merge\ insert
|
||||
```
|
||||
@@ -23,18 +23,6 @@ be closed when they are garbage collected.
|
||||
Any created tables are independent and will continue to work even if
|
||||
the underlying connection has been closed.
|
||||
|
||||
## Constructors
|
||||
|
||||
### new Connection()
|
||||
|
||||
```ts
|
||||
new Connection(): Connection
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Connection`](Connection.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### close()
|
||||
@@ -71,7 +59,7 @@ Creates a new empty Table
|
||||
* **name**: `string`
|
||||
The name of the table.
|
||||
|
||||
* **schema**: `SchemaLike`
|
||||
* **schema**: [`SchemaLike`](../type-aliases/SchemaLike.md)
|
||||
The schema of the table
|
||||
|
||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
@@ -117,7 +105,7 @@ Creates a new Table and initialize it with new data.
|
||||
* **name**: `string`
|
||||
The name of the table.
|
||||
|
||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
* **data**: [`TableLike`](../type-aliases/TableLike.md) \| `Record`<`string`, `unknown`>[]
|
||||
Non-empty Array of Records
|
||||
to be inserted into the table
|
||||
|
||||
@@ -143,6 +131,20 @@ Return a brief description of the connection
|
||||
|
||||
***
|
||||
|
||||
### dropAllTables()
|
||||
|
||||
```ts
|
||||
abstract dropAllTables(): Promise<void>
|
||||
```
|
||||
|
||||
Drop all tables in the database.
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
***
|
||||
|
||||
### dropTable()
|
||||
|
||||
```ts
|
||||
@@ -189,7 +191,7 @@ Open a table in the database.
|
||||
* **name**: `string`
|
||||
The name of the table
|
||||
|
||||
* **options?**: `Partial`<`OpenTableOptions`>
|
||||
* **options?**: `Partial`<[`OpenTableOptions`](../interfaces/OpenTableOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -72,11 +72,9 @@ The results of a full text search are ordered by relevance measured by BM25.
|
||||
|
||||
You can combine filters with full text search.
|
||||
|
||||
For now, the full text search index only supports English, and doesn't support phrase search.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`FtsOptions`>
|
||||
* **options?**: `Partial`<[`FtsOptions`](../interfaces/FtsOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -98,7 +96,7 @@ the vectors.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`HnswPqOptions`>
|
||||
* **options?**: `Partial`<[`HnswPqOptions`](../interfaces/HnswPqOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -120,7 +118,7 @@ the vectors.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`HnswSqOptions`>
|
||||
* **options?**: `Partial`<[`HnswSqOptions`](../interfaces/HnswSqOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
126
docs/src/js/classes/MergeInsertBuilder.md
Normal file
126
docs/src/js/classes/MergeInsertBuilder.md
Normal file
@@ -0,0 +1,126 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / MergeInsertBuilder
|
||||
|
||||
# Class: MergeInsertBuilder
|
||||
|
||||
A builder used to create and run a merge insert operation
|
||||
|
||||
## Constructors
|
||||
|
||||
### new MergeInsertBuilder()
|
||||
|
||||
```ts
|
||||
new MergeInsertBuilder(native, schema): MergeInsertBuilder
|
||||
```
|
||||
|
||||
Construct a MergeInsertBuilder. __Internal use only.__
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **native**: `NativeMergeInsertBuilder`
|
||||
|
||||
* **schema**: `Schema`<`any`> \| `Promise`<`Schema`<`any`>>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### execute()
|
||||
|
||||
```ts
|
||||
execute(data): Promise<void>
|
||||
```
|
||||
|
||||
Executes the merge insert operation
|
||||
|
||||
Nothing is returned but the `Table` is updated
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **data**: [`Data`](../type-aliases/Data.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
***
|
||||
|
||||
### whenMatchedUpdateAll()
|
||||
|
||||
```ts
|
||||
whenMatchedUpdateAll(options?): MergeInsertBuilder
|
||||
```
|
||||
|
||||
Rows that exist in both the source table (new data) and
|
||||
the target table (old data) will be updated, replacing
|
||||
the old row with the corresponding matching row.
|
||||
|
||||
If there are multiple matches then the behavior is undefined.
|
||||
Currently this causes multiple copies of the row to be created
|
||||
but that behavior is subject to change.
|
||||
|
||||
An optional condition may be specified. If it is, then only
|
||||
matched rows that satisfy the condtion will be updated. Any
|
||||
rows that do not satisfy the condition will be left as they
|
||||
are. Failing to satisfy the condition does not cause a
|
||||
"matched row" to become a "not matched" row.
|
||||
|
||||
The condition should be an SQL string. Use the prefix
|
||||
target. to refer to rows in the target table (old data)
|
||||
and the prefix source. to refer to rows in the source
|
||||
table (new data).
|
||||
|
||||
For example, "target.last_update < source.last_update"
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**
|
||||
|
||||
* **options.where?**: `string`
|
||||
|
||||
#### Returns
|
||||
|
||||
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||
|
||||
***
|
||||
|
||||
### whenNotMatchedBySourceDelete()
|
||||
|
||||
```ts
|
||||
whenNotMatchedBySourceDelete(options?): MergeInsertBuilder
|
||||
```
|
||||
|
||||
Rows that exist only in the target table (old data) will be
|
||||
deleted. An optional condition can be provided to limit what
|
||||
data is deleted.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**
|
||||
|
||||
* **options.where?**: `string`
|
||||
An optional condition to limit what data is deleted
|
||||
|
||||
#### Returns
|
||||
|
||||
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||
|
||||
***
|
||||
|
||||
### whenNotMatchedInsertAll()
|
||||
|
||||
```ts
|
||||
whenNotMatchedInsertAll(): MergeInsertBuilder
|
||||
```
|
||||
|
||||
Rows that exist only in the source table (new data) should
|
||||
be inserted into the target table.
|
||||
|
||||
#### Returns
|
||||
|
||||
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||
@@ -8,30 +8,14 @@
|
||||
|
||||
A builder for LanceDB queries.
|
||||
|
||||
## See
|
||||
|
||||
[Table#query](Table.md#query), [Table#search](Table.md#search)
|
||||
|
||||
## Extends
|
||||
|
||||
- [`QueryBase`](QueryBase.md)<`NativeQuery`>
|
||||
|
||||
## Constructors
|
||||
|
||||
### new Query()
|
||||
|
||||
```ts
|
||||
new Query(tbl): Query
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **tbl**: `Table`
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Query`](Query.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`constructor`](QueryBase.md#constructors)
|
||||
|
||||
## Properties
|
||||
|
||||
### inner
|
||||
@@ -46,42 +30,6 @@ protected inner: Query | Promise<Query>;
|
||||
|
||||
## Methods
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`[asyncIterator]`](QueryBase.md#%5Basynciterator%5D)
|
||||
|
||||
***
|
||||
|
||||
### doCall()
|
||||
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`doCall`](QueryBase.md#docall)
|
||||
|
||||
***
|
||||
|
||||
### execute()
|
||||
|
||||
```ts
|
||||
@@ -92,7 +40,7 @@ Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -161,7 +109,7 @@ fastSearch(): this
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -189,7 +137,7 @@ A filter statement to be applied to this query.
|
||||
|
||||
`this`
|
||||
|
||||
#### Alias
|
||||
#### See
|
||||
|
||||
where
|
||||
|
||||
@@ -213,7 +161,7 @@ fullTextSearch(query, options?): this
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -250,26 +198,6 @@ called then every valid row from the table will be returned.
|
||||
|
||||
***
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`RecordBatchIterator`>
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`nativeExecute`](QueryBase.md#nativeexecute)
|
||||
|
||||
***
|
||||
|
||||
### nearestTo()
|
||||
|
||||
```ts
|
||||
@@ -294,7 +222,7 @@ If there is more than one vector column you must use
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **vector**: `IntoVector`
|
||||
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -427,7 +355,7 @@ Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -449,7 +377,7 @@ Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -8,6 +8,11 @@
|
||||
|
||||
Common methods supported by all query types
|
||||
|
||||
## See
|
||||
|
||||
- [Query](Query.md)
|
||||
- [VectorQuery](VectorQuery.md)
|
||||
|
||||
## Extended by
|
||||
|
||||
- [`Query`](Query.md)
|
||||
@@ -21,22 +26,6 @@ Common methods supported by all query types
|
||||
|
||||
- `AsyncIterable`<`RecordBatch`>
|
||||
|
||||
## Constructors
|
||||
|
||||
### new QueryBase()
|
||||
|
||||
```ts
|
||||
protected new QueryBase<NativeQueryType>(inner): QueryBase<NativeQueryType>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **inner**: `NativeQueryType` \| `Promise`<`NativeQueryType`>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`QueryBase`](QueryBase.md)<`NativeQueryType`>
|
||||
|
||||
## Properties
|
||||
|
||||
### inner
|
||||
@@ -47,38 +36,6 @@ protected inner: NativeQueryType | Promise<NativeQueryType>;
|
||||
|
||||
## Methods
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
|
||||
#### Implementation of
|
||||
|
||||
`AsyncIterable.[asyncIterator]`
|
||||
|
||||
***
|
||||
|
||||
### doCall()
|
||||
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
***
|
||||
|
||||
### execute()
|
||||
|
||||
```ts
|
||||
@@ -89,7 +46,7 @@ Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -150,7 +107,7 @@ fastSearch(): this
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -174,7 +131,7 @@ A filter statement to be applied to this query.
|
||||
|
||||
`this`
|
||||
|
||||
#### Alias
|
||||
#### See
|
||||
|
||||
where
|
||||
|
||||
@@ -194,7 +151,7 @@ fullTextSearch(query, options?): this
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -223,22 +180,6 @@ called then every valid row from the table will be returned.
|
||||
|
||||
***
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`RecordBatchIterator`>
|
||||
|
||||
***
|
||||
|
||||
### offset()
|
||||
|
||||
```ts
|
||||
@@ -314,7 +255,7 @@ Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -332,7 +273,7 @@ Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -14,21 +14,13 @@ will be freed when the Table is garbage collected. To eagerly free the cache yo
|
||||
can call the `close` method. Once the Table is closed, it cannot be used for any
|
||||
further operations.
|
||||
|
||||
Tables are created using the methods [Connection#createTable](Connection.md#createtable)
|
||||
and [Connection#createEmptyTable](Connection.md#createemptytable). Existing tables are opened
|
||||
using [Connection#openTable](Connection.md#opentable).
|
||||
|
||||
Closing a table is optional. It not closed, it will be closed when it is garbage
|
||||
collected.
|
||||
|
||||
## Constructors
|
||||
|
||||
### new Table()
|
||||
|
||||
```ts
|
||||
new Table(): Table
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
[`Table`](Table.md)
|
||||
|
||||
## Accessors
|
||||
|
||||
### name
|
||||
@@ -216,6 +208,9 @@ Indices on vector columns will speed up vector searches.
|
||||
Indices on scalar columns will speed up filtering (in both
|
||||
vector and non-vector searches)
|
||||
|
||||
We currently don't support custom named indexes.
|
||||
The index name will always be `${column}_idx`.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **column**: `string`
|
||||
@@ -226,11 +221,6 @@ vector and non-vector searches)
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
#### Note
|
||||
|
||||
We currently don't support custom named indexes,
|
||||
The index name will always be `${column}_idx`
|
||||
|
||||
#### Examples
|
||||
|
||||
```ts
|
||||
@@ -317,6 +307,28 @@ then call ``cleanup_files`` to remove the old files.
|
||||
|
||||
***
|
||||
|
||||
### dropIndex()
|
||||
|
||||
```ts
|
||||
abstract dropIndex(name): Promise<void>
|
||||
```
|
||||
|
||||
Drop an index from the table.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **name**: `string`
|
||||
The name of the index.
|
||||
This does not delete the index from disk, it just removes it from the table.
|
||||
To delete the index, run [Table#optimize](Table.md#optimize) after dropping the index.
|
||||
Use [Table.listIndices](Table.md#listindices) to find the names of the indices.
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`void`>
|
||||
|
||||
***
|
||||
|
||||
### indexStats()
|
||||
|
||||
```ts
|
||||
@@ -336,6 +348,8 @@ List all the stats of a specified index
|
||||
|
||||
The stats of the index. If the index does not exist, it will return undefined
|
||||
|
||||
Use [Table.listIndices](Table.md#listindices) to find the names of the indices.
|
||||
|
||||
***
|
||||
|
||||
### isOpen()
|
||||
@@ -376,7 +390,7 @@ List all the versions of the table
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`Version`[]>
|
||||
`Promise`<[`Version`](../interfaces/Version.md)[]>
|
||||
|
||||
***
|
||||
|
||||
@@ -392,7 +406,7 @@ abstract mergeInsert(on): MergeInsertBuilder
|
||||
|
||||
#### Returns
|
||||
|
||||
`MergeInsertBuilder`
|
||||
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||
|
||||
***
|
||||
|
||||
@@ -436,7 +450,7 @@ Modeled after ``VACUUM`` in PostgreSQL.
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`OptimizeStats`>
|
||||
`Promise`<[`OptimizeStats`](../interfaces/OptimizeStats.md)>
|
||||
|
||||
***
|
||||
|
||||
@@ -553,7 +567,7 @@ Get the schema of the table.
|
||||
abstract search(
|
||||
query,
|
||||
queryType?,
|
||||
ftsColumns?): VectorQuery | Query
|
||||
ftsColumns?): Query | VectorQuery
|
||||
```
|
||||
|
||||
Create a search query to find the nearest neighbors
|
||||
@@ -561,7 +575,7 @@ of the given query
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string` \| `IntoVector`
|
||||
* **query**: `string` \| [`IntoVector`](../type-aliases/IntoVector.md)
|
||||
the query, a vector or string
|
||||
|
||||
* **queryType?**: `string`
|
||||
@@ -575,7 +589,7 @@ of the given query
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md) \| [`Query`](Query.md)
|
||||
[`Query`](Query.md) \| [`VectorQuery`](VectorQuery.md)
|
||||
|
||||
***
|
||||
|
||||
@@ -694,7 +708,7 @@ by `query`.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **vector**: `IntoVector`
|
||||
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -717,38 +731,3 @@ Retrieve the version of the table
|
||||
#### Returns
|
||||
|
||||
`Promise`<`number`>
|
||||
|
||||
***
|
||||
|
||||
### parseTableData()
|
||||
|
||||
```ts
|
||||
static parseTableData(
|
||||
data,
|
||||
options?,
|
||||
streaming?): Promise<object>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
||||
|
||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||
|
||||
* **streaming?**: `boolean` = `false`
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`object`>
|
||||
|
||||
##### buf
|
||||
|
||||
```ts
|
||||
buf: Buffer;
|
||||
```
|
||||
|
||||
##### mode
|
||||
|
||||
```ts
|
||||
mode: string;
|
||||
```
|
||||
|
||||
@@ -10,30 +10,14 @@ A builder used to construct a vector search
|
||||
|
||||
This builder can be reused to execute the query many times.
|
||||
|
||||
## See
|
||||
|
||||
[Query#nearestTo](Query.md#nearestto)
|
||||
|
||||
## Extends
|
||||
|
||||
- [`QueryBase`](QueryBase.md)<`NativeVectorQuery`>
|
||||
|
||||
## Constructors
|
||||
|
||||
### new VectorQuery()
|
||||
|
||||
```ts
|
||||
new VectorQuery(inner): VectorQuery
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **inner**: `VectorQuery` \| `Promise`<`VectorQuery`>
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`constructor`](QueryBase.md#constructors)
|
||||
|
||||
## Properties
|
||||
|
||||
### inner
|
||||
@@ -48,22 +32,6 @@ protected inner: VectorQuery | Promise<VectorQuery>;
|
||||
|
||||
## Methods
|
||||
|
||||
### \[asyncIterator\]()
|
||||
|
||||
```ts
|
||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
||||
```
|
||||
|
||||
#### Returns
|
||||
|
||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`[asyncIterator]`](QueryBase.md#%5Basynciterator%5D)
|
||||
|
||||
***
|
||||
|
||||
### addQueryVector()
|
||||
|
||||
```ts
|
||||
@@ -72,7 +40,7 @@ addQueryVector(vector): VectorQuery
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **vector**: `IntoVector`
|
||||
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -128,6 +96,24 @@ whose data type is a fixed-size-list of floats.
|
||||
|
||||
***
|
||||
|
||||
### distanceRange()
|
||||
|
||||
```ts
|
||||
distanceRange(lowerBound?, upperBound?): VectorQuery
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **lowerBound?**: `number`
|
||||
|
||||
* **upperBound?**: `number`
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
***
|
||||
|
||||
### distanceType()
|
||||
|
||||
```ts
|
||||
@@ -161,26 +147,6 @@ By default "l2" is used.
|
||||
|
||||
***
|
||||
|
||||
### doCall()
|
||||
|
||||
```ts
|
||||
protected doCall(fn): void
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **fn**
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`doCall`](QueryBase.md#docall)
|
||||
|
||||
***
|
||||
|
||||
### ef()
|
||||
|
||||
```ts
|
||||
@@ -215,7 +181,7 @@ Execute the query and return the results as an
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -284,7 +250,7 @@ fastSearch(): this
|
||||
Skip searching un-indexed data. This can make search faster, but will miss
|
||||
any data that is not yet indexed.
|
||||
|
||||
Use lancedb.Table#optimize to index all un-indexed data.
|
||||
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -312,7 +278,7 @@ A filter statement to be applied to this query.
|
||||
|
||||
`this`
|
||||
|
||||
#### Alias
|
||||
#### See
|
||||
|
||||
where
|
||||
|
||||
@@ -336,7 +302,7 @@ fullTextSearch(query, options?): this
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
||||
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -373,26 +339,6 @@ called then every valid row from the table will be returned.
|
||||
|
||||
***
|
||||
|
||||
### nativeExecute()
|
||||
|
||||
```ts
|
||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`RecordBatchIterator`>
|
||||
|
||||
#### Inherited from
|
||||
|
||||
[`QueryBase`](QueryBase.md).[`nativeExecute`](QueryBase.md#nativeexecute)
|
||||
|
||||
***
|
||||
|
||||
### nprobes()
|
||||
|
||||
```ts
|
||||
@@ -528,6 +474,22 @@ distance between the query vector and the actual uncompressed vector.
|
||||
|
||||
***
|
||||
|
||||
### rerank()
|
||||
|
||||
```ts
|
||||
rerank(reranker): VectorQuery
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **reranker**: [`Reranker`](../namespaces/rerankers/interfaces/Reranker.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`VectorQuery`](VectorQuery.md)
|
||||
|
||||
***
|
||||
|
||||
### select()
|
||||
|
||||
```ts
|
||||
@@ -591,7 +553,7 @@ Collect the results as an array of objects.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
@@ -613,7 +575,7 @@ Collect the results as an Arrow
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
||||
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / WriteMode
|
||||
|
||||
# Enumeration: WriteMode
|
||||
|
||||
Write mode for writing a table.
|
||||
|
||||
## Enumeration Members
|
||||
|
||||
### Append
|
||||
|
||||
```ts
|
||||
Append: "Append";
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### Create
|
||||
|
||||
```ts
|
||||
Create: "Create";
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### Overwrite
|
||||
|
||||
```ts
|
||||
Overwrite: "Overwrite";
|
||||
```
|
||||
@@ -6,10 +6,10 @@
|
||||
|
||||
# Function: connect()
|
||||
|
||||
## connect(uri, opts)
|
||||
## connect(uri, options)
|
||||
|
||||
```ts
|
||||
function connect(uri, opts?): Promise<Connection>
|
||||
function connect(uri, options?): Promise<Connection>
|
||||
```
|
||||
|
||||
Connect to a LanceDB instance at the given URI.
|
||||
@@ -26,7 +26,8 @@ Accepted formats:
|
||||
The uri of the database. If the database uri starts
|
||||
with `db://` then it connects to a remote database.
|
||||
|
||||
* **opts?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)>
|
||||
* **options?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)>
|
||||
The options to use when connecting to the database
|
||||
|
||||
### Returns
|
||||
|
||||
@@ -49,10 +50,10 @@ const conn = await connect(
|
||||
});
|
||||
```
|
||||
|
||||
## connect(opts)
|
||||
## connect(options)
|
||||
|
||||
```ts
|
||||
function connect(opts): Promise<Connection>
|
||||
function connect(options): Promise<Connection>
|
||||
```
|
||||
|
||||
Connect to a LanceDB instance at the given URI.
|
||||
@@ -65,7 +66,8 @@ Accepted formats:
|
||||
|
||||
### Parameters
|
||||
|
||||
* **opts**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)> & `object`
|
||||
* **options**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)> & `object`
|
||||
The options to use when connecting to the database
|
||||
|
||||
### Returns
|
||||
|
||||
|
||||
@@ -22,8 +22,6 @@ when creating a table or adding data to it)
|
||||
This function converts an array of Record<String, any> (row-major JS objects)
|
||||
to an Arrow Table (a columnar structure)
|
||||
|
||||
Note that it currently does not support nulls.
|
||||
|
||||
If a schema is provided then it will be used to determine the resulting array
|
||||
types. Fields will also be reordered to fit the order defined by the schema.
|
||||
|
||||
@@ -31,6 +29,9 @@ If a schema is not provided then the types will be inferred and the field order
|
||||
will be controlled by the order of properties in the first record. If a type
|
||||
is inferred it will always be nullable.
|
||||
|
||||
If not all fields are found in the data, then a subset of the schema will be
|
||||
returned.
|
||||
|
||||
If the input is empty then a schema must be provided to create an empty table.
|
||||
|
||||
When a schema is not specified then data types will be inferred. The inference
|
||||
@@ -38,6 +39,7 @@ rules are as follows:
|
||||
|
||||
- boolean => Bool
|
||||
- number => Float64
|
||||
- bigint => Int64
|
||||
- String => Utf8
|
||||
- Buffer => Binary
|
||||
- Record<String, any> => Struct
|
||||
@@ -57,6 +59,7 @@ rules are as follows:
|
||||
|
||||
## Example
|
||||
|
||||
```ts
|
||||
import { fromTableToBuffer, makeArrowTable } from "../arrow";
|
||||
import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow";
|
||||
|
||||
@@ -78,7 +81,6 @@ The `vectorColumns` option can be used to support other vector column
|
||||
names and data types.
|
||||
|
||||
```ts
|
||||
|
||||
const schema = new Schema([
|
||||
new Field("a", new Float64()),
|
||||
new Field("b", new Float64()),
|
||||
@@ -97,8 +99,7 @@ const schema = new Schema([
|
||||
|
||||
You can specify the vector column types and names using the options as well
|
||||
|
||||
```typescript
|
||||
|
||||
```ts
|
||||
const schema = new Schema([
|
||||
new Field('a', new Float64()),
|
||||
new Field('b', new Float64()),
|
||||
|
||||
@@ -7,16 +7,14 @@
|
||||
## Namespaces
|
||||
|
||||
- [embedding](namespaces/embedding/README.md)
|
||||
|
||||
## Enumerations
|
||||
|
||||
- [WriteMode](enumerations/WriteMode.md)
|
||||
- [rerankers](namespaces/rerankers/README.md)
|
||||
|
||||
## Classes
|
||||
|
||||
- [Connection](classes/Connection.md)
|
||||
- [Index](classes/Index.md)
|
||||
- [MakeArrowTableOptions](classes/MakeArrowTableOptions.md)
|
||||
- [MergeInsertBuilder](classes/MergeInsertBuilder.md)
|
||||
- [Query](classes/Query.md)
|
||||
- [QueryBase](classes/QueryBase.md)
|
||||
- [RecordBatchIterator](classes/RecordBatchIterator.md)
|
||||
@@ -30,23 +28,39 @@
|
||||
- [AddDataOptions](interfaces/AddDataOptions.md)
|
||||
- [ClientConfig](interfaces/ClientConfig.md)
|
||||
- [ColumnAlteration](interfaces/ColumnAlteration.md)
|
||||
- [CompactionStats](interfaces/CompactionStats.md)
|
||||
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
||||
- [CreateTableOptions](interfaces/CreateTableOptions.md)
|
||||
- [ExecutableQuery](interfaces/ExecutableQuery.md)
|
||||
- [FtsOptions](interfaces/FtsOptions.md)
|
||||
- [FullTextSearchOptions](interfaces/FullTextSearchOptions.md)
|
||||
- [HnswPqOptions](interfaces/HnswPqOptions.md)
|
||||
- [HnswSqOptions](interfaces/HnswSqOptions.md)
|
||||
- [IndexConfig](interfaces/IndexConfig.md)
|
||||
- [IndexOptions](interfaces/IndexOptions.md)
|
||||
- [IndexStatistics](interfaces/IndexStatistics.md)
|
||||
- [IvfPqOptions](interfaces/IvfPqOptions.md)
|
||||
- [OpenTableOptions](interfaces/OpenTableOptions.md)
|
||||
- [OptimizeOptions](interfaces/OptimizeOptions.md)
|
||||
- [OptimizeStats](interfaces/OptimizeStats.md)
|
||||
- [QueryExecutionOptions](interfaces/QueryExecutionOptions.md)
|
||||
- [RemovalStats](interfaces/RemovalStats.md)
|
||||
- [RetryConfig](interfaces/RetryConfig.md)
|
||||
- [TableNamesOptions](interfaces/TableNamesOptions.md)
|
||||
- [TimeoutConfig](interfaces/TimeoutConfig.md)
|
||||
- [UpdateOptions](interfaces/UpdateOptions.md)
|
||||
- [WriteOptions](interfaces/WriteOptions.md)
|
||||
- [Version](interfaces/Version.md)
|
||||
|
||||
## Type Aliases
|
||||
|
||||
- [Data](type-aliases/Data.md)
|
||||
- [DataLike](type-aliases/DataLike.md)
|
||||
- [FieldLike](type-aliases/FieldLike.md)
|
||||
- [IntoSql](type-aliases/IntoSql.md)
|
||||
- [IntoVector](type-aliases/IntoVector.md)
|
||||
- [RecordBatchLike](type-aliases/RecordBatchLike.md)
|
||||
- [SchemaLike](type-aliases/SchemaLike.md)
|
||||
- [TableLike](type-aliases/TableLike.md)
|
||||
|
||||
## Functions
|
||||
|
||||
|
||||
@@ -8,6 +8,14 @@
|
||||
|
||||
## Properties
|
||||
|
||||
### extraHeaders?
|
||||
|
||||
```ts
|
||||
optional extraHeaders: Record<string, string>;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### retryConfig?
|
||||
|
||||
```ts
|
||||
|
||||
49
docs/src/js/interfaces/CompactionStats.md
Normal file
49
docs/src/js/interfaces/CompactionStats.md
Normal file
@@ -0,0 +1,49 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / CompactionStats
|
||||
|
||||
# Interface: CompactionStats
|
||||
|
||||
Statistics about a compaction operation.
|
||||
|
||||
## Properties
|
||||
|
||||
### filesAdded
|
||||
|
||||
```ts
|
||||
filesAdded: number;
|
||||
```
|
||||
|
||||
The number of new, compacted data files added
|
||||
|
||||
***
|
||||
|
||||
### filesRemoved
|
||||
|
||||
```ts
|
||||
filesRemoved: number;
|
||||
```
|
||||
|
||||
The number of data files removed
|
||||
|
||||
***
|
||||
|
||||
### fragmentsAdded
|
||||
|
||||
```ts
|
||||
fragmentsAdded: number;
|
||||
```
|
||||
|
||||
The number of new, compacted fragments added
|
||||
|
||||
***
|
||||
|
||||
### fragmentsRemoved
|
||||
|
||||
```ts
|
||||
fragmentsRemoved: number;
|
||||
```
|
||||
|
||||
The number of fragments removed
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
## Properties
|
||||
|
||||
### dataStorageVersion?
|
||||
### ~~dataStorageVersion?~~
|
||||
|
||||
```ts
|
||||
optional dataStorageVersion: string;
|
||||
@@ -19,6 +19,10 @@ The version of the data storage format to use.
|
||||
The default is `stable`.
|
||||
Set to "legacy" to use the old format.
|
||||
|
||||
#### Deprecated
|
||||
|
||||
Pass `new_table_data_storage_version` to storageOptions instead.
|
||||
|
||||
***
|
||||
|
||||
### embeddingFunction?
|
||||
@@ -29,7 +33,7 @@ optional embeddingFunction: EmbeddingFunctionConfig;
|
||||
|
||||
***
|
||||
|
||||
### enableV2ManifestPaths?
|
||||
### ~~enableV2ManifestPaths?~~
|
||||
|
||||
```ts
|
||||
optional enableV2ManifestPaths: boolean;
|
||||
@@ -41,6 +45,10 @@ turning this on will make the dataset unreadable for older versions
|
||||
of LanceDB (prior to 0.10.0). To migrate an existing dataset, instead
|
||||
use the LocalTable#migrateManifestPathsV2 method.
|
||||
|
||||
#### Deprecated
|
||||
|
||||
Pass `new_table_enable_v2_manifest_paths` to storageOptions instead.
|
||||
|
||||
***
|
||||
|
||||
### existOk
|
||||
@@ -90,17 +98,3 @@ Options already set on the connection will be inherited by the table,
|
||||
but can be overridden here.
|
||||
|
||||
The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||
|
||||
***
|
||||
|
||||
### useLegacyFormat?
|
||||
|
||||
```ts
|
||||
optional useLegacyFormat: boolean;
|
||||
```
|
||||
|
||||
If true then data files will be written with the legacy format
|
||||
|
||||
The default is false.
|
||||
|
||||
Deprecated. Use data storage version instead.
|
||||
|
||||
103
docs/src/js/interfaces/FtsOptions.md
Normal file
103
docs/src/js/interfaces/FtsOptions.md
Normal file
@@ -0,0 +1,103 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / FtsOptions
|
||||
|
||||
# Interface: FtsOptions
|
||||
|
||||
Options to create a full text search index
|
||||
|
||||
## Properties
|
||||
|
||||
### asciiFolding?
|
||||
|
||||
```ts
|
||||
optional asciiFolding: boolean;
|
||||
```
|
||||
|
||||
whether to remove punctuation
|
||||
|
||||
***
|
||||
|
||||
### baseTokenizer?
|
||||
|
||||
```ts
|
||||
optional baseTokenizer: "raw" | "simple" | "whitespace";
|
||||
```
|
||||
|
||||
The tokenizer to use when building the index.
|
||||
The default is "simple".
|
||||
|
||||
The following tokenizers are available:
|
||||
|
||||
"simple" - Simple tokenizer. This tokenizer splits the text into tokens using whitespace and punctuation as a delimiter.
|
||||
|
||||
"whitespace" - Whitespace tokenizer. This tokenizer splits the text into tokens using whitespace as a delimiter.
|
||||
|
||||
"raw" - Raw tokenizer. This tokenizer does not split the text into tokens and indexes the entire text as a single token.
|
||||
|
||||
***
|
||||
|
||||
### language?
|
||||
|
||||
```ts
|
||||
optional language: string;
|
||||
```
|
||||
|
||||
language for stemming and stop words
|
||||
this is only used when `stem` or `remove_stop_words` is true
|
||||
|
||||
***
|
||||
|
||||
### lowercase?
|
||||
|
||||
```ts
|
||||
optional lowercase: boolean;
|
||||
```
|
||||
|
||||
whether to lowercase tokens
|
||||
|
||||
***
|
||||
|
||||
### maxTokenLength?
|
||||
|
||||
```ts
|
||||
optional maxTokenLength: number;
|
||||
```
|
||||
|
||||
maximum token length
|
||||
tokens longer than this length will be ignored
|
||||
|
||||
***
|
||||
|
||||
### removeStopWords?
|
||||
|
||||
```ts
|
||||
optional removeStopWords: boolean;
|
||||
```
|
||||
|
||||
whether to remove stop words
|
||||
|
||||
***
|
||||
|
||||
### stem?
|
||||
|
||||
```ts
|
||||
optional stem: boolean;
|
||||
```
|
||||
|
||||
whether to stem tokens
|
||||
|
||||
***
|
||||
|
||||
### withPosition?
|
||||
|
||||
```ts
|
||||
optional withPosition: boolean;
|
||||
```
|
||||
|
||||
Whether to build the index with positions.
|
||||
True by default.
|
||||
If set to false, the index will not store the positions of the tokens in the text,
|
||||
which will make the index smaller and faster to build, but will not support phrase queries.
|
||||
22
docs/src/js/interfaces/FullTextSearchOptions.md
Normal file
22
docs/src/js/interfaces/FullTextSearchOptions.md
Normal file
@@ -0,0 +1,22 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / FullTextSearchOptions
|
||||
|
||||
# Interface: FullTextSearchOptions
|
||||
|
||||
Options that control the behavior of a full text search
|
||||
|
||||
## Properties
|
||||
|
||||
### columns?
|
||||
|
||||
```ts
|
||||
optional columns: string | string[];
|
||||
```
|
||||
|
||||
The columns to search
|
||||
|
||||
If not specified, all indexed columns will be searched.
|
||||
For now, only one column can be searched.
|
||||
149
docs/src/js/interfaces/HnswPqOptions.md
Normal file
149
docs/src/js/interfaces/HnswPqOptions.md
Normal file
@@ -0,0 +1,149 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / HnswPqOptions
|
||||
|
||||
# Interface: HnswPqOptions
|
||||
|
||||
Options to create an `HNSW_PQ` index
|
||||
|
||||
## Properties
|
||||
|
||||
### distanceType?
|
||||
|
||||
```ts
|
||||
optional distanceType: "l2" | "cosine" | "dot";
|
||||
```
|
||||
|
||||
The distance metric used to train the index.
|
||||
|
||||
Default value is "l2".
|
||||
|
||||
The following distance types are available:
|
||||
|
||||
"l2" - Euclidean distance. This is a very common distance metric that
|
||||
accounts for both magnitude and direction when determining the distance
|
||||
between vectors. L2 distance has a range of [0, ∞).
|
||||
|
||||
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||
calculated from the cosine similarity between two vectors. Cosine
|
||||
similarity is a measure of similarity between two non-zero vectors of an
|
||||
inner product space. It is defined to equal the cosine of the angle
|
||||
between them. Unlike L2, the cosine distance is not affected by the
|
||||
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||
|
||||
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||
L2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||
|
||||
***
|
||||
|
||||
### efConstruction?
|
||||
|
||||
```ts
|
||||
optional efConstruction: number;
|
||||
```
|
||||
|
||||
The number of candidates to evaluate during the construction of the HNSW graph.
|
||||
|
||||
The default value is 300.
|
||||
|
||||
This value controls the tradeoff between build speed and accuracy.
|
||||
The higher the value the more accurate the build but the slower it will be.
|
||||
150 to 300 is the typical range. 100 is a minimum for good quality search
|
||||
results. In most cases, there is no benefit to setting this higher than 500.
|
||||
This value should be set to a value that is not less than `ef` in the search phase.
|
||||
|
||||
***
|
||||
|
||||
### m?
|
||||
|
||||
```ts
|
||||
optional m: number;
|
||||
```
|
||||
|
||||
The number of neighbors to select for each vector in the HNSW graph.
|
||||
|
||||
The default value is 20.
|
||||
|
||||
This value controls the tradeoff between search speed and accuracy.
|
||||
The higher the value the more accurate the search but the slower it will be.
|
||||
|
||||
***
|
||||
|
||||
### maxIterations?
|
||||
|
||||
```ts
|
||||
optional maxIterations: number;
|
||||
```
|
||||
|
||||
Max iterations to train kmeans.
|
||||
|
||||
The default value is 50.
|
||||
|
||||
When training an IVF index we use kmeans to calculate the partitions. This parameter
|
||||
controls how many iterations of kmeans to run.
|
||||
|
||||
Increasing this might improve the quality of the index but in most cases the parameter
|
||||
is unused because kmeans will converge with fewer iterations. The parameter is only
|
||||
used in cases where kmeans does not appear to converge. In those cases it is unlikely
|
||||
that setting this larger will lead to the index converging anyways.
|
||||
|
||||
***
|
||||
|
||||
### numPartitions?
|
||||
|
||||
```ts
|
||||
optional numPartitions: number;
|
||||
```
|
||||
|
||||
The number of IVF partitions to create.
|
||||
|
||||
For HNSW, we recommend a small number of partitions. Setting this to 1 works
|
||||
well for most tables. For very large tables, training just one HNSW graph
|
||||
will require too much memory. Each partition becomes its own HNSW graph, so
|
||||
setting this value higher reduces the peak memory use of training.
|
||||
|
||||
***
|
||||
|
||||
### numSubVectors?
|
||||
|
||||
```ts
|
||||
optional numSubVectors: number;
|
||||
```
|
||||
|
||||
Number of sub-vectors of PQ.
|
||||
|
||||
This value controls how much the vector is compressed during the quantization step.
|
||||
The more sub vectors there are the less the vector is compressed. The default is
|
||||
the dimension of the vector divided by 16. If the dimension is not evenly divisible
|
||||
by 16 we use the dimension divded by 8.
|
||||
|
||||
The above two cases are highly preferred. Having 8 or 16 values per subvector allows
|
||||
us to use efficient SIMD instructions.
|
||||
|
||||
If the dimension is not visible by 8 then we use 1 subvector. This is not ideal and
|
||||
will likely result in poor performance.
|
||||
|
||||
***
|
||||
|
||||
### sampleRate?
|
||||
|
||||
```ts
|
||||
optional sampleRate: number;
|
||||
```
|
||||
|
||||
The rate used to calculate the number of training vectors for kmeans.
|
||||
|
||||
Default value is 256.
|
||||
|
||||
When an IVF index is trained, we need to calculate partitions. These are groups
|
||||
of vectors that are similar to each other. To do this we use an algorithm called kmeans.
|
||||
|
||||
Running kmeans on a large dataset can be slow. To speed this up we run kmeans on a
|
||||
random sample of the data. This parameter controls the size of the sample. The total
|
||||
number of vectors used to train the index is `sample_rate * num_partitions`.
|
||||
|
||||
Increasing this value might improve the quality of the index but in most cases the
|
||||
default should be sufficient.
|
||||
128
docs/src/js/interfaces/HnswSqOptions.md
Normal file
128
docs/src/js/interfaces/HnswSqOptions.md
Normal file
@@ -0,0 +1,128 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / HnswSqOptions
|
||||
|
||||
# Interface: HnswSqOptions
|
||||
|
||||
Options to create an `HNSW_SQ` index
|
||||
|
||||
## Properties
|
||||
|
||||
### distanceType?
|
||||
|
||||
```ts
|
||||
optional distanceType: "l2" | "cosine" | "dot";
|
||||
```
|
||||
|
||||
The distance metric used to train the index.
|
||||
|
||||
Default value is "l2".
|
||||
|
||||
The following distance types are available:
|
||||
|
||||
"l2" - Euclidean distance. This is a very common distance metric that
|
||||
accounts for both magnitude and direction when determining the distance
|
||||
between vectors. L2 distance has a range of [0, ∞).
|
||||
|
||||
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||
calculated from the cosine similarity between two vectors. Cosine
|
||||
similarity is a measure of similarity between two non-zero vectors of an
|
||||
inner product space. It is defined to equal the cosine of the angle
|
||||
between them. Unlike L2, the cosine distance is not affected by the
|
||||
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||
|
||||
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||
L2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||
|
||||
***
|
||||
|
||||
### efConstruction?
|
||||
|
||||
```ts
|
||||
optional efConstruction: number;
|
||||
```
|
||||
|
||||
The number of candidates to evaluate during the construction of the HNSW graph.
|
||||
|
||||
The default value is 300.
|
||||
|
||||
This value controls the tradeoff between build speed and accuracy.
|
||||
The higher the value the more accurate the build but the slower it will be.
|
||||
150 to 300 is the typical range. 100 is a minimum for good quality search
|
||||
results. In most cases, there is no benefit to setting this higher than 500.
|
||||
This value should be set to a value that is not less than `ef` in the search phase.
|
||||
|
||||
***
|
||||
|
||||
### m?
|
||||
|
||||
```ts
|
||||
optional m: number;
|
||||
```
|
||||
|
||||
The number of neighbors to select for each vector in the HNSW graph.
|
||||
|
||||
The default value is 20.
|
||||
|
||||
This value controls the tradeoff between search speed and accuracy.
|
||||
The higher the value the more accurate the search but the slower it will be.
|
||||
|
||||
***
|
||||
|
||||
### maxIterations?
|
||||
|
||||
```ts
|
||||
optional maxIterations: number;
|
||||
```
|
||||
|
||||
Max iterations to train kmeans.
|
||||
|
||||
The default value is 50.
|
||||
|
||||
When training an IVF index we use kmeans to calculate the partitions. This parameter
|
||||
controls how many iterations of kmeans to run.
|
||||
|
||||
Increasing this might improve the quality of the index but in most cases the parameter
|
||||
is unused because kmeans will converge with fewer iterations. The parameter is only
|
||||
used in cases where kmeans does not appear to converge. In those cases it is unlikely
|
||||
that setting this larger will lead to the index converging anyways.
|
||||
|
||||
***
|
||||
|
||||
### numPartitions?
|
||||
|
||||
```ts
|
||||
optional numPartitions: number;
|
||||
```
|
||||
|
||||
The number of IVF partitions to create.
|
||||
|
||||
For HNSW, we recommend a small number of partitions. Setting this to 1 works
|
||||
well for most tables. For very large tables, training just one HNSW graph
|
||||
will require too much memory. Each partition becomes its own HNSW graph, so
|
||||
setting this value higher reduces the peak memory use of training.
|
||||
|
||||
***
|
||||
|
||||
### sampleRate?
|
||||
|
||||
```ts
|
||||
optional sampleRate: number;
|
||||
```
|
||||
|
||||
The rate used to calculate the number of training vectors for kmeans.
|
||||
|
||||
Default value is 256.
|
||||
|
||||
When an IVF index is trained, we need to calculate partitions. These are groups
|
||||
of vectors that are similar to each other. To do this we use an algorithm called kmeans.
|
||||
|
||||
Running kmeans on a large dataset can be slow. To speed this up we run kmeans on a
|
||||
random sample of the data. This parameter controls the size of the sample. The total
|
||||
number of vectors used to train the index is `sample_rate * num_partitions`.
|
||||
|
||||
Increasing this value might improve the quality of the index but in most cases the
|
||||
default should be sufficient.
|
||||
@@ -68,6 +68,21 @@ The default value is 50.
|
||||
|
||||
***
|
||||
|
||||
### numBits?
|
||||
|
||||
```ts
|
||||
optional numBits: number;
|
||||
```
|
||||
|
||||
Number of bits per sub-vector.
|
||||
|
||||
This value controls how much each subvector is compressed. The more bits the more
|
||||
accurate the index will be but the slower search. The default is 8 bits.
|
||||
|
||||
The number of bits must be 4 or 8.
|
||||
|
||||
***
|
||||
|
||||
### numPartitions?
|
||||
|
||||
```ts
|
||||
|
||||
40
docs/src/js/interfaces/OpenTableOptions.md
Normal file
40
docs/src/js/interfaces/OpenTableOptions.md
Normal file
@@ -0,0 +1,40 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / OpenTableOptions
|
||||
|
||||
# Interface: OpenTableOptions
|
||||
|
||||
## Properties
|
||||
|
||||
### indexCacheSize?
|
||||
|
||||
```ts
|
||||
optional indexCacheSize: number;
|
||||
```
|
||||
|
||||
Set the size of the index cache, specified as a number of entries
|
||||
|
||||
The exact meaning of an "entry" will depend on the type of index:
|
||||
- IVF: there is one entry for each IVF partition
|
||||
- BTREE: there is one entry for the entire index
|
||||
|
||||
This cache applies to the entire opened table, across all indices.
|
||||
Setting this value higher will increase performance on larger datasets
|
||||
at the expense of more RAM
|
||||
|
||||
***
|
||||
|
||||
### storageOptions?
|
||||
|
||||
```ts
|
||||
optional storageOptions: Record<string, string>;
|
||||
```
|
||||
|
||||
Configuration for object storage.
|
||||
|
||||
Options already set on the connection will be inherited by the table,
|
||||
but can be overridden here.
|
||||
|
||||
The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||
29
docs/src/js/interfaces/OptimizeStats.md
Normal file
29
docs/src/js/interfaces/OptimizeStats.md
Normal file
@@ -0,0 +1,29 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / OptimizeStats
|
||||
|
||||
# Interface: OptimizeStats
|
||||
|
||||
Statistics about an optimize operation
|
||||
|
||||
## Properties
|
||||
|
||||
### compaction
|
||||
|
||||
```ts
|
||||
compaction: CompactionStats;
|
||||
```
|
||||
|
||||
Statistics about the compaction operation
|
||||
|
||||
***
|
||||
|
||||
### prune
|
||||
|
||||
```ts
|
||||
prune: RemovalStats;
|
||||
```
|
||||
|
||||
Statistics about the removal operation
|
||||
22
docs/src/js/interfaces/QueryExecutionOptions.md
Normal file
22
docs/src/js/interfaces/QueryExecutionOptions.md
Normal file
@@ -0,0 +1,22 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / QueryExecutionOptions
|
||||
|
||||
# Interface: QueryExecutionOptions
|
||||
|
||||
Options that control the behavior of a particular query execution
|
||||
|
||||
## Properties
|
||||
|
||||
### maxBatchLength?
|
||||
|
||||
```ts
|
||||
optional maxBatchLength: number;
|
||||
```
|
||||
|
||||
The maximum number of rows to return in a single batch
|
||||
|
||||
Batches may have fewer rows if the underlying data is stored
|
||||
in smaller chunks.
|
||||
29
docs/src/js/interfaces/RemovalStats.md
Normal file
29
docs/src/js/interfaces/RemovalStats.md
Normal file
@@ -0,0 +1,29 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / RemovalStats
|
||||
|
||||
# Interface: RemovalStats
|
||||
|
||||
Statistics about a cleanup operation
|
||||
|
||||
## Properties
|
||||
|
||||
### bytesRemoved
|
||||
|
||||
```ts
|
||||
bytesRemoved: number;
|
||||
```
|
||||
|
||||
The number of bytes removed
|
||||
|
||||
***
|
||||
|
||||
### oldVersionsRemoved
|
||||
|
||||
```ts
|
||||
oldVersionsRemoved: number;
|
||||
```
|
||||
|
||||
The number of old versions removed
|
||||
31
docs/src/js/interfaces/Version.md
Normal file
31
docs/src/js/interfaces/Version.md
Normal file
@@ -0,0 +1,31 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / Version
|
||||
|
||||
# Interface: Version
|
||||
|
||||
## Properties
|
||||
|
||||
### metadata
|
||||
|
||||
```ts
|
||||
metadata: Record<string, string>;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### timestamp
|
||||
|
||||
```ts
|
||||
timestamp: Date;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### version
|
||||
|
||||
```ts
|
||||
version: number;
|
||||
```
|
||||
@@ -1,19 +0,0 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / WriteOptions
|
||||
|
||||
# Interface: WriteOptions
|
||||
|
||||
Write options when creating a Table.
|
||||
|
||||
## Properties
|
||||
|
||||
### mode?
|
||||
|
||||
```ts
|
||||
optional mode: WriteMode;
|
||||
```
|
||||
|
||||
Write mode for writing to a table.
|
||||
@@ -17,6 +17,14 @@
|
||||
### Interfaces
|
||||
|
||||
- [EmbeddingFunctionConfig](interfaces/EmbeddingFunctionConfig.md)
|
||||
- [EmbeddingFunctionConstructor](interfaces/EmbeddingFunctionConstructor.md)
|
||||
- [EmbeddingFunctionCreate](interfaces/EmbeddingFunctionCreate.md)
|
||||
- [FieldOptions](interfaces/FieldOptions.md)
|
||||
- [FunctionOptions](interfaces/FunctionOptions.md)
|
||||
|
||||
### Type Aliases
|
||||
|
||||
- [CreateReturnType](type-aliases/CreateReturnType.md)
|
||||
|
||||
### Functions
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ An embedding function that automatically creates vector representation for a giv
|
||||
|
||||
• **T** = `any`
|
||||
|
||||
• **M** *extends* `FunctionOptions` = `FunctionOptions`
|
||||
• **M** *extends* [`FunctionOptions`](../interfaces/FunctionOptions.md) = [`FunctionOptions`](../interfaces/FunctionOptions.md)
|
||||
|
||||
## Constructors
|
||||
|
||||
@@ -118,16 +118,16 @@ sourceField is used in combination with `LanceSchema` to provide a declarative d
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
* **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||
The options for the field or the datatype
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||
|
||||
#### See
|
||||
|
||||
lancedb.LanceSchema
|
||||
[LanceSchema](../functions/LanceSchema.md)
|
||||
|
||||
***
|
||||
|
||||
@@ -178,12 +178,13 @@ vectorField is used in combination with `LanceSchema` to provide a declarative d
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||
The options for the field
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||
|
||||
#### See
|
||||
|
||||
lancedb.LanceSchema
|
||||
[LanceSchema](../functions/LanceSchema.md)
|
||||
|
||||
@@ -51,7 +51,7 @@ Fetch an embedding function by name
|
||||
|
||||
#### Type Parameters
|
||||
|
||||
• **T** *extends* [`EmbeddingFunction`](EmbeddingFunction.md)<`unknown`, `FunctionOptions`>
|
||||
• **T** *extends* [`EmbeddingFunction`](EmbeddingFunction.md)<`unknown`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>
|
||||
|
||||
#### Parameters
|
||||
|
||||
@@ -60,7 +60,7 @@ Fetch an embedding function by name
|
||||
|
||||
#### Returns
|
||||
|
||||
`undefined` \| `EmbeddingFunctionCreate`<`T`>
|
||||
`undefined` \| [`EmbeddingFunctionCreate`](../interfaces/EmbeddingFunctionCreate.md)<`T`>
|
||||
|
||||
***
|
||||
|
||||
@@ -104,7 +104,7 @@ Register an embedding function
|
||||
|
||||
#### Type Parameters
|
||||
|
||||
• **T** *extends* `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>> = `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
||||
• **T** *extends* [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>> = [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>
|
||||
|
||||
#### Parameters
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ an abstract class for implementing embedding functions that take text as input
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **M** *extends* `FunctionOptions` = `FunctionOptions`
|
||||
• **M** *extends* [`FunctionOptions`](../interfaces/FunctionOptions.md) = [`FunctionOptions`](../interfaces/FunctionOptions.md)
|
||||
|
||||
## Constructors
|
||||
|
||||
@@ -158,11 +158,11 @@ sourceField is used in combination with `LanceSchema` to provide a declarative d
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||
|
||||
#### See
|
||||
|
||||
lancedb.LanceSchema
|
||||
[LanceSchema](../functions/LanceSchema.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
@@ -221,15 +221,16 @@ vectorField is used in combination with `LanceSchema` to provide a declarative d
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||
The options for the field
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||
|
||||
#### See
|
||||
|
||||
lancedb.LanceSchema
|
||||
[LanceSchema](../functions/LanceSchema.md)
|
||||
|
||||
#### Inherited from
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ Create a schema with embedding functions.
|
||||
|
||||
## Parameters
|
||||
|
||||
* **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>]>
|
||||
* **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]>
|
||||
|
||||
## Returns
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ function register(name?): (ctor) => any
|
||||
|
||||
### Parameters
|
||||
|
||||
* **ctor**: `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
||||
* **ctor**: [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>
|
||||
|
||||
### Returns
|
||||
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / EmbeddingFunctionConstructor
|
||||
|
||||
# Interface: EmbeddingFunctionConstructor<T>
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **T** *extends* [`EmbeddingFunction`](../classes/EmbeddingFunction.md) = [`EmbeddingFunction`](../classes/EmbeddingFunction.md)
|
||||
|
||||
## Constructors
|
||||
|
||||
### new EmbeddingFunctionConstructor()
|
||||
|
||||
```ts
|
||||
new EmbeddingFunctionConstructor(modelOptions?): T
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **modelOptions?**: `T`\[`"TOptions"`\]
|
||||
|
||||
#### Returns
|
||||
|
||||
`T`
|
||||
@@ -0,0 +1,27 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / EmbeddingFunctionCreate
|
||||
|
||||
# Interface: EmbeddingFunctionCreate<T>
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **T** *extends* [`EmbeddingFunction`](../classes/EmbeddingFunction.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### create()
|
||||
|
||||
```ts
|
||||
create(options?): CreateReturnType<T>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **options?**: `T`\[`"TOptions"`\]
|
||||
|
||||
#### Returns
|
||||
|
||||
[`CreateReturnType`](../type-aliases/CreateReturnType.md)<`T`>
|
||||
27
docs/src/js/namespaces/embedding/interfaces/FieldOptions.md
Normal file
27
docs/src/js/namespaces/embedding/interfaces/FieldOptions.md
Normal file
@@ -0,0 +1,27 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / FieldOptions
|
||||
|
||||
# Interface: FieldOptions<T>
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **T** *extends* `DataType` = `DataType`
|
||||
|
||||
## Properties
|
||||
|
||||
### datatype
|
||||
|
||||
```ts
|
||||
datatype: T;
|
||||
```
|
||||
|
||||
***
|
||||
|
||||
### dims?
|
||||
|
||||
```ts
|
||||
optional dims: number;
|
||||
```
|
||||
@@ -0,0 +1,13 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / FunctionOptions
|
||||
|
||||
# Interface: FunctionOptions
|
||||
|
||||
Options for a given embedding function
|
||||
|
||||
## Indexable
|
||||
|
||||
\[`key`: `string`\]: `any`
|
||||
@@ -0,0 +1,15 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / CreateReturnType
|
||||
|
||||
# Type Alias: CreateReturnType<T>
|
||||
|
||||
```ts
|
||||
type CreateReturnType<T>: T extends object ? Promise<T> : T;
|
||||
```
|
||||
|
||||
## Type Parameters
|
||||
|
||||
• **T**
|
||||
17
docs/src/js/namespaces/rerankers/README.md
Normal file
17
docs/src/js/namespaces/rerankers/README.md
Normal file
@@ -0,0 +1,17 @@
|
||||
[**@lancedb/lancedb**](../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../globals.md) / rerankers
|
||||
|
||||
# rerankers
|
||||
|
||||
## Index
|
||||
|
||||
### Classes
|
||||
|
||||
- [RRFReranker](classes/RRFReranker.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [Reranker](interfaces/Reranker.md)
|
||||
48
docs/src/js/namespaces/rerankers/classes/RRFReranker.md
Normal file
48
docs/src/js/namespaces/rerankers/classes/RRFReranker.md
Normal file
@@ -0,0 +1,48 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [rerankers](../README.md) / RRFReranker
|
||||
|
||||
# Class: RRFReranker
|
||||
|
||||
Reranks the results using the Reciprocal Rank Fusion (RRF) algorithm.
|
||||
|
||||
## Methods
|
||||
|
||||
### rerankHybrid()
|
||||
|
||||
```ts
|
||||
rerankHybrid(
|
||||
query,
|
||||
vecResults,
|
||||
ftsResults): Promise<RecordBatch<any>>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **vecResults**: `RecordBatch`<`any`>
|
||||
|
||||
* **ftsResults**: `RecordBatch`<`any`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`RecordBatch`<`any`>>
|
||||
|
||||
***
|
||||
|
||||
### create()
|
||||
|
||||
```ts
|
||||
static create(k): Promise<RRFReranker>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **k**: `number` = `60`
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<[`RRFReranker`](RRFReranker.md)>
|
||||
30
docs/src/js/namespaces/rerankers/interfaces/Reranker.md
Normal file
30
docs/src/js/namespaces/rerankers/interfaces/Reranker.md
Normal file
@@ -0,0 +1,30 @@
|
||||
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../../../globals.md) / [rerankers](../README.md) / Reranker
|
||||
|
||||
# Interface: Reranker
|
||||
|
||||
## Methods
|
||||
|
||||
### rerankHybrid()
|
||||
|
||||
```ts
|
||||
rerankHybrid(
|
||||
query,
|
||||
vecResults,
|
||||
ftsResults): Promise<RecordBatch<any>>
|
||||
```
|
||||
|
||||
#### Parameters
|
||||
|
||||
* **query**: `string`
|
||||
|
||||
* **vecResults**: `RecordBatch`<`any`>
|
||||
|
||||
* **ftsResults**: `RecordBatch`<`any`>
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`<`RecordBatch`<`any`>>
|
||||
11
docs/src/js/type-aliases/DataLike.md
Normal file
11
docs/src/js/type-aliases/DataLike.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / DataLike
|
||||
|
||||
# Type Alias: DataLike
|
||||
|
||||
```ts
|
||||
type DataLike: Data | object;
|
||||
```
|
||||
11
docs/src/js/type-aliases/FieldLike.md
Normal file
11
docs/src/js/type-aliases/FieldLike.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / FieldLike
|
||||
|
||||
# Type Alias: FieldLike
|
||||
|
||||
```ts
|
||||
type FieldLike: Field | object;
|
||||
```
|
||||
19
docs/src/js/type-aliases/IntoSql.md
Normal file
19
docs/src/js/type-aliases/IntoSql.md
Normal file
@@ -0,0 +1,19 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / IntoSql
|
||||
|
||||
# Type Alias: IntoSql
|
||||
|
||||
```ts
|
||||
type IntoSql:
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| Date
|
||||
| ArrayBufferLike
|
||||
| Buffer
|
||||
| IntoSql[];
|
||||
```
|
||||
11
docs/src/js/type-aliases/IntoVector.md
Normal file
11
docs/src/js/type-aliases/IntoVector.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / IntoVector
|
||||
|
||||
# Type Alias: IntoVector
|
||||
|
||||
```ts
|
||||
type IntoVector: Float32Array | Float64Array | number[] | Promise<Float32Array | Float64Array | number[]>;
|
||||
```
|
||||
11
docs/src/js/type-aliases/RecordBatchLike.md
Normal file
11
docs/src/js/type-aliases/RecordBatchLike.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / RecordBatchLike
|
||||
|
||||
# Type Alias: RecordBatchLike
|
||||
|
||||
```ts
|
||||
type RecordBatchLike: RecordBatch | object;
|
||||
```
|
||||
11
docs/src/js/type-aliases/SchemaLike.md
Normal file
11
docs/src/js/type-aliases/SchemaLike.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / SchemaLike
|
||||
|
||||
# Type Alias: SchemaLike
|
||||
|
||||
```ts
|
||||
type SchemaLike: Schema | object;
|
||||
```
|
||||
11
docs/src/js/type-aliases/TableLike.md
Normal file
11
docs/src/js/type-aliases/TableLike.md
Normal file
@@ -0,0 +1,11 @@
|
||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||
|
||||
***
|
||||
|
||||
[@lancedb/lancedb](../globals.md) / TableLike
|
||||
|
||||
# Type Alias: TableLike
|
||||
|
||||
```ts
|
||||
type TableLike: ArrowTable | object;
|
||||
```
|
||||
@@ -1,17 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2023 LanceDB Developers
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
"""Dataset hf://poloclub/diffusiondb
|
||||
"""
|
||||
|
||||
@@ -114,14 +114,17 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"data = [\n",
|
||||
" {\"vector\": [1.1, 1.2], \"lat\": 45.5, \"long\": -122.7},\n",
|
||||
" {\"vector\": [0.2, 1.8], \"lat\": 40.1, \"long\": -74.1},\n",
|
||||
"]\n",
|
||||
"import pandas as pd\n",
|
||||
"\n",
|
||||
"db.create_table(\"table2\", data)\n",
|
||||
"\n",
|
||||
"db[\"table2\"].head() "
|
||||
"data = pd.DataFrame(\n",
|
||||
" {\n",
|
||||
" \"vector\": [[1.1, 1.2, 1.3, 1.4], [0.2, 1.8, 0.4, 3.6]],\n",
|
||||
" \"lat\": [45.5, 40.1],\n",
|
||||
" \"long\": [-122.7, -74.1],\n",
|
||||
" }\n",
|
||||
")\n",
|
||||
"db.create_table(\"my_table_pandas\", data)\n",
|
||||
"db[\"my_table_pandas\"].head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -164,7 +167,7 @@
|
||||
"import pyarrow as pa\n",
|
||||
"\n",
|
||||
"custom_schema = pa.schema([\n",
|
||||
"pa.field(\"vector\", pa.list_(pa.float32(), 2)),\n",
|
||||
"pa.field(\"vector\", pa.list_(pa.float32(), 4)),\n",
|
||||
"pa.field(\"lat\", pa.float32()),\n",
|
||||
"pa.field(\"long\", pa.float32())\n",
|
||||
"])\n",
|
||||
|
||||
@@ -147,8 +147,19 @@ to return the entire (typically filtered) table. Vector searches return the
|
||||
rows nearest to a query vector and can be created with the
|
||||
[AsyncTable.vector_search][lancedb.table.AsyncTable.vector_search] method.
|
||||
|
||||
::: lancedb.query.AsyncQueryBase
|
||||
|
||||
::: lancedb.query.AsyncQuery
|
||||
options:
|
||||
inherited_members: true
|
||||
|
||||
::: lancedb.query.AsyncVectorQuery
|
||||
options:
|
||||
inherited_members: true
|
||||
|
||||
::: lancedb.query.AsyncFTSQuery
|
||||
options:
|
||||
inherited_members: true
|
||||
|
||||
::: lancedb.query.AsyncHybridQuery
|
||||
options:
|
||||
inherited_members: true
|
||||
|
||||
@@ -122,7 +122,7 @@ LanceDB supports binary vectors as a data type, and has the ability to search bi
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "sync API"
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_binary_vector.py:imports"
|
||||
@@ -130,7 +130,7 @@ LanceDB supports binary vectors as a data type, and has the ability to search bi
|
||||
--8<-- "python/python/tests/docs/test_binary_vector.py:sync_binary_vector"
|
||||
```
|
||||
|
||||
=== "async API"
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_binary_vector.py:imports"
|
||||
@@ -149,10 +149,11 @@ You can index on a column with multivector type and search on it, the query can
|
||||
where `sim` is the similarity function (e.g. cosine).
|
||||
|
||||
For now, only `cosine` metric is supported for multivector search.
|
||||
The vector value type can be `float16`, `float32` or `float64`.
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "sync API"
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_multivector.py:imports"
|
||||
@@ -160,7 +161,7 @@ For now, only `cosine` metric is supported for multivector search.
|
||||
--8<-- "python/python/tests/docs/test_multivector.py:sync_multivector"
|
||||
```
|
||||
|
||||
=== "async API"
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_multivector.py:imports"
|
||||
@@ -174,7 +175,7 @@ You can also search for vectors within a specific distance range from the query
|
||||
|
||||
=== "Python"
|
||||
|
||||
=== "sync API"
|
||||
=== "Sync API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_distance_range.py:imports"
|
||||
@@ -182,7 +183,7 @@ You can also search for vectors within a specific distance range from the query
|
||||
--8<-- "python/python/tests/docs/test_distance_range.py:sync_distance_range"
|
||||
```
|
||||
|
||||
=== "async API"
|
||||
=== "Async API"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_distance_range.py:imports"
|
||||
|
||||
@@ -20,6 +20,7 @@ async function setup() {
|
||||
}
|
||||
|
||||
async () => {
|
||||
console.log("search_legacy.ts: start");
|
||||
await setup();
|
||||
|
||||
// --8<-- [start:search1]
|
||||
@@ -37,5 +38,5 @@ async () => {
|
||||
.execute();
|
||||
// --8<-- [end:search2]
|
||||
|
||||
console.log("search: done");
|
||||
console.log("search_legacy.ts: done");
|
||||
};
|
||||
|
||||
@@ -64,6 +64,7 @@ const tbl = await db.createTable('myVectors', data)
|
||||
```ts
|
||||
--8<-- "docs/src/sql_legacy.ts:search"
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
Creating a [scalar index](guides/scalar_index.md) accelerates filtering.
|
||||
@@ -118,14 +119,14 @@ For example, the following filter string is acceptable:
|
||||
--8<-- "docs/src/sql_legacy.ts:vec_search"
|
||||
```
|
||||
|
||||
If your column name contains special characters or is a [SQL Keyword](https://docs.rs/sqlparser/latest/sqlparser/keywords/index.html),
|
||||
If your column name contains special characters, upper-case characters, or is a [SQL Keyword](https://docs.rs/sqlparser/latest/sqlparser/keywords/index.html),
|
||||
you can use backtick (`` ` ``) to escape it. For nested fields, each segment of the
|
||||
path must be wrapped in backticks.
|
||||
|
||||
=== "SQL"
|
||||
|
||||
```sql
|
||||
`CUBE` = 10 AND `column name with space` IS NOT NULL
|
||||
`CUBE` = 10 AND `UpperCaseName` = '3' AND `column name with space` IS NOT NULL
|
||||
AND `nested with space`.`inner with space` < 2
|
||||
```
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as vectordb from "vectordb";
|
||||
|
||||
(async () => {
|
||||
console.log("sql_legacy.ts: start");
|
||||
const db = await vectordb.connect("data/sample-lancedb");
|
||||
|
||||
let data = [];
|
||||
@@ -34,5 +35,5 @@ import * as vectordb from "vectordb";
|
||||
await tbl.filter("id = 10").limit(10).execute();
|
||||
// --8<-- [end:sql_search]
|
||||
|
||||
console.log("SQL search: done");
|
||||
console.log("sql_legacy.ts: done");
|
||||
})();
|
||||
|
||||
@@ -11,6 +11,7 @@ excluded_globs = [
|
||||
"../src/examples/*.md",
|
||||
"../src/integrations/*.md",
|
||||
"../src/guides/tables.md",
|
||||
"../src/guides/tables/merge_insert.md",
|
||||
"../src/python/duckdb.md",
|
||||
"../src/python/pandas_and_pyarrow.md",
|
||||
"../src/python/polars_arrow.md",
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
use crate::ffi::JNIEnvExt;
|
||||
use crate::traits::IntoJava;
|
||||
use crate::{Error, RT};
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
use std::str::Utf8Error;
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
use core::slice;
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
use jni::objects::{JMap, JObject, JString, JValue};
|
||||
use jni::JNIEnv;
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.15.0-beta.0</version>
|
||||
<version>0.16.1-beta.2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
package com.lancedb.lancedb;
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
package com.lancedb.lancedb;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
4
java/license_header.txt
Normal file
4
java/license_header.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
* SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
*/
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.15.0-beta.0</version>
|
||||
<version>0.16.1-beta.2</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>LanceDB Parent</name>
|
||||
|
||||
124
node/package-lock.json
generated
124
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.15.0-beta.0",
|
||||
"version": "0.16.1-beta.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "vectordb",
|
||||
"version": "0.15.0-beta.0",
|
||||
"version": "0.16.1-beta.2",
|
||||
"cpu": [
|
||||
"x64",
|
||||
"arm64"
|
||||
@@ -52,14 +52,14 @@
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-arm64": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-darwin-x64": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.15.0-beta.0"
|
||||
"@lancedb/vectordb-darwin-arm64": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-darwin-x64": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.16.1-beta.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@apache-arrow/ts": "^14.0.2",
|
||||
@@ -329,110 +329,6 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-4sPAW4p1YFVfURyf0k017l6LRCz+VmN9fVUBy7W27b6EOQ3xuIb3t5xq3JAtslMPWBP3wxP8rKXXDmlbqDg3+g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-uzGINrBBsZattB4/ZYxdGNkTxNh3MqE6Y4nF762qo0zWWSiu+QNHQ+ZyLAZ2lwrEvwxs8LUaJNmnpn3nocHc1A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-bgphfea8h65vJ+bAL+vb+XEfmjskLZ+trZ3GN4n6SICU7XMGSFPl9xzPLGAj1WsoFCTJHe87DRYQpsWGlOI/LQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-arm64-musl": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-musl/-/vectordb-linux-arm64-musl-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-GpmVgqMS9ztNX53z8v0JdZiG6K1cK+mJnGZd3Gzguiavrly4mkYZ8IKNwWP9RmewUMNsFWR0IzD4VR+ojVpjlQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-6Y/39TDv4UDVWnl8UpUJ8mqv9rUNc9Q5VR510I7w34c0ChdWvjqdcy+JFnGrraamE1DA8E6wGEz+5oG0zprkNg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-x64-musl": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-musl/-/vectordb-linux-x64-musl-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-GRdW2dhf6DmynhRojjtQjs8DeARM1WpbZZKXukeofOSMv6JoRBSWKw2DzW5sF/285IMU81B0OXZE75QjLp+VJg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-win32-arm64-msvc": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-arm64-msvc/-/vectordb-win32-arm64-msvc-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-2EmRHuqqj8kC5ArUZztUWWTfNd774zL68btOlyhYL1CAiet5jIeGuFWJifdh+PXfQeLoa4GLW5LwyudIR4IHwA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
||||
"version": "0.15.0-beta.0",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.15.0-beta.0.tgz",
|
||||
"integrity": "sha512-lWq9b7LnWMGO0zDsp3rsLYyAzLooV7zQP77ph9Qv9fF0e4egD5l6SmMsAdQqLQnlhbQjkRjt3XRoDsqI809fcw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@neon-rs/cli": {
|
||||
"version": "0.0.160",
|
||||
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.15.0-beta.0",
|
||||
"version": "0.16.1-beta.2",
|
||||
"description": " Serverless, low-latency vector database for AI applications",
|
||||
"private": false,
|
||||
"main": "dist/index.js",
|
||||
@@ -92,13 +92,13 @@
|
||||
}
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-x64": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-darwin-arm64": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.15.0-beta.0",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.15.0-beta.0"
|
||||
"@lancedb/vectordb-darwin-x64": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-darwin-arm64": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-x64-musl": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-linux-arm64-musl": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.16.1-beta.2",
|
||||
"@lancedb/vectordb-win32-arm64-msvc": "0.16.1-beta.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,8 @@ const {
|
||||
tableSchema,
|
||||
tableAddColumns,
|
||||
tableAlterColumns,
|
||||
tableDropColumns
|
||||
tableDropColumns,
|
||||
tableDropIndex
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
} = require("../native.js");
|
||||
|
||||
@@ -604,6 +605,13 @@ export interface Table<T = number[]> {
|
||||
*/
|
||||
dropColumns(columnNames: string[]): Promise<void>
|
||||
|
||||
/**
|
||||
* Drop an index from the table
|
||||
*
|
||||
* @param indexName The name of the index to drop
|
||||
*/
|
||||
dropIndex(indexName: string): Promise<void>
|
||||
|
||||
/**
|
||||
* Instrument the behavior of this Table with middleware.
|
||||
*
|
||||
@@ -1206,6 +1214,10 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
return tableDropColumns.call(this._tbl, columnNames);
|
||||
}
|
||||
|
||||
async dropIndex(indexName: string): Promise<void> {
|
||||
return tableDropIndex.call(this._tbl, indexName);
|
||||
}
|
||||
|
||||
withMiddleware(middleware: HttpMiddleware): Table<T> {
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -471,6 +471,18 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
||||
)
|
||||
}
|
||||
}
|
||||
async dropIndex (index_name: string): Promise<void> {
|
||||
const res = await this._client.post(
|
||||
`/v1/table/${encodeURIComponent(this._name)}/index/${encodeURIComponent(index_name)}/drop/`
|
||||
)
|
||||
if (res.status !== 200) {
|
||||
throw new Error(
|
||||
`Server Error, status: ${res.status}, ` +
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
`message: ${res.statusText}: ${await res.body()}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async countRows (filter?: string): Promise<number> {
|
||||
const result = await this._client.post(`/v1/table/${encodeURIComponent(this._name)}/count_rows/`, {
|
||||
|
||||
@@ -894,6 +894,27 @@ describe("LanceDB client", function () {
|
||||
expect(stats.distanceType).to.equal("l2");
|
||||
expect(stats.numIndices).to.equal(1);
|
||||
}).timeout(50_000);
|
||||
|
||||
// not yet implemented
|
||||
// it("can drop index", async function () {
|
||||
// const uri = await createTestDB(32, 300);
|
||||
// const con = await lancedb.connect(uri);
|
||||
// const table = await con.openTable("vectors");
|
||||
// await table.createIndex({
|
||||
// type: "ivf_pq",
|
||||
// column: "vector",
|
||||
// num_partitions: 2,
|
||||
// max_iters: 2,
|
||||
// num_sub_vectors: 2
|
||||
// });
|
||||
//
|
||||
// const indices = await table.listIndices();
|
||||
// expect(indices).to.have.lengthOf(1);
|
||||
// expect(indices[0].name).to.equal("vector_idx");
|
||||
//
|
||||
// await table.dropIndex("vector_idx");
|
||||
// expect(await table.listIndices()).to.have.lengthOf(0);
|
||||
// }).timeout(50_000);
|
||||
});
|
||||
|
||||
describe("when using a custom embedding function", function () {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "lancedb-nodejs"
|
||||
edition.workspace = true
|
||||
version = "0.15.0-beta.0"
|
||||
version = "0.16.1-beta.2"
|
||||
license.workspace = true
|
||||
description.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
@@ -32,7 +32,7 @@ const results = await table.vectorSearch([0.1, 0.3]).limit(20).toArray();
|
||||
console.log(results);
|
||||
```
|
||||
|
||||
The [quickstart](../basic.md) contains a more complete example.
|
||||
The [quickstart](https://lancedb.github.io/lancedb/basic/) contains a more complete example.
|
||||
|
||||
## Development
|
||||
|
||||
|
||||
@@ -1,17 +1,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import { Schema } from "apache-arrow";
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import * as arrow15 from "apache-arrow-15";
|
||||
import * as arrow16 from "apache-arrow-16";
|
||||
@@ -65,6 +55,7 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
Float64,
|
||||
Struct,
|
||||
List,
|
||||
Int16,
|
||||
Int32,
|
||||
Int64,
|
||||
Float,
|
||||
@@ -118,13 +109,16 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
false,
|
||||
),
|
||||
]);
|
||||
|
||||
const table = (await tableCreationMethod(
|
||||
records,
|
||||
recordsReversed,
|
||||
schema,
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
)) as any;
|
||||
|
||||
// We expect deterministic ordering of the fields
|
||||
expect(table.schema.names).toEqual(schema.names);
|
||||
|
||||
schema.fields.forEach(
|
||||
(
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
@@ -151,13 +145,13 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
describe("The function makeArrowTable", function () {
|
||||
it("will use data types from a provided schema instead of inference", async function () {
|
||||
const schema = new Schema([
|
||||
new Field("a", new Int32()),
|
||||
new Field("b", new Float32()),
|
||||
new Field("a", new Int32(), false),
|
||||
new Field("b", new Float32(), true),
|
||||
new Field(
|
||||
"c",
|
||||
new FixedSizeList(3, new Field("item", new Float16())),
|
||||
),
|
||||
new Field("d", new Int64()),
|
||||
new Field("d", new Int64(), true),
|
||||
]);
|
||||
const table = makeArrowTable(
|
||||
[
|
||||
@@ -175,12 +169,15 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
expect(actual.numRows).toBe(3);
|
||||
const actualSchema = actual.schema;
|
||||
expect(actualSchema).toEqual(schema);
|
||||
expect(table.getChild("a")?.toJSON()).toEqual([1, 4, 7]);
|
||||
expect(table.getChild("b")?.toJSON()).toEqual([2, 5, 8]);
|
||||
expect(table.getChild("d")?.toJSON()).toEqual([9n, 10n, null]);
|
||||
});
|
||||
|
||||
it("will assume the column `vector` is FixedSizeList<Float32> by default", async function () {
|
||||
const schema = new Schema([
|
||||
new Field("a", new Float(Precision.DOUBLE), true),
|
||||
new Field("b", new Float(Precision.DOUBLE), true),
|
||||
new Field("b", new Int64(), true),
|
||||
new Field(
|
||||
"vector",
|
||||
new FixedSizeList(
|
||||
@@ -191,9 +188,9 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
),
|
||||
]);
|
||||
const table = makeArrowTable([
|
||||
{ a: 1, b: 2, vector: [1, 2, 3] },
|
||||
{ a: 4, b: 5, vector: [4, 5, 6] },
|
||||
{ a: 7, b: 8, vector: [7, 8, 9] },
|
||||
{ a: 1, b: 2n, vector: [1, 2, 3] },
|
||||
{ a: 4, b: 5n, vector: [4, 5, 6] },
|
||||
{ a: 7, b: 8n, vector: [7, 8, 9] },
|
||||
]);
|
||||
|
||||
const buf = await fromTableToBuffer(table);
|
||||
@@ -203,6 +200,19 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
expect(actual.numRows).toBe(3);
|
||||
const actualSchema = actual.schema;
|
||||
expect(actualSchema).toEqual(schema);
|
||||
|
||||
expect(table.getChild("a")?.toJSON()).toEqual([1, 4, 7]);
|
||||
expect(table.getChild("b")?.toJSON()).toEqual([2n, 5n, 8n]);
|
||||
expect(
|
||||
table
|
||||
.getChild("vector")
|
||||
?.toJSON()
|
||||
.map((v) => v.toJSON()),
|
||||
).toEqual([
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
[7, 8, 9],
|
||||
]);
|
||||
});
|
||||
|
||||
it("can support multiple vector columns", async function () {
|
||||
@@ -216,7 +226,7 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
),
|
||||
new Field(
|
||||
"vec2",
|
||||
new FixedSizeList(3, new Field("item", new Float16(), true)),
|
||||
new FixedSizeList(3, new Field("item", new Float64(), true)),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
@@ -229,7 +239,7 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
{
|
||||
vectorColumns: {
|
||||
vec1: { type: new Float16() },
|
||||
vec2: { type: new Float16() },
|
||||
vec2: { type: new Float64() },
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -317,6 +327,53 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it("will allow subsets of columns if nullable", async function () {
|
||||
const schema = new Schema([
|
||||
new Field("a", new Int64(), true),
|
||||
new Field(
|
||||
"s",
|
||||
new Struct([
|
||||
new Field("x", new Int32(), true),
|
||||
new Field("y", new Int32(), true),
|
||||
]),
|
||||
true,
|
||||
),
|
||||
new Field("d", new Int16(), true),
|
||||
]);
|
||||
|
||||
const table = makeArrowTable([{ a: 1n }], { schema });
|
||||
expect(table.numCols).toBe(1);
|
||||
expect(table.numRows).toBe(1);
|
||||
|
||||
const table2 = makeArrowTable([{ a: 1n, d: 2 }], { schema });
|
||||
expect(table2.numCols).toBe(2);
|
||||
|
||||
const table3 = makeArrowTable([{ s: { y: 3 } }], { schema });
|
||||
expect(table3.numCols).toBe(1);
|
||||
const expectedSchema = new Schema([
|
||||
new Field("s", new Struct([new Field("y", new Int32(), true)]), true),
|
||||
]);
|
||||
expect(table3.schema).toEqual(expectedSchema);
|
||||
});
|
||||
|
||||
it("will work even if columns are sparsely provided", async function () {
|
||||
const sparseRecords = [{ a: 1n }, { b: 2n }, { c: 3n }, { d: 4n }];
|
||||
const table = makeArrowTable(sparseRecords);
|
||||
expect(table.numCols).toBe(4);
|
||||
expect(table.numRows).toBe(4);
|
||||
|
||||
const schema = new Schema([
|
||||
new Field("a", new Int64(), true),
|
||||
new Field("b", new Int32(), true),
|
||||
new Field("c", new Int64(), true),
|
||||
new Field("d", new Int16(), true),
|
||||
]);
|
||||
const table2 = makeArrowTable(sparseRecords, { schema });
|
||||
expect(table2.numCols).toBe(4);
|
||||
expect(table2.numRows).toBe(4);
|
||||
expect(table2.schema).toEqual(schema);
|
||||
});
|
||||
});
|
||||
|
||||
class DummyEmbedding extends EmbeddingFunction<string> {
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import { readdirSync } from "fs";
|
||||
import { Field, Float64, Schema } from "apache-arrow";
|
||||
@@ -28,14 +17,14 @@ describe("when connecting", () => {
|
||||
it("should connect", async () => {
|
||||
const db = await connect(tmpDir.name);
|
||||
expect(db.display()).toBe(
|
||||
`NativeDatabase(uri=${tmpDir.name}, read_consistency_interval=None)`,
|
||||
`ListingDatabase(uri=${tmpDir.name}, read_consistency_interval=None)`,
|
||||
);
|
||||
});
|
||||
|
||||
it("should allow read consistency interval to be specified", async () => {
|
||||
const db = await connect(tmpDir.name, { readConsistencyInterval: 5 });
|
||||
expect(db.display()).toBe(
|
||||
`NativeDatabase(uri=${tmpDir.name}, read_consistency_interval=5s)`,
|
||||
`ListingDatabase(uri=${tmpDir.name}, read_consistency_interval=5s)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -72,6 +61,26 @@ describe("given a connection", () => {
|
||||
await expect(tbl.countRows()).resolves.toBe(1);
|
||||
});
|
||||
|
||||
it("should be able to drop tables`", async () => {
|
||||
await db.createTable("test", [{ id: 1 }, { id: 2 }]);
|
||||
await db.createTable("test2", [{ id: 1 }, { id: 2 }]);
|
||||
await db.createTable("test3", [{ id: 1 }, { id: 2 }]);
|
||||
|
||||
await expect(db.tableNames()).resolves.toEqual(["test", "test2", "test3"]);
|
||||
|
||||
await db.dropTable("test2");
|
||||
|
||||
await expect(db.tableNames()).resolves.toEqual(["test", "test3"]);
|
||||
|
||||
await db.dropAllTables();
|
||||
|
||||
await expect(db.tableNames()).resolves.toEqual([]);
|
||||
|
||||
// Make sure we can still create more tables after dropping all
|
||||
|
||||
await db.createTable("test4", [{ id: 1 }, { id: 2 }]);
|
||||
});
|
||||
|
||||
it("should fail if creating table twice, unless overwrite is true", async () => {
|
||||
let tbl = await db.createTable("test", [{ id: 1 }, { id: 2 }]);
|
||||
await expect(tbl.countRows()).resolves.toBe(2);
|
||||
@@ -107,14 +116,15 @@ describe("given a connection", () => {
|
||||
const data = [...Array(10000).keys()].map((i) => ({ id: i }));
|
||||
|
||||
// Create in v1 mode
|
||||
let table = await db.createTable("test", data, { useLegacyFormat: true });
|
||||
let table = await db.createTable("test", data, {
|
||||
storageOptions: { newTableDataStorageVersion: "legacy" },
|
||||
});
|
||||
|
||||
const isV2 = async (table: Table) => {
|
||||
const data = await table
|
||||
.query()
|
||||
.limit(10000)
|
||||
.toArrow({ maxBatchLength: 100000 });
|
||||
console.log(data.batches.length);
|
||||
return data.batches.length < 5;
|
||||
};
|
||||
|
||||
@@ -133,7 +143,7 @@ describe("given a connection", () => {
|
||||
const schema = new Schema([new Field("id", new Float64(), true)]);
|
||||
|
||||
table = await db.createEmptyTable("test_v2_empty", schema, {
|
||||
useLegacyFormat: false,
|
||||
storageOptions: { newTableDataStorageVersion: "stable" },
|
||||
});
|
||||
|
||||
await table.add(data);
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import * as tmp from "tmp";
|
||||
|
||||
@@ -83,6 +72,74 @@ describe("embedding functions", () => {
|
||||
expect(vector0).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("should be able to append and upsert using embedding function", async () => {
|
||||
@register()
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return new Float32();
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const func = new MockEmbeddingFunction();
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{
|
||||
embeddingFunction: {
|
||||
function: func,
|
||||
sourceColumn: "text",
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const schema = await table.schema();
|
||||
expect(schema.metadata.get("embedding_functions")).toBeDefined();
|
||||
|
||||
// Append some new data
|
||||
const data1 = [
|
||||
{ id: 3, text: "forest" },
|
||||
{ id: 4, text: "mountain" },
|
||||
];
|
||||
await table.add(data1);
|
||||
|
||||
// Upsert some data
|
||||
const data2 = [
|
||||
{ id: 5, text: "river" },
|
||||
{ id: 2, text: "canyon" },
|
||||
];
|
||||
await table
|
||||
.mergeInsert("id")
|
||||
.whenMatchedUpdateAll()
|
||||
.whenNotMatchedInsertAll()
|
||||
.execute(data2);
|
||||
|
||||
const rows = await table.query().toArray();
|
||||
rows.sort((a, b) => a.id - b.id);
|
||||
const texts = rows.map((row) => row.text);
|
||||
expect(texts).toEqual(["hello", "canyon", "forest", "mountain", "river"]);
|
||||
const vectorsDefined = rows.map(
|
||||
(row) => row.vector !== undefined && row.vector !== null,
|
||||
);
|
||||
expect(vectorsDefined).toEqual(new Array(5).fill(true));
|
||||
});
|
||||
|
||||
it("should be able to create an empty table with an embedding function", async () => {
|
||||
@register()
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import * as apiArrow from "apache-arrow";
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import * as arrow15 from "apache-arrow-15";
|
||||
import * as arrow16 from "apache-arrow-16";
|
||||
import * as arrow17 from "apache-arrow-17";
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import * as http from "http";
|
||||
import { RequestListener } from "http";
|
||||
@@ -115,4 +104,26 @@ describe("remote connection", () => {
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it("should pass on requested extra headers", async () => {
|
||||
await withMockDatabase(
|
||||
(req, res) => {
|
||||
expect(req.headers["x-my-header"]).toEqual("my-value");
|
||||
|
||||
const body = JSON.stringify({ tables: [] });
|
||||
res.writeHead(200, { "Content-Type": "application/json" }).end(body);
|
||||
},
|
||||
async (db) => {
|
||||
const tableNames = await db.tableNames();
|
||||
expect(tableNames).toEqual([]);
|
||||
},
|
||||
{
|
||||
clientConfig: {
|
||||
extraHeaders: {
|
||||
"x-my-header": "my-value",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
@@ -264,6 +253,31 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
|
||||
const arrowTbl = await table.toArrow();
|
||||
expect(arrowTbl).toBeInstanceOf(ArrowTable);
|
||||
});
|
||||
|
||||
it("should be able to handle missing fields", async () => {
|
||||
const schema = new arrow.Schema([
|
||||
new arrow.Field("id", new arrow.Int32(), true),
|
||||
new arrow.Field("y", new arrow.Int32(), true),
|
||||
new arrow.Field("z", new arrow.Int64(), true),
|
||||
]);
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createEmptyTable("testNull", schema);
|
||||
await table.add([{ id: 1, y: 2 }]);
|
||||
await table.add([{ id: 2 }]);
|
||||
|
||||
await table
|
||||
.mergeInsert("id")
|
||||
.whenNotMatchedInsertAll()
|
||||
.execute([
|
||||
{ id: 3, z: 3 },
|
||||
{ id: 4, z: 5 },
|
||||
]);
|
||||
|
||||
const res = await table.query().toArrow();
|
||||
expect(res.getChild("id")?.toJSON()).toEqual([1, 2, 3, 4]);
|
||||
expect(res.getChild("y")?.toJSON()).toEqual([2, null, null, null]);
|
||||
expect(res.getChild("z")?.toJSON()).toEqual([null, null, 3n, 5n]);
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
@@ -473,6 +487,10 @@ describe("When creating an index", () => {
|
||||
// test offset
|
||||
rst = await tbl.query().limit(2).offset(1).nearestTo(queryVec).toArrow();
|
||||
expect(rst.numRows).toBe(1);
|
||||
|
||||
await tbl.dropIndex("vec_idx");
|
||||
const indices2 = await tbl.listIndices();
|
||||
expect(indices2.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should search with distance range", async () => {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
import { IntoSql, toSQL } from "../lancedb/util";
|
||||
test.each([
|
||||
["string", "'string'"],
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
|
||||
|
||||
extern crate napi_build;
|
||||
|
||||
fn main() {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { expect, test } from "@jest/globals";
|
||||
// --8<-- [start:import]
|
||||
import * as lancedb from "@lancedb/lancedb";
|
||||
import { VectorQuery } from "@lancedb/lancedb";
|
||||
import type { VectorQuery } from "@lancedb/lancedb";
|
||||
// --8<-- [end:import]
|
||||
import { withTempDirectory } from "./util.ts";
|
||||
|
||||
|
||||
@@ -117,7 +117,6 @@ test("basic table examples", async () => {
|
||||
// --8<-- [end:add_data]
|
||||
}
|
||||
|
||||
{
|
||||
// --8<-- [start:add_columns]
|
||||
await tbl.addColumns([
|
||||
{ name: "double_price", valueSql: "cast((price * 2) as Float)" },
|
||||
@@ -136,7 +135,6 @@ test("basic table examples", async () => {
|
||||
// --8<-- [start:drop_columns]
|
||||
await tbl.dropColumns(["dbl_price"]);
|
||||
// --8<-- [end:drop_columns]
|
||||
}
|
||||
|
||||
{
|
||||
// --8<-- [start:vector_search]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user