mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
87 Commits
v0.1.10-py
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b06e214d29 | ||
|
|
c1f8feb6ed | ||
|
|
cada35d5b7 | ||
|
|
2d25c263e9 | ||
|
|
bcd7f66dc7 | ||
|
|
1daecac648 | ||
|
|
b8e656b2a7 | ||
|
|
ff7c1193a7 | ||
|
|
6d70e7c29b | ||
|
|
73cc12ecc5 | ||
|
|
6036cf48a7 | ||
|
|
15f4787cc8 | ||
|
|
0e4050e706 | ||
|
|
147796ffcd | ||
|
|
6fd465ceef | ||
|
|
e2e5a0fb83 | ||
|
|
ff8d5a6d51 | ||
|
|
8829988ada | ||
|
|
80a32be121 | ||
|
|
8325979bb8 | ||
|
|
ed5ff5a482 | ||
|
|
2c9371dcc4 | ||
|
|
6d5621da4a | ||
|
|
380c1572f3 | ||
|
|
4383848d53 | ||
|
|
473c43860c | ||
|
|
17cf244e53 | ||
|
|
0b60694df4 | ||
|
|
600da476e8 | ||
|
|
458217783c | ||
|
|
21b1a71a6b | ||
|
|
2d899675e8 | ||
|
|
1cbfc1bbf4 | ||
|
|
a2bb497135 | ||
|
|
0cf40c8da3 | ||
|
|
8233c689c3 | ||
|
|
6e24e731b8 | ||
|
|
f4ce86e12c | ||
|
|
0664eaec82 | ||
|
|
63acdc2069 | ||
|
|
a636bb1075 | ||
|
|
5e3167da83 | ||
|
|
f09db4a6d6 | ||
|
|
1d343edbd4 | ||
|
|
980f910f50 | ||
|
|
fb97b03a51 | ||
|
|
141b6647a8 | ||
|
|
b45ac4608f | ||
|
|
a86bc05131 | ||
|
|
3537afb2c3 | ||
|
|
23f5dddc7c | ||
|
|
9748406cba | ||
|
|
6271949d38 | ||
|
|
131ad09ab3 | ||
|
|
030f07e7f0 | ||
|
|
72afa06b7a | ||
|
|
088e745e1d | ||
|
|
7a57cddb2c | ||
|
|
8ff5f88916 | ||
|
|
028a6e433d | ||
|
|
04c6814fb1 | ||
|
|
c62e4ca1eb | ||
|
|
aecc5fc42b | ||
|
|
2fdcb307eb | ||
|
|
ad18826579 | ||
|
|
a8a50591d7 | ||
|
|
6dfe7fabc2 | ||
|
|
2b108e1c80 | ||
|
|
8c9edafccc | ||
|
|
0590413b96 | ||
|
|
bd2d40a927 | ||
|
|
08944bf4fd | ||
|
|
826dc90151 | ||
|
|
08cc483ec9 | ||
|
|
ff1d206182 | ||
|
|
c385c55629 | ||
|
|
0a03f7ca5a | ||
|
|
88be978e87 | ||
|
|
98b12caa06 | ||
|
|
091dffb171 | ||
|
|
ace6aa883a | ||
|
|
80c25f9896 | ||
|
|
caf22fdb71 | ||
|
|
0e7ae5dfbf | ||
|
|
b261e27222 | ||
|
|
9f603f73a9 | ||
|
|
9ef846929b |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.1.10
|
current_version = 0.1.19
|
||||||
commit = True
|
commit = True
|
||||||
message = Bump version: {current_version} → {new_version}
|
message = Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
2
.github/workflows/docs_test.yml
vendored
2
.github/workflows/docs_test.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cd docs/test/node_modules/vectordb
|
cd docs/test/node_modules/vectordb
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
npm run build-release
|
||||||
npm run tsc
|
npm run tsc
|
||||||
- name: Create test files
|
- name: Create test files
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/make-release-commit.yml
vendored
4
.github/workflows/make-release-commit.yml
vendored
@@ -52,4 +52,8 @@ jobs:
|
|||||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
branch: main
|
branch: main
|
||||||
tags: true
|
tags: true
|
||||||
|
- uses: ./.github/workflows/update_package_lock
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
|
||||||
|
|||||||
12
.github/workflows/node.yml
vendored
12
.github/workflows/node.yml
vendored
@@ -67,8 +67,12 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
|
||||||
npm run tsc
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
npm run pack-build
|
||||||
|
npm install --no-save ./dist/lancedb-vectordb-*.tgz
|
||||||
|
# Remove index.node to test with dependency installed
|
||||||
|
rm index.node
|
||||||
- name: Test
|
- name: Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
macos:
|
macos:
|
||||||
@@ -94,8 +98,12 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
|
||||||
npm run tsc
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
npm run pack-build
|
||||||
|
npm install --no-save ./dist/lancedb-vectordb-*.tgz
|
||||||
|
# Remove index.node to test with dependency installed
|
||||||
|
rm index.node
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
npm run test
|
npm run test
|
||||||
|
|||||||
163
.github/workflows/npm-publish.yml
vendored
Normal file
163
.github/workflows/npm-publish.yml
vendored
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
name: NPM Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
node:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: node
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: node/package-lock.json
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
npm run tsc
|
||||||
|
npm pack
|
||||||
|
- name: Upload Linux Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: node-package
|
||||||
|
path: |
|
||||||
|
node/vectordb-*.tgz
|
||||||
|
|
||||||
|
node-macos:
|
||||||
|
runs-on: macos-12
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target: [x86_64-apple-darwin, aarch64-apple-darwin]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: brew install protobuf
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: |
|
||||||
|
cd node
|
||||||
|
npm ci
|
||||||
|
- name: Install rustup target
|
||||||
|
if: ${{ matrix.target == 'aarch64-apple-darwin' }}
|
||||||
|
run: rustup target add aarch64-apple-darwin
|
||||||
|
- name: Build MacOS native node modules
|
||||||
|
run: bash ci/build_macos_artifacts.sh ${{ matrix.target }}
|
||||||
|
- name: Upload Darwin Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-darwin
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-darwin*.tgz
|
||||||
|
|
||||||
|
node-linux:
|
||||||
|
name: node-linux (${{ matrix.config.arch}}-unknown-linux-gnu
|
||||||
|
runs-on: ${{ matrix.config.runner }}
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
config:
|
||||||
|
- arch: x86_64
|
||||||
|
runner: ubuntu-latest
|
||||||
|
- arch: aarch64
|
||||||
|
runner: buildjet-4vcpu-ubuntu-2204-arm
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Build Linux Artifacts
|
||||||
|
run: |
|
||||||
|
bash ci/build_linux_artifacts.sh ${{ matrix.config.arch }}
|
||||||
|
- name: Upload Linux Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-linux
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-linux*.tgz
|
||||||
|
|
||||||
|
node-windows:
|
||||||
|
runs-on: windows-2022
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target: [x86_64-pc-windows-msvc]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install Protoc v21.12
|
||||||
|
working-directory: C:\
|
||||||
|
run: |
|
||||||
|
New-Item -Path 'C:\protoc' -ItemType Directory
|
||||||
|
Set-Location C:\protoc
|
||||||
|
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
||||||
|
7z x protoc.zip
|
||||||
|
Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
||||||
|
shell: powershell
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: |
|
||||||
|
cd node
|
||||||
|
npm ci
|
||||||
|
- name: Build Windows native node modules
|
||||||
|
run: .\ci\build_windows_artifacts.ps1 ${{ matrix.target }}
|
||||||
|
- name: Upload Windows Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-windows
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-win32*.tgz
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: [node, node-macos, node-linux, node-windows]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
- name: Display structure of downloaded files
|
||||||
|
run: ls -R
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
- name: Publish to NPM
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||||
|
run: |
|
||||||
|
mv */*.tgz .
|
||||||
|
for filename in *.tgz; do
|
||||||
|
npm publish $filename
|
||||||
|
done
|
||||||
|
|
||||||
|
update-package-lock:
|
||||||
|
needs: [release]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- uses: ./.github/workflows/update_package_lock
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
4
.github/workflows/python.yml
vendored
4
.github/workflows/python.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
|||||||
python-version: 3.${{ matrix.python-minor-version }}
|
python-version: 3.${{ matrix.python-minor-version }}
|
||||||
- name: Install lancedb
|
- name: Install lancedb
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .[tests]
|
||||||
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
pip install pytest pytest-mock black isort
|
pip install pytest pytest-mock black isort
|
||||||
- name: Black
|
- name: Black
|
||||||
@@ -59,7 +59,7 @@ jobs:
|
|||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
- name: Install lancedb
|
- name: Install lancedb
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .[tests]
|
||||||
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
pip install pytest pytest-mock black
|
pip install pytest pytest-mock black
|
||||||
- name: Black
|
- name: Black
|
||||||
|
|||||||
22
.github/workflows/rust.yml
vendored
22
.github/workflows/rust.yml
vendored
@@ -6,6 +6,7 @@ on:
|
|||||||
- main
|
- main
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
|
- Cargo.toml
|
||||||
- rust/**
|
- rust/**
|
||||||
- .github/workflows/rust.yml
|
- .github/workflows/rust.yml
|
||||||
|
|
||||||
@@ -65,3 +66,24 @@ jobs:
|
|||||||
run: cargo build --all-features
|
run: cargo build --all-features
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --all-features
|
run: cargo test --all-features
|
||||||
|
windows:
|
||||||
|
runs-on: windows-2022
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: rust
|
||||||
|
- name: Install Protoc v21.12
|
||||||
|
working-directory: C:\
|
||||||
|
run: |
|
||||||
|
New-Item -Path 'C:\protoc' -ItemType Directory
|
||||||
|
Set-Location C:\protoc
|
||||||
|
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
||||||
|
7z x protoc.zip
|
||||||
|
Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
||||||
|
shell: powershell
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||||
|
cargo build
|
||||||
|
cargo test
|
||||||
|
|||||||
33
.github/workflows/update_package_lock/action.yml
vendored
Normal file
33
.github/workflows/update_package_lock/action.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: update_package_lock
|
||||||
|
description: "Update node's package.lock"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
github_token:
|
||||||
|
required: true
|
||||||
|
description: "github token for the repo"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
- name: Set git configs
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Update package-lock.json file
|
||||||
|
working-directory: ./node
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
git add package-lock.json
|
||||||
|
git commit -m "Updating package-lock.json"
|
||||||
|
shell: bash
|
||||||
|
- name: Push changes
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ inputs.github_token }}
|
||||||
|
branch: main
|
||||||
|
tags: true
|
||||||
19
.github/workflows/update_package_lock_run.yml
vendored
Normal file
19
.github/workflows/update_package_lock_run.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: Update package-lock.json
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- uses: ./.github/workflows/update_package_lock
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,6 +5,8 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
venv
|
venv
|
||||||
|
|
||||||
|
.vscode
|
||||||
|
|
||||||
rust/target
|
rust/target
|
||||||
rust/Cargo.lock
|
rust/Cargo.lock
|
||||||
|
|
||||||
|
|||||||
13
Cargo.toml
13
Cargo.toml
@@ -6,9 +6,12 @@ members = [
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = "0.5.3"
|
lance = "=0.5.9"
|
||||||
arrow-array = "40.0"
|
arrow-array = "42.0"
|
||||||
arrow-data = "40.0"
|
arrow-data = "42.0"
|
||||||
arrow-schema = "40.0"
|
arrow-schema = "42.0"
|
||||||
arrow-ipc = "40.0"
|
arrow-ipc = "42.0"
|
||||||
|
half = { "version" = "=2.2.1", default-features = false }
|
||||||
object_store = "0.6.1"
|
object_store = "0.6.1"
|
||||||
|
snafu = "0.7.4"
|
||||||
|
|
||||||
|
|||||||
19
ci/build_linux_artifacts.sh
Executable file
19
ci/build_linux_artifacts.sh
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
ARCH=${1:-x86_64}
|
||||||
|
|
||||||
|
# We pass down the current user so that when we later mount the local files
|
||||||
|
# into the container, the files are accessible by the current user.
|
||||||
|
pushd ci/manylinux_node
|
||||||
|
docker build \
|
||||||
|
-t lancedb-node-manylinux \
|
||||||
|
--build-arg="ARCH=$ARCH" \
|
||||||
|
--build-arg="DOCKER_USER=$(id -u)" \
|
||||||
|
--progress=plain \
|
||||||
|
.
|
||||||
|
popd
|
||||||
|
|
||||||
|
docker run \
|
||||||
|
-v $(pwd):/io -w /io \
|
||||||
|
lancedb-node-manylinux \
|
||||||
|
bash ci/manylinux_node/build.sh $ARCH
|
||||||
33
ci/build_macos_artifacts.sh
Normal file
33
ci/build_macos_artifacts.sh
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Builds the macOS artifacts (node binaries).
|
||||||
|
# Usage: ./ci/build_macos_artifacts.sh [target]
|
||||||
|
# Targets supported: x86_64-apple-darwin aarch64-apple-darwin
|
||||||
|
|
||||||
|
prebuild_rust() {
|
||||||
|
# Building here for the sake of easier debugging.
|
||||||
|
pushd rust/ffi/node
|
||||||
|
echo "Building rust library for $1"
|
||||||
|
export RUST_BACKTRACE=1
|
||||||
|
cargo build --release --target $1
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
build_node_binaries() {
|
||||||
|
pushd node
|
||||||
|
echo "Building node library for $1"
|
||||||
|
npm run build-release -- --target $1
|
||||||
|
npm run pack-build -- --target $1
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
targets=$1
|
||||||
|
else
|
||||||
|
targets="x86_64-apple-darwin aarch64-apple-darwin"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Building artifacts for targets: $targets"
|
||||||
|
for target in $targets
|
||||||
|
do
|
||||||
|
prebuild_rust $target
|
||||||
|
build_node_binaries $target
|
||||||
|
done
|
||||||
41
ci/build_windows_artifacts.ps1
Normal file
41
ci/build_windows_artifacts.ps1
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Builds the Windows artifacts (node binaries).
|
||||||
|
# Usage: .\ci\build_windows_artifacts.ps1 [target]
|
||||||
|
# Targets supported:
|
||||||
|
# - x86_64-pc-windows-msvc
|
||||||
|
# - i686-pc-windows-msvc
|
||||||
|
|
||||||
|
function Prebuild-Rust {
|
||||||
|
param (
|
||||||
|
[string]$target
|
||||||
|
)
|
||||||
|
|
||||||
|
# Building here for the sake of easier debugging.
|
||||||
|
Push-Location -Path "rust/ffi/node"
|
||||||
|
Write-Host "Building rust library for $target"
|
||||||
|
$env:RUST_BACKTRACE=1
|
||||||
|
cargo build --release --target $target
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
|
|
||||||
|
function Build-NodeBinaries {
|
||||||
|
param (
|
||||||
|
[string]$target
|
||||||
|
)
|
||||||
|
|
||||||
|
Push-Location -Path "node"
|
||||||
|
Write-Host "Building node library for $target"
|
||||||
|
npm run build-release -- --target $target
|
||||||
|
npm run pack-build -- --target $target
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
|
|
||||||
|
$targets = $args[0]
|
||||||
|
if (-not $targets) {
|
||||||
|
$targets = "x86_64-pc-windows-msvc"
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Building artifacts for targets: $targets"
|
||||||
|
foreach ($target in $targets) {
|
||||||
|
Prebuild-Rust $target
|
||||||
|
Build-NodeBinaries $target
|
||||||
|
}
|
||||||
31
ci/manylinux_node/Dockerfile
Normal file
31
ci/manylinux_node/Dockerfile
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Many linux dockerfile with Rust, Node, and Lance dependencies installed.
|
||||||
|
# This container allows building the node modules native libraries in an
|
||||||
|
# environment with a very old glibc, so that we are compatible with a wide
|
||||||
|
# range of linux distributions.
|
||||||
|
ARG ARCH=x86_64
|
||||||
|
|
||||||
|
FROM quay.io/pypa/manylinux2014_${ARCH}
|
||||||
|
|
||||||
|
ARG ARCH=x86_64
|
||||||
|
ARG DOCKER_USER=default_user
|
||||||
|
|
||||||
|
# Install static openssl
|
||||||
|
COPY install_openssl.sh install_openssl.sh
|
||||||
|
RUN ./install_openssl.sh ${ARCH} > /dev/null
|
||||||
|
|
||||||
|
# Protobuf is also installed as root.
|
||||||
|
COPY install_protobuf.sh install_protobuf.sh
|
||||||
|
RUN ./install_protobuf.sh ${ARCH}
|
||||||
|
|
||||||
|
ENV DOCKER_USER=${DOCKER_USER}
|
||||||
|
# Create a group and user
|
||||||
|
RUN echo ${ARCH} && adduser --user-group --create-home --uid ${DOCKER_USER} build_user
|
||||||
|
|
||||||
|
# We switch to the user to install Rust and Node, since those like to be
|
||||||
|
# installed at the user level.
|
||||||
|
USER ${DOCKER_USER}
|
||||||
|
|
||||||
|
COPY prepare_manylinux_node.sh prepare_manylinux_node.sh
|
||||||
|
RUN cp /prepare_manylinux_node.sh $HOME/ && \
|
||||||
|
cd $HOME && \
|
||||||
|
./prepare_manylinux_node.sh ${ARCH}
|
||||||
19
ci/manylinux_node/build.sh
Executable file
19
ci/manylinux_node/build.sh
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Builds the node module for manylinux. Invoked by ci/build_linux_artifacts.sh.
|
||||||
|
set -e
|
||||||
|
ARCH=${1:-x86_64}
|
||||||
|
|
||||||
|
if [ "$ARCH" = "x86_64" ]; then
|
||||||
|
export OPENSSL_LIB_DIR=/usr/local/lib64/
|
||||||
|
else
|
||||||
|
export OPENSSL_LIB_DIR=/usr/local/lib/
|
||||||
|
fi
|
||||||
|
export OPENSSL_STATIC=1
|
||||||
|
export OPENSSL_INCLUDE_DIR=/usr/local/include/openssl
|
||||||
|
|
||||||
|
source $HOME/.bashrc
|
||||||
|
|
||||||
|
cd node
|
||||||
|
npm ci
|
||||||
|
npm run build-release
|
||||||
|
npm run pack-build
|
||||||
26
ci/manylinux_node/install_openssl.sh
Executable file
26
ci/manylinux_node/install_openssl.sh
Executable file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Builds openssl from source so we can statically link to it
|
||||||
|
|
||||||
|
# this is to avoid the error we get with the system installation:
|
||||||
|
# /usr/bin/ld: <library>: version node not found for symbol SSLeay@@OPENSSL_1.0.1
|
||||||
|
# /usr/bin/ld: failed to set dynamic section sizes: Bad value
|
||||||
|
set -e
|
||||||
|
|
||||||
|
git clone -b OpenSSL_1_1_1u \
|
||||||
|
--single-branch \
|
||||||
|
https://github.com/openssl/openssl.git
|
||||||
|
|
||||||
|
pushd openssl
|
||||||
|
|
||||||
|
if [[ $1 == x86_64* ]]; then
|
||||||
|
ARCH=linux-x86_64
|
||||||
|
else
|
||||||
|
# gnu target
|
||||||
|
ARCH=linux-aarch64
|
||||||
|
fi
|
||||||
|
|
||||||
|
./Configure no-shared $ARCH
|
||||||
|
|
||||||
|
make
|
||||||
|
|
||||||
|
make install
|
||||||
15
ci/manylinux_node/install_protobuf.sh
Executable file
15
ci/manylinux_node/install_protobuf.sh
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Installs protobuf compiler. Should be run as root.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ $1 == x86_64* ]]; then
|
||||||
|
ARCH=x86_64
|
||||||
|
else
|
||||||
|
# gnu target
|
||||||
|
ARCH=aarch_64
|
||||||
|
fi
|
||||||
|
|
||||||
|
PB_REL=https://github.com/protocolbuffers/protobuf/releases
|
||||||
|
PB_VERSION=23.1
|
||||||
|
curl -LO $PB_REL/download/v$PB_VERSION/protoc-$PB_VERSION-linux-$ARCH.zip
|
||||||
|
unzip protoc-$PB_VERSION-linux-$ARCH.zip -d /usr/local
|
||||||
21
ci/manylinux_node/prepare_manylinux_node.sh
Executable file
21
ci/manylinux_node/prepare_manylinux_node.sh
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
install_node() {
|
||||||
|
echo "Installing node..."
|
||||||
|
|
||||||
|
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh | bash
|
||||||
|
|
||||||
|
source "$HOME"/.bashrc
|
||||||
|
|
||||||
|
nvm install --no-progress 16
|
||||||
|
}
|
||||||
|
|
||||||
|
install_rust() {
|
||||||
|
echo "Installing rust..."
|
||||||
|
curl https://sh.rustup.rs -sSf | bash -s -- -y
|
||||||
|
export PATH="$PATH:/root/.cargo/bin"
|
||||||
|
}
|
||||||
|
|
||||||
|
install_node
|
||||||
|
install_rust
|
||||||
@@ -50,13 +50,21 @@ markdown_extensions:
|
|||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
- pymdownx.tabbed:
|
- pymdownx.tabbed:
|
||||||
alternate_style: true
|
alternate_style: true
|
||||||
|
- md_in_html
|
||||||
|
|
||||||
nav:
|
nav:
|
||||||
- Home: index.md
|
- Home: index.md
|
||||||
- Basics: basic.md
|
- Basics: basic.md
|
||||||
- Embeddings: embedding.md
|
- Embeddings: embedding.md
|
||||||
- Python full-text search: fts.md
|
- Python full-text search: fts.md
|
||||||
- Python integrations: integrations.md
|
- Integrations:
|
||||||
|
- Pandas and PyArrow: python/arrow.md
|
||||||
|
- DuckDB: python/duckdb.md
|
||||||
|
- LangChain 🦜️🔗: https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lancedb.html
|
||||||
|
- LangChain JS/TS 🦜️🔗: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb
|
||||||
|
- LlamaIndex 🦙: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
||||||
|
- Pydantic: python/pydantic.md
|
||||||
|
- Voxel51: integrations/voxel51.md
|
||||||
- Python examples:
|
- Python examples:
|
||||||
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
||||||
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
||||||
@@ -65,6 +73,8 @@ nav:
|
|||||||
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
||||||
- Javascript examples:
|
- Javascript examples:
|
||||||
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
|
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
||||||
|
|
||||||
- References:
|
- References:
|
||||||
- Vector Search: search.md
|
- Vector Search: search.md
|
||||||
- SQL filters: sql.md
|
- SQL filters: sql.md
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# ANN (Approximate Nearest Neighbor) Indexes
|
# ANN (Approximate Nearest Neighbor) Indexes
|
||||||
|
|
||||||
You can create an index over your vector data to make search faster.
|
You can create an index over your vector data to make search faster.
|
||||||
Vector indexes are faster but less accurate than exhaustive search.
|
Vector indexes are faster but less accurate than exhaustive search (KNN or Flat Search).
|
||||||
LanceDB provides many parameters to fine-tune the index's size, the speed of queries, and the accuracy of results.
|
LanceDB provides many parameters to fine-tune the index's size, the speed of queries, and the accuracy of results.
|
||||||
|
|
||||||
Currently, LanceDB does *not* automatically create the ANN index.
|
Currently, LanceDB does *not* automatically create the ANN index.
|
||||||
@@ -10,7 +10,18 @@ If you can live with <100ms latency, skipping index creation is a simpler workfl
|
|||||||
|
|
||||||
In the future we will look to automatically create and configure the ANN index.
|
In the future we will look to automatically create and configure the ANN index.
|
||||||
|
|
||||||
## Creating an ANN Index
|
## Types of Index
|
||||||
|
|
||||||
|
Lance can support multiple index types, the most widely used one is `IVF_PQ`.
|
||||||
|
|
||||||
|
* `IVF_PQ`: use **Inverted File Index (IVF)** to first divide the dataset into `N` partitions,
|
||||||
|
and then use **Product Quantization** to compress vectors in each partition.
|
||||||
|
* `DISKANN` (**Experimental**): organize the vector as a on-disk graph, where the vertices approximately
|
||||||
|
represent the nearest neighbors of each vector.
|
||||||
|
|
||||||
|
## Creating an IVF_PQ Index
|
||||||
|
|
||||||
|
Lance supports `IVF_PQ` index type by default.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
||||||
@@ -45,15 +56,18 @@ In the future we will look to automatically create and configure the ANN index.
|
|||||||
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
||||||
```
|
```
|
||||||
|
|
||||||
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
- **metric** (default: "L2"): The distance metric to use. By default it uses euclidean distance "`L2`".
|
||||||
creation by providing the following parameters:
|
We also support "cosine" and "dot" distance as well.
|
||||||
|
- **num_partitions** (default: 256): The number of partitions of the index.
|
||||||
|
- **num_sub_vectors** (default: 96): The number of sub-vectors (M) that will be created during Product Quantization (PQ).
|
||||||
|
For D dimensional vector, it will be divided into `M` of `D/M` sub-vectors, each of which is presented by
|
||||||
|
a single PQ code.
|
||||||
|
|
||||||
|
<figure markdown>
|
||||||
|

|
||||||
|
<figcaption>IVF_PQ index with <code>num_partitions=2, num_sub_vectors=4</code></figcaption>
|
||||||
|
</figure>
|
||||||
|
|
||||||
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support "cosine" distance.
|
|
||||||
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
|
||||||
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
|
||||||
A higher number leads to faster queries, but it makes index generation slower.
|
|
||||||
- **num_sub_vectors** (default: 96): The number of subvectors (M) that will be created during Product Quantization (PQ). A larger number makes
|
|
||||||
search more accurate, but also makes the index larger and slower to build.
|
|
||||||
|
|
||||||
## Querying an ANN Index
|
## Querying an ANN Index
|
||||||
|
|
||||||
@@ -138,3 +152,31 @@ You can select the columns returned by the query using a select clause.
|
|||||||
.select(["id"])
|
.select(["id"])
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
### When is it necessary to create an ANN vector index.
|
||||||
|
|
||||||
|
`LanceDB` has manually tuned SIMD code for computing vector distances.
|
||||||
|
In our benchmarks, computing 100K pairs of 1K dimension vectors only take less than 20ms.
|
||||||
|
For small dataset (<100K rows) or the applications which can accept 100ms latency, vector indices are usually not necessary.
|
||||||
|
|
||||||
|
For large-scale or higher dimension vectors, it is beneficial to create vector index.
|
||||||
|
|
||||||
|
### How big is my index, and how many memory will it take.
|
||||||
|
|
||||||
|
In LanceDB, all vector indices are disk-based, meaning that when responding to a vector query, only the relevant pages from the index file are loaded from disk and cached in memory. Additionally, each sub-vector is usually encoded into 1 byte PQ code.
|
||||||
|
|
||||||
|
For example, with a 1024-dimension dataset, if we choose `num_sub_vectors=64`, each sub-vector has `1024 / 64 = 16` float32 numbers.
|
||||||
|
Product quantization can lead to approximately `16 * sizeof(float32) / 1 = 64` times of space reduction.
|
||||||
|
|
||||||
|
### How to choose `num_partitions` and `num_sub_vectors` for `IVF_PQ` index.
|
||||||
|
|
||||||
|
`num_partitions` is used to decide how many partitions the first level `IVF` index uses.
|
||||||
|
Higher number of partitions could lead to more efficient I/O during queries and better accuracy, but it takes much more time to train.
|
||||||
|
On `SIFT-1M` dataset, our benchmark shows that keeping each partition 1K-4K rows lead to a good latency / recall.
|
||||||
|
|
||||||
|
`num_sub_vectors` decides how many Product Quantization code to generate on each vector. Because
|
||||||
|
Product Quantization is a lossy compression of the original vector, the more `num_sub_vectors` usually results to
|
||||||
|
less space distortion, and thus yield better accuracy. However, similarly, more `num_sub_vectors` causes heavier I/O and
|
||||||
|
more PQ computation, thus, higher latency. `dimension / num_sub_vectors` should be aligned with 8 for better SIMD efficiency.
|
||||||
BIN
docs/src/assets/ivf_pq.png
Normal file
BIN
docs/src/assets/ivf_pq.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 266 KiB |
BIN
docs/src/assets/voxel.gif
Normal file
BIN
docs/src/assets/voxel.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 953 KiB |
@@ -122,6 +122,35 @@ After a table has been created, you can always add more data to it using
|
|||||||
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## How to delete rows from a table
|
||||||
|
|
||||||
|
Use the `delete()` method on tables to delete rows from a table. To choose
|
||||||
|
which rows to delete, provide a filter that matches on the metadata columns.
|
||||||
|
This can delete any number of rows that match the filter.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
```python
|
||||||
|
tbl.delete('item = "fizz"')
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
await tbl.delete('item = "fizz"')
|
||||||
|
```
|
||||||
|
|
||||||
|
The deletion predicate is a SQL expression that supports the same expressions
|
||||||
|
as the `where()` clause on a search. They can be as simple or complex as needed.
|
||||||
|
To see what expressions are supported, see the [SQL filters](sql.md) section.
|
||||||
|
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
Read more: [lancedb.table.Table.delete][]
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
|
||||||
|
Read more: [vectordb.Table.delete](javascript/interfaces/Table.md#delete)
|
||||||
|
|
||||||
## How to search for (approximate) nearest neighbors
|
## How to search for (approximate) nearest neighbors
|
||||||
|
|
||||||
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ belong in the same latent space and your results will be nonsensical.
|
|||||||
=== "Javascript"
|
=== "Javascript"
|
||||||
```javascript
|
```javascript
|
||||||
const results = await table
|
const results = await table
|
||||||
.search('What's the best pizza topping?')
|
.search("What's the best pizza topping?")
|
||||||
.limit(10)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|||||||
121
docs/src/examples/transformerjs_embedding_search_nodejs.md
Normal file
121
docs/src/examples/transformerjs_embedding_search_nodejs.md
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
# Vector embedding search using TransformersJS
|
||||||
|
|
||||||
|
## Embed and query data from LanceDB using TransformersJS
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="transformersjs" src="https://github.com/lancedb/lancedb/assets/43097991/88a31e30-3d6f-4eef-9216-4b7c688f1b4f">
|
||||||
|
|
||||||
|
This example shows how to use the [transformers.js](https://github.com/xenova/transformers.js) library to perform vector embedding search using LanceDB's Javascript API.
|
||||||
|
|
||||||
|
|
||||||
|
### Setting up
|
||||||
|
First, install the dependencies:
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
npm i @xenova/transformers
|
||||||
|
```
|
||||||
|
|
||||||
|
We will also be using the [all-MiniLM-L6-v2](https://huggingface.co/Xenova/all-MiniLM-L6-v2) model to make it compatible with Transformers.js
|
||||||
|
|
||||||
|
Within our `index.js` file we will import the necessary libraries and define our model and database:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
const { pipeline } = await import('@xenova/transformers')
|
||||||
|
const pipe = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating the embedding function
|
||||||
|
|
||||||
|
Next, we will create a function that will take in a string and return the vector embedding of that string. We will use the `pipe` function we defined earlier to get the vector embedding of the string.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Define the function. `sourceColumn` is required for LanceDB to know
|
||||||
|
// which column to use as input.
|
||||||
|
const embed_fun = {}
|
||||||
|
embed_fun.sourceColumn = 'text'
|
||||||
|
embed_fun.embed = async function (batch) {
|
||||||
|
let result = []
|
||||||
|
// Given a batch of strings, we will use the `pipe` function to get
|
||||||
|
// the vector embedding of each string.
|
||||||
|
for (let text of batch) {
|
||||||
|
// 'mean' pooling and normalizing allows the embeddings to share the
|
||||||
|
// same length.
|
||||||
|
const res = await pipe(text, { pooling: 'mean', normalize: true })
|
||||||
|
result.push(Array.from(res['data']))
|
||||||
|
}
|
||||||
|
return (result)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating the database
|
||||||
|
|
||||||
|
Now, we will create the LanceDB database and add the embedding function we defined earlier.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Link a folder and create a table with data
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
// You can also import any other data, but make sure that you have a column
|
||||||
|
// for the embedding function to use.
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Cherry', type: 'fruit' },
|
||||||
|
{ id: 2, text: 'Carrot', type: 'vegetable' },
|
||||||
|
{ id: 3, text: 'Potato', type: 'vegetable' },
|
||||||
|
{ id: 4, text: 'Apple', type: 'fruit' },
|
||||||
|
{ id: 5, text: 'Banana', type: 'fruit' }
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create the table with the embedding function
|
||||||
|
const table = await db.createTable('food_table', data, "create", embed_fun)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performing the search
|
||||||
|
|
||||||
|
Now, we can perform the search using the `search` function. LanceDB automatically uses the embedding function we defined earlier to get the vector embedding of the query string.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Query the table
|
||||||
|
const results = await table
|
||||||
|
.search("a sweet fruit to eat")
|
||||||
|
.metricType("cosine")
|
||||||
|
.limit(2)
|
||||||
|
.execute()
|
||||||
|
console.log(results.map(r => r.text))
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
[ 'Banana', 'Cherry' ]
|
||||||
|
```
|
||||||
|
|
||||||
|
Output of `results`:
|
||||||
|
```bash
|
||||||
|
[
|
||||||
|
{
|
||||||
|
vector: Float32Array(384) [
|
||||||
|
-0.057455405592918396,
|
||||||
|
0.03617725893855095,
|
||||||
|
-0.0367760956287384,
|
||||||
|
... 381 more items
|
||||||
|
],
|
||||||
|
id: 5,
|
||||||
|
text: 'Banana',
|
||||||
|
type: 'fruit',
|
||||||
|
score: 0.4919965863227844
|
||||||
|
},
|
||||||
|
{
|
||||||
|
vector: Float32Array(384) [
|
||||||
|
0.0009714411571621895,
|
||||||
|
0.008223623037338257,
|
||||||
|
0.009571489877998829,
|
||||||
|
... 381 more items
|
||||||
|
],
|
||||||
|
id: 1,
|
||||||
|
text: 'Cherry',
|
||||||
|
type: 'fruit',
|
||||||
|
score: 0.5540297031402588
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Wrapping it up
|
||||||
|
|
||||||
|
In this example, we showed how to use the `transformers.js` library to perform vector embedding search using LanceDB's Javascript API. You can find the full code for this example on [Github](https://github.com/lancedb/lancedb/blob/main/node/examples/js-transformers/index.js)!
|
||||||
@@ -4,4 +4,10 @@
|
|||||||
|
|
||||||
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
||||||
|
|
||||||
|
|
||||||
|
<a href="https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/youtube_bot/main.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab">
|
||||||
|
|
||||||
|
Scripts - [](./examples/youtube_bot/main.py) [](./examples/youtube_bot/index.js)
|
||||||
|
|
||||||
|
|
||||||
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb)
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb)
|
||||||
|
|||||||
@@ -67,6 +67,6 @@ LanceDB's core is written in Rust 🦀 and is built using <a href="https://githu
|
|||||||
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
||||||
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
||||||
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
||||||
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
* [`Ecosystem Integrations`](python/integration.md) - integrating LanceDB with python data tooling ecosystem.
|
||||||
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
|||||||
@@ -1,116 +0,0 @@
|
|||||||
# Integrations
|
|
||||||
|
|
||||||
Built on top of Apache Arrow, `LanceDB` is easy to integrate with the Python ecosystem, including Pandas, PyArrow and DuckDB.
|
|
||||||
|
|
||||||
## Pandas and PyArrow
|
|
||||||
|
|
||||||
First, we need to connect to a `LanceDB` database.
|
|
||||||
|
|
||||||
```py
|
|
||||||
|
|
||||||
import lancedb
|
|
||||||
|
|
||||||
db = lancedb.connect("data/sample-lancedb")
|
|
||||||
```
|
|
||||||
|
|
||||||
And write a `Pandas DataFrame` to LanceDB directly.
|
|
||||||
|
|
||||||
```py
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
data = pd.DataFrame({
|
|
||||||
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
|
||||||
"item": ["foo", "bar"],
|
|
||||||
"price": [10.0, 20.0]
|
|
||||||
})
|
|
||||||
table = db.create_table("pd_table", data=data)
|
|
||||||
```
|
|
||||||
|
|
||||||
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](ann_indexes.md)
|
|
||||||
sections.
|
|
||||||
|
|
||||||
|
|
||||||
We can now perform similarity searches via `LanceDB`.
|
|
||||||
|
|
||||||
```py
|
|
||||||
# Open the table previously created.
|
|
||||||
table = db.open_table("pd_table")
|
|
||||||
|
|
||||||
query_vector = [100, 100]
|
|
||||||
# Pandas DataFrame
|
|
||||||
df = table.search(query_vector).limit(1).to_df()
|
|
||||||
print(df)
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
vector item price score
|
|
||||||
0 [5.9, 26.5] bar 20.0 14257.05957
|
|
||||||
```
|
|
||||||
|
|
||||||
If you have a simple filter, it's faster to provide a where clause to `LanceDB`'s search query.
|
|
||||||
If you have more complex criteria, you can always apply the filter to the resulting pandas `DataFrame` from the search query.
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
# Apply the filter via LanceDB
|
|
||||||
results = table.search([100, 100]).where("price < 15").to_df()
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results["item"].iloc[0] == "foo"
|
|
||||||
|
|
||||||
# Apply the filter via Pandas
|
|
||||||
df = results = table.search([100, 100]).to_df()
|
|
||||||
results = df[df.price < 15]
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results["item"].iloc[0] == "foo"
|
|
||||||
```
|
|
||||||
|
|
||||||
## DuckDB
|
|
||||||
|
|
||||||
`LanceDB` works with `DuckDB` via [PyArrow integration](https://duckdb.org/docs/guides/python/sql_on_arrow).
|
|
||||||
|
|
||||||
Let us start with installing `duckdb` and `lancedb`.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install duckdb lancedb
|
|
||||||
```
|
|
||||||
|
|
||||||
We will re-use the dataset created previously
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
|
|
||||||
db = lancedb.connect("data/sample-lancedb")
|
|
||||||
table = db.open_table("pd_table")
|
|
||||||
arrow_table = table.to_arrow()
|
|
||||||
```
|
|
||||||
|
|
||||||
`DuckDB` can directly query the `arrow_table`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
import duckdb
|
|
||||||
|
|
||||||
duckdb.query("SELECT * FROM arrow_table")
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────┬─────────┬────────┐
|
|
||||||
│ vector │ item │ price │
|
|
||||||
│ float[] │ varchar │ double │
|
|
||||||
├─────────────┼─────────┼────────┤
|
|
||||||
│ [3.1, 4.1] │ foo │ 10.0 │
|
|
||||||
│ [5.9, 26.5] │ bar │ 20.0 │
|
|
||||||
└─────────────┴─────────┴────────┘
|
|
||||||
```
|
|
||||||
```python
|
|
||||||
duckdb.query("SELECT mean(price) FROM arrow_table")
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
Out[16]:
|
|
||||||
┌─────────────┐
|
|
||||||
│ mean(price) │
|
|
||||||
│ double │
|
|
||||||
├─────────────┤
|
|
||||||
│ 15.0 │
|
|
||||||
└─────────────┘
|
|
||||||
```
|
|
||||||
71
docs/src/integrations/voxel51.md
Normal file
71
docs/src/integrations/voxel51.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|

|
||||||
|
|
||||||
|
Basic recipe
|
||||||
|
____________
|
||||||
|
|
||||||
|
The basic workflow to use LanceDB to create a similarity index on your FiftyOne
|
||||||
|
datasets and use this to query your data is as follows:
|
||||||
|
|
||||||
|
1) Load a dataset into FiftyOne
|
||||||
|
|
||||||
|
2) Compute embedding vectors for samples or patches in your dataset, or select
|
||||||
|
a model to use to generate embeddings
|
||||||
|
|
||||||
|
3) Use the `compute_similarity()`
|
||||||
|
method to generate a LanceDB table for the samples or object
|
||||||
|
patches embeddings in a dataset by setting the parameter `backend="lancedb"` and
|
||||||
|
specifying a `brain_key` of your choice
|
||||||
|
|
||||||
|
4) Use this LanceDB table to query your data with
|
||||||
|
`sort_by_similarity()`
|
||||||
|
|
||||||
|
5) If desired, delete the table
|
||||||
|
|
||||||
|
The example below demonstrates this workflow.
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
|
||||||
|
You must install the LanceDB Python client to run this
|
||||||
|
```
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
|
||||||
|
import fiftyone as fo
|
||||||
|
import fiftyone.brain as fob
|
||||||
|
import fiftyone.zoo as foz
|
||||||
|
|
||||||
|
# Step 1: Load your data into FiftyOne
|
||||||
|
dataset = foz.load_zoo_dataset("quickstart")
|
||||||
|
|
||||||
|
# Steps 2 and 3: Compute embeddings and create a similarity index
|
||||||
|
lancedb_index = fob.compute_similarity(
|
||||||
|
dataset,
|
||||||
|
model="clip-vit-base32-torch",
|
||||||
|
brain_key="lancedb_index",
|
||||||
|
backend="lancedb",
|
||||||
|
)
|
||||||
|
```
|
||||||
|
Once the similarity index has been generated, we can query our data in FiftyOne
|
||||||
|
by specifying the `brain_key`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Step 4: Query your data
|
||||||
|
query = dataset.first().id # query by sample ID
|
||||||
|
view = dataset.sort_by_similarity(
|
||||||
|
query,
|
||||||
|
brain_key="lancedb_index",
|
||||||
|
k=10, # limit to 10 most similar samples
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 5 (optional): Cleanup
|
||||||
|
|
||||||
|
# Delete the LanceDB table
|
||||||
|
lancedb_index.cleanup()
|
||||||
|
|
||||||
|
# Delete run record from FiftyOne
|
||||||
|
dataset.delete_brain_run("lancedb_index")
|
||||||
|
```
|
||||||
|
|
||||||
|
More in depth walkthrough of the integration, visit the LanceDB guide on Voxel51 - [LaceDB x Voxel51](https://docs.voxel51.com/integrations/lancedb.html)
|
||||||
@@ -10,6 +10,10 @@ A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb)
|
|||||||
npm install vectordb
|
npm install vectordb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This will download the appropriate native library for your platform. We currently
|
||||||
|
support x86_64 Linux, aarch64 Linux, Intel MacOS, and ARM (M1/M2) MacOS. We do not
|
||||||
|
yet support Windows or musl-based Linux (such as Alpine Linux).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Basic Example
|
### Basic Example
|
||||||
@@ -28,12 +32,34 @@ The [examples](./examples) folder contains complete examples.
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run the tests with
|
To build everything fresh:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you should be able to run the tests with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm test
|
npm test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Rebuilding Rust library
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rebuilding Typescript
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix lints
|
||||||
|
|
||||||
To run the linter and have it automatically fix all errors
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ A connection to a LanceDB database.
|
|||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
- [\_db](LocalConnection.md#_db)
|
- [\_db](LocalConnection.md#_db)
|
||||||
- [\_uri](LocalConnection.md#_uri)
|
- [\_options](LocalConnection.md#_options)
|
||||||
|
|
||||||
### Accessors
|
### Accessors
|
||||||
|
|
||||||
@@ -35,18 +35,18 @@ A connection to a LanceDB database.
|
|||||||
|
|
||||||
### constructor
|
### constructor
|
||||||
|
|
||||||
• **new LocalConnection**(`db`, `uri`)
|
• **new LocalConnection**(`db`, `options`)
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
| Name | Type |
|
| Name | Type |
|
||||||
| :------ | :------ |
|
| :------ | :------ |
|
||||||
| `db` | `any` |
|
| `db` | `any` |
|
||||||
| `uri` | `string` |
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:132](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L132)
|
[index.ts:184](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L184)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -56,17 +56,17 @@ A connection to a LanceDB database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:130](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L130)
|
[index.ts:182](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L182)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
### \_uri
|
### \_options
|
||||||
|
|
||||||
• `Private` `Readonly` **\_uri**: `string`
|
• `Private` `Readonly` **\_options**: [`ConnectionOptions`](../interfaces/ConnectionOptions.md)
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:129](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L129)
|
[index.ts:181](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L181)
|
||||||
|
|
||||||
## Accessors
|
## Accessors
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:137](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L137)
|
[index.ts:189](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L189)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ Creates a new Table and initialize it with new data.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:177](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L177)
|
[index.ts:230](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L230)
|
||||||
|
|
||||||
▸ **createTable**(`name`, `data`, `mode`): `Promise`<[`Table`](../interfaces/Table.md)<`number`[]\>\>
|
▸ **createTable**(`name`, `data`, `mode`): `Promise`<[`Table`](../interfaces/Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
@@ -134,7 +134,7 @@ Connection.createTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:178](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L178)
|
[index.ts:231](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L231)
|
||||||
|
|
||||||
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
@@ -165,7 +165,36 @@ Connection.createTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:188](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L188)
|
[index.ts:241](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L241)
|
||||||
|
|
||||||
|
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings?`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] |
|
||||||
|
| `mode` | [`WriteMode`](../enums/WriteMode.md) |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
Connection.createTable
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:242](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L242)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -190,7 +219,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:201](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L201)
|
[index.ts:266](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L266)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -216,7 +245,7 @@ Drop an existing table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:211](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L211)
|
[index.ts:276](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L276)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -242,7 +271,7 @@ Open a table in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:153](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L153)
|
[index.ts:205](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L205)
|
||||||
|
|
||||||
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
@@ -271,7 +300,34 @@ Connection.openTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:160](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L160)
|
[index.ts:212](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L212)
|
||||||
|
|
||||||
|
▸ **openTable**<`T`\>(`name`, `embeddings?`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
Connection.openTable
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:213](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L213)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -291,4 +347,4 @@ Get the names of all tables in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:144](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L144)
|
[index.ts:196](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L196)
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
- [\_embeddings](LocalTable.md#_embeddings)
|
- [\_embeddings](LocalTable.md#_embeddings)
|
||||||
- [\_name](LocalTable.md#_name)
|
- [\_name](LocalTable.md#_name)
|
||||||
|
- [\_options](LocalTable.md#_options)
|
||||||
- [\_tbl](LocalTable.md#_tbl)
|
- [\_tbl](LocalTable.md#_tbl)
|
||||||
|
|
||||||
### Accessors
|
### Accessors
|
||||||
@@ -43,7 +44,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
### constructor
|
### constructor
|
||||||
|
|
||||||
• **new LocalTable**<`T`\>(`tbl`, `name`)
|
• **new LocalTable**<`T`\>(`tbl`, `name`, `options`)
|
||||||
|
|
||||||
#### Type parameters
|
#### Type parameters
|
||||||
|
|
||||||
@@ -57,12 +58,13 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
| :------ | :------ |
|
| :------ | :------ |
|
||||||
| `tbl` | `any` |
|
| `tbl` | `any` |
|
||||||
| `name` | `string` |
|
| `name` | `string` |
|
||||||
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:221](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L221)
|
[index.ts:287](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L287)
|
||||||
|
|
||||||
• **new LocalTable**<`T`\>(`tbl`, `name`, `embeddings`)
|
• **new LocalTable**<`T`\>(`tbl`, `name`, `options`, `embeddings`)
|
||||||
|
|
||||||
#### Type parameters
|
#### Type parameters
|
||||||
|
|
||||||
@@ -76,11 +78,12 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `tbl` | `any` | |
|
| `tbl` | `any` | |
|
||||||
| `name` | `string` | |
|
| `name` | `string` | |
|
||||||
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) | |
|
||||||
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:227](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L227)
|
[index.ts:294](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L294)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -90,7 +93,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:219](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L219)
|
[index.ts:284](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L284)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -100,7 +103,17 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:218](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L218)
|
[index.ts:283](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L283)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_options
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_options**: [`ConnectionOptions`](../interfaces/ConnectionOptions.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:285](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L285)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -110,7 +123,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:217](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L217)
|
[index.ts:282](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L282)
|
||||||
|
|
||||||
## Accessors
|
## Accessors
|
||||||
|
|
||||||
@@ -128,7 +141,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:234](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L234)
|
[index.ts:302](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L302)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -156,7 +169,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:252](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L252)
|
[index.ts:320](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L320)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -176,7 +189,7 @@ Returns the number of rows in this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:278](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L278)
|
[index.ts:362](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L362)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -194,7 +207,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
| `indexParams` | [`IvfPQIndexConfig`](../interfaces/IvfPQIndexConfig.md) | The parameters of this Index, |
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -206,7 +219,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:271](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L271)
|
[index.ts:355](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L355)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -232,7 +245,7 @@ Delete rows from this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:287](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L287)
|
[index.ts:371](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L371)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -260,7 +273,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:262](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L262)
|
[index.ts:338](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L338)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -286,4 +299,4 @@ Creates a search query to find the nearest neighbors of the given search term
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:242](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L242)
|
[index.ts:310](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L310)
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ An embedding function that automatically creates vector representation for a giv
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L21)
|
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L21)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ An embedding function that automatically creates vector representation for a giv
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L19)
|
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L19)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L18)
|
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L18)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -76,7 +76,7 @@ The name of the column that will be used as input for the Embedding Function.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L50)
|
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L50)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -102,4 +102,4 @@ Creates a vector representation for the given values.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L38)
|
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L38)
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ A builder for nearest neighbor queries for LanceDB.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:362](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L362)
|
[index.ts:448](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L448)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ A builder for nearest neighbor queries for LanceDB.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:360](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L360)
|
[index.ts:446](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L446)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -82,7 +82,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:358](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L358)
|
[index.ts:444](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L444)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:354](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L354)
|
[index.ts:440](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L440)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:359](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L359)
|
[index.ts:445](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L445)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:356](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L356)
|
[index.ts:442](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L442)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -122,7 +122,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:352](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L352)
|
[index.ts:438](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L438)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -132,7 +132,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:353](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L353)
|
[index.ts:439](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L439)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -142,7 +142,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:355](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L355)
|
[index.ts:441](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L441)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -152,7 +152,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:357](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L357)
|
[index.ts:443](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L443)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -162,7 +162,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:351](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L351)
|
[index.ts:437](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L437)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -188,7 +188,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:410](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L410)
|
[index.ts:496](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L496)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ Execute the query and return the results as an Array of Objects
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:433](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L433)
|
[index.ts:519](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L519)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -232,7 +232,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:405](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L405)
|
[index.ts:491](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L491)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -254,7 +254,7 @@ Sets the number of results that will be returned
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:378](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L378)
|
[index.ts:464](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L464)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -280,7 +280,7 @@ MetricType for the different options
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:425](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L425)
|
[index.ts:511](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L511)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@ The number of probes used. A higher number makes search more accurate but also s
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:396](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L396)
|
[index.ts:482](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L482)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -324,7 +324,7 @@ Refine the results by reading extra elements and re-ranking them in memory.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:387](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L387)
|
[index.ts:473](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L473)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -346,4 +346,4 @@ Return only the specified columns.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:416](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L416)
|
[index.ts:502](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L502)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ Cosine distance
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:481](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L481)
|
[index.ts:567](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L567)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Dot product
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:486](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L486)
|
[index.ts:572](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L572)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -46,4 +46,4 @@ Euclidean distance
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:476](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L476)
|
[index.ts:562](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L562)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ Append new data to the table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:466](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L466)
|
[index.ts:552](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L552)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Create a new [Table](../interfaces/Table.md).
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:462](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L462)
|
[index.ts:548](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L548)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -46,4 +46,4 @@ Overwrite the existing [Table](../interfaces/Table.md) if presented.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:464](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L464)
|
[index.ts:550](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L550)
|
||||||
|
|||||||
41
docs/src/javascript/interfaces/AwsCredentials.md
Normal file
41
docs/src/javascript/interfaces/AwsCredentials.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / AwsCredentials
|
||||||
|
|
||||||
|
# Interface: AwsCredentials
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [accessKeyId](AwsCredentials.md#accesskeyid)
|
||||||
|
- [secretKey](AwsCredentials.md#secretkey)
|
||||||
|
- [sessionToken](AwsCredentials.md#sessiontoken)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### accessKeyId
|
||||||
|
|
||||||
|
• **accessKeyId**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:31](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L31)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### secretKey
|
||||||
|
|
||||||
|
• **secretKey**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:33](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L33)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sessionToken
|
||||||
|
|
||||||
|
• `Optional` **sessionToken**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:35](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L35)
|
||||||
@@ -32,7 +32,7 @@ Connection could be local against filesystem or remote against a server.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:45](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L45)
|
[index.ts:70](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L70)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ Creates a new Table and initialize it with new data.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:65](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L65)
|
[index.ts:90](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L90)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:67](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L67)
|
[index.ts:92](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L92)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -106,7 +106,7 @@ Drop an existing table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:73](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L73)
|
[index.ts:98](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L98)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ Open a table in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:55](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L55)
|
[index.ts:80](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L80)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -149,4 +149,4 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:47](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L47)
|
[index.ts:72](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L72)
|
||||||
|
|||||||
30
docs/src/javascript/interfaces/ConnectionOptions.md
Normal file
30
docs/src/javascript/interfaces/ConnectionOptions.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / ConnectionOptions
|
||||||
|
|
||||||
|
# Interface: ConnectionOptions
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [awsCredentials](ConnectionOptions.md#awscredentials)
|
||||||
|
- [uri](ConnectionOptions.md#uri)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### awsCredentials
|
||||||
|
|
||||||
|
• `Optional` **awsCredentials**: [`AwsCredentials`](AwsCredentials.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:40](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L40)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### uri
|
||||||
|
|
||||||
|
• **uri**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:39](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L39)
|
||||||
@@ -45,7 +45,7 @@ Creates a vector representation for the given values.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/embedding_function.ts#L27)
|
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/embedding_function.ts#L27)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -57,4 +57,4 @@ The name of the column that will be used as input for the Embedding Function.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/embedding_function.ts#L22)
|
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/embedding_function.ts#L22)
|
||||||
|
|||||||
149
docs/src/javascript/interfaces/IvfPQIndexConfig.md
Normal file
149
docs/src/javascript/interfaces/IvfPQIndexConfig.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / IvfPQIndexConfig
|
||||||
|
|
||||||
|
# Interface: IvfPQIndexConfig
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [column](IvfPQIndexConfig.md#column)
|
||||||
|
- [index\_name](IvfPQIndexConfig.md#index_name)
|
||||||
|
- [max\_iters](IvfPQIndexConfig.md#max_iters)
|
||||||
|
- [max\_opq\_iters](IvfPQIndexConfig.md#max_opq_iters)
|
||||||
|
- [metric\_type](IvfPQIndexConfig.md#metric_type)
|
||||||
|
- [num\_bits](IvfPQIndexConfig.md#num_bits)
|
||||||
|
- [num\_partitions](IvfPQIndexConfig.md#num_partitions)
|
||||||
|
- [num\_sub\_vectors](IvfPQIndexConfig.md#num_sub_vectors)
|
||||||
|
- [replace](IvfPQIndexConfig.md#replace)
|
||||||
|
- [type](IvfPQIndexConfig.md#type)
|
||||||
|
- [use\_opq](IvfPQIndexConfig.md#use_opq)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### column
|
||||||
|
|
||||||
|
• `Optional` **column**: `string`
|
||||||
|
|
||||||
|
The column to be indexed
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:382](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L382)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### index\_name
|
||||||
|
|
||||||
|
• `Optional` **index\_name**: `string`
|
||||||
|
|
||||||
|
A unique name for the index
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:387](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L387)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### max\_iters
|
||||||
|
|
||||||
|
• `Optional` **max\_iters**: `number`
|
||||||
|
|
||||||
|
The max number of iterations for kmeans training.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:402](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L402)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### max\_opq\_iters
|
||||||
|
|
||||||
|
• `Optional` **max\_opq\_iters**: `number`
|
||||||
|
|
||||||
|
Max number of iterations to train OPQ, if `use_opq` is true.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:421](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L421)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### metric\_type
|
||||||
|
|
||||||
|
• `Optional` **metric\_type**: [`MetricType`](../enums/MetricType.md)
|
||||||
|
|
||||||
|
Metric type, L2 or Cosine
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:392](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L392)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_bits
|
||||||
|
|
||||||
|
• `Optional` **num\_bits**: `number`
|
||||||
|
|
||||||
|
The number of bits to present one PQ centroid.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:416](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L416)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_partitions
|
||||||
|
|
||||||
|
• `Optional` **num\_partitions**: `number`
|
||||||
|
|
||||||
|
The number of partitions this index
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:397](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L397)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_sub\_vectors
|
||||||
|
|
||||||
|
• `Optional` **num\_sub\_vectors**: `number`
|
||||||
|
|
||||||
|
Number of subvectors to build PQ code
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:412](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L412)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### replace
|
||||||
|
|
||||||
|
• `Optional` **replace**: `boolean`
|
||||||
|
|
||||||
|
Replace an existing index with the same name if it exists.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:426](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L426)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### type
|
||||||
|
|
||||||
|
• **type**: ``"ivf_pq"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:428](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L428)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### use\_opq
|
||||||
|
|
||||||
|
• `Optional` **use\_opq**: `boolean`
|
||||||
|
|
||||||
|
Train as optimized product quantization.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:407](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L407)
|
||||||
@@ -52,7 +52,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:95](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L95)
|
[index.ts:120](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L120)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -72,13 +72,13 @@ Returns the number of rows in this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:115](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L115)
|
[index.ts:140](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L140)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
### createIndex
|
### createIndex
|
||||||
|
|
||||||
• **createIndex**: (`indexParams`: `IvfPQIndexConfig`) => `Promise`<`any`\>
|
• **createIndex**: (`indexParams`: [`IvfPQIndexConfig`](IvfPQIndexConfig.md)) => `Promise`<`any`\>
|
||||||
|
|
||||||
#### Type declaration
|
#### Type declaration
|
||||||
|
|
||||||
@@ -94,7 +94,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
| `indexParams` | [`IvfPQIndexConfig`](IvfPQIndexConfig.md) | The parameters of this Index, |
|
||||||
|
|
||||||
##### Returns
|
##### Returns
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:110](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L110)
|
[index.ts:135](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L135)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -116,11 +116,37 @@ ___
|
|||||||
|
|
||||||
Delete rows from this table.
|
Delete rows from this table.
|
||||||
|
|
||||||
|
This can be used to delete a single row, many rows, all rows, or
|
||||||
|
sometimes no rows (if your predicate matches nothing).
|
||||||
|
|
||||||
|
**`Examples`**
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const con = await lancedb.connect("./.lancedb")
|
||||||
|
const data = [
|
||||||
|
{id: 1, vector: [1, 2]},
|
||||||
|
{id: 2, vector: [3, 4]},
|
||||||
|
{id: 3, vector: [5, 6]},
|
||||||
|
];
|
||||||
|
const tbl = await con.createTable("my_table", data)
|
||||||
|
await tbl.delete("id = 2")
|
||||||
|
await tbl.countRows() // Returns 2
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have a list of values to delete, you can combine them into a
|
||||||
|
stringified list and use the `IN` operator:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const to_remove = [1, 5];
|
||||||
|
await tbl.delete(`id IN (${to_remove.join(",")})`)
|
||||||
|
await tbl.countRows() // Returns 1
|
||||||
|
```
|
||||||
|
|
||||||
##### Parameters
|
##### Parameters
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `filter` | `string` | A filter in the same format used by a sql WHERE clause. |
|
| `filter` | `string` | A filter in the same format used by a sql WHERE clause. The filter must not be empty. |
|
||||||
|
|
||||||
##### Returns
|
##### Returns
|
||||||
|
|
||||||
@@ -128,7 +154,7 @@ Delete rows from this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:122](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L122)
|
[index.ts:174](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L174)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -138,7 +164,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:81](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L81)
|
[index.ts:106](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L106)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -166,7 +192,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:103](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L103)
|
[index.ts:128](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L128)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -192,4 +218,4 @@ Creates a search query to find the nearest neighbors of the given search term
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:87](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L87)
|
[index.ts:112](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L112)
|
||||||
|
|||||||
@@ -18,8 +18,11 @@
|
|||||||
|
|
||||||
### Interfaces
|
### Interfaces
|
||||||
|
|
||||||
|
- [AwsCredentials](interfaces/AwsCredentials.md)
|
||||||
- [Connection](interfaces/Connection.md)
|
- [Connection](interfaces/Connection.md)
|
||||||
|
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
||||||
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
||||||
|
- [IvfPQIndexConfig](interfaces/IvfPQIndexConfig.md)
|
||||||
- [Table](interfaces/Table.md)
|
- [Table](interfaces/Table.md)
|
||||||
|
|
||||||
### Type Aliases
|
### Type Aliases
|
||||||
@@ -34,11 +37,11 @@
|
|||||||
|
|
||||||
### VectorIndexParams
|
### VectorIndexParams
|
||||||
|
|
||||||
Ƭ **VectorIndexParams**: `IvfPQIndexConfig`
|
Ƭ **VectorIndexParams**: [`IvfPQIndexConfig`](interfaces/IvfPQIndexConfig.md)
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:345](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L345)
|
[index.ts:431](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L431)
|
||||||
|
|
||||||
## Functions
|
## Functions
|
||||||
|
|
||||||
@@ -60,4 +63,20 @@ Connect to a LanceDB instance at the given URI
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:34](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L34)
|
[index.ts:47](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L47)
|
||||||
|
|
||||||
|
▸ **connect**(`opts`): `Promise`<[`Connection`](interfaces/Connection.md)\>
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `opts` | `Partial`<[`ConnectionOptions`](interfaces/ConnectionOptions.md)\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Connection`](interfaces/Connection.md)\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:48](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L48)
|
||||||
|
|||||||
@@ -10,7 +10,11 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"This Q&A bot will allow you to query your own documentation easily using questions. We'll also demonstrate the use of LangChain and LanceDB using the OpenAI API. \n",
|
"This Q&A bot will allow you to query your own documentation easily using questions. We'll also demonstrate the use of LangChain and LanceDB using the OpenAI API. \n",
|
||||||
"\n",
|
"\n",
|
||||||
"In this example we'll use Pandas 2.0 documentation, but, this could be replaced for your own docs as well"
|
"In this example we'll use Pandas 2.0 documentation, but, this could be replaced for your own docs as well\n",
|
||||||
|
"\n",
|
||||||
|
"<a href=\"https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/Code-Documentation-QA-Bot/main.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
|
||||||
|
"\n",
|
||||||
|
"Scripts - [](./examples/Code-Documentation-QA-Bot/main.py) [](./examples/Code-Documentation-QA-Bot/index.js)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -181,7 +185,7 @@
|
|||||||
"id": "c3852dd3",
|
"id": "c3852dd3",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"# Generating emebeddings from our docs\n",
|
"# Generating embeddings from our docs\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Now that we have our raw documents loaded, we need to pre-process them to generate embeddings:"
|
"Now that we have our raw documents loaded, we need to pre-process them to generate embeddings:"
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,5 +1,14 @@
|
|||||||
{
|
{
|
||||||
"cells": [
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
" <a href=\"https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/multimodal_clip/main.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>| [](./examples/multimodal_clip/main.py) |"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 2,
|
"execution_count": 2,
|
||||||
@@ -42,6 +51,19 @@
|
|||||||
"## First run setup: Download data and pre-process"
|
"## First run setup: Download data and pre-process"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"### Get dataset\n",
|
||||||
|
"\n",
|
||||||
|
"!wget https://eto-public.s3.us-west-2.amazonaws.com/datasets/diffusiondb_lance.tar.gz\n",
|
||||||
|
"!tar -xvf diffusiondb_lance.tar.gz\n",
|
||||||
|
"!mv diffusiondb_test rawdata.lance\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 30,
|
"execution_count": 30,
|
||||||
@@ -247,7 +269,7 @@
|
|||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "Python 3 (ipykernel)",
|
"display_name": "Python 3.11.4 64-bit",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
"name": "python3"
|
"name": "python3"
|
||||||
},
|
},
|
||||||
@@ -261,7 +283,12 @@
|
|||||||
"name": "python",
|
"name": "python",
|
||||||
"nbconvert_exporter": "python",
|
"nbconvert_exporter": "python",
|
||||||
"pygments_lexer": "ipython3",
|
"pygments_lexer": "ipython3",
|
||||||
"version": "3.11.3"
|
"version": "3.11.4"
|
||||||
|
},
|
||||||
|
"vscode": {
|
||||||
|
"interpreter": {
|
||||||
|
"hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nbformat": 4,
|
"nbformat": 4,
|
||||||
|
|||||||
@@ -8,7 +8,12 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"# Youtube Transcript Search QA Bot\n",
|
"# Youtube Transcript Search QA Bot\n",
|
||||||
"\n",
|
"\n",
|
||||||
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily."
|
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily.\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"<a href=\"https://colab.research.google.com/github/lancedb/vectordb-recipes/blob/main/examples/youtube_bot/main.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\">\n",
|
||||||
|
"\n",
|
||||||
|
"Scripts - [](./examples/youtube_bot/main.py) [](./examples/youtube_bot/index.js)\n"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
101
docs/src/python/arrow.md
Normal file
101
docs/src/python/arrow.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Pandas and PyArrow
|
||||||
|
|
||||||
|
|
||||||
|
Built on top of [Apache Arrow](https://arrow.apache.org/),
|
||||||
|
`LanceDB` is easy to integrate with the Python ecosystem, including [Pandas](https://pandas.pydata.org/)
|
||||||
|
and PyArrow.
|
||||||
|
|
||||||
|
## Create dataset
|
||||||
|
|
||||||
|
First, we need to connect to a `LanceDB` database.
|
||||||
|
|
||||||
|
```py
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
```
|
||||||
|
|
||||||
|
Afterwards, we write a `Pandas DataFrame` to LanceDB directly.
|
||||||
|
|
||||||
|
```py
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
data = pd.DataFrame({
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0]
|
||||||
|
})
|
||||||
|
table = db.create_table("pd_table", data=data)
|
||||||
|
```
|
||||||
|
|
||||||
|
Similar to [`pyarrow.write_dataset()`](https://arrow.apache.org/docs/python/generated/pyarrow.dataset.write_dataset.html),
|
||||||
|
[db.create_table()](../python/#lancedb.db.DBConnection.create_table) accepts a wide-range of forms of data.
|
||||||
|
|
||||||
|
For example, if you have a dataset that is larger than memory size, you can create table with `Iterator[pyarrow.RecordBatch]`,
|
||||||
|
to lazily generate data:
|
||||||
|
|
||||||
|
```py
|
||||||
|
|
||||||
|
from typing import Iterable
|
||||||
|
import pyarrow as pa
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
def make_batches() -> Iterable[pa.RecordBatch]:
|
||||||
|
for i in range(5):
|
||||||
|
yield pa.RecordBatch.from_arrays(
|
||||||
|
[
|
||||||
|
pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
pa.array(["foo", "bar"]),
|
||||||
|
pa.array([10.0, 20.0]),
|
||||||
|
],
|
||||||
|
["vector", "item", "price"])
|
||||||
|
|
||||||
|
schema=pa.schema([
|
||||||
|
pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
pa.field("item", pa.utf8()),
|
||||||
|
pa.field("price", pa.float32()),
|
||||||
|
])
|
||||||
|
|
||||||
|
table = db.create_table("iterable_table", data=make_batches(), schema=schema)
|
||||||
|
```
|
||||||
|
|
||||||
|
You will find detailed instructions of creating dataset in
|
||||||
|
[Basic Operations](../basic.md) and [API](../python/#lancedb.db.DBConnection.create_table)
|
||||||
|
sections.
|
||||||
|
|
||||||
|
## Vector Search
|
||||||
|
|
||||||
|
We can now perform similarity search via `LanceDB` Python API.
|
||||||
|
|
||||||
|
```py
|
||||||
|
# Open the table previously created.
|
||||||
|
table = db.open_table("pd_table")
|
||||||
|
|
||||||
|
query_vector = [100, 100]
|
||||||
|
# Pandas DataFrame
|
||||||
|
df = table.search(query_vector).limit(1).to_df()
|
||||||
|
print(df)
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
vector item price score
|
||||||
|
0 [5.9, 26.5] bar 20.0 14257.05957
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have a simple filter, it's faster to provide a `where clause` to `LanceDB`'s search query.
|
||||||
|
If you have more complex criteria, you can always apply the filter to the resulting Pandas `DataFrame`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
|
||||||
|
# Apply the filter via LanceDB
|
||||||
|
results = table.search([100, 100]).where("price < 15").to_df()
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results["item"].iloc[0] == "foo"
|
||||||
|
|
||||||
|
# Apply the filter via Pandas
|
||||||
|
df = results = table.search([100, 100]).to_df()
|
||||||
|
results = df[df.price < 15]
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results["item"].iloc[0] == "foo"
|
||||||
|
```
|
||||||
56
docs/src/python/duckdb.md
Normal file
56
docs/src/python/duckdb.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# DuckDB
|
||||||
|
|
||||||
|
`LanceDB` works with `DuckDB` via [PyArrow integration](https://duckdb.org/docs/guides/python/sql_on_arrow).
|
||||||
|
|
||||||
|
Let us start with installing `duckdb` and `lancedb`.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install duckdb lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
We will re-use [the dataset created previously](./arrow.md):
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pandas as pd
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
data = pd.DataFrame({
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0]
|
||||||
|
})
|
||||||
|
table = db.create_table("pd_table", data=data)
|
||||||
|
arrow_table = table.to_arrow()
|
||||||
|
```
|
||||||
|
|
||||||
|
`DuckDB` can directly query the `arrow_table`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import duckdb
|
||||||
|
|
||||||
|
duckdb.query("SELECT * FROM arrow_table")
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────┬─────────┬────────┐
|
||||||
|
│ vector │ item │ price │
|
||||||
|
│ float[] │ varchar │ double │
|
||||||
|
├─────────────┼─────────┼────────┤
|
||||||
|
│ [3.1, 4.1] │ foo │ 10.0 │
|
||||||
|
│ [5.9, 26.5] │ bar │ 20.0 │
|
||||||
|
└─────────────┴─────────┴────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
```py
|
||||||
|
duckdb.query("SELECT mean(price) FROM arrow_table")
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────┐
|
||||||
|
│ mean(price) │
|
||||||
|
│ double │
|
||||||
|
├─────────────┤
|
||||||
|
│ 15.0 │
|
||||||
|
└─────────────┘
|
||||||
|
```
|
||||||
7
docs/src/python/integration.md
Normal file
7
docs/src/python/integration.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Integration
|
||||||
|
|
||||||
|
Built on top of [Apache Arrow](https://arrow.apache.org/),
|
||||||
|
`LanceDB` is very easy to be integrate with Python ecosystems.
|
||||||
|
|
||||||
|
* [Pandas and Arrow Integration](./arrow.md)
|
||||||
|
* [DuckDB Integration](./duckdb.md)
|
||||||
37
docs/src/python/pydantic.md
Normal file
37
docs/src/python/pydantic.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# Pydantic
|
||||||
|
|
||||||
|
[Pydantic](https://docs.pydantic.dev/latest/) is a data validation library in Python.
|
||||||
|
LanceDB integrates with Pydantic for schema inference, data ingestion, and query result casting.
|
||||||
|
|
||||||
|
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
LanceDB supports to create Apache Arrow Schema from a
|
||||||
|
[Pydantic BaseModel](https://docs.pydantic.dev/latest/api/main/#pydantic.main.BaseModel)
|
||||||
|
via [pydantic_to_schema()](python.md##lancedb.pydantic.pydantic_to_schema) method.
|
||||||
|
|
||||||
|
::: lancedb.pydantic.pydantic_to_schema
|
||||||
|
|
||||||
|
## Vector Field
|
||||||
|
|
||||||
|
LanceDB provides a [`vector(dim)`](python.md#lancedb.pydantic.vector) method to define a
|
||||||
|
vector Field in a Pydantic Model.
|
||||||
|
|
||||||
|
::: lancedb.pydantic.vector
|
||||||
|
|
||||||
|
## Type Conversion
|
||||||
|
|
||||||
|
LanceDB automatically convert Pydantic fields to
|
||||||
|
[Apache Arrow DataType](https://arrow.apache.org/docs/python/generated/pyarrow.DataType.html#pyarrow.DataType).
|
||||||
|
|
||||||
|
Current supported type conversions:
|
||||||
|
|
||||||
|
| Pydantic Field Type | PyArrow Data Type |
|
||||||
|
| ------------------- | ----------------- |
|
||||||
|
| `int` | `pyarrow.int64` |
|
||||||
|
| `float` | `pyarrow.float64` |
|
||||||
|
| `bool` | `pyarrow.bool` |
|
||||||
|
| `str` | `pyarrow.utf8()` |
|
||||||
|
| `list` | `pyarrow.List` |
|
||||||
|
| `BaseModel` | `pyarrow.Struct` |
|
||||||
|
| `vector(n)` | `pyarrow.FixedSizeList(float32, n)` |
|
||||||
@@ -43,3 +43,17 @@ pip install lancedb
|
|||||||
::: lancedb.fts.populate_index
|
::: lancedb.fts.populate_index
|
||||||
|
|
||||||
::: lancedb.fts.search_index
|
::: lancedb.fts.search_index
|
||||||
|
|
||||||
|
## Utilities
|
||||||
|
|
||||||
|
::: lancedb.vector
|
||||||
|
|
||||||
|
## Integrations
|
||||||
|
|
||||||
|
### Pydantic
|
||||||
|
|
||||||
|
::: lancedb.pydantic.pydantic_to_schema
|
||||||
|
|
||||||
|
::: lancedb.pydantic.vector
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -25,9 +25,9 @@ Currently, we support the following metrics:
|
|||||||
|
|
||||||
### Flat Search
|
### Flat Search
|
||||||
|
|
||||||
|
If LanceDB does not create a vector index, LanceDB would need to scan (`Flat Search`) the entire vector column
|
||||||
|
and compute the distance for each vector in order to find the closest matches.
|
||||||
|
|
||||||
If there is no [vector index is created](ann_indexes.md), LanceDB will just brute-force scan
|
|
||||||
the vector column and compute the distance.
|
|
||||||
|
|
||||||
<!-- Setup Code
|
<!-- Setup Code
|
||||||
```python
|
```python
|
||||||
@@ -79,39 +79,43 @@ await db_setup.createTable('my_vectors', data)
|
|||||||
const tbl = await db.openTable("my_vectors")
|
const tbl = await db.openTable("my_vectors")
|
||||||
|
|
||||||
const results_1 = await tbl.search(Array(1536).fill(1.2))
|
const results_1 = await tbl.search(Array(1536).fill(1.2))
|
||||||
.limit(20)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
<!-- Commenting out for now since metricType fails for JS on Ubuntu 22.04.
|
|
||||||
|
|
||||||
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
||||||
as well.
|
as well.
|
||||||
-->
|
|
||||||
|
|
||||||
<!--
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
-->
|
|
||||||
<!-- ```python
|
```python
|
||||||
df = tbl.search(np.random.random((1536))) \
|
df = tbl.search(np.random.random((1536))) \
|
||||||
.metric("cosine") \
|
.metric("cosine") \
|
||||||
.limit(10) \
|
.limit(10) \
|
||||||
.to_df()
|
.to_df()
|
||||||
```
|
```
|
||||||
-->
|
|
||||||
<!--
|
|
||||||
=== "JavaScript"
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- ```javascript
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
const results_2 = await tbl.search(Array(1536).fill(1.2))
|
const results_2 = await tbl.search(Array(1536).fill(1.2))
|
||||||
.metricType("cosine")
|
.metricType("cosine")
|
||||||
.limit(20)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
-->
|
|
||||||
|
|
||||||
### Search with Vector Index.
|
|
||||||
|
### Approximate Nearest Neighbor (ANN) Search with Vector Index.
|
||||||
|
|
||||||
|
To accelerate vector retrievals, it is common to build vector indices.
|
||||||
|
A vector index is a data structure specifically designed to efficiently organize and
|
||||||
|
search vector data based on their similarity or distance metrics.
|
||||||
|
By constructing a vector index, you can reduce the search space and avoid the need
|
||||||
|
for brute-force scanning of the entire vector column.
|
||||||
|
|
||||||
|
However, fast vector search using indices often entails making a trade-off with accuracy to some extent.
|
||||||
|
This is why it is often called **Approximate Nearest Neighbors (ANN)** search, while the Flat Search (KNN)
|
||||||
|
always returns 100% recall.
|
||||||
|
|
||||||
See [ANN Index](ann_indexes.md) for more details.
|
See [ANN Index](ann_indexes.md) for more details.
|
||||||
@@ -7,6 +7,7 @@ const excludedFiles = [
|
|||||||
"../src/embedding.md",
|
"../src/embedding.md",
|
||||||
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
||||||
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
||||||
|
"../src/examples/transformerjs_embedding_search_nodejs.md",
|
||||||
"../src/examples/youtube_transcript_bot_with_nodejs.md",
|
"../src/examples/youtube_transcript_bot_with_nodejs.md",
|
||||||
];
|
];
|
||||||
const nodePrefix = "javascript";
|
const nodePrefix = "javascript";
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ excluded_files = [
|
|||||||
"../src/embedding.md",
|
"../src/embedding.md",
|
||||||
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
||||||
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
||||||
"../src/examples/youtube_transcript_bot_with_nodejs.md"
|
"../src/examples/youtube_transcript_bot_with_nodejs.md",
|
||||||
|
"../src/integrations/voxel51.md",
|
||||||
]
|
]
|
||||||
|
|
||||||
python_prefix = "py"
|
python_prefix = "py"
|
||||||
|
|||||||
4
node/.npmignore
Normal file
4
node/.npmignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
gen_test_data.py
|
||||||
|
index.node
|
||||||
|
dist/lancedb*.tgz
|
||||||
|
vectordb*.tgz
|
||||||
@@ -8,6 +8,10 @@ A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb)
|
|||||||
npm install vectordb
|
npm install vectordb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This will download the appropriate native library for your platform. We currently
|
||||||
|
support x86_64 Linux, aarch64 Linux, Intel MacOS, and ARM (M1/M2) MacOS. We do not
|
||||||
|
yet support Windows or musl-based Linux (such as Alpine Linux).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Basic Example
|
### Basic Example
|
||||||
@@ -26,12 +30,34 @@ The [examples](./examples) folder contains complete examples.
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run the tests with
|
To build everything fresh:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you should be able to run the tests with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm test
|
npm test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Rebuilding Rust library
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rebuilding Typescript
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix lints
|
||||||
|
|
||||||
To run the linter and have it automatically fix all errors
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
66
node/examples/js-transformers/index.js
Normal file
66
node/examples/js-transformers/index.js
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
|
||||||
|
async function example() {
|
||||||
|
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
|
||||||
|
// Import transformers and the all-MiniLM-L6-v2 model (https://huggingface.co/Xenova/all-MiniLM-L6-v2)
|
||||||
|
const { pipeline } = await import('@xenova/transformers')
|
||||||
|
const pipe = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
|
||||||
|
|
||||||
|
// Create embedding function from pipeline which returns a list of vectors from batch
|
||||||
|
// sourceColumn is the name of the column in the data to be embedded
|
||||||
|
//
|
||||||
|
// Output of pipe is a Tensor { data: Float32Array(384) }, so filter for the vector
|
||||||
|
const embed_fun = {}
|
||||||
|
embed_fun.sourceColumn = 'text'
|
||||||
|
embed_fun.embed = async function (batch) {
|
||||||
|
let result = []
|
||||||
|
for (let text of batch) {
|
||||||
|
const res = await pipe(text, { pooling: 'mean', normalize: true })
|
||||||
|
result.push(Array.from(res['data']))
|
||||||
|
}
|
||||||
|
return (result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Link a folder and create a table with data
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Cherry', type: 'fruit' },
|
||||||
|
{ id: 2, text: 'Carrot', type: 'vegetable' },
|
||||||
|
{ id: 3, text: 'Potato', type: 'vegetable' },
|
||||||
|
{ id: 4, text: 'Apple', type: 'fruit' },
|
||||||
|
{ id: 5, text: 'Banana', type: 'fruit' }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('food_table', data, "create", embed_fun)
|
||||||
|
|
||||||
|
|
||||||
|
// Query the table
|
||||||
|
const results = await table
|
||||||
|
.search("a sweet fruit to eat")
|
||||||
|
.metricType("cosine")
|
||||||
|
.limit(2)
|
||||||
|
.execute()
|
||||||
|
console.log(results.map(r => r.text))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
example().then(_ => { console.log("Done!") })
|
||||||
16
node/examples/js-transformers/package.json
Normal file
16
node/examples/js-transformers/package.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-transformers",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Example for using transformers.js with lancedb",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@xenova/transformers": "^2.4.1",
|
||||||
|
"vectordb": "^0.1.12"
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -12,29 +12,26 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
const { currentTarget } = require('@neon-rs/load');
|
||||||
|
|
||||||
let nativeLib;
|
let nativeLib;
|
||||||
|
|
||||||
function getPlatformLibrary() {
|
|
||||||
if (process.platform === "darwin" && process.arch == "arm64") {
|
|
||||||
return require('./aarch64-apple-darwin.node');
|
|
||||||
} else if (process.platform === "darwin" && process.arch == "x64") {
|
|
||||||
return require('./x86_64-apple-darwin.node');
|
|
||||||
} else if (process.platform === "linux" && process.arch == "x64") {
|
|
||||||
return require('./x86_64-unknown-linux-gnu.node');
|
|
||||||
} else {
|
|
||||||
throw new Error(`vectordb: unsupported platform ${process.platform}_${process.arch}. Please file a bug report at https://github.com/lancedb/lancedb/issues`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nativeLib = require('./index.node')
|
nativeLib = require(`@lancedb/vectordb-${currentTarget()}`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.code === "MODULE_NOT_FOUND") {
|
try {
|
||||||
nativeLib = getPlatformLibrary();
|
// Might be developing locally, so try that. But don't expose that error
|
||||||
} else {
|
// to the user.
|
||||||
throw new Error('vectordb: failed to load native library. Please file a bug report at https://github.com/lancedb/lancedb/issues');
|
nativeLib = require("./index.node");
|
||||||
|
} catch {
|
||||||
|
throw new Error(`vectordb: failed to load native library.
|
||||||
|
You may need to run \`npm install @lancedb/vectordb-${currentTarget()}\`.
|
||||||
|
|
||||||
|
If that does not work, please file a bug report at https://github.com/lancedb/lancedb/issues
|
||||||
|
|
||||||
|
Source error: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = nativeLib
|
// Dynamic require for runtime.
|
||||||
|
module.exports = nativeLib;
|
||||||
|
|||||||
374
node/package-lock.json
generated
374
node/package-lock.json
generated
@@ -1,18 +1,30 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.9",
|
"version": "0.1.19",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.9",
|
"version": "0.1.19",
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apache-arrow/ts": "^12.0.0",
|
"@apache-arrow/ts": "^12.0.0",
|
||||||
"apache-arrow": "^12.0.0"
|
"@neon-rs/load": "^0.0.74",
|
||||||
|
"apache-arrow": "^12.0.0",
|
||||||
|
"axios": "^1.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@neon-rs/cli": "^0.0.160",
|
||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/chai-as-promised": "^7.1.5",
|
"@types/chai-as-promised": "^7.1.5",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
@@ -37,6 +49,13 @@
|
|||||||
"typedoc": "^0.24.7",
|
"typedoc": "^0.24.7",
|
||||||
"typedoc-plugin-markdown": "^3.15.3",
|
"typedoc-plugin-markdown": "^3.15.3",
|
||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@lancedb/vectordb-darwin-arm64": "0.1.19",
|
||||||
|
"@lancedb/vectordb-darwin-x64": "0.1.19",
|
||||||
|
"@lancedb/vectordb-linux-arm64-gnu": "0.1.19",
|
||||||
|
"@lancedb/vectordb-linux-x64-gnu": "0.1.19",
|
||||||
|
"@lancedb/vectordb-win32-x64-msvc": "0.1.19"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@apache-arrow/ts": {
|
"node_modules/@apache-arrow/ts": {
|
||||||
@@ -66,6 +85,97 @@
|
|||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
|
||||||
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
|
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
|
||||||
},
|
},
|
||||||
|
"node_modules/@cargo-messages/android-arm-eabi": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/android-arm-eabi/-/android-arm-eabi-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-PTgCEmBHEPKJbxwlHVXB3aGES+NqpeBvn6hJNYWIkET3ZQCSJnScMlIDQXEkWndK7J+hW3Or3H32a93B/MbbfQ==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"android"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/darwin-arm64": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/darwin-arm64/-/darwin-arm64-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-YSVUuc8TUTi/XmZVg9KrH0bDywKLqC1zeTyZYAYDDmqVDZW9KeTnbBUECKRs56iyHeO+kuEkVW7MKf7j2zb/FA==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/darwin-x64": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/darwin-x64/-/darwin-x64-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-U+YlAR+9tKpBljnNPWMop5YhvtwfIPQSAaUYN2llteC7ZNU5/cv8CGT1vm7uFNxr2LeGuAtRbzIh2gUmTV8mng==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/linux-arm-gnueabihf": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/linux-arm-gnueabihf/-/linux-arm-gnueabihf-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-wqAelTzVv1E7Ls4aviqUbem5xjzCaJQxQtVnLhv6pf1k0UyEHCS2WdufFFmWcojGe7QglI4uve3KTe01MKYj0A==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/linux-x64-gnu": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/linux-x64-gnu/-/linux-x64-gnu-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-LQ6e7O7YYkWfDNIi/53q2QG/+lZok72LOG+NKDVCrrY4TYUcrTqWAybOV6IlkVntKPnpx8YB95umSQGeVuvhpQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/win32-arm64-msvc": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/win32-arm64-msvc/-/win32-arm64-msvc-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-VDMBhyun02gIDwmEhkYP1W9Z0tYqn4drgY5Iua1qV2tYOU58RVkWhzUYxM9rzYbnwKZlltgM46J/j5QZ3VaFrA==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@cargo-messages/win32-x64-msvc": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/win32-x64-msvc/-/win32-x64-msvc-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-vnoglDxF6zj0W/Co9D0H/bgnrhUuO5EumIf9v3ujLtBH94rAX11JsXh/FgC/8wQnQSsLyWSq70YxNS2wdETxjA==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"dev": true,
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
"node_modules/@cspotcode/source-map-support": {
|
"node_modules/@cspotcode/source-map-support": {
|
||||||
"version": "0.8.1",
|
"version": "0.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||||
@@ -204,6 +314,89 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-efQhJkBKvMNhjFq3Sw3/qHo9D9gb9UqiIr98n3STsbNxBQjMnWemXn91Ckl40siRG1O8qXcINW7Qs/EGmus+kg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@lancedb/vectordb-darwin-x64": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-r6OZNVyemAssABz2w7CRhe7dyREwBEfTytn+ux1zzTnzsgMgDovCQ0rQ3WZcxWvcy7SFCxiemA9IP1b/lsb4tQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-mL/hRmZp6Kw7hmGJBdOZfp/tTYiCdlOcs8DA/+nr2eiXERv0gIhyiKvr2P5DwbBmut3qXEkDalMHTo95BSdL2A==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-AG0FHksbbr+cHVKPi4B8cmBtqb6T9E0uaK4kyZkXrX52/xtv9RYVZcykaB/tSSm0XNFPWWRnx9R8UqNZV/hxMA==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-PDWZ2hvLVXH4Z4WIO1rsWY8ev3NpNm7aXlaey32P+l1Iz9Hia9+F2GBpp2UiEQKfvbk82ucAvBLRmpSsHY8Tlw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@neon-rs/cli": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-GQjzHPJVTOARbX3nP/fAWqBq7JlQ8XgfYlCa+iwzIXf0LC1EyfJTX+vqGD/36b9lKoyY01Z/aDUB9o/qF6ztHA==",
|
||||||
|
"dev": true,
|
||||||
|
"bin": {
|
||||||
|
"neon": "index.js"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@cargo-messages/android-arm-eabi": "0.0.160",
|
||||||
|
"@cargo-messages/darwin-arm64": "0.0.160",
|
||||||
|
"@cargo-messages/darwin-x64": "0.0.160",
|
||||||
|
"@cargo-messages/linux-arm-gnueabihf": "0.0.160",
|
||||||
|
"@cargo-messages/linux-x64-gnu": "0.0.160",
|
||||||
|
"@cargo-messages/win32-arm64-msvc": "0.0.160",
|
||||||
|
"@cargo-messages/win32-x64-msvc": "0.0.160"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@neon-rs/load": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-/cPZD907UNz55yrc/ud4wDgQKtU1TvkD9jeqZWG6J4IMmZkp6zgjkQcKA8UvpkZlcpPHvc8J17sGzLFbP/LUYg=="
|
||||||
|
},
|
||||||
"node_modules/@nodelib/fs.scandir": {
|
"node_modules/@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -810,8 +1003,7 @@
|
|||||||
"node_modules/asynckit": {
|
"node_modules/asynckit": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"node_modules/available-typed-arrays": {
|
"node_modules/available-typed-arrays": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
@@ -826,12 +1018,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/axios": {
|
"node_modules/axios": {
|
||||||
"version": "0.26.1",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"follow-redirects": "^1.14.8"
|
"follow-redirects": "^1.15.0",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/balanced-match": {
|
"node_modules/balanced-match": {
|
||||||
@@ -1062,7 +1255,6 @@
|
|||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"delayed-stream": "~1.0.0"
|
"delayed-stream": "~1.0.0"
|
||||||
},
|
},
|
||||||
@@ -1285,7 +1477,6 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.4.0"
|
"node": ">=0.4.0"
|
||||||
}
|
}
|
||||||
@@ -2052,7 +2243,6 @@
|
|||||||
"version": "1.15.2",
|
"version": "1.15.2",
|
||||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
||||||
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
||||||
"dev": true,
|
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "individual",
|
"type": "individual",
|
||||||
@@ -2081,7 +2271,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -2955,7 +3144,6 @@
|
|||||||
"version": "1.52.0",
|
"version": "1.52.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -2964,7 +3152,6 @@
|
|||||||
"version": "2.1.35",
|
"version": "2.1.35",
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mime-db": "1.52.0"
|
"mime-db": "1.52.0"
|
||||||
},
|
},
|
||||||
@@ -3258,6 +3445,15 @@
|
|||||||
"form-data": "^4.0.0"
|
"form-data": "^4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/openai/node_modules/axios": {
|
||||||
|
"version": "0.26.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||||
|
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"follow-redirects": "^1.14.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.1",
|
"version": "0.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
||||||
@@ -3409,6 +3605,11 @@
|
|||||||
"node": ">= 0.8.0"
|
"node": ">= 0.8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||||
|
},
|
||||||
"node_modules/punycode": {
|
"node_modules/punycode": {
|
||||||
"version": "2.3.0",
|
"version": "2.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||||
@@ -4501,6 +4702,55 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"@cargo-messages/android-arm-eabi": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/android-arm-eabi/-/android-arm-eabi-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-PTgCEmBHEPKJbxwlHVXB3aGES+NqpeBvn6hJNYWIkET3ZQCSJnScMlIDQXEkWndK7J+hW3Or3H32a93B/MbbfQ==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/darwin-arm64": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/darwin-arm64/-/darwin-arm64-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-YSVUuc8TUTi/XmZVg9KrH0bDywKLqC1zeTyZYAYDDmqVDZW9KeTnbBUECKRs56iyHeO+kuEkVW7MKf7j2zb/FA==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/darwin-x64": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/darwin-x64/-/darwin-x64-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-U+YlAR+9tKpBljnNPWMop5YhvtwfIPQSAaUYN2llteC7ZNU5/cv8CGT1vm7uFNxr2LeGuAtRbzIh2gUmTV8mng==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/linux-arm-gnueabihf": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/linux-arm-gnueabihf/-/linux-arm-gnueabihf-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-wqAelTzVv1E7Ls4aviqUbem5xjzCaJQxQtVnLhv6pf1k0UyEHCS2WdufFFmWcojGe7QglI4uve3KTe01MKYj0A==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/linux-x64-gnu": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/linux-x64-gnu/-/linux-x64-gnu-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-LQ6e7O7YYkWfDNIi/53q2QG/+lZok72LOG+NKDVCrrY4TYUcrTqWAybOV6IlkVntKPnpx8YB95umSQGeVuvhpQ==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/win32-arm64-msvc": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/win32-arm64-msvc/-/win32-arm64-msvc-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-VDMBhyun02gIDwmEhkYP1W9Z0tYqn4drgY5Iua1qV2tYOU58RVkWhzUYxM9rzYbnwKZlltgM46J/j5QZ3VaFrA==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@cargo-messages/win32-x64-msvc": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@cargo-messages/win32-x64-msvc/-/win32-x64-msvc-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-vnoglDxF6zj0W/Co9D0H/bgnrhUuO5EumIf9v3ujLtBH94rAX11JsXh/FgC/8wQnQSsLyWSq70YxNS2wdETxjA==",
|
||||||
|
"dev": true,
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
"@cspotcode/source-map-support": {
|
"@cspotcode/source-map-support": {
|
||||||
"version": "0.8.1",
|
"version": "0.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||||
@@ -4601,6 +4851,56 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"@lancedb/vectordb-darwin-arm64": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-efQhJkBKvMNhjFq3Sw3/qHo9D9gb9UqiIr98n3STsbNxBQjMnWemXn91Ckl40siRG1O8qXcINW7Qs/EGmus+kg==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@lancedb/vectordb-darwin-x64": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-r6OZNVyemAssABz2w7CRhe7dyREwBEfTytn+ux1zzTnzsgMgDovCQ0rQ3WZcxWvcy7SFCxiemA9IP1b/lsb4tQ==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@lancedb/vectordb-linux-arm64-gnu": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-mL/hRmZp6Kw7hmGJBdOZfp/tTYiCdlOcs8DA/+nr2eiXERv0gIhyiKvr2P5DwbBmut3qXEkDalMHTo95BSdL2A==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@lancedb/vectordb-linux-x64-gnu": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-AG0FHksbbr+cHVKPi4B8cmBtqb6T9E0uaK4kyZkXrX52/xtv9RYVZcykaB/tSSm0XNFPWWRnx9R8UqNZV/hxMA==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@lancedb/vectordb-win32-x64-msvc": {
|
||||||
|
"version": "0.1.19",
|
||||||
|
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.1.19.tgz",
|
||||||
|
"integrity": "sha512-PDWZ2hvLVXH4Z4WIO1rsWY8ev3NpNm7aXlaey32P+l1Iz9Hia9+F2GBpp2UiEQKfvbk82ucAvBLRmpSsHY8Tlw==",
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@neon-rs/cli": {
|
||||||
|
"version": "0.0.160",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||||
|
"integrity": "sha512-GQjzHPJVTOARbX3nP/fAWqBq7JlQ8XgfYlCa+iwzIXf0LC1EyfJTX+vqGD/36b9lKoyY01Z/aDUB9o/qF6ztHA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"@cargo-messages/android-arm-eabi": "0.0.160",
|
||||||
|
"@cargo-messages/darwin-arm64": "0.0.160",
|
||||||
|
"@cargo-messages/darwin-x64": "0.0.160",
|
||||||
|
"@cargo-messages/linux-arm-gnueabihf": "0.0.160",
|
||||||
|
"@cargo-messages/linux-x64-gnu": "0.0.160",
|
||||||
|
"@cargo-messages/win32-arm64-msvc": "0.0.160",
|
||||||
|
"@cargo-messages/win32-x64-msvc": "0.0.160"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@neon-rs/load": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-/cPZD907UNz55yrc/ud4wDgQKtU1TvkD9jeqZWG6J4IMmZkp6zgjkQcKA8UvpkZlcpPHvc8J17sGzLFbP/LUYg=="
|
||||||
|
},
|
||||||
"@nodelib/fs.scandir": {
|
"@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -5056,8 +5356,7 @@
|
|||||||
"asynckit": {
|
"asynckit": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"available-typed-arrays": {
|
"available-typed-arrays": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
@@ -5066,12 +5365,13 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"axios": {
|
"axios": {
|
||||||
"version": "0.26.1",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"follow-redirects": "^1.14.8"
|
"follow-redirects": "^1.15.0",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"balanced-match": {
|
"balanced-match": {
|
||||||
@@ -5251,7 +5551,6 @@
|
|||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"delayed-stream": "~1.0.0"
|
"delayed-stream": "~1.0.0"
|
||||||
}
|
}
|
||||||
@@ -5418,8 +5717,7 @@
|
|||||||
"delayed-stream": {
|
"delayed-stream": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"diff": {
|
"diff": {
|
||||||
"version": "4.0.2",
|
"version": "4.0.2",
|
||||||
@@ -5989,8 +6287,7 @@
|
|||||||
"follow-redirects": {
|
"follow-redirects": {
|
||||||
"version": "1.15.2",
|
"version": "1.15.2",
|
||||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
||||||
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"for-each": {
|
"for-each": {
|
||||||
"version": "0.3.3",
|
"version": "0.3.3",
|
||||||
@@ -6005,7 +6302,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -6619,14 +6915,12 @@
|
|||||||
"mime-db": {
|
"mime-db": {
|
||||||
"version": "1.52.0",
|
"version": "1.52.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"mime-types": {
|
"mime-types": {
|
||||||
"version": "2.1.35",
|
"version": "2.1.35",
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"mime-db": "1.52.0"
|
"mime-db": "1.52.0"
|
||||||
}
|
}
|
||||||
@@ -6852,6 +7146,17 @@
|
|||||||
"requires": {
|
"requires": {
|
||||||
"axios": "^0.26.0",
|
"axios": "^0.26.0",
|
||||||
"form-data": "^4.0.0"
|
"form-data": "^4.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"axios": {
|
||||||
|
"version": "0.26.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||||
|
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"follow-redirects": "^1.14.8"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optionator": {
|
"optionator": {
|
||||||
@@ -6960,6 +7265,11 @@
|
|||||||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||||
|
},
|
||||||
"punycode": {
|
"punycode": {
|
||||||
"version": "2.3.0",
|
"version": "2.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||||
|
|||||||
@@ -1,16 +1,18 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.10",
|
"version": "0.1.19",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"tsc": "tsc -b",
|
"tsc": "tsc -b",
|
||||||
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json",
|
||||||
"build-release": "npm run build -- --release",
|
"build-release": "npm run build -- --release",
|
||||||
"test": "npm run tsc; mocha -recursive dist/test",
|
"test": "npm run tsc && mocha -recursive dist/test",
|
||||||
"lint": "eslint src --ext .js,.ts",
|
"lint": "eslint src --ext .js,.ts",
|
||||||
"clean": "rm -rf node_modules *.node dist/"
|
"clean": "rm -rf node_modules *.node dist/",
|
||||||
|
"pack-build": "neon pack-build",
|
||||||
|
"check-npm": "printenv && which node && which npm && npm --version"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -25,6 +27,7 @@
|
|||||||
"author": "Lance Devs",
|
"author": "Lance Devs",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@neon-rs/cli": "^0.0.160",
|
||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/chai-as-promised": "^7.1.5",
|
"@types/chai-as-promised": "^7.1.5",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
@@ -52,6 +55,33 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apache-arrow/ts": "^12.0.0",
|
"@apache-arrow/ts": "^12.0.0",
|
||||||
"apache-arrow": "^12.0.0"
|
"@neon-rs/load": "^0.0.74",
|
||||||
|
"apache-arrow": "^12.0.0",
|
||||||
|
"axios": "^1.4.0"
|
||||||
|
},
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"neon": {
|
||||||
|
"targets": {
|
||||||
|
"x86_64-apple-darwin": "@lancedb/vectordb-darwin-x64",
|
||||||
|
"aarch64-apple-darwin": "@lancedb/vectordb-darwin-arm64",
|
||||||
|
"x86_64-unknown-linux-gnu": "@lancedb/vectordb-linux-x64-gnu",
|
||||||
|
"aarch64-unknown-linux-gnu": "@lancedb/vectordb-linux-arm64-gnu",
|
||||||
|
"x86_64-pc-windows-msvc": "@lancedb/vectordb-win32-x64-msvc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@lancedb/vectordb-darwin-arm64": "0.1.19",
|
||||||
|
"@lancedb/vectordb-darwin-x64": "0.1.19",
|
||||||
|
"@lancedb/vectordb-linux-arm64-gnu": "0.1.19",
|
||||||
|
"@lancedb/vectordb-linux-x64-gnu": "0.1.19",
|
||||||
|
"@lancedb/vectordb-win32-x64-msvc": "0.1.19"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,3 +26,8 @@ export interface EmbeddingFunction<T> {
|
|||||||
*/
|
*/
|
||||||
embed: (data: T[]) => Promise<number[][]>
|
embed: (data: T[]) => Promise<number[][]>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isEmbeddingFunction<T> (value: any): value is EmbeddingFunction<T> {
|
||||||
|
return typeof value.sourceColumn === 'string' &&
|
||||||
|
typeof value.embed === 'function'
|
||||||
|
}
|
||||||
|
|||||||
@@ -14,26 +14,69 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
RecordBatchFileWriter,
|
RecordBatchFileWriter,
|
||||||
type Table as ArrowTable,
|
type Table as ArrowTable
|
||||||
tableFromIPC,
|
|
||||||
Vector
|
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
import { fromRecordsToBuffer } from './arrow'
|
import { fromRecordsToBuffer } from './arrow'
|
||||||
import type { EmbeddingFunction } from './embedding/embedding_function'
|
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
import { RemoteConnection } from './remote'
|
||||||
|
import { Query } from './query'
|
||||||
|
import { isEmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const { databaseNew, databaseTableNames, databaseOpenTable, databaseDropTable, tableCreate, tableSearch, tableAdd, tableCreateVectorIndex, tableCountRows, tableDelete } = require('../native.js')
|
const { databaseNew, databaseTableNames, databaseOpenTable, databaseDropTable, tableCreate, tableAdd, tableCreateVectorIndex, tableCountRows, tableDelete } = require('../native.js')
|
||||||
|
|
||||||
|
export { Query }
|
||||||
export type { EmbeddingFunction }
|
export type { EmbeddingFunction }
|
||||||
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||||
|
|
||||||
|
export interface AwsCredentials {
|
||||||
|
accessKeyId: string
|
||||||
|
|
||||||
|
secretKey: string
|
||||||
|
|
||||||
|
sessionToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConnectionOptions {
|
||||||
|
uri: string
|
||||||
|
|
||||||
|
awsCredentials?: AwsCredentials
|
||||||
|
|
||||||
|
// API key for the remote connections
|
||||||
|
apiKey?: string
|
||||||
|
// Region to connect
|
||||||
|
region?: string
|
||||||
|
|
||||||
|
// override the host for the remote connections
|
||||||
|
hostOverride?: string
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance at the given URI
|
* Connect to a LanceDB instance at the given URI
|
||||||
* @param uri The uri of the database.
|
* @param uri The uri of the database.
|
||||||
*/
|
*/
|
||||||
export async function connect (uri: string): Promise<Connection> {
|
export async function connect (uri: string): Promise<Connection>
|
||||||
const db = await databaseNew(uri)
|
export async function connect (opts: Partial<ConnectionOptions>): Promise<Connection>
|
||||||
return new LocalConnection(db, uri)
|
export async function connect (arg: string | Partial<ConnectionOptions>): Promise<Connection> {
|
||||||
|
let opts: ConnectionOptions
|
||||||
|
if (typeof arg === 'string') {
|
||||||
|
opts = { uri: arg }
|
||||||
|
} else {
|
||||||
|
// opts = { uri: arg.uri, awsCredentials = arg.awsCredentials }
|
||||||
|
opts = Object.assign({
|
||||||
|
uri: '',
|
||||||
|
awsCredentials: undefined,
|
||||||
|
apiKey: undefined,
|
||||||
|
region: 'us-west-2'
|
||||||
|
}, arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts.uri.startsWith('db://')) {
|
||||||
|
// Remote connection
|
||||||
|
return new RemoteConnection(opts)
|
||||||
|
}
|
||||||
|
const db = await databaseNew(opts.uri)
|
||||||
|
return new LocalConnection(db, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -59,10 +102,35 @@ export interface Connection {
|
|||||||
*
|
*
|
||||||
* @param {string} name - The name of the table.
|
* @param {string} name - The name of the table.
|
||||||
* @param data - Non-empty Array of Records to be inserted into the table
|
* @param data - Non-empty Array of Records to be inserted into the table
|
||||||
* @param {WriteMode} mode - The write mode to use when creating the table.
|
*/
|
||||||
|
createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new Table and initialize it with new data.
|
||||||
|
*
|
||||||
|
* @param {string} name - The name of the table.
|
||||||
|
* @param data - Non-empty Array of Records to be inserted into the table
|
||||||
|
* @param {WriteOptions} options - The write options to use when creating the table.
|
||||||
|
*/
|
||||||
|
createTable (name: string, data: Array<Record<string, unknown>>, options: WriteOptions): Promise<Table>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new Table and initialize it with new data.
|
||||||
|
*
|
||||||
|
* @param {string} name - The name of the table.
|
||||||
|
* @param data - Non-empty Array of Records to be inserted into the table
|
||||||
* @param {EmbeddingFunction} embeddings - An embedding function to use on this table
|
* @param {EmbeddingFunction} embeddings - An embedding function to use on this table
|
||||||
*/
|
*/
|
||||||
createTable<T>(name: string, data: Array<Record<string, unknown>>, mode?: WriteMode, embeddings?: EmbeddingFunction<T>): Promise<Table<T>>
|
createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
/**
|
||||||
|
* Creates a new Table and initialize it with new data.
|
||||||
|
*
|
||||||
|
* @param {string} name - The name of the table.
|
||||||
|
* @param data - Non-empty Array of Records to be inserted into the table
|
||||||
|
* @param {EmbeddingFunction} embeddings - An embedding function to use on this table
|
||||||
|
* @param {WriteOptions} options - The write options to use when creating the table.
|
||||||
|
*/
|
||||||
|
createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>, options: WriteOptions): Promise<Table<T>>
|
||||||
|
|
||||||
createTableArrow(name: string, table: ArrowTable): Promise<Table>
|
createTableArrow(name: string, table: ArrowTable): Promise<Table>
|
||||||
|
|
||||||
@@ -117,7 +185,34 @@ export interface Table<T = number[]> {
|
|||||||
/**
|
/**
|
||||||
* Delete rows from this table.
|
* Delete rows from this table.
|
||||||
*
|
*
|
||||||
* @param filter A filter in the same format used by a sql WHERE clause.
|
* This can be used to delete a single row, many rows, all rows, or
|
||||||
|
* sometimes no rows (if your predicate matches nothing).
|
||||||
|
*
|
||||||
|
* @param filter A filter in the same format used by a sql WHERE clause. The
|
||||||
|
* filter must not be empty.
|
||||||
|
*
|
||||||
|
* @examples
|
||||||
|
*
|
||||||
|
* ```ts
|
||||||
|
* const con = await lancedb.connect("./.lancedb")
|
||||||
|
* const data = [
|
||||||
|
* {id: 1, vector: [1, 2]},
|
||||||
|
* {id: 2, vector: [3, 4]},
|
||||||
|
* {id: 3, vector: [5, 6]},
|
||||||
|
* ];
|
||||||
|
* const tbl = await con.createTable("my_table", data)
|
||||||
|
* await tbl.delete("id = 2")
|
||||||
|
* await tbl.countRows() // Returns 2
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* If you have a list of values to delete, you can combine them into a
|
||||||
|
* stringified list and use the `IN` operator:
|
||||||
|
*
|
||||||
|
* ```ts
|
||||||
|
* const to_remove = [1, 5];
|
||||||
|
* await tbl.delete(`id IN (${to_remove.join(",")})`)
|
||||||
|
* await tbl.countRows() // Returns 1
|
||||||
|
* ```
|
||||||
*/
|
*/
|
||||||
delete: (filter: string) => Promise<void>
|
delete: (filter: string) => Promise<void>
|
||||||
}
|
}
|
||||||
@@ -126,16 +221,16 @@ export interface Table<T = number[]> {
|
|||||||
* A connection to a LanceDB database.
|
* A connection to a LanceDB database.
|
||||||
*/
|
*/
|
||||||
export class LocalConnection implements Connection {
|
export class LocalConnection implements Connection {
|
||||||
private readonly _uri: string
|
private readonly _options: ConnectionOptions
|
||||||
private readonly _db: any
|
private readonly _db: any
|
||||||
|
|
||||||
constructor (db: any, uri: string) {
|
constructor (db: any, options: ConnectionOptions) {
|
||||||
this._uri = uri
|
this._options = options
|
||||||
this._db = db
|
this._db = db
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri (): string {
|
get uri (): string {
|
||||||
return this._uri
|
return this._options.uri
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -151,6 +246,7 @@ export class LocalConnection implements Connection {
|
|||||||
* @param name The name of the table.
|
* @param name The name of the table.
|
||||||
*/
|
*/
|
||||||
async openTable (name: string): Promise<Table>
|
async openTable (name: string): Promise<Table>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a table in the database.
|
* Open a table in the database.
|
||||||
*
|
*
|
||||||
@@ -158,43 +254,43 @@ export class LocalConnection implements Connection {
|
|||||||
* @param embeddings An embedding function to use on this Table
|
* @param embeddings An embedding function to use on this Table
|
||||||
*/
|
*/
|
||||||
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
const tbl = await databaseOpenTable.call(this._db, name)
|
const tbl = await databaseOpenTable.call(this._db, name)
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new LocalTable(tbl, name, embeddings)
|
return new LocalTable(tbl, name, this._options, embeddings)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name)
|
return new LocalTable(tbl, name, this._options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, optsOrEmbedding?: WriteOptions | EmbeddingFunction<T>, opt?: WriteOptions): Promise<Table<T>> {
|
||||||
* Creates a new Table and initialize it with new data.
|
let writeOptions: WriteOptions = new DefaultWriteOptions()
|
||||||
*
|
if (opt !== undefined && isWriteOptions(opt)) {
|
||||||
* @param name The name of the table.
|
writeOptions = opt
|
||||||
* @param data Non-empty Array of Records to be inserted into the Table
|
} else if (optsOrEmbedding !== undefined && isWriteOptions(optsOrEmbedding)) {
|
||||||
* @param mode The write mode to use when creating the table.
|
writeOptions = optsOrEmbedding
|
||||||
*/
|
|
||||||
async createTable (name: string, data: Array<Record<string, unknown>>, mode?: WriteMode): Promise<Table>
|
|
||||||
async createTable (name: string, data: Array<Record<string, unknown>>, mode: WriteMode): Promise<Table>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new Table and initialize it with new data.
|
|
||||||
*
|
|
||||||
* @param name The name of the table.
|
|
||||||
* @param data Non-empty Array of Records to be inserted into the Table
|
|
||||||
* @param mode The write mode to use when creating the table.
|
|
||||||
* @param embeddings An embedding function to use on this Table
|
|
||||||
*/
|
|
||||||
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
|
||||||
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
|
||||||
if (mode === undefined) {
|
|
||||||
mode = WriteMode.Create
|
|
||||||
}
|
}
|
||||||
const tbl = await tableCreate.call(this._db, name, await fromRecordsToBuffer(data, embeddings), mode.toLowerCase())
|
|
||||||
|
let embeddings: undefined | EmbeddingFunction<T>
|
||||||
|
if (optsOrEmbedding !== undefined && isEmbeddingFunction(optsOrEmbedding)) {
|
||||||
|
embeddings = optsOrEmbedding
|
||||||
|
}
|
||||||
|
const createArgs = [this._db, name, await fromRecordsToBuffer(data, embeddings), writeOptions.writeMode?.toString()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
createArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
createArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
createArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const tbl = await tableCreate.call(...createArgs)
|
||||||
|
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new LocalTable(tbl, name, embeddings)
|
return new LocalTable(tbl, name, this._options, embeddings)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name)
|
return new LocalTable(tbl, name, this._options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -217,18 +313,21 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _name: string
|
private readonly _name: string
|
||||||
private readonly _embeddings?: EmbeddingFunction<T>
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
private readonly _options: ConnectionOptions
|
||||||
|
|
||||||
constructor (tbl: any, name: string)
|
constructor (tbl: any, name: string, options: ConnectionOptions)
|
||||||
/**
|
/**
|
||||||
* @param tbl
|
* @param tbl
|
||||||
* @param name
|
* @param name
|
||||||
|
* @param options
|
||||||
* @param embeddings An embedding function to use when interacting with this table
|
* @param embeddings An embedding function to use when interacting with this table
|
||||||
*/
|
*/
|
||||||
constructor (tbl: any, name: string, embeddings: EmbeddingFunction<T>)
|
constructor (tbl: any, name: string, options: ConnectionOptions, embeddings: EmbeddingFunction<T>)
|
||||||
constructor (tbl: any, name: string, embeddings?: EmbeddingFunction<T>) {
|
constructor (tbl: any, name: string, options: ConnectionOptions, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._name = name
|
this._name = name
|
||||||
this._embeddings = embeddings
|
this._embeddings = embeddings
|
||||||
|
this._options = options
|
||||||
}
|
}
|
||||||
|
|
||||||
get name (): string {
|
get name (): string {
|
||||||
@@ -240,7 +339,7 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @param query The query search term
|
* @param query The query search term
|
||||||
*/
|
*/
|
||||||
search (query: T): Query<T> {
|
search (query: T): Query<T> {
|
||||||
return new Query(this._tbl, query, this._embeddings)
|
return new Query(query, this._tbl, this._embeddings)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -250,7 +349,15 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString())
|
const callArgs = [this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
callArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tableAdd.call(...callArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -260,6 +367,14 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
const callArgs = [this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
callArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,116 +461,6 @@ export interface IvfPQIndexConfig {
|
|||||||
|
|
||||||
export type VectorIndexParams = IvfPQIndexConfig
|
export type VectorIndexParams = IvfPQIndexConfig
|
||||||
|
|
||||||
/**
|
|
||||||
* A builder for nearest neighbor queries for LanceDB.
|
|
||||||
*/
|
|
||||||
export class Query<T = number[]> {
|
|
||||||
private readonly _tbl: any
|
|
||||||
private readonly _query: T
|
|
||||||
private _queryVector?: number[]
|
|
||||||
private _limit: number
|
|
||||||
private _refineFactor?: number
|
|
||||||
private _nprobes: number
|
|
||||||
private _select?: string[]
|
|
||||||
private _filter?: string
|
|
||||||
private _metricType?: MetricType
|
|
||||||
private readonly _embeddings?: EmbeddingFunction<T>
|
|
||||||
|
|
||||||
constructor (tbl: any, query: T, embeddings?: EmbeddingFunction<T>) {
|
|
||||||
this._tbl = tbl
|
|
||||||
this._query = query
|
|
||||||
this._limit = 10
|
|
||||||
this._nprobes = 20
|
|
||||||
this._refineFactor = undefined
|
|
||||||
this._select = undefined
|
|
||||||
this._filter = undefined
|
|
||||||
this._metricType = undefined
|
|
||||||
this._embeddings = embeddings
|
|
||||||
}
|
|
||||||
|
|
||||||
/***
|
|
||||||
* Sets the number of results that will be returned
|
|
||||||
* @param value number of results
|
|
||||||
*/
|
|
||||||
limit (value: number): Query<T> {
|
|
||||||
this._limit = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refine the results by reading extra elements and re-ranking them in memory.
|
|
||||||
* @param value refine factor to use in this query.
|
|
||||||
*/
|
|
||||||
refineFactor (value: number): Query<T> {
|
|
||||||
this._refineFactor = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The number of probes used. A higher number makes search more accurate but also slower.
|
|
||||||
* @param value The number of probes used.
|
|
||||||
*/
|
|
||||||
nprobes (value: number): Query<T> {
|
|
||||||
this._nprobes = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A filter statement to be applied to this query.
|
|
||||||
* @param value A filter in the same format used by a sql WHERE clause.
|
|
||||||
*/
|
|
||||||
filter (value: string): Query<T> {
|
|
||||||
this._filter = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
where = this.filter
|
|
||||||
|
|
||||||
/** Return only the specified columns.
|
|
||||||
*
|
|
||||||
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
|
||||||
*/
|
|
||||||
select (value: string[]): Query<T> {
|
|
||||||
this._select = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The MetricType used for this Query.
|
|
||||||
* @param value The metric to the. @see MetricType for the different options
|
|
||||||
*/
|
|
||||||
metricType (value: MetricType): Query<T> {
|
|
||||||
this._metricType = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the query and return the results as an Array of Objects
|
|
||||||
*/
|
|
||||||
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
|
||||||
if (this._embeddings !== undefined) {
|
|
||||||
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
|
||||||
} else {
|
|
||||||
this._queryVector = this._query as number[]
|
|
||||||
}
|
|
||||||
|
|
||||||
const buffer = await tableSearch.call(this._tbl, this)
|
|
||||||
const data = tableFromIPC(buffer)
|
|
||||||
|
|
||||||
return data.toArray().map((entry: Record<string, unknown>) => {
|
|
||||||
const newObject: Record<string, unknown> = {}
|
|
||||||
Object.keys(entry).forEach((key: string) => {
|
|
||||||
if (entry[key] instanceof Vector) {
|
|
||||||
newObject[key] = (entry[key] as Vector).toArray()
|
|
||||||
} else {
|
|
||||||
newObject[key] = entry[key]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return newObject as unknown as T
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write mode for writing a table.
|
* Write mode for writing a table.
|
||||||
*/
|
*/
|
||||||
@@ -468,6 +473,23 @@ export enum WriteMode {
|
|||||||
Append = 'append'
|
Append = 'append'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write options when creating a Table.
|
||||||
|
*/
|
||||||
|
export interface WriteOptions {
|
||||||
|
/** A {@link WriteMode} to use on this operation */
|
||||||
|
writeMode?: WriteMode
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DefaultWriteOptions implements WriteOptions {
|
||||||
|
writeMode = WriteMode.Create
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isWriteOptions (value: any): value is WriteOptions {
|
||||||
|
return Object.keys(value).length === 1 &&
|
||||||
|
(value.writeMode === undefined || typeof value.writeMode === 'string')
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Distance metrics type.
|
* Distance metrics type.
|
||||||
*/
|
*/
|
||||||
|
|||||||
130
node/src/query.ts
Normal file
130
node/src/query.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { Vector, tableFromIPC } from 'apache-arrow'
|
||||||
|
import { type EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
import { type MetricType } from '.'
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { tableSearch } = require('../native.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A builder for nearest neighbor queries for LanceDB.
|
||||||
|
*/
|
||||||
|
export class Query<T = number[]> {
|
||||||
|
private readonly _query: T
|
||||||
|
private readonly _tbl?: any
|
||||||
|
private _queryVector?: number[]
|
||||||
|
private _limit: number
|
||||||
|
private _refineFactor?: number
|
||||||
|
private _nprobes: number
|
||||||
|
private _select?: string[]
|
||||||
|
private _filter?: string
|
||||||
|
private _metricType?: MetricType
|
||||||
|
protected readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
|
constructor (query: T, tbl?: any, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
this._tbl = tbl
|
||||||
|
this._query = query
|
||||||
|
this._limit = 10
|
||||||
|
this._nprobes = 20
|
||||||
|
this._refineFactor = undefined
|
||||||
|
this._select = undefined
|
||||||
|
this._filter = undefined
|
||||||
|
this._metricType = undefined
|
||||||
|
this._embeddings = embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
/***
|
||||||
|
* Sets the number of results that will be returned
|
||||||
|
* @param value number of results
|
||||||
|
*/
|
||||||
|
limit (value: number): Query<T> {
|
||||||
|
this._limit = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
|
* @param value refine factor to use in this query.
|
||||||
|
*/
|
||||||
|
refineFactor (value: number): Query<T> {
|
||||||
|
this._refineFactor = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
|
* @param value The number of probes used.
|
||||||
|
*/
|
||||||
|
nprobes (value: number): Query<T> {
|
||||||
|
this._nprobes = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A filter statement to be applied to this query.
|
||||||
|
* @param value A filter in the same format used by a sql WHERE clause.
|
||||||
|
*/
|
||||||
|
filter (value: string): Query<T> {
|
||||||
|
this._filter = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
where = this.filter
|
||||||
|
|
||||||
|
/** Return only the specified columns.
|
||||||
|
*
|
||||||
|
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
*/
|
||||||
|
select (value: string[]): Query<T> {
|
||||||
|
this._select = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The MetricType used for this Query.
|
||||||
|
* @param value The metric to the. @see MetricType for the different options
|
||||||
|
*/
|
||||||
|
metricType (value: MetricType): Query<T> {
|
||||||
|
this._metricType = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the query and return the results as an Array of Objects
|
||||||
|
*/
|
||||||
|
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
||||||
|
if (this._embeddings !== undefined) {
|
||||||
|
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
||||||
|
} else {
|
||||||
|
this._queryVector = this._query as number[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = await tableSearch.call(this._tbl, this)
|
||||||
|
const data = tableFromIPC(buffer)
|
||||||
|
|
||||||
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
|
const newObject: Record<string, unknown> = {}
|
||||||
|
Object.keys(entry).forEach((key: string) => {
|
||||||
|
if (entry[key] instanceof Vector) {
|
||||||
|
newObject[key] = (entry[key] as Vector).toArray()
|
||||||
|
} else {
|
||||||
|
newObject[key] = entry[key]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return newObject as unknown as T
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
107
node/src/remote/client.ts
Normal file
107
node/src/remote/client.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import axios, { type AxiosResponse } from 'axios'
|
||||||
|
|
||||||
|
import { tableFromIPC, type Table as ArrowTable } from 'apache-arrow'
|
||||||
|
|
||||||
|
export class HttpLancedbClient {
|
||||||
|
private readonly _url: string
|
||||||
|
private readonly _apiKey: () => string
|
||||||
|
|
||||||
|
public constructor (
|
||||||
|
url: string,
|
||||||
|
apiKey: string,
|
||||||
|
private readonly _dbName?: string
|
||||||
|
) {
|
||||||
|
this._url = url
|
||||||
|
this._apiKey = () => apiKey
|
||||||
|
}
|
||||||
|
|
||||||
|
get uri (): string {
|
||||||
|
return this._url
|
||||||
|
}
|
||||||
|
|
||||||
|
public async search (
|
||||||
|
tableName: string,
|
||||||
|
vector: number[],
|
||||||
|
k: number,
|
||||||
|
nprobes: number,
|
||||||
|
refineFactor?: number,
|
||||||
|
columns?: string[],
|
||||||
|
filter?: string
|
||||||
|
): Promise<ArrowTable<any>> {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${this._url}/v1/table/${tableName}/query/`,
|
||||||
|
{
|
||||||
|
vector,
|
||||||
|
k,
|
||||||
|
nprobes,
|
||||||
|
refineFactor,
|
||||||
|
columns,
|
||||||
|
filter
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': this._apiKey(),
|
||||||
|
...(this._dbName !== undefined ? { 'x-lancedb-database': this._dbName } : {})
|
||||||
|
},
|
||||||
|
responseType: 'arraybuffer',
|
||||||
|
timeout: 10000
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.error('error: ', err)
|
||||||
|
return err.response
|
||||||
|
})
|
||||||
|
if (response.status !== 200) {
|
||||||
|
const errorData = new TextDecoder().decode(response.data)
|
||||||
|
throw new Error(
|
||||||
|
`Server Error, status: ${response.status as number}, ` +
|
||||||
|
`message: ${response.statusText as string}: ${errorData}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const table = tableFromIPC(response.data)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sent GET request.
|
||||||
|
*/
|
||||||
|
public async get (path: string, params?: Record<string, string | number>): Promise<AxiosResponse> {
|
||||||
|
const response = await axios.get(
|
||||||
|
`${this._url}${path}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': this._apiKey()
|
||||||
|
},
|
||||||
|
params,
|
||||||
|
timeout: 10000
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.error('error: ', err)
|
||||||
|
return err.response
|
||||||
|
})
|
||||||
|
if (response.status !== 200) {
|
||||||
|
const errorData = new TextDecoder().decode(response.data)
|
||||||
|
throw new Error(
|
||||||
|
`Server Error, status: ${response.status as number}, ` +
|
||||||
|
`message: ${response.statusText as string}: ${errorData}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
168
node/src/remote/index.ts
Normal file
168
node/src/remote/index.ts
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import {
|
||||||
|
type EmbeddingFunction, type Table, type VectorIndexParams, type Connection,
|
||||||
|
type ConnectionOptions
|
||||||
|
} from '../index'
|
||||||
|
import { Query } from '../query'
|
||||||
|
|
||||||
|
import { type Table as ArrowTable, Vector } from 'apache-arrow'
|
||||||
|
import { HttpLancedbClient } from './client'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remote connection.
|
||||||
|
*/
|
||||||
|
export class RemoteConnection implements Connection {
|
||||||
|
private readonly _client: HttpLancedbClient
|
||||||
|
private readonly _dbName: string
|
||||||
|
|
||||||
|
constructor (opts: ConnectionOptions) {
|
||||||
|
if (!opts.uri.startsWith('db://')) {
|
||||||
|
throw new Error(`Invalid remote DB URI: ${opts.uri}`)
|
||||||
|
}
|
||||||
|
if (opts.apiKey === undefined || opts.region === undefined) {
|
||||||
|
throw new Error('API key and region are not supported for remote connections')
|
||||||
|
}
|
||||||
|
|
||||||
|
this._dbName = opts.uri.slice('db://'.length)
|
||||||
|
let server: string
|
||||||
|
if (opts.hostOverride === undefined) {
|
||||||
|
server = `https://${this._dbName}.${opts.region}.api.lancedb.com`
|
||||||
|
} else {
|
||||||
|
server = opts.hostOverride
|
||||||
|
}
|
||||||
|
this._client = new HttpLancedbClient(server, opts.apiKey, opts.hostOverride === undefined ? undefined : this._dbName)
|
||||||
|
}
|
||||||
|
|
||||||
|
get uri (): string {
|
||||||
|
// add the lancedb+ prefix back
|
||||||
|
return 'db://' + this._client.uri
|
||||||
|
}
|
||||||
|
|
||||||
|
async tableNames (): Promise<string[]> {
|
||||||
|
const response = await this._client.get('/v1/table/')
|
||||||
|
return response.data.tables
|
||||||
|
}
|
||||||
|
|
||||||
|
async openTable (name: string): Promise<Table>
|
||||||
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
return new RemoteTable(this._client, name, embeddings)
|
||||||
|
} else {
|
||||||
|
return new RemoteTable(this._client, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropTable (name: string): Promise<void> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class RemoteQuery<T = number[]> extends Query<T> {
|
||||||
|
constructor (query: T, private readonly _client: HttpLancedbClient,
|
||||||
|
private readonly _name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
super(query, undefined, embeddings)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: refactor this to a base class + queryImpl pattern
|
||||||
|
async execute<T = Record<string, unknown>>(): Promise<T[]> {
|
||||||
|
const embeddings = this._embeddings
|
||||||
|
const query = (this as any)._query
|
||||||
|
let queryVector: number[]
|
||||||
|
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
queryVector = (await embeddings.embed([query]))[0]
|
||||||
|
} else {
|
||||||
|
queryVector = query as number[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await this._client.search(
|
||||||
|
this._name,
|
||||||
|
queryVector,
|
||||||
|
(this as any)._limit,
|
||||||
|
(this as any)._nprobes,
|
||||||
|
(this as any)._refineFactor,
|
||||||
|
(this as any)._select,
|
||||||
|
(this as any)._filter
|
||||||
|
)
|
||||||
|
|
||||||
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
|
const newObject: Record<string, unknown> = {}
|
||||||
|
Object.keys(entry).forEach((key: string) => {
|
||||||
|
if (entry[key] instanceof Vector) {
|
||||||
|
newObject[key] = (entry[key] as Vector).toArray()
|
||||||
|
} else {
|
||||||
|
newObject[key] = entry[key]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return newObject as unknown as T
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// we are using extend until we have next next version release
|
||||||
|
// Table and Connection has both been refactored to interfaces
|
||||||
|
export class RemoteTable<T = number[]> implements Table<T> {
|
||||||
|
private readonly _client: HttpLancedbClient
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
private readonly _name: string
|
||||||
|
|
||||||
|
constructor (client: HttpLancedbClient, name: string)
|
||||||
|
constructor (client: HttpLancedbClient, name: string, embeddings: EmbeddingFunction<T>)
|
||||||
|
constructor (client: HttpLancedbClient, name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
this._client = client
|
||||||
|
this._name = name
|
||||||
|
this._embeddings = embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
get name (): string {
|
||||||
|
return this._name
|
||||||
|
}
|
||||||
|
|
||||||
|
search (query: T): Query<T> {
|
||||||
|
return new RemoteQuery(query, this._client, this._name)//, this._embeddings_new)
|
||||||
|
}
|
||||||
|
|
||||||
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async countRows (): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete (filter: string): Promise<void> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,6 +16,7 @@ import { describe } from 'mocha'
|
|||||||
import { assert } from 'chai'
|
import { assert } from 'chai'
|
||||||
|
|
||||||
import { OpenAIEmbeddingFunction } from '../../embedding/openai'
|
import { OpenAIEmbeddingFunction } from '../../embedding/openai'
|
||||||
|
import { isEmbeddingFunction } from '../../embedding/embedding_function'
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const { OpenAIApi } = require('openai')
|
const { OpenAIApi } = require('openai')
|
||||||
@@ -47,4 +48,10 @@ describe('OpenAPIEmbeddings', function () {
|
|||||||
assert.deepEqual(vectors[1], stubValue.data.data[1].embedding)
|
assert.deepEqual(vectors[1], stubValue.data.data[1].embedding)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('isEmbeddingFunction', function () {
|
||||||
|
it('should match the isEmbeddingFunction guard', function () {
|
||||||
|
assert.isTrue(isEmbeddingFunction(new OpenAIEmbeddingFunction('text', 'sk-key')))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -18,26 +18,48 @@ import { describe } from 'mocha'
|
|||||||
import { assert } from 'chai'
|
import { assert } from 'chai'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
|
import { type ConnectionOptions } from '../index'
|
||||||
|
|
||||||
describe('LanceDB S3 client', function () {
|
describe('LanceDB S3 client', function () {
|
||||||
if (process.env.TEST_S3_BASE_URL != null) {
|
if (process.env.TEST_S3_BASE_URL != null) {
|
||||||
const baseUri = process.env.TEST_S3_BASE_URL
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
it('should have a valid url', async function () {
|
it('should have a valid url', async function () {
|
||||||
const uri = `${baseUri}/valid_url`
|
const opts = { uri: `${baseUri}/valid_url` }
|
||||||
const table = await createTestDB(uri, 2, 20)
|
const table = await createTestDB(opts, 2, 20)
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(opts)
|
||||||
assert.equal(con.uri, uri)
|
assert.equal(con.uri, opts.uri)
|
||||||
|
|
||||||
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
assert.equal(results.length, 5)
|
assert.equal(results.length, 5)
|
||||||
})
|
}).timeout(10_000)
|
||||||
|
} else {
|
||||||
|
describe.skip('Skip S3 test', function () {})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.TEST_S3_BASE_URL != null && process.env.TEST_AWS_ACCESS_KEY_ID != null && process.env.TEST_AWS_SECRET_ACCESS_KEY != null) {
|
||||||
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
|
it('use custom credentials', async function () {
|
||||||
|
const opts: ConnectionOptions = {
|
||||||
|
uri: `${baseUri}/custom_credentials`,
|
||||||
|
awsCredentials: {
|
||||||
|
accessKeyId: process.env.TEST_AWS_ACCESS_KEY_ID as string,
|
||||||
|
secretKey: process.env.TEST_AWS_SECRET_ACCESS_KEY as string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const table = await createTestDB(opts, 2, 20)
|
||||||
|
const con = await lancedb.connect(opts)
|
||||||
|
assert.equal(con.uri, opts.uri)
|
||||||
|
|
||||||
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
|
assert.equal(results.length, 5)
|
||||||
|
}).timeout(10_000)
|
||||||
} else {
|
} else {
|
||||||
describe.skip('Skip S3 test', function () {})
|
describe.skip('Skip S3 test', function () {})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
async function createTestDB (uri: string, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
async function createTestDB (opts: ConnectionOptions, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(opts)
|
||||||
|
|
||||||
const data = []
|
const data = []
|
||||||
for (let i = 0; i < numRows; i++) {
|
for (let i = 0; i < numRows; i++) {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import * as chai from 'chai'
|
|||||||
import * as chaiAsPromised from 'chai-as-promised'
|
import * as chaiAsPromised from 'chai-as-promised'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
import { type EmbeddingFunction, MetricType, Query, WriteMode } from '../index'
|
import { type AwsCredentials, type EmbeddingFunction, MetricType, Query, WriteMode, DefaultWriteOptions, isWriteOptions } from '../index'
|
||||||
|
|
||||||
const expect = chai.expect
|
const expect = chai.expect
|
||||||
const assert = chai.assert
|
const assert = chai.assert
|
||||||
@@ -32,6 +32,22 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(con.uri, uri)
|
assert.equal(con.uri, uri)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should accept an options object', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const con = await lancedb.connect({ uri })
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should accept custom aws credentials', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const awsCredentials: AwsCredentials = {
|
||||||
|
accessKeyId: '',
|
||||||
|
secretKey: ''
|
||||||
|
}
|
||||||
|
const con = await lancedb.connect({ uri, awsCredentials })
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
})
|
||||||
|
|
||||||
it('should return the existing table names', async function () {
|
it('should return the existing table names', async function () {
|
||||||
const uri = await createTestDB()
|
const uri = await createTestDB()
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
@@ -118,6 +134,18 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(await table.countRows(), 2)
|
assert.equal(await table.countRows(), 2)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('fails to create a new table when the vector column is missing', async function () {
|
||||||
|
const dir = await track().mkdir('lancejs')
|
||||||
|
const con = await lancedb.connect(dir)
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ id: 1, price: 10 }
|
||||||
|
]
|
||||||
|
|
||||||
|
const create = con.createTable('missing_vector', data)
|
||||||
|
await expect(create).to.be.rejectedWith(Error, 'column \'vector\' is missing')
|
||||||
|
})
|
||||||
|
|
||||||
it('use overwrite flag to overwrite existing table', async function () {
|
it('use overwrite flag to overwrite existing table', async function () {
|
||||||
const dir = await track().mkdir('lancejs')
|
const dir = await track().mkdir('lancejs')
|
||||||
const con = await lancedb.connect(dir)
|
const con = await lancedb.connect(dir)
|
||||||
@@ -128,7 +156,7 @@ describe('LanceDB client', function () {
|
|||||||
]
|
]
|
||||||
|
|
||||||
const tableName = 'overwrite'
|
const tableName = 'overwrite'
|
||||||
await con.createTable(tableName, data, WriteMode.Create)
|
await con.createTable(tableName, data, { writeMode: WriteMode.Create })
|
||||||
|
|
||||||
const newData = [
|
const newData = [
|
||||||
{ id: 1, vector: [0.1, 0.2], price: 10 },
|
{ id: 1, vector: [0.1, 0.2], price: 10 },
|
||||||
@@ -138,7 +166,7 @@ describe('LanceDB client', function () {
|
|||||||
|
|
||||||
await expect(con.createTable(tableName, newData)).to.be.rejectedWith(Error, 'already exists')
|
await expect(con.createTable(tableName, newData)).to.be.rejectedWith(Error, 'already exists')
|
||||||
|
|
||||||
const table = await con.createTable(tableName, newData, WriteMode.Overwrite)
|
const table = await con.createTable(tableName, newData, { writeMode: WriteMode.Overwrite })
|
||||||
assert.equal(table.name, tableName)
|
assert.equal(table.name, tableName)
|
||||||
assert.equal(await table.countRows(), 3)
|
assert.equal(await table.countRows(), 3)
|
||||||
})
|
})
|
||||||
@@ -214,6 +242,22 @@ describe('LanceDB client', function () {
|
|||||||
// Default replace = true
|
// Default replace = true
|
||||||
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 2, max_iters: 2, num_sub_vectors: 2 })
|
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 2, max_iters: 2, num_sub_vectors: 2 })
|
||||||
}).timeout(50_000)
|
}).timeout(50_000)
|
||||||
|
|
||||||
|
it('it should fail when the column is not a vector', async function () {
|
||||||
|
const uri = await createTestDB(32, 300)
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
const createIndex = table.createIndex({ type: 'ivf_pq', column: 'name', num_partitions: 2, max_iters: 2, num_sub_vectors: 2 })
|
||||||
|
await expect(createIndex).to.be.rejectedWith(/VectorIndex requires the column data type to be fixed size list of float32s/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('it should fail when the column is not a vector', async function () {
|
||||||
|
const uri = await createTestDB(32, 300)
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
const createIndex = table.createIndex({ type: 'ivf_pq', column: 'name', num_partitions: -1, max_iters: 2, num_sub_vectors: 2 })
|
||||||
|
await expect(createIndex).to.be.rejectedWith('num_partitions: must be > 0')
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when using a custom embedding function', function () {
|
describe('when using a custom embedding function', function () {
|
||||||
@@ -243,7 +287,7 @@ describe('LanceDB client', function () {
|
|||||||
{ price: 10, name: 'foo' },
|
{ price: 10, name: 'foo' },
|
||||||
{ price: 50, name: 'bar' }
|
{ price: 50, name: 'bar' }
|
||||||
]
|
]
|
||||||
const table = await con.createTable('vectors', data, WriteMode.Create, embeddings)
|
const table = await con.createTable('vectors', data, embeddings, { writeMode: WriteMode.Create })
|
||||||
const results = await table.search('foo').execute()
|
const results = await table.search('foo').execute()
|
||||||
assert.equal(results.length, 2)
|
assert.equal(results.length, 2)
|
||||||
})
|
})
|
||||||
@@ -252,7 +296,7 @@ describe('LanceDB client', function () {
|
|||||||
|
|
||||||
describe('Query object', function () {
|
describe('Query object', function () {
|
||||||
it('sets custom parameters', async function () {
|
it('sets custom parameters', async function () {
|
||||||
const query = new Query(undefined, [0.1, 0.3])
|
const query = new Query([0.1, 0.3])
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.metricType(MetricType.Cosine)
|
.metricType(MetricType.Cosine)
|
||||||
.refineFactor(100)
|
.refineFactor(100)
|
||||||
@@ -301,3 +345,20 @@ describe('Drop table', function () {
|
|||||||
assert.deepEqual(await con.tableNames(), ['t2'])
|
assert.deepEqual(await con.tableNames(), ['t2'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('WriteOptions', function () {
|
||||||
|
context('#isWriteOptions', function () {
|
||||||
|
it('should not match empty object', function () {
|
||||||
|
assert.equal(isWriteOptions({}), false)
|
||||||
|
})
|
||||||
|
it('should match write options', function () {
|
||||||
|
assert.equal(isWriteOptions({ writeMode: WriteMode.Create }), true)
|
||||||
|
})
|
||||||
|
it('should match undefined write mode', function () {
|
||||||
|
assert.equal(isWriteOptions({ writeMode: undefined }), true)
|
||||||
|
})
|
||||||
|
it('should match default write options', function () {
|
||||||
|
assert.equal(isWriteOptions(new DefaultWriteOptions()), true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.1.8
|
current_version = 0.1.16
|
||||||
commit = True
|
commit = True
|
||||||
message = [python] Bump version: {current_version} → {new_version}
|
message = [python] Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
@@ -15,10 +15,15 @@ from typing import Optional
|
|||||||
|
|
||||||
from .db import URI, DBConnection, LanceDBConnection
|
from .db import URI, DBConnection, LanceDBConnection
|
||||||
from .remote.db import RemoteDBConnection
|
from .remote.db import RemoteDBConnection
|
||||||
|
from .schema import vector
|
||||||
|
|
||||||
|
|
||||||
def connect(
|
def connect(
|
||||||
uri: URI, *, api_key: Optional[str] = None, region: str = "us-west-2"
|
uri: URI,
|
||||||
|
*,
|
||||||
|
api_key: Optional[str] = None,
|
||||||
|
region: str = "us-west-2",
|
||||||
|
host_override: Optional[str] = None,
|
||||||
) -> DBConnection:
|
) -> DBConnection:
|
||||||
"""Connect to a LanceDB database.
|
"""Connect to a LanceDB database.
|
||||||
|
|
||||||
@@ -54,5 +59,5 @@ def connect(
|
|||||||
if isinstance(uri, str) and uri.startswith("db://"):
|
if isinstance(uri, str) and uri.startswith("db://"):
|
||||||
if api_key is None:
|
if api_key is None:
|
||||||
raise ValueError(f"api_key is required to connected LanceDB cloud: {uri}")
|
raise ValueError(f"api_key is required to connected LanceDB cloud: {uri}")
|
||||||
return RemoteDBConnection(uri, api_key, region)
|
return RemoteDBConnection(uri, api_key, region, host_override)
|
||||||
return LanceDBConnection(uri)
|
return LanceDBConnection(uri)
|
||||||
|
|||||||
@@ -11,17 +11,18 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Union
|
from typing import Iterable, List, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from .util import safe_import_pandas
|
||||||
|
|
||||||
|
pd = safe_import_pandas()
|
||||||
|
|
||||||
|
DATA = Union[List[dict], dict, "pd.DataFrame", pa.Table, Iterable[pa.RecordBatch]]
|
||||||
VEC = Union[list, np.ndarray, pa.Array, pa.ChunkedArray]
|
VEC = Union[list, np.ndarray, pa.Array, pa.ChunkedArray]
|
||||||
URI = Union[str, Path]
|
URI = Union[str, Path]
|
||||||
|
|
||||||
# TODO support generator
|
|
||||||
DATA = Union[List[dict], dict, pd.DataFrame]
|
|
||||||
VECTOR_COLUMN_NAME = "vector"
|
VECTOR_COLUMN_NAME = "vector"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,12 +12,13 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
from .exceptions import MissingColumnError, MissingValueError
|
from .exceptions import MissingColumnError, MissingValueError
|
||||||
|
from .util import safe_import_pandas
|
||||||
|
|
||||||
|
pd = safe_import_pandas()
|
||||||
|
|
||||||
|
|
||||||
def contextualize(raw_df: pd.DataFrame) -> Contextualizer:
|
def contextualize(raw_df: "pd.DataFrame") -> Contextualizer:
|
||||||
"""Create a Contextualizer object for the given DataFrame.
|
"""Create a Contextualizer object for the given DataFrame.
|
||||||
|
|
||||||
Used to create context windows. Context windows are rolling subsets of text
|
Used to create context windows. Context windows are rolling subsets of text
|
||||||
@@ -175,8 +176,12 @@ class Contextualizer:
|
|||||||
self._min_window_size = min_window_size
|
self._min_window_size = min_window_size
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def to_df(self) -> pd.DataFrame:
|
def to_df(self) -> "pd.DataFrame":
|
||||||
"""Create the context windows and return a DataFrame."""
|
"""Create the context windows and return a DataFrame."""
|
||||||
|
if pd is None:
|
||||||
|
raise ImportError(
|
||||||
|
"pandas is required to create context windows using lancedb"
|
||||||
|
)
|
||||||
|
|
||||||
if self._text_col not in self._raw_df.columns.tolist():
|
if self._text_col not in self._raw_df.columns.tolist():
|
||||||
raise MissingColumnError(self._text_col)
|
raise MissingColumnError(self._text_col)
|
||||||
|
|||||||
@@ -13,17 +13,17 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import functools
|
|
||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from pyarrow import fs
|
from pyarrow import fs
|
||||||
|
|
||||||
from .common import DATA, URI
|
from .common import DATA, URI
|
||||||
from .table import LanceTable, Table
|
from .table import LanceTable, Table
|
||||||
from .util import get_uri_location, get_uri_scheme
|
from .util import fs_from_uri, get_uri_location, get_uri_scheme
|
||||||
|
|
||||||
|
|
||||||
class DBConnection(ABC):
|
class DBConnection(ABC):
|
||||||
@@ -38,8 +38,8 @@ class DBConnection(ABC):
|
|||||||
def create_table(
|
def create_table(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
data: DATA = None,
|
data: Optional[DATA] = None,
|
||||||
schema: pa.Schema = None,
|
schema: Optional[pa.Schema] = None,
|
||||||
mode: str = "create",
|
mode: str = "create",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
@@ -51,7 +51,7 @@ class DBConnection(ABC):
|
|||||||
name: str
|
name: str
|
||||||
The name of the table.
|
The name of the table.
|
||||||
data: list, tuple, dict, pd.DataFrame; optional
|
data: list, tuple, dict, pd.DataFrame; optional
|
||||||
The data to insert into the table.
|
The data to initialize the table. User must provide at least one of `data` or `schema`.
|
||||||
schema: pyarrow.Schema; optional
|
schema: pyarrow.Schema; optional
|
||||||
The schema of the table.
|
The schema of the table.
|
||||||
mode: str; default "create"
|
mode: str; default "create"
|
||||||
@@ -64,16 +64,16 @@ class DBConnection(ABC):
|
|||||||
fill_value: float
|
fill_value: float
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
|
||||||
Note
|
|
||||||
----
|
|
||||||
The vector index won't be created by default.
|
|
||||||
To create the index, call the `create_index` method on the table.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
LanceTable
|
LanceTable
|
||||||
A reference to the newly created table.
|
A reference to the newly created table.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
The vector index won't be created by default.
|
||||||
|
To create the index, call the `create_index` method on the table.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
|
|
||||||
@@ -119,7 +119,7 @@ class DBConnection(ABC):
|
|||||||
|
|
||||||
Data is converted to Arrow before being written to disk. For maximum
|
Data is converted to Arrow before being written to disk. For maximum
|
||||||
control over how data is saved, either provide the PyArrow schema to
|
control over how data is saved, either provide the PyArrow schema to
|
||||||
convert to or else provide a PyArrow table directly.
|
convert to or else provide a [PyArrow Table](pyarrow.Table) directly.
|
||||||
|
|
||||||
>>> custom_schema = pa.schema([
|
>>> custom_schema = pa.schema([
|
||||||
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
@@ -138,6 +138,30 @@ class DBConnection(ABC):
|
|||||||
vector: [[[1.1,1.2],[0.2,1.8]]]
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
lat: [[45.5,40.1]]
|
lat: [[45.5,40.1]]
|
||||||
long: [[-122.7,-74.1]]
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
|
||||||
|
It is also possible to create an table from `[Iterable[pa.RecordBatch]]`:
|
||||||
|
|
||||||
|
|
||||||
|
>>> import pyarrow as pa
|
||||||
|
>>> def make_batches():
|
||||||
|
... for i in range(5):
|
||||||
|
... yield pa.RecordBatch.from_arrays(
|
||||||
|
... [
|
||||||
|
... pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
... pa.array(["foo", "bar"]),
|
||||||
|
... pa.array([10.0, 20.0]),
|
||||||
|
... ],
|
||||||
|
... ["vector", "item", "price"],
|
||||||
|
... )
|
||||||
|
>>> schema=pa.schema([
|
||||||
|
... pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
... pa.field("item", pa.utf8()),
|
||||||
|
... pa.field("price", pa.float32()),
|
||||||
|
... ])
|
||||||
|
>>> db.create_table("table4", make_batches(), schema=schema)
|
||||||
|
LanceTable(table4)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -225,7 +249,7 @@ class LanceDBConnection(DBConnection):
|
|||||||
A list of table names.
|
A list of table names.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
filesystem, path = fs.FileSystem.from_uri(self.uri)
|
filesystem, path = fs_from_uri(self.uri)
|
||||||
except pa.ArrowInvalid:
|
except pa.ArrowInvalid:
|
||||||
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
||||||
|
|
||||||
@@ -252,7 +276,7 @@ class LanceDBConnection(DBConnection):
|
|||||||
def create_table(
|
def create_table(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
data: DATA = None,
|
data: Optional[DATA] = None,
|
||||||
schema: pa.Schema = None,
|
schema: pa.Schema = None,
|
||||||
mode: str = "create",
|
mode: str = "create",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
@@ -260,103 +284,13 @@ class LanceDBConnection(DBConnection):
|
|||||||
) -> LanceTable:
|
) -> LanceTable:
|
||||||
"""Create a table in the database.
|
"""Create a table in the database.
|
||||||
|
|
||||||
Parameters
|
See
|
||||||
----------
|
---
|
||||||
name: str
|
DBConnection.create_table
|
||||||
The name of the table.
|
|
||||||
data: list, tuple, dict, pd.DataFrame; optional
|
|
||||||
The data to insert into the table.
|
|
||||||
schema: pyarrow.Schema; optional
|
|
||||||
The schema of the table.
|
|
||||||
mode: str; default "create"
|
|
||||||
The mode to use when creating the table. Can be either "create" or "overwrite".
|
|
||||||
By default, if the table already exists, an exception is raised.
|
|
||||||
If you want to overwrite the table, use mode="overwrite".
|
|
||||||
on_bad_vectors: str, default "error"
|
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
|
||||||
One of "error", "drop", "fill".
|
|
||||||
fill_value: float
|
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
|
||||||
|
|
||||||
Note
|
|
||||||
----
|
|
||||||
The vector index won't be created by default.
|
|
||||||
To create the index, call the `create_index` method on the table.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
LanceTable
|
|
||||||
A reference to the newly created table.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
|
|
||||||
Can create with list of tuples or dictionaries:
|
|
||||||
|
|
||||||
>>> import lancedb
|
|
||||||
>>> db = lancedb.connect("./.lancedb")
|
|
||||||
>>> data = [{"vector": [1.1, 1.2], "lat": 45.5, "long": -122.7},
|
|
||||||
... {"vector": [0.2, 1.8], "lat": 40.1, "long": -74.1}]
|
|
||||||
>>> db.create_table("my_table", data)
|
|
||||||
LanceTable(my_table)
|
|
||||||
>>> db["my_table"].head()
|
|
||||||
pyarrow.Table
|
|
||||||
vector: fixed_size_list<item: float>[2]
|
|
||||||
child 0, item: float
|
|
||||||
lat: double
|
|
||||||
long: double
|
|
||||||
----
|
|
||||||
vector: [[[1.1,1.2],[0.2,1.8]]]
|
|
||||||
lat: [[45.5,40.1]]
|
|
||||||
long: [[-122.7,-74.1]]
|
|
||||||
|
|
||||||
You can also pass a pandas DataFrame:
|
|
||||||
|
|
||||||
>>> import pandas as pd
|
|
||||||
>>> data = pd.DataFrame({
|
|
||||||
... "vector": [[1.1, 1.2], [0.2, 1.8]],
|
|
||||||
... "lat": [45.5, 40.1],
|
|
||||||
... "long": [-122.7, -74.1]
|
|
||||||
... })
|
|
||||||
>>> db.create_table("table2", data)
|
|
||||||
LanceTable(table2)
|
|
||||||
>>> db["table2"].head()
|
|
||||||
pyarrow.Table
|
|
||||||
vector: fixed_size_list<item: float>[2]
|
|
||||||
child 0, item: float
|
|
||||||
lat: double
|
|
||||||
long: double
|
|
||||||
----
|
|
||||||
vector: [[[1.1,1.2],[0.2,1.8]]]
|
|
||||||
lat: [[45.5,40.1]]
|
|
||||||
long: [[-122.7,-74.1]]
|
|
||||||
|
|
||||||
Data is converted to Arrow before being written to disk. For maximum
|
|
||||||
control over how data is saved, either provide the PyArrow schema to
|
|
||||||
convert to or else provide a PyArrow table directly.
|
|
||||||
|
|
||||||
>>> custom_schema = pa.schema([
|
|
||||||
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
|
||||||
... pa.field("lat", pa.float32()),
|
|
||||||
... pa.field("long", pa.float32())
|
|
||||||
... ])
|
|
||||||
>>> db.create_table("table3", data, schema = custom_schema)
|
|
||||||
LanceTable(table3)
|
|
||||||
>>> db["table3"].head()
|
|
||||||
pyarrow.Table
|
|
||||||
vector: fixed_size_list<item: float>[2]
|
|
||||||
child 0, item: float
|
|
||||||
lat: float
|
|
||||||
long: float
|
|
||||||
----
|
|
||||||
vector: [[[1.1,1.2],[0.2,1.8]]]
|
|
||||||
lat: [[45.5,40.1]]
|
|
||||||
long: [[-122.7,-74.1]]
|
|
||||||
"""
|
"""
|
||||||
if mode.lower() not in ["create", "overwrite"]:
|
if mode.lower() not in ["create", "overwrite"]:
|
||||||
raise ValueError("mode must be either 'create' or 'overwrite'")
|
raise ValueError("mode must be either 'create' or 'overwrite'")
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
tbl = LanceTable.create(
|
tbl = LanceTable.create(
|
||||||
self,
|
self,
|
||||||
name,
|
name,
|
||||||
@@ -366,8 +300,6 @@ class LanceDBConnection(DBConnection):
|
|||||||
on_bad_vectors=on_bad_vectors,
|
on_bad_vectors=on_bad_vectors,
|
||||||
fill_value=fill_value,
|
fill_value=fill_value,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
tbl = LanceTable.open(self, name)
|
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
def open_table(self, name: str) -> LanceTable:
|
def open_table(self, name: str) -> LanceTable:
|
||||||
@@ -384,14 +316,20 @@ class LanceDBConnection(DBConnection):
|
|||||||
"""
|
"""
|
||||||
return LanceTable.open(self, name)
|
return LanceTable.open(self, name)
|
||||||
|
|
||||||
def drop_table(self, name: str):
|
def drop_table(self, name: str, ignore_missing: bool = False):
|
||||||
"""Drop a table from the database.
|
"""Drop a table from the database.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
name: str
|
name: str
|
||||||
The name of the table.
|
The name of the table.
|
||||||
|
ignore_missing: bool, default False
|
||||||
|
If True, ignore if the table does not exist.
|
||||||
"""
|
"""
|
||||||
filesystem, path = pa.fs.FileSystem.from_uri(self.uri)
|
try:
|
||||||
|
filesystem, path = fs_from_uri(self.uri)
|
||||||
table_path = os.path.join(path, name + ".lance")
|
table_path = os.path.join(path, name + ".lance")
|
||||||
filesystem.delete_dir(table_path)
|
filesystem.delete_dir(table_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
if not ignore_missing:
|
||||||
|
raise
|
||||||
|
|||||||
@@ -16,15 +16,19 @@ import sys
|
|||||||
from typing import Callable, Union
|
from typing import Callable, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
from retry import retry
|
from retry import retry
|
||||||
|
|
||||||
|
from .util import safe_import_pandas
|
||||||
|
|
||||||
|
pd = safe_import_pandas()
|
||||||
|
DATA = Union[pa.Table, "pd.DataFrame"]
|
||||||
|
|
||||||
|
|
||||||
def with_embeddings(
|
def with_embeddings(
|
||||||
func: Callable,
|
func: Callable,
|
||||||
data: Union[pa.Table, pd.DataFrame],
|
data: DATA,
|
||||||
column: str = "text",
|
column: str = "text",
|
||||||
wrap_api: bool = True,
|
wrap_api: bool = True,
|
||||||
show_progress: bool = False,
|
show_progress: bool = False,
|
||||||
@@ -60,7 +64,7 @@ def with_embeddings(
|
|||||||
func = func.batch_size(batch_size)
|
func = func.batch_size(batch_size)
|
||||||
if show_progress:
|
if show_progress:
|
||||||
func = func.show_progress()
|
func = func.show_progress()
|
||||||
if isinstance(data, pd.DataFrame):
|
if pd is not None and isinstance(data, pd.DataFrame):
|
||||||
data = pa.Table.from_pandas(data, preserve_index=False)
|
data = pa.Table.from_pandas(data, preserve_index=False)
|
||||||
embeddings = func(data[column].to_numpy())
|
embeddings = func(data[column].to_numpy())
|
||||||
table = vec_to_table(np.array(embeddings))
|
table = vec_to_table(np.array(embeddings))
|
||||||
|
|||||||
284
python/lancedb/pydantic.py
Normal file
284
python/lancedb/pydantic.py
Normal file
@@ -0,0 +1,284 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Pydantic (v1 / v2) adapter for LanceDB"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Callable, Dict, Generator, List, Type, Union, _GenericAlias
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pyarrow as pa
|
||||||
|
import pydantic
|
||||||
|
import semver
|
||||||
|
|
||||||
|
PYDANTIC_VERSION = semver.Version.parse(pydantic.__version__)
|
||||||
|
try:
|
||||||
|
from pydantic_core import CoreSchema, core_schema
|
||||||
|
except ImportError:
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class FixedSizeListMixin(ABC):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def dim() -> int:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def value_arrow_type() -> pa.DataType:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
def vector(
|
||||||
|
dim: int, value_type: pa.DataType = pa.float32()
|
||||||
|
) -> Type[FixedSizeListMixin]:
|
||||||
|
"""Pydantic Vector Type.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
Experimental feature.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
dim : int
|
||||||
|
The dimension of the vector.
|
||||||
|
value_type : pyarrow.DataType, optional
|
||||||
|
The value type of the vector, by default pa.float32()
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> import pydantic
|
||||||
|
>>> from lancedb.pydantic import vector
|
||||||
|
...
|
||||||
|
>>> class MyModel(pydantic.BaseModel):
|
||||||
|
... id: int
|
||||||
|
... url: str
|
||||||
|
... embeddings: vector(768)
|
||||||
|
>>> schema = pydantic_to_schema(MyModel)
|
||||||
|
>>> assert schema == pa.schema([
|
||||||
|
... pa.field("id", pa.int64(), False),
|
||||||
|
... pa.field("url", pa.utf8(), False),
|
||||||
|
... pa.field("embeddings", pa.list_(pa.float32(), 768), False)
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
|
||||||
|
# TODO: make a public parameterized type.
|
||||||
|
class FixedSizeList(list, FixedSizeListMixin):
|
||||||
|
def __repr__(self):
|
||||||
|
return f"FixedSizeList(dim={dim})"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dim() -> int:
|
||||||
|
return dim
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def value_arrow_type() -> pa.DataType:
|
||||||
|
return value_type
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_core_schema__(
|
||||||
|
cls, _source_type: Any, _handler: pydantic.GetCoreSchemaHandler
|
||||||
|
) -> CoreSchema:
|
||||||
|
return core_schema.no_info_after_validator_function(
|
||||||
|
cls,
|
||||||
|
core_schema.list_schema(
|
||||||
|
min_length=dim,
|
||||||
|
max_length=dim,
|
||||||
|
items_schema=core_schema.float_schema(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_validators__(cls) -> Generator[Callable, None, None]:
|
||||||
|
yield cls.validate
|
||||||
|
|
||||||
|
# For pydantic v1
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, v):
|
||||||
|
if not isinstance(v, (list, range, np.ndarray)) or len(v) != dim:
|
||||||
|
raise TypeError("A list of numbers or numpy.ndarray is needed")
|
||||||
|
return v
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION < (2, 0):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __modify_schema__(cls, field_schema: Dict[str, Any]):
|
||||||
|
field_schema["items"] = {"type": "number"}
|
||||||
|
field_schema["maxItems"] = dim
|
||||||
|
field_schema["minItems"] = dim
|
||||||
|
|
||||||
|
return FixedSizeList
|
||||||
|
|
||||||
|
|
||||||
|
def _py_type_to_arrow_type(py_type: Type[Any]) -> pa.DataType:
|
||||||
|
"""Convert Python Type to Arrow DataType.
|
||||||
|
|
||||||
|
Raises
|
||||||
|
------
|
||||||
|
TypeError
|
||||||
|
If the type is not supported.
|
||||||
|
"""
|
||||||
|
if py_type == int:
|
||||||
|
return pa.int64()
|
||||||
|
elif py_type == float:
|
||||||
|
return pa.float64()
|
||||||
|
elif py_type == str:
|
||||||
|
return pa.utf8()
|
||||||
|
elif py_type == bool:
|
||||||
|
return pa.bool_()
|
||||||
|
elif py_type == bytes:
|
||||||
|
return pa.binary()
|
||||||
|
raise TypeError(
|
||||||
|
f"Converting Pydantic type to Arrow Type: unsupported type {py_type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION.major < 2:
|
||||||
|
|
||||||
|
def _pydantic_model_to_fields(model: pydantic.BaseModel) -> List[pa.Field]:
|
||||||
|
return [
|
||||||
|
_pydantic_to_field(name, field) for name, field in model.__fields__.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def _pydantic_model_to_fields(model: pydantic.BaseModel) -> List[pa.Field]:
|
||||||
|
return [
|
||||||
|
_pydantic_to_field(name, field)
|
||||||
|
for name, field in model.model_fields.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _pydantic_to_arrow_type(field: pydantic.fields.FieldInfo) -> pa.DataType:
|
||||||
|
"""Convert a Pydantic FieldInfo to Arrow DataType"""
|
||||||
|
if isinstance(field.annotation, _GenericAlias) or (
|
||||||
|
sys.version_info > (3, 9) and isinstance(field.annotation, types.GenericAlias)
|
||||||
|
):
|
||||||
|
origin = field.annotation.__origin__
|
||||||
|
args = field.annotation.__args__
|
||||||
|
if origin == list:
|
||||||
|
child = args[0]
|
||||||
|
return pa.list_(_py_type_to_arrow_type(child))
|
||||||
|
elif origin == Union:
|
||||||
|
if len(args) == 2 and args[1] == type(None):
|
||||||
|
return _py_type_to_arrow_type(args[0])
|
||||||
|
elif inspect.isclass(field.annotation):
|
||||||
|
if issubclass(field.annotation, pydantic.BaseModel):
|
||||||
|
# Struct
|
||||||
|
fields = _pydantic_model_to_fields(field.annotation)
|
||||||
|
return pa.struct(fields)
|
||||||
|
elif issubclass(field.annotation, FixedSizeListMixin):
|
||||||
|
return pa.list_(field.annotation.value_arrow_type(), field.annotation.dim())
|
||||||
|
return _py_type_to_arrow_type(field.annotation)
|
||||||
|
|
||||||
|
|
||||||
|
def is_nullable(field: pydantic.fields.FieldInfo) -> bool:
|
||||||
|
"""Check if a Pydantic FieldInfo is nullable."""
|
||||||
|
if isinstance(field.annotation, _GenericAlias):
|
||||||
|
origin = field.annotation.__origin__
|
||||||
|
args = field.annotation.__args__
|
||||||
|
if origin == Union:
|
||||||
|
if len(args) == 2 and args[1] == type(None):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _pydantic_to_field(name: str, field: pydantic.fields.FieldInfo) -> pa.Field:
|
||||||
|
"""Convert a Pydantic field to a PyArrow Field."""
|
||||||
|
dt = _pydantic_to_arrow_type(field)
|
||||||
|
return pa.field(name, dt, is_nullable(field))
|
||||||
|
|
||||||
|
|
||||||
|
def pydantic_to_schema(model: Type[pydantic.BaseModel]) -> pa.Schema:
|
||||||
|
"""Convert a Pydantic model to a PyArrow Schema.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
model : Type[pydantic.BaseModel]
|
||||||
|
The Pydantic BaseModel to convert to Arrow Schema.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pyarrow.Schema
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> from typing import List, Optional
|
||||||
|
>>> import pydantic
|
||||||
|
>>> from lancedb.pydantic import pydantic_to_schema
|
||||||
|
...
|
||||||
|
>>> class InnerModel(pydantic.BaseModel):
|
||||||
|
... a: str
|
||||||
|
... b: Optional[float]
|
||||||
|
>>>
|
||||||
|
>>> class FooModel(pydantic.BaseModel):
|
||||||
|
... id: int
|
||||||
|
... s: Optional[str] = None
|
||||||
|
... vec: List[float]
|
||||||
|
... li: List[int]
|
||||||
|
... inner: InnerModel
|
||||||
|
>>> schema = pydantic_to_schema(FooModel)
|
||||||
|
>>> assert schema == pa.schema([
|
||||||
|
... pa.field("id", pa.int64(), False),
|
||||||
|
... pa.field("s", pa.utf8(), True),
|
||||||
|
... pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
... pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
... pa.field("inner", pa.struct([
|
||||||
|
... pa.field("a", pa.utf8(), False),
|
||||||
|
... pa.field("b", pa.float64(), True),
|
||||||
|
... ]), False),
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
fields = _pydantic_model_to_fields(model)
|
||||||
|
return pa.schema(fields)
|
||||||
|
|
||||||
|
|
||||||
|
class LanceModel(pydantic.BaseModel):
|
||||||
|
"""
|
||||||
|
A Pydantic Model base class that can be converted to a LanceDB Table.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> from lancedb.pydantic import LanceModel, vector
|
||||||
|
>>>
|
||||||
|
>>> class TestModel(LanceModel):
|
||||||
|
... name: str
|
||||||
|
... vector: vector(2)
|
||||||
|
...
|
||||||
|
>>> db = lancedb.connect("/tmp")
|
||||||
|
>>> table = db.create_table("test", schema=TestModel.to_arrow_schema())
|
||||||
|
>>> table.add([
|
||||||
|
... TestModel(name="test", vector=[1.0, 2.0])
|
||||||
|
... ])
|
||||||
|
>>> table.search([0., 0.]).limit(1).to_pydantic(TestModel)
|
||||||
|
[TestModel(name='test', vector=FixedSizeList(dim=2))]
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_arrow_schema(cls):
|
||||||
|
return pydantic_to_schema(cls)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def field_names(cls) -> List[str]:
|
||||||
|
if PYDANTIC_VERSION.major < 2:
|
||||||
|
return list(cls.__fields__.keys())
|
||||||
|
return list(cls.model_fields.keys())
|
||||||
@@ -13,17 +13,20 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import List, Literal, Optional, Union
|
from typing import List, Literal, Optional, Type, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from pydantic import BaseModel
|
import pydantic
|
||||||
|
|
||||||
from .common import VECTOR_COLUMN_NAME
|
from .common import VECTOR_COLUMN_NAME
|
||||||
|
from .pydantic import LanceModel
|
||||||
|
from .util import safe_import_pandas
|
||||||
|
|
||||||
|
pd = safe_import_pandas()
|
||||||
|
|
||||||
|
|
||||||
class Query(BaseModel):
|
class Query(pydantic.BaseModel):
|
||||||
"""A Query"""
|
"""A Query"""
|
||||||
|
|
||||||
vector_column: str = VECTOR_COLUMN_NAME
|
vector_column: str = VECTOR_COLUMN_NAME
|
||||||
@@ -198,7 +201,7 @@ class LanceQueryBuilder:
|
|||||||
self._refine_factor = refine_factor
|
self._refine_factor = refine_factor
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def to_df(self) -> pd.DataFrame:
|
def to_df(self) -> "pd.DataFrame":
|
||||||
"""
|
"""
|
||||||
Execute the query and return the results as a pandas DataFrame.
|
Execute the query and return the results as a pandas DataFrame.
|
||||||
In addition to the selected columns, LanceDB also returns a vector
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
@@ -226,12 +229,30 @@ class LanceQueryBuilder:
|
|||||||
columns=self._columns,
|
columns=self._columns,
|
||||||
nprobes=self._nprobes,
|
nprobes=self._nprobes,
|
||||||
refine_factor=self._refine_factor,
|
refine_factor=self._refine_factor,
|
||||||
|
vector_column=self._vector_column,
|
||||||
)
|
)
|
||||||
return self._table._execute_query(query)
|
return self._table._execute_query(query)
|
||||||
|
|
||||||
|
def to_pydantic(self, model: Type[LanceModel]) -> List[LanceModel]:
|
||||||
|
"""Return the table as a list of pydantic models.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
model: Type[LanceModel]
|
||||||
|
The pydantic model to use.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
List[LanceModel]
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
model(**{k: v for k, v in row.items() if k in model.field_names()})
|
||||||
|
for row in self.to_arrow().to_pylist()
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
||||||
def to_arrow(self) -> pd.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
try:
|
try:
|
||||||
import tantivy
|
import tantivy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|||||||
22
python/lancedb/remote/arrow.py
Normal file
22
python/lancedb/remote/arrow.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
|
||||||
|
def to_ipc_binary(table: pa.Table) -> bytes:
|
||||||
|
"""Serialize a PyArrow Table to IPC binary."""
|
||||||
|
sink = pa.BufferOutputStream()
|
||||||
|
with pa.ipc.new_stream(sink, table.schema) as writer:
|
||||||
|
writer.write_table(table)
|
||||||
|
return sink.getvalue().to_pybytes()
|
||||||
@@ -13,16 +13,19 @@
|
|||||||
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
from typing import Dict
|
from typing import Any, Callable, Dict, Optional, Union
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import attr
|
import attr
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from lancedb.common import Credential
|
from lancedb.common import Credential
|
||||||
from lancedb.remote import VectorQuery, VectorQueryResult
|
from lancedb.remote import VectorQuery, VectorQueryResult
|
||||||
from lancedb.remote.errors import LanceDBClientError
|
from lancedb.remote.errors import LanceDBClientError
|
||||||
|
|
||||||
|
ARROW_STREAM_CONTENT_TYPE = "application/vnd.apache.arrow.stream"
|
||||||
|
|
||||||
|
|
||||||
def _check_not_closed(f):
|
def _check_not_closed(f):
|
||||||
@functools.wraps(f)
|
@functools.wraps(f)
|
||||||
@@ -34,16 +37,27 @@ def _check_not_closed(f):
|
|||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
async def _read_ipc(resp: aiohttp.ClientResponse) -> pa.Table:
|
||||||
|
resp_body = await resp.read()
|
||||||
|
with pa.ipc.open_file(pa.BufferReader(resp_body)) as reader:
|
||||||
|
return reader.read_all()
|
||||||
|
|
||||||
|
|
||||||
@attr.define(slots=False)
|
@attr.define(slots=False)
|
||||||
class RestfulLanceDBClient:
|
class RestfulLanceDBClient:
|
||||||
db_name: str
|
db_name: str
|
||||||
region: str
|
region: str
|
||||||
api_key: Credential
|
api_key: Credential
|
||||||
|
host_override: Optional[str] = attr.field(default=None)
|
||||||
|
|
||||||
closed: bool = attr.field(default=False, init=False)
|
closed: bool = attr.field(default=False, init=False)
|
||||||
|
|
||||||
@functools.cached_property
|
@functools.cached_property
|
||||||
def session(self) -> aiohttp.ClientSession:
|
def session(self) -> aiohttp.ClientSession:
|
||||||
url = f"https://{self.db_name}.{self.region}.api.lancedb.com"
|
url = (
|
||||||
|
self.host_override
|
||||||
|
or f"https://{self.db_name}.{self.region}.api.lancedb.com"
|
||||||
|
)
|
||||||
return aiohttp.ClientSession(url)
|
return aiohttp.ClientSession(url)
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
@@ -52,32 +66,89 @@ class RestfulLanceDBClient:
|
|||||||
|
|
||||||
@functools.cached_property
|
@functools.cached_property
|
||||||
def headers(self) -> Dict[str, str]:
|
def headers(self) -> Dict[str, str]:
|
||||||
return {
|
headers = {
|
||||||
"x-api-key": self.api_key,
|
"x-api-key": self.api_key,
|
||||||
}
|
}
|
||||||
|
if self.region == "local": # Local test mode
|
||||||
|
headers["Host"] = f"{self.db_name}.{self.region}.api.lancedb.com"
|
||||||
|
if self.host_override:
|
||||||
|
headers["x-lancedb-database"] = self.db_name
|
||||||
|
return headers
|
||||||
|
|
||||||
@_check_not_closed
|
@staticmethod
|
||||||
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
async def _check_status(resp: aiohttp.ClientResponse):
|
||||||
async with self.session.post(
|
if resp.status == 404:
|
||||||
f"/1/table/{table_name}/",
|
raise LanceDBClientError(f"Not found: {await resp.text()}")
|
||||||
json=query.dict(exclude_none=True),
|
elif 400 <= resp.status < 500:
|
||||||
headers=self.headers,
|
|
||||||
) as resp:
|
|
||||||
resp: aiohttp.ClientResponse = resp
|
|
||||||
if 400 <= resp.status < 500:
|
|
||||||
raise LanceDBClientError(
|
raise LanceDBClientError(
|
||||||
f"Bad Request: {resp.status}, error: {await resp.text()}"
|
f"Bad Request: {resp.status}, error: {await resp.text()}"
|
||||||
)
|
)
|
||||||
if 500 <= resp.status < 600:
|
elif 500 <= resp.status < 600:
|
||||||
raise LanceDBClientError(
|
raise LanceDBClientError(
|
||||||
f"Internal Server Error: {resp.status}, error: {await resp.text()}"
|
f"Internal Server Error: {resp.status}, error: {await resp.text()}"
|
||||||
)
|
)
|
||||||
if resp.status != 200:
|
elif resp.status != 200:
|
||||||
raise LanceDBClientError(
|
raise LanceDBClientError(
|
||||||
f"Unknown Error: {resp.status}, error: {await resp.text()}"
|
f"Unknown Error: {resp.status}, error: {await resp.text()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
resp_body = await resp.read()
|
@_check_not_closed
|
||||||
with pa.ipc.open_file(pa.BufferReader(resp_body)) as reader:
|
async def get(self, uri: str, params: Union[Dict[str, Any], BaseModel] = None):
|
||||||
tbl = reader.read_all()
|
"""Send a GET request and returns the deserialized response payload."""
|
||||||
|
if isinstance(params, BaseModel):
|
||||||
|
params: Dict[str, Any] = params.dict(exclude_none=True)
|
||||||
|
async with self.session.get(uri, params=params, headers=self.headers) as resp:
|
||||||
|
await self._check_status(resp)
|
||||||
|
return await resp.json()
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def post(
|
||||||
|
self,
|
||||||
|
uri: str,
|
||||||
|
data: Optional[Union[Dict[str, Any], BaseModel, bytes]] = None,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
content_type: Optional[str] = None,
|
||||||
|
deserialize: Callable = lambda resp: resp.json(),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Send a POST request and returns the deserialized response payload.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : str
|
||||||
|
The uri to send the POST request to.
|
||||||
|
data: Union[Dict[str, Any], BaseModel]
|
||||||
|
|
||||||
|
"""
|
||||||
|
if isinstance(data, BaseModel):
|
||||||
|
data: Dict[str, Any] = data.dict(exclude_none=True)
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
req_kwargs = {"data": data}
|
||||||
|
else:
|
||||||
|
req_kwargs = {"json": data}
|
||||||
|
|
||||||
|
headers = self.headers.copy()
|
||||||
|
if content_type is not None:
|
||||||
|
headers["content-type"] = content_type
|
||||||
|
async with self.session.post(
|
||||||
|
uri,
|
||||||
|
headers=headers,
|
||||||
|
params=params,
|
||||||
|
**req_kwargs,
|
||||||
|
) as resp:
|
||||||
|
resp: aiohttp.ClientResponse = resp
|
||||||
|
await self._check_status(resp)
|
||||||
|
return await deserialize(resp)
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def list_tables(self):
|
||||||
|
"""List all tables in the database."""
|
||||||
|
json = await self.get("/v1/table/", {})
|
||||||
|
return json["tables"]
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
||||||
|
"""Query a table."""
|
||||||
|
tbl = await self.post(
|
||||||
|
f"/v1/table/{table_name}/query/", query, deserialize=_read_ipc
|
||||||
|
)
|
||||||
return VectorQueryResult(tbl)
|
return VectorQueryResult(tbl)
|
||||||
|
|||||||
@@ -11,35 +11,52 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from typing import List
|
import asyncio
|
||||||
|
import uuid
|
||||||
|
from typing import List, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from lancedb.common import DATA
|
from lancedb.common import DATA
|
||||||
from lancedb.db import DBConnection
|
from lancedb.db import DBConnection
|
||||||
from lancedb.table import Table
|
from lancedb.table import Table, _sanitize_data
|
||||||
|
|
||||||
from .client import RestfulLanceDBClient
|
from .arrow import to_ipc_binary
|
||||||
|
from .client import ARROW_STREAM_CONTENT_TYPE, RestfulLanceDBClient
|
||||||
|
|
||||||
|
|
||||||
class RemoteDBConnection(DBConnection):
|
class RemoteDBConnection(DBConnection):
|
||||||
"""A connection to a remote LanceDB database."""
|
"""A connection to a remote LanceDB database."""
|
||||||
|
|
||||||
def __init__(self, db_url: str, api_key: str, region: str):
|
def __init__(
|
||||||
|
self,
|
||||||
|
db_url: str,
|
||||||
|
api_key: str,
|
||||||
|
region: str,
|
||||||
|
host_override: Optional[str] = None,
|
||||||
|
):
|
||||||
"""Connect to a remote LanceDB database."""
|
"""Connect to a remote LanceDB database."""
|
||||||
parsed = urlparse(db_url)
|
parsed = urlparse(db_url)
|
||||||
if parsed.scheme != "db":
|
if parsed.scheme != "db":
|
||||||
raise ValueError(f"Invalid scheme: {parsed.scheme}, only accepts db://")
|
raise ValueError(f"Invalid scheme: {parsed.scheme}, only accepts db://")
|
||||||
self.db_name = parsed.netloc
|
self.db_name = parsed.netloc
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
self._client = RestfulLanceDBClient(self.db_name, region, api_key)
|
self._client = RestfulLanceDBClient(
|
||||||
|
self.db_name, region, api_key, host_override
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
self._loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
self._loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"RemoveConnect(name={self.db_name})"
|
return f"RemoveConnect(name={self.db_name})"
|
||||||
|
|
||||||
def table_names(self) -> List[str]:
|
def table_names(self) -> List[str]:
|
||||||
raise NotImplementedError
|
"""List the names of all tables in the database."""
|
||||||
|
result = self._loop.run_until_complete(self._client.list_tables())
|
||||||
|
return result
|
||||||
|
|
||||||
def open_table(self, name: str) -> Table:
|
def open_table(self, name: str) -> Table:
|
||||||
"""Open a Lance Table in the database.
|
"""Open a Lance Table in the database.
|
||||||
@@ -64,8 +81,31 @@ class RemoteDBConnection(DBConnection):
|
|||||||
name: str,
|
name: str,
|
||||||
data: DATA = None,
|
data: DATA = None,
|
||||||
schema: pa.Schema = None,
|
schema: pa.Schema = None,
|
||||||
mode: str = "create",
|
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> Table:
|
) -> Table:
|
||||||
raise NotImplementedError
|
if data is None and schema is None:
|
||||||
|
raise ValueError("Either data or schema must be provided.")
|
||||||
|
if data is not None:
|
||||||
|
data = _sanitize_data(
|
||||||
|
data, schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if schema is None:
|
||||||
|
raise ValueError("Either data or schema must be provided")
|
||||||
|
data = pa.Table.from_pylist([], schema=schema)
|
||||||
|
|
||||||
|
from .table import RemoteTable
|
||||||
|
|
||||||
|
data = to_ipc_binary(data)
|
||||||
|
request_id = uuid.uuid4().hex
|
||||||
|
|
||||||
|
self._loop.run_until_complete(
|
||||||
|
self._client.post(
|
||||||
|
f"/v1/table/{name}/create/",
|
||||||
|
data=data,
|
||||||
|
params={"request_id": request_id},
|
||||||
|
content_type=ARROW_STREAM_CONTENT_TYPE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return RemoteTable(self, name)
|
||||||
|
|||||||
@@ -11,15 +11,19 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import asyncio
|
import uuid
|
||||||
|
from functools import cached_property
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from lance import json_to_schema
|
||||||
|
|
||||||
from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
|
from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
from ..query import LanceQueryBuilder, Query
|
from ..query import LanceQueryBuilder
|
||||||
from ..table import Query, Table
|
from ..table import Query, Table, _sanitize_data
|
||||||
|
from .arrow import to_ipc_binary
|
||||||
|
from .client import ARROW_STREAM_CONTENT_TYPE
|
||||||
from .db import RemoteDBConnection
|
from .db import RemoteDBConnection
|
||||||
|
|
||||||
|
|
||||||
@@ -29,10 +33,16 @@ class RemoteTable(Table):
|
|||||||
self._name = name
|
self._name = name
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"RemoteTable({self._conn.db_name}.{self.name})"
|
return f"RemoteTable({self._conn.db_name}.{self._name})"
|
||||||
|
|
||||||
|
@cached_property
|
||||||
def schema(self) -> pa.Schema:
|
def schema(self) -> pa.Schema:
|
||||||
raise NotImplementedError
|
"""Return the schema of the table."""
|
||||||
|
resp = self._conn._loop.run_until_complete(
|
||||||
|
self._conn._client.post(f"/v1/table/{self._name}/describe/")
|
||||||
|
)
|
||||||
|
schema = json_to_schema(resp["schema"])
|
||||||
|
return schema
|
||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
@@ -54,7 +64,21 @@ class RemoteTable(Table):
|
|||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> int:
|
) -> int:
|
||||||
raise NotImplementedError
|
data = _sanitize_data(
|
||||||
|
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
|
)
|
||||||
|
payload = to_ipc_binary(data)
|
||||||
|
|
||||||
|
request_id = uuid.uuid4().hex
|
||||||
|
|
||||||
|
self._conn._loop.run_until_complete(
|
||||||
|
self._conn._client.post(
|
||||||
|
f"/v1/table/{self._name}/insert/",
|
||||||
|
data=payload,
|
||||||
|
params={"request_id": request_id, "mode": mode},
|
||||||
|
content_type=ARROW_STREAM_CONTENT_TYPE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
||||||
@@ -62,9 +86,8 @@ class RemoteTable(Table):
|
|||||||
return LanceQueryBuilder(self, query, vector_column)
|
return LanceQueryBuilder(self, query, vector_column)
|
||||||
|
|
||||||
def _execute_query(self, query: Query) -> pa.Table:
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
try:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
except RuntimeError:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
result = self._conn._client.query(self._name, query)
|
result = self._conn._client.query(self._name, query)
|
||||||
return loop.run_until_complete(result).to_arrow()
|
return self._conn._loop.run_until_complete(result).to_arrow()
|
||||||
|
|
||||||
|
def delete(self, predicate: str):
|
||||||
|
raise NotImplementedError
|
||||||
|
|||||||
41
python/lancedb/schema.py
Normal file
41
python/lancedb/schema.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Schema related utilities."""
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
|
||||||
|
def vector(dimension: int, value_type: pa.DataType = pa.float32()) -> pa.DataType:
|
||||||
|
"""A help function to create a vector type.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
dimension: The dimension of the vector.
|
||||||
|
value_type: pa.DataType, optional
|
||||||
|
The type of the value in the vector.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
A PyArrow DataType for vectors.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> import pyarrow as pa
|
||||||
|
>>> import lancedb
|
||||||
|
>>> schema = pa.schema([
|
||||||
|
... pa.field("id", pa.int64()),
|
||||||
|
... pa.field("vector", lancedb.vector(756)),
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
return pa.list_(value_type, dimension)
|
||||||
@@ -16,35 +16,41 @@ from __future__ import annotations
|
|||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import List, Union
|
from typing import Iterable, List, Union
|
||||||
|
|
||||||
import lance
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pyarrow.compute as pc
|
import pyarrow.compute as pc
|
||||||
import pyarrow.fs
|
|
||||||
from lance import LanceDataset
|
from lance import LanceDataset
|
||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
|
|
||||||
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
|
from .pydantic import LanceModel
|
||||||
from .query import LanceFtsQueryBuilder, LanceQueryBuilder, Query
|
from .query import LanceFtsQueryBuilder, LanceQueryBuilder, Query
|
||||||
|
from .util import fs_from_uri, safe_import_pandas
|
||||||
|
|
||||||
|
pd = safe_import_pandas()
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
||||||
if isinstance(data, list):
|
if isinstance(data, list):
|
||||||
|
# convert to list of dict if data is a bunch of LanceModels
|
||||||
|
if isinstance(data[0], LanceModel):
|
||||||
|
schema = data[0].__class__.to_arrow_schema()
|
||||||
|
data = [dict(d) for d in data]
|
||||||
data = pa.Table.from_pylist(data)
|
data = pa.Table.from_pylist(data)
|
||||||
data = _sanitize_schema(
|
data = _sanitize_schema(
|
||||||
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
)
|
)
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
data = vec_to_table(data)
|
data = vec_to_table(data)
|
||||||
if isinstance(data, pd.DataFrame):
|
if pd is not None and isinstance(data, pd.DataFrame):
|
||||||
data = pa.Table.from_pandas(data)
|
data = pa.Table.from_pandas(data)
|
||||||
data = _sanitize_schema(
|
data = _sanitize_schema(
|
||||||
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
)
|
)
|
||||||
if not isinstance(data, pa.Table):
|
if not isinstance(data, (pa.Table, Iterable)):
|
||||||
raise TypeError(f"Unsupported data type: {type(data)}")
|
raise TypeError(f"Unsupported data type: {type(data)}")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@@ -74,7 +80,6 @@ class Table(ABC):
|
|||||||
Can append new data with [Table.add()][lancedb.table.Table.add].
|
Can append new data with [Table.add()][lancedb.table.Table.add].
|
||||||
|
|
||||||
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
||||||
2
|
|
||||||
|
|
||||||
Can query the table with [Table.search][lancedb.table.Table.search].
|
Can query the table with [Table.search][lancedb.table.Table.search].
|
||||||
|
|
||||||
@@ -95,7 +100,7 @@ class Table(ABC):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def to_pandas(self) -> pd.DataFrame:
|
def to_pandas(self):
|
||||||
"""Return the table as a pandas DataFrame.
|
"""Return the table as a pandas DataFrame.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
@@ -151,7 +156,7 @@ class Table(ABC):
|
|||||||
mode: str = "append",
|
mode: str = "append",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> int:
|
):
|
||||||
"""Add more data to the [Table](Table).
|
"""Add more data to the [Table](Table).
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
@@ -167,10 +172,6 @@ class Table(ABC):
|
|||||||
fill_value: float, default 0.
|
fill_value: float, default 0.
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
int
|
|
||||||
The number of vectors in the table.
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -202,6 +203,51 @@ class Table(ABC):
|
|||||||
def _execute_query(self, query: Query) -> pa.Table:
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete(self, where: str):
|
||||||
|
"""Delete rows from the table.
|
||||||
|
|
||||||
|
This can be used to delete a single row, many rows, all rows, or
|
||||||
|
sometimes no rows (if your predicate matches nothing).
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
where: str
|
||||||
|
The SQL where clause to use when deleting rows. For example, 'x = 2'
|
||||||
|
or 'x IN (1, 2, 3)'. The filter must not be empty, or it will error.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 2 [3.0, 4.0]
|
||||||
|
2 3 [5.0, 6.0]
|
||||||
|
>>> table.delete("x = 2")
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 3 [5.0, 6.0]
|
||||||
|
|
||||||
|
If you have a list of values to delete, you can combine them into a
|
||||||
|
stringified list and use the `IN` operator:
|
||||||
|
|
||||||
|
>>> to_remove = [1, 5]
|
||||||
|
>>> to_remove = ", ".join([str(v) for v in to_remove])
|
||||||
|
>>> to_remove
|
||||||
|
'1, 5'
|
||||||
|
>>> table.delete(f"x IN ({to_remove})")
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 3 [5.0, 6.0]
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class LanceTable(Table):
|
class LanceTable(Table):
|
||||||
"""
|
"""
|
||||||
@@ -262,7 +308,6 @@ class LanceTable(Table):
|
|||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
2
|
|
||||||
>>> table.version
|
>>> table.version
|
||||||
2
|
2
|
||||||
>>> table.checkout(1)
|
>>> table.checkout(1)
|
||||||
@@ -289,7 +334,7 @@ class LanceTable(Table):
|
|||||||
"""Return the first n rows of the table."""
|
"""Return the first n rows of the table."""
|
||||||
return self._dataset.head(n)
|
return self._dataset.head(n)
|
||||||
|
|
||||||
def to_pandas(self) -> pd.DataFrame:
|
def to_pandas(self) -> "pd.DataFrame":
|
||||||
"""Return the table as a pandas DataFrame.
|
"""Return the table as a pandas DataFrame.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
@@ -364,7 +409,7 @@ class LanceTable(Table):
|
|||||||
mode: str = "append",
|
mode: str = "append",
|
||||||
on_bad_vectors: str = "error",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> int:
|
):
|
||||||
"""Add data to the table.
|
"""Add data to the table.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
@@ -391,7 +436,6 @@ class LanceTable(Table):
|
|||||||
)
|
)
|
||||||
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
return len(self)
|
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column_name=VECTOR_COLUMN_NAME
|
self, query: Union[VEC, str], vector_column_name=VECTOR_COLUMN_NAME
|
||||||
@@ -483,44 +527,21 @@ class LanceTable(Table):
|
|||||||
if schema is None:
|
if schema is None:
|
||||||
raise ValueError("Either data or schema must be provided")
|
raise ValueError("Either data or schema must be provided")
|
||||||
data = pa.Table.from_pylist([], schema=schema)
|
data = pa.Table.from_pylist([], schema=schema)
|
||||||
lance.write_dataset(data, tbl._dataset_uri, mode=mode)
|
lance.write_dataset(data, tbl._dataset_uri, schema=schema, mode=mode)
|
||||||
return LanceTable(db, name)
|
return LanceTable(db, name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def open(cls, db, name):
|
def open(cls, db, name):
|
||||||
tbl = cls(db, name)
|
tbl = cls(db, name)
|
||||||
if not os.path.exists(tbl._dataset_uri):
|
fs, path = fs_from_uri(tbl._dataset_uri)
|
||||||
|
file_info = fs.get_file_info(path)
|
||||||
|
if file_info.type != pa.fs.FileType.Directory:
|
||||||
raise FileNotFoundError(
|
raise FileNotFoundError(
|
||||||
f"Table {name} does not exist. Please first call db.create_table({name}, data)"
|
f"Table {name} does not exist. Please first call db.create_table({name}, data)"
|
||||||
)
|
)
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
def delete(self, where: str):
|
def delete(self, where: str):
|
||||||
"""Delete rows from the table.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
where: str
|
|
||||||
The SQL where clause to use when deleting rows.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> import lancedb
|
|
||||||
>>> import pandas as pd
|
|
||||||
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
|
||||||
>>> db = lancedb.connect("./.lancedb")
|
|
||||||
>>> table = db.create_table("my_table", data)
|
|
||||||
>>> table.to_pandas()
|
|
||||||
x vector
|
|
||||||
0 1 [1.0, 2.0]
|
|
||||||
1 2 [3.0, 4.0]
|
|
||||||
2 3 [5.0, 6.0]
|
|
||||||
>>> table.delete("x = 2")
|
|
||||||
>>> table.to_pandas()
|
|
||||||
x vector
|
|
||||||
0 1 [1.0, 2.0]
|
|
||||||
1 3 [5.0, 6.0]
|
|
||||||
"""
|
|
||||||
self._dataset.delete(where)
|
self._dataset.delete(where)
|
||||||
|
|
||||||
def _execute_query(self, query: Query) -> pa.Table:
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
|
|||||||
@@ -11,8 +11,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Tuple
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import pyarrow.fs as pa_fs
|
||||||
|
|
||||||
|
|
||||||
def get_uri_scheme(uri: str) -> str:
|
def get_uri_scheme(uri: str) -> str:
|
||||||
"""
|
"""
|
||||||
@@ -59,3 +63,24 @@ def get_uri_location(uri: str) -> str:
|
|||||||
return parsed.path
|
return parsed.path
|
||||||
else:
|
else:
|
||||||
return parsed.netloc + parsed.path
|
return parsed.netloc + parsed.path
|
||||||
|
|
||||||
|
|
||||||
|
def fs_from_uri(uri: str) -> Tuple[pa_fs.FileSystem, str]:
|
||||||
|
"""
|
||||||
|
Get a PyArrow FileSystem from a URI, handling extra environment variables.
|
||||||
|
"""
|
||||||
|
if get_uri_scheme(uri) == "s3":
|
||||||
|
fs = pa_fs.S3FileSystem(endpoint_override=os.environ.get("AWS_ENDPOINT"))
|
||||||
|
path = get_uri_location(uri)
|
||||||
|
return fs, path
|
||||||
|
|
||||||
|
return pa_fs.FileSystem.from_uri(uri)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_import_pandas():
|
||||||
|
try:
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
return pd
|
||||||
|
except ImportError:
|
||||||
|
return None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.1.10"
|
version = "0.1.16"
|
||||||
dependencies = ["pylance~=0.5.0", "ratelimiter", "retry", "tqdm", "aiohttp", "pydantic", "attr"]
|
dependencies = ["pylance==0.5.10", "ratelimiter", "retry", "tqdm", "aiohttp", "pydantic", "attr", "semver"]
|
||||||
description = "lancedb"
|
description = "lancedb"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
||||||
@@ -37,7 +37,7 @@ repository = "https://github.com/lancedb/lancedb"
|
|||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
tests = [
|
tests = [
|
||||||
"pytest", "pytest-mock", "pytest-asyncio"
|
"pandas>=1.4", "pytest", "pytest-mock", "pytest-asyncio"
|
||||||
]
|
]
|
||||||
dev = [
|
dev = [
|
||||||
"ruff", "pre-commit", "black"
|
"ruff", "pre-commit", "black"
|
||||||
@@ -52,3 +52,6 @@ requires = [
|
|||||||
"wheel",
|
"wheel",
|
||||||
]
|
]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import lancedb
|
import lancedb
|
||||||
@@ -75,6 +76,32 @@ def test_ingest_pd(tmp_path):
|
|||||||
assert db.open_table("test").name == db["test"].name
|
assert db.open_table("test").name == db["test"].name
|
||||||
|
|
||||||
|
|
||||||
|
def test_ingest_record_batch_iterator(tmp_path):
|
||||||
|
def batch_reader():
|
||||||
|
for i in range(5):
|
||||||
|
yield pa.RecordBatch.from_arrays(
|
||||||
|
[
|
||||||
|
pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
pa.array(["foo", "bar"]),
|
||||||
|
pa.array([10.0, 20.0]),
|
||||||
|
],
|
||||||
|
["vector", "item", "price"],
|
||||||
|
)
|
||||||
|
|
||||||
|
db = lancedb.connect(tmp_path)
|
||||||
|
tbl = db.create_table(
|
||||||
|
"test",
|
||||||
|
batch_reader(),
|
||||||
|
schema=pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
pa.field("item", pa.utf8()),
|
||||||
|
pa.field("price", pa.float32()),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_create_mode(tmp_path):
|
def test_create_mode(tmp_path):
|
||||||
db = lancedb.connect(tmp_path)
|
db = lancedb.connect(tmp_path)
|
||||||
data = pd.DataFrame(
|
data = pd.DataFrame(
|
||||||
@@ -122,6 +149,10 @@ def test_delete_table(tmp_path):
|
|||||||
db.create_table("test", data=data)
|
db.create_table("test", data=data)
|
||||||
assert db.table_names() == ["test"]
|
assert db.table_names() == ["test"]
|
||||||
|
|
||||||
|
# dropping a table that does not exist should pass
|
||||||
|
# if ignore_missing=True
|
||||||
|
db.drop_table("does_not_exist", ignore_missing=True)
|
||||||
|
|
||||||
|
|
||||||
def test_empty_or_nonexistent_table(tmp_path):
|
def test_empty_or_nonexistent_table(tmp_path):
|
||||||
db = lancedb.connect(tmp_path)
|
db = lancedb.connect(tmp_path)
|
||||||
@@ -131,6 +162,9 @@ def test_empty_or_nonexistent_table(tmp_path):
|
|||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
db.open_table("does_not_exist")
|
db.open_table("does_not_exist")
|
||||||
|
|
||||||
|
schema = pa.schema([pa.field("a", pa.int32())])
|
||||||
|
db.create_table("test", schema=schema)
|
||||||
|
|
||||||
|
|
||||||
def test_replace_index(tmp_path):
|
def test_replace_index(tmp_path):
|
||||||
db = lancedb.connect(uri=tmp_path)
|
db = lancedb.connect(uri=tmp_path)
|
||||||
|
|||||||
175
python/tests/test_pydantic.py
Normal file
175
python/tests/test_pydantic.py
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
import pydantic
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.pydantic import PYDANTIC_VERSION, LanceModel, pydantic_to_schema, vector
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info < (3, 9),
|
||||||
|
reason="using native type alias requires python3.9 or higher",
|
||||||
|
)
|
||||||
|
def test_pydantic_to_arrow():
|
||||||
|
class StructModel(pydantic.BaseModel):
|
||||||
|
a: str
|
||||||
|
b: Optional[float]
|
||||||
|
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
id: int
|
||||||
|
s: str
|
||||||
|
vec: list[float]
|
||||||
|
li: List[int]
|
||||||
|
opt: Optional[str] = None
|
||||||
|
st: StructModel
|
||||||
|
# d: dict
|
||||||
|
|
||||||
|
m = TestModel(
|
||||||
|
id=1, s="hello", vec=[1.0, 2.0, 3.0], li=[2, 3, 4], st=StructModel(a="a", b=1.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
|
||||||
|
expect_schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("id", pa.int64(), False),
|
||||||
|
pa.field("s", pa.utf8(), False),
|
||||||
|
pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
pa.field("opt", pa.utf8(), True),
|
||||||
|
pa.field(
|
||||||
|
"st",
|
||||||
|
pa.struct(
|
||||||
|
[pa.field("a", pa.utf8(), False), pa.field("b", pa.float64(), True)]
|
||||||
|
),
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
assert schema == expect_schema
|
||||||
|
|
||||||
|
|
||||||
|
def test_pydantic_to_arrow_py38():
|
||||||
|
class StructModel(pydantic.BaseModel):
|
||||||
|
a: str
|
||||||
|
b: Optional[float]
|
||||||
|
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
id: int
|
||||||
|
s: str
|
||||||
|
vec: List[float]
|
||||||
|
li: List[int]
|
||||||
|
opt: Optional[str] = None
|
||||||
|
st: StructModel
|
||||||
|
# d: dict
|
||||||
|
|
||||||
|
m = TestModel(
|
||||||
|
id=1, s="hello", vec=[1.0, 2.0, 3.0], li=[2, 3, 4], st=StructModel(a="a", b=1.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
|
||||||
|
expect_schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("id", pa.int64(), False),
|
||||||
|
pa.field("s", pa.utf8(), False),
|
||||||
|
pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
pa.field("opt", pa.utf8(), True),
|
||||||
|
pa.field(
|
||||||
|
"st",
|
||||||
|
pa.struct(
|
||||||
|
[pa.field("a", pa.utf8(), False), pa.field("b", pa.float64(), True)]
|
||||||
|
),
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
assert schema == expect_schema
|
||||||
|
|
||||||
|
|
||||||
|
def test_fixed_size_list_field():
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
vec: vector(16)
|
||||||
|
li: List[int]
|
||||||
|
|
||||||
|
data = TestModel(vec=list(range(16)), li=[1, 2, 3])
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
assert json.loads(data.model_dump_json()) == {
|
||||||
|
"vec": list(range(16)),
|
||||||
|
"li": [1, 2, 3],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
assert data.dict() == {
|
||||||
|
"vec": list(range(16)),
|
||||||
|
"li": [1, 2, 3],
|
||||||
|
}
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
assert schema == pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vec", pa.list_(pa.float32(), 16), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
json_schema = TestModel.model_json_schema()
|
||||||
|
else:
|
||||||
|
json_schema = TestModel.schema()
|
||||||
|
|
||||||
|
assert json_schema == {
|
||||||
|
"properties": {
|
||||||
|
"vec": {
|
||||||
|
"items": {"type": "number"},
|
||||||
|
"maxItems": 16,
|
||||||
|
"minItems": 16,
|
||||||
|
"title": "Vec",
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"li": {"items": {"type": "integer"}, "title": "Li", "type": "array"},
|
||||||
|
},
|
||||||
|
"required": ["vec", "li"],
|
||||||
|
"title": "TestModel",
|
||||||
|
"type": "object",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_fixed_size_list_validation():
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
vec: vector(8)
|
||||||
|
|
||||||
|
with pytest.raises(pydantic.ValidationError):
|
||||||
|
TestModel(vec=range(9))
|
||||||
|
|
||||||
|
with pytest.raises(pydantic.ValidationError):
|
||||||
|
TestModel(vec=range(7))
|
||||||
|
|
||||||
|
TestModel(vec=range(8))
|
||||||
|
|
||||||
|
|
||||||
|
def test_lance_model():
|
||||||
|
class TestModel(LanceModel):
|
||||||
|
vec: vector(16)
|
||||||
|
li: List[int]
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
assert schema == TestModel.to_arrow_schema()
|
||||||
|
assert TestModel.field_names() == ["vec", "li"]
|
||||||
@@ -20,6 +20,7 @@ import pyarrow as pa
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
|
from lancedb.pydantic import LanceModel, vector
|
||||||
from lancedb.query import LanceQueryBuilder, Query
|
from lancedb.query import LanceQueryBuilder, Query
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
@@ -64,6 +65,24 @@ def table(tmp_path) -> MockTable:
|
|||||||
return MockTable(tmp_path)
|
return MockTable(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cast(table):
|
||||||
|
class TestModel(LanceModel):
|
||||||
|
vector: vector(2)
|
||||||
|
id: int
|
||||||
|
str_field: str
|
||||||
|
float_field: float
|
||||||
|
|
||||||
|
q = LanceQueryBuilder(table, [0, 0], "vector").limit(1)
|
||||||
|
results = q.to_pydantic(TestModel)
|
||||||
|
assert len(results) == 1
|
||||||
|
r0 = results[0]
|
||||||
|
assert isinstance(r0, TestModel)
|
||||||
|
assert r0.id == 1
|
||||||
|
assert r0.vector == [1, 2]
|
||||||
|
assert r0.str_field == "a"
|
||||||
|
assert r0.float_field == 1.0
|
||||||
|
|
||||||
|
|
||||||
def test_query_builder(table):
|
def test_query_builder(table):
|
||||||
df = LanceQueryBuilder(table, [0, 0], "vector").limit(1).select(["id"]).to_df()
|
df = LanceQueryBuilder(table, [0, 0], "vector").limit(1).select(["id"]).to_df()
|
||||||
assert df["id"].values[0] == 1
|
assert df["id"].values[0] == 1
|
||||||
@@ -119,6 +138,7 @@ def test_query_builder_with_different_vector_column():
|
|||||||
columns=["b"],
|
columns=["b"],
|
||||||
nprobes=20,
|
nprobes=20,
|
||||||
refine_factor=None,
|
refine_factor=None,
|
||||||
|
vector_column="foo_vector",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -13,15 +13,16 @@
|
|||||||
|
|
||||||
import functools
|
import functools
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
from unittest.mock import PropertyMock, patch
|
from unittest.mock import PropertyMock, patch
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
from lance.vector import vec_to_table
|
|
||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
|
from lancedb.pydantic import LanceModel, vector
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
@@ -135,12 +136,23 @@ def test_add(db):
|
|||||||
_add(table, schema)
|
_add(table, schema)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_pydantic_model(db):
|
||||||
|
class TestModel(LanceModel):
|
||||||
|
vector: vector(16)
|
||||||
|
li: List[int]
|
||||||
|
|
||||||
|
data = TestModel(vector=list(range(16)), li=[1, 2, 3])
|
||||||
|
table = LanceTable.create(db, "test", data=[data])
|
||||||
|
assert len(table) == 1
|
||||||
|
assert table.schema == TestModel.to_arrow_schema()
|
||||||
|
|
||||||
|
|
||||||
def _add(table, schema):
|
def _add(table, schema):
|
||||||
# table = LanceTable(db, "test")
|
# table = LanceTable(db, "test")
|
||||||
assert len(table) == 2
|
assert len(table) == 2
|
||||||
|
|
||||||
count = table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
||||||
assert count == 3
|
assert len(table) == 3
|
||||||
|
|
||||||
expected = pa.Table.from_arrays(
|
expected = pa.Table.from_arrays(
|
||||||
[
|
[
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb-node"
|
name = "vectordb-node"
|
||||||
version = "0.1.10"
|
version = "0.1.19"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
@@ -13,9 +13,15 @@ crate-type = ["cdylib"]
|
|||||||
arrow-array = { workspace = true }
|
arrow-array = { workspace = true }
|
||||||
arrow-ipc = { workspace = true }
|
arrow-ipc = { workspace = true }
|
||||||
arrow-schema = { workspace = true }
|
arrow-schema = { workspace = true }
|
||||||
|
conv = "0.3.3"
|
||||||
once_cell = "1"
|
once_cell = "1"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
|
half = { workspace = true }
|
||||||
lance = { workspace = true }
|
lance = { workspace = true }
|
||||||
vectordb = { path = "../../vectordb" }
|
vectordb = { path = "../../vectordb" }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
||||||
|
object_store = { workspace = true, features = ["aws"] }
|
||||||
|
snafu = { workspace = true }
|
||||||
|
async-trait = "0"
|
||||||
|
env_logger = "0"
|
||||||
|
|||||||
@@ -17,21 +17,26 @@ use std::ops::Deref;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::cast::as_list_array;
|
use arrow_array::cast::as_list_array;
|
||||||
use arrow_array::{Array, FixedSizeListArray, RecordBatch};
|
use arrow_array::{Array, ArrayRef, FixedSizeListArray, RecordBatch};
|
||||||
use arrow_ipc::reader::FileReader;
|
use arrow_ipc::reader::FileReader;
|
||||||
|
use arrow_ipc::writer::FileWriter;
|
||||||
use arrow_schema::{DataType, Field, Schema};
|
use arrow_schema::{DataType, Field, Schema};
|
||||||
use lance::arrow::{FixedSizeListArrayExt, RecordBatchExt};
|
use lance::arrow::{FixedSizeListArrayExt, RecordBatchExt};
|
||||||
|
use vectordb::table::VECTOR_COLUMN_NAME;
|
||||||
|
|
||||||
pub(crate) fn convert_record_batch(record_batch: RecordBatch) -> RecordBatch {
|
use crate::error::{MissingColumnSnafu, Result};
|
||||||
let column = record_batch
|
use snafu::prelude::*;
|
||||||
.column_by_name("vector")
|
|
||||||
.expect("vector column is missing");
|
pub(crate) fn convert_record_batch(record_batch: RecordBatch) -> Result<RecordBatch> {
|
||||||
let arr = as_list_array(column.deref());
|
let column = get_column(VECTOR_COLUMN_NAME, &record_batch)?;
|
||||||
|
|
||||||
|
// TODO: we should just consume the underlying js buffer in the future instead of this arrow around a bunch of times
|
||||||
|
let arr = as_list_array(column.as_ref());
|
||||||
let list_size = arr.values().len() / record_batch.num_rows();
|
let list_size = arr.values().len() / record_batch.num_rows();
|
||||||
let r = FixedSizeListArray::try_new(arr.values(), list_size as i32).unwrap();
|
let r = FixedSizeListArray::try_new_from_values(arr.values().to_owned(), list_size as i32)?;
|
||||||
|
|
||||||
let schema = Arc::new(Schema::new(vec![Field::new(
|
let schema = Arc::new(Schema::new(vec![Field::new(
|
||||||
"vector",
|
VECTOR_COLUMN_NAME,
|
||||||
DataType::FixedSizeList(
|
DataType::FixedSizeList(
|
||||||
Arc::new(Field::new("item", DataType::Float32, true)),
|
Arc::new(Field::new("item", DataType::Float32, true)),
|
||||||
list_size as i32,
|
list_size as i32,
|
||||||
@@ -39,22 +44,42 @@ pub(crate) fn convert_record_batch(record_batch: RecordBatch) -> RecordBatch {
|
|||||||
true,
|
true,
|
||||||
)]));
|
)]));
|
||||||
|
|
||||||
let mut new_batch = RecordBatch::try_new(schema.clone(), vec![Arc::new(r)]).unwrap();
|
let mut new_batch = RecordBatch::try_new(schema.clone(), vec![Arc::new(r)])?;
|
||||||
|
|
||||||
if record_batch.num_columns() > 1 {
|
if record_batch.num_columns() > 1 {
|
||||||
let rb = record_batch.drop_column("vector").unwrap();
|
let rb = record_batch.drop_column(VECTOR_COLUMN_NAME)?;
|
||||||
new_batch = new_batch.merge(&rb).unwrap();
|
new_batch = new_batch.merge(&rb)?;
|
||||||
}
|
}
|
||||||
new_batch
|
Ok(new_batch)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn arrow_buffer_to_record_batch(slice: &[u8]) -> Vec<RecordBatch> {
|
fn get_column(column_name: &str, record_batch: &RecordBatch) -> Result<ArrayRef> {
|
||||||
|
record_batch
|
||||||
|
.column_by_name(column_name)
|
||||||
|
.cloned()
|
||||||
|
.context(MissingColumnSnafu { name: column_name })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn arrow_buffer_to_record_batch(slice: &[u8]) -> Result<Vec<RecordBatch>> {
|
||||||
let mut batches: Vec<RecordBatch> = Vec::new();
|
let mut batches: Vec<RecordBatch> = Vec::new();
|
||||||
let fr = FileReader::try_new(Cursor::new(slice), None);
|
let file_reader = FileReader::try_new(Cursor::new(slice), None)?;
|
||||||
let file_reader = fr.unwrap();
|
|
||||||
for b in file_reader {
|
for b in file_reader {
|
||||||
let record_batch = convert_record_batch(b.unwrap());
|
let record_batch = convert_record_batch(b?)?;
|
||||||
batches.push(record_batch);
|
batches.push(record_batch);
|
||||||
}
|
}
|
||||||
batches
|
Ok(batches)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn record_batch_to_buffer(batches: Vec<RecordBatch>) -> Result<Vec<u8>> {
|
||||||
|
if batches.is_empty() {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let schema = batches.get(0).unwrap().schema();
|
||||||
|
let mut fr = FileWriter::try_new(Vec::new(), schema.deref())?;
|
||||||
|
for batch in batches.iter() {
|
||||||
|
fr.write(batch)?
|
||||||
|
}
|
||||||
|
fr.finish()?;
|
||||||
|
Ok(fr.into_inner()?)
|
||||||
}
|
}
|
||||||
|
|||||||
88
rust/ffi/node/src/error.rs
Normal file
88
rust/ffi/node/src/error.rs
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use arrow_schema::ArrowError;
|
||||||
|
use neon::context::Context;
|
||||||
|
use neon::prelude::NeonResult;
|
||||||
|
use snafu::Snafu;
|
||||||
|
|
||||||
|
#[derive(Debug, Snafu)]
|
||||||
|
#[snafu(visibility(pub(crate)))]
|
||||||
|
pub enum Error {
|
||||||
|
#[snafu(display("column '{name}' is missing"))]
|
||||||
|
MissingColumn { name: String },
|
||||||
|
#[snafu(display("{name}: {message}"))]
|
||||||
|
RangeError { name: String, message: String },
|
||||||
|
#[snafu(display("{index_type} is not a valid index type"))]
|
||||||
|
InvalidIndexType { index_type: String },
|
||||||
|
|
||||||
|
#[snafu(display("{message}"))]
|
||||||
|
LanceDB { message: String },
|
||||||
|
#[snafu(display("{message}"))]
|
||||||
|
Neon { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
|
impl From<vectordb::error::Error> for Error {
|
||||||
|
fn from(e: vectordb::error::Error) -> Self {
|
||||||
|
Self::LanceDB {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<lance::Error> for Error {
|
||||||
|
fn from(e: lance::Error) -> Self {
|
||||||
|
Self::LanceDB {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ArrowError> for Error {
|
||||||
|
fn from(value: ArrowError) -> Self {
|
||||||
|
Self::LanceDB {
|
||||||
|
message: value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<neon::result::Throw> for Error {
|
||||||
|
fn from(value: neon::result::Throw) -> Self {
|
||||||
|
Self::Neon {
|
||||||
|
message: value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ResultExt is used to transform a [`Result`] into a [`NeonResult`],
|
||||||
|
/// so it can be returned as a JavaScript error
|
||||||
|
/// Copied from [Neon](https://github.com/neon-bindings/neon/blob/4c2e455a9e6814f1ba0178616d63caec7f4df317/crates/neon/src/result/mod.rs#L88)
|
||||||
|
pub trait ResultExt<T> {
|
||||||
|
fn or_throw<'a, C: Context<'a>>(self, cx: &mut C) -> NeonResult<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Implement ResultExt for the std Result so it can be used any Result type
|
||||||
|
impl<T, E> ResultExt<T> for std::result::Result<T, E>
|
||||||
|
where
|
||||||
|
E: std::fmt::Display,
|
||||||
|
{
|
||||||
|
fn or_throw<'a, C: Context<'a>>(self, cx: &mut C) -> NeonResult<T> {
|
||||||
|
match self {
|
||||||
|
Ok(value) => Ok(value),
|
||||||
|
Err(error) => cx.throw_error(error.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -22,12 +22,15 @@ use neon::prelude::*;
|
|||||||
|
|
||||||
use vectordb::index::vector::{IvfPQIndexBuilder, VectorIndexBuilder};
|
use vectordb::index::vector::{IvfPQIndexBuilder, VectorIndexBuilder};
|
||||||
|
|
||||||
|
use crate::error::Error::InvalidIndexType;
|
||||||
|
use crate::error::ResultExt;
|
||||||
|
use crate::neon_ext::js_object_ext::JsObjectExt;
|
||||||
use crate::{runtime, JsTable};
|
use crate::{runtime, JsTable};
|
||||||
|
|
||||||
pub(crate) fn table_create_vector_index(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
pub(crate) fn table_create_vector_index(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
let index_params = cx.argument::<JsObject>(0)?;
|
let index_params = cx.argument::<JsObject>(0)?;
|
||||||
let index_params_builder = get_index_params_builder(&mut cx, index_params).unwrap();
|
let index_params_builder = get_index_params_builder(&mut cx, index_params).or_throw(&mut cx)?;
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
@@ -54,27 +57,21 @@ pub(crate) fn table_create_vector_index(mut cx: FunctionContext) -> JsResult<JsP
|
|||||||
fn get_index_params_builder(
|
fn get_index_params_builder(
|
||||||
cx: &mut FunctionContext,
|
cx: &mut FunctionContext,
|
||||||
obj: Handle<JsObject>,
|
obj: Handle<JsObject>,
|
||||||
) -> Result<impl VectorIndexBuilder, String> {
|
) -> crate::error::Result<impl VectorIndexBuilder> {
|
||||||
let idx_type = obj
|
let idx_type = obj.get::<JsString, _, _>(cx, "type")?.value(cx);
|
||||||
.get::<JsString, _, _>(cx, "type")
|
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.value(cx);
|
|
||||||
|
|
||||||
match idx_type.as_str() {
|
match idx_type.as_str() {
|
||||||
"ivf_pq" => {
|
"ivf_pq" => {
|
||||||
let mut index_builder: IvfPQIndexBuilder = IvfPQIndexBuilder::new();
|
let mut index_builder: IvfPQIndexBuilder = IvfPQIndexBuilder::new();
|
||||||
let mut pq_params = PQBuildParams::default();
|
let mut pq_params = PQBuildParams::default();
|
||||||
|
|
||||||
obj.get_opt::<JsString, _, _>(cx, "column")
|
obj.get_opt::<JsString, _, _>(cx, "column")?
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| index_builder.column(s.value(cx)));
|
.map(|s| index_builder.column(s.value(cx)));
|
||||||
|
|
||||||
obj.get_opt::<JsString, _, _>(cx, "index_name")
|
obj.get_opt::<JsString, _, _>(cx, "index_name")?
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| index_builder.index_name(s.value(cx)));
|
.map(|s| index_builder.index_name(s.value(cx)));
|
||||||
|
|
||||||
obj.get_opt::<JsString, _, _>(cx, "metric_type")
|
obj.get_opt::<JsString, _, _>(cx, "metric_type")?
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| MetricType::try_from(s.value(cx).as_str()))
|
.map(|s| MetricType::try_from(s.value(cx).as_str()))
|
||||||
.map(|mt| {
|
.map(|mt| {
|
||||||
let metric_type = mt.unwrap();
|
let metric_type = mt.unwrap();
|
||||||
@@ -82,15 +79,8 @@ fn get_index_params_builder(
|
|||||||
pq_params.metric_type = metric_type;
|
pq_params.metric_type = metric_type;
|
||||||
});
|
});
|
||||||
|
|
||||||
let num_partitions = obj
|
let num_partitions = obj.get_opt_usize(cx, "num_partitions")?;
|
||||||
.get_opt::<JsNumber, _, _>(cx, "num_partitions")
|
let max_iters = obj.get_opt_usize(cx, "max_iters")?;
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| s.value(cx) as usize);
|
|
||||||
|
|
||||||
let max_iters = obj
|
|
||||||
.get_opt::<JsNumber, _, _>(cx, "max_iters")
|
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| s.value(cx) as usize);
|
|
||||||
|
|
||||||
num_partitions.map(|np| {
|
num_partitions.map(|np| {
|
||||||
let max_iters = max_iters.unwrap_or(50);
|
let max_iters = max_iters.unwrap_or(50);
|
||||||
@@ -102,32 +92,28 @@ fn get_index_params_builder(
|
|||||||
index_builder.ivf_params(ivf_params)
|
index_builder.ivf_params(ivf_params)
|
||||||
});
|
});
|
||||||
|
|
||||||
obj.get_opt::<JsBoolean, _, _>(cx, "use_opq")
|
obj.get_opt::<JsBoolean, _, _>(cx, "use_opq")?
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| pq_params.use_opq = s.value(cx));
|
.map(|s| pq_params.use_opq = s.value(cx));
|
||||||
|
|
||||||
obj.get_opt::<JsNumber, _, _>(cx, "num_sub_vectors")
|
obj.get_opt_usize(cx, "num_sub_vectors")?
|
||||||
.map_err(|t| t.to_string())?
|
.map(|s| pq_params.num_sub_vectors = s);
|
||||||
.map(|s| pq_params.num_sub_vectors = s.value(cx) as usize);
|
|
||||||
|
|
||||||
obj.get_opt::<JsNumber, _, _>(cx, "num_bits")
|
obj.get_opt_usize(cx, "num_bits")?
|
||||||
.map_err(|t| t.to_string())?
|
.map(|s| pq_params.num_bits = s);
|
||||||
.map(|s| pq_params.num_bits = s.value(cx) as usize);
|
|
||||||
|
|
||||||
obj.get_opt::<JsNumber, _, _>(cx, "max_iters")
|
obj.get_opt_usize(cx, "max_iters")?
|
||||||
.map_err(|t| t.to_string())?
|
.map(|s| pq_params.max_iters = s);
|
||||||
.map(|s| pq_params.max_iters = s.value(cx) as usize);
|
|
||||||
|
|
||||||
obj.get_opt::<JsNumber, _, _>(cx, "max_opq_iters")
|
obj.get_opt_usize(cx, "max_opq_iters")?
|
||||||
.map_err(|t| t.to_string())?
|
.map(|s| pq_params.max_opq_iters = s);
|
||||||
.map(|s| pq_params.max_opq_iters = s.value(cx) as usize);
|
|
||||||
|
|
||||||
obj.get_opt::<JsBoolean, _, _>(cx, "replace")
|
obj.get_opt::<JsBoolean, _, _>(cx, "replace")?
|
||||||
.map_err(|t| t.to_string())?
|
|
||||||
.map(|s| index_builder.replace(s.value(cx)));
|
.map(|s| index_builder.replace(s.value(cx)));
|
||||||
|
|
||||||
Ok(index_builder)
|
Ok(index_builder)
|
||||||
}
|
}
|
||||||
t => Err(format!("{} is not a valid index type", t).to_string()),
|
index_type => Err(InvalidIndexType {
|
||||||
|
index_type: index_type.into(),
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,25 +17,31 @@ use std::convert::TryFrom;
|
|||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use arrow_array::{Float32Array, RecordBatchIterator, RecordBatchReader};
|
use arrow_array::{Float32Array, RecordBatchIterator};
|
||||||
use arrow_ipc::writer::FileWriter;
|
use async_trait::async_trait;
|
||||||
use futures::{TryFutureExt, TryStreamExt};
|
use futures::{TryFutureExt, TryStreamExt};
|
||||||
use lance::dataset::{WriteMode, WriteParams};
|
use lance::dataset::{WriteMode, WriteParams};
|
||||||
use lance::index::vector::MetricType;
|
use lance::index::vector::MetricType;
|
||||||
|
use lance::io::object_store::ObjectStoreParams;
|
||||||
use neon::prelude::*;
|
use neon::prelude::*;
|
||||||
use neon::types::buffer::TypedArray;
|
use neon::types::buffer::TypedArray;
|
||||||
|
use object_store::aws::{AwsCredential, AwsCredentialProvider};
|
||||||
|
use object_store::CredentialProvider;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use tokio::runtime::Runtime;
|
use tokio::runtime::Runtime;
|
||||||
|
|
||||||
use vectordb::database::Database;
|
use vectordb::database::Database;
|
||||||
use vectordb::error::Error;
|
use vectordb::table::{ReadParams, Table};
|
||||||
use vectordb::table::Table;
|
|
||||||
|
|
||||||
use crate::arrow::arrow_buffer_to_record_batch;
|
use crate::arrow::{arrow_buffer_to_record_batch, record_batch_to_buffer};
|
||||||
|
use crate::error::ResultExt;
|
||||||
|
use crate::neon_ext::js_object_ext::JsObjectExt;
|
||||||
|
|
||||||
mod arrow;
|
mod arrow;
|
||||||
mod convert;
|
mod convert;
|
||||||
|
mod error;
|
||||||
mod index;
|
mod index;
|
||||||
|
mod neon_ext;
|
||||||
|
|
||||||
struct JsDatabase {
|
struct JsDatabase {
|
||||||
database: Arc<Database>,
|
database: Arc<Database>,
|
||||||
@@ -49,10 +55,40 @@ struct JsTable {
|
|||||||
|
|
||||||
impl Finalize for JsTable {}
|
impl Finalize for JsTable {}
|
||||||
|
|
||||||
|
// TODO: object_store didn't export this type so I copied it.
|
||||||
|
// Make a request to object_store to export this type
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct StaticCredentialProvider<T> {
|
||||||
|
credential: Arc<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> StaticCredentialProvider<T> {
|
||||||
|
pub fn new(credential: T) -> Self {
|
||||||
|
Self {
|
||||||
|
credential: Arc::new(credential),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<T> CredentialProvider for StaticCredentialProvider<T>
|
||||||
|
where
|
||||||
|
T: std::fmt::Debug + Send + Sync,
|
||||||
|
{
|
||||||
|
type Credential = T;
|
||||||
|
|
||||||
|
async fn get_credential(&self) -> object_store::Result<Arc<T>> {
|
||||||
|
Ok(Arc::clone(&self.credential))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
||||||
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
||||||
|
static LOG: OnceCell<()> = OnceCell::new();
|
||||||
|
|
||||||
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
LOG.get_or_init(|| env_logger::init());
|
||||||
|
|
||||||
|
RUNTIME.get_or_try_init(|| Runtime::new().or_throw(cx))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_new(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn database_new(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
@@ -67,7 +103,7 @@ fn database_new(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let db = JsDatabase {
|
let db = JsDatabase {
|
||||||
database: Arc::new(database.or_else(|err| cx.throw_error(err.to_string()))?),
|
database: Arc::new(database.or_throw(&mut cx)?),
|
||||||
};
|
};
|
||||||
Ok(cx.boxed(db))
|
Ok(cx.boxed(db))
|
||||||
});
|
});
|
||||||
@@ -89,7 +125,7 @@ fn database_table_names(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let tables_rst = database.table_names().await;
|
let tables_rst = database.table_names().await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let tables = tables_rst.or_else(|err| cx.throw_error(err.to_string()))?;
|
let tables = tables_rst.or_throw(&mut cx)?;
|
||||||
let table_names = convert::vec_str_to_array(&tables, &mut cx);
|
let table_names = convert::vec_str_to_array(&tables, &mut cx);
|
||||||
table_names
|
table_names
|
||||||
});
|
});
|
||||||
@@ -97,24 +133,70 @@ fn database_table_names(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
Ok(promise)
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_aws_creds<T>(
|
||||||
|
cx: &mut FunctionContext,
|
||||||
|
arg_starting_location: i32,
|
||||||
|
) -> Result<Option<AwsCredentialProvider>, NeonResult<T>> {
|
||||||
|
let secret_key_id = cx
|
||||||
|
.argument_opt(arg_starting_location)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
let secret_key = cx
|
||||||
|
.argument_opt(arg_starting_location + 1)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
let temp_token = cx
|
||||||
|
.argument_opt(arg_starting_location + 2)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
match (secret_key_id, secret_key, temp_token) {
|
||||||
|
(Some(key_id), Some(key), optional_token) => Ok(Some(Arc::new(
|
||||||
|
StaticCredentialProvider::new(AwsCredential {
|
||||||
|
key_id: key_id,
|
||||||
|
secret_key: key,
|
||||||
|
token: optional_token,
|
||||||
|
}),
|
||||||
|
))),
|
||||||
|
(None, None, None) => Ok(None),
|
||||||
|
_ => Err(cx.throw_error("Invalid credentials configuration")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let db = cx
|
let db = cx
|
||||||
.this()
|
.this()
|
||||||
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 1) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = ReadParams {
|
||||||
|
store_options: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
..ReadParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
let database = db.database.clone();
|
let database = db.database.clone();
|
||||||
|
|
||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
let table_rst = database.open_table(&table_name).await;
|
let table_rst = database.open_table_with_params(&table_name, ¶ms).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(
|
let table = Arc::new(Mutex::new(table_rst.or_throw(&mut cx)?));
|
||||||
table_rst.or_else(|err| cx.throw_error(err.to_string()))?,
|
|
||||||
));
|
|
||||||
Ok(cx.boxed(JsTable { table }))
|
Ok(cx.boxed(JsTable { table }))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -135,7 +217,7 @@ fn database_drop_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
let result = database.drop_table(&table_name).await;
|
let result = database.drop_table(&table_name).await;
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
result.or_else(|err| cx.throw_error(err.to_string()))?;
|
result.or_throw(&mut cx)?;
|
||||||
Ok(cx.null())
|
Ok(cx.null())
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -164,12 +246,9 @@ fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
.get_opt::<JsString, _, _>(&mut cx, "_filter")?
|
.get_opt::<JsString, _, _>(&mut cx, "_filter")?
|
||||||
.map(|s| s.value(&mut cx));
|
.map(|s| s.value(&mut cx));
|
||||||
let refine_factor = query_obj
|
let refine_factor = query_obj
|
||||||
.get_opt::<JsNumber, _, _>(&mut cx, "_refineFactor")?
|
.get_opt_u32(&mut cx, "_refineFactor")
|
||||||
.map(|s| s.value(&mut cx))
|
.or_throw(&mut cx)?;
|
||||||
.map(|i| i as u32);
|
let nprobes = query_obj.get_usize(&mut cx, "_nprobes").or_throw(&mut cx)?;
|
||||||
let nprobes = query_obj
|
|
||||||
.get::<JsNumber, _, _>(&mut cx, "_nprobes")?
|
|
||||||
.value(&mut cx) as usize;
|
|
||||||
let metric_type = query_obj
|
let metric_type = query_obj
|
||||||
.get_opt::<JsString, _, _>(&mut cx, "_metricType")?
|
.get_opt::<JsString, _, _>(&mut cx, "_metricType")?
|
||||||
.map(|s| s.value(&mut cx))
|
.map(|s| s.value(&mut cx))
|
||||||
@@ -196,30 +275,17 @@ fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
.select(select);
|
.select(select);
|
||||||
let record_batch_stream = builder.execute();
|
let record_batch_stream = builder.execute();
|
||||||
let results = record_batch_stream
|
let results = record_batch_stream
|
||||||
.and_then(|stream| stream.try_collect::<Vec<_>>().map_err(Error::from))
|
.and_then(|stream| {
|
||||||
|
stream
|
||||||
|
.try_collect::<Vec<_>>()
|
||||||
|
.map_err(vectordb::error::Error::from)
|
||||||
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let results = results.or_else(|err| cx.throw_error(err.to_string()))?;
|
let results = results.or_throw(&mut cx)?;
|
||||||
let vector: Vec<u8> = Vec::new();
|
let buffer = record_batch_to_buffer(results).or_throw(&mut cx)?;
|
||||||
|
Ok(JsBuffer::external(&mut cx, buffer))
|
||||||
if results.is_empty() {
|
|
||||||
return cx.buffer(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let schema = results.get(0).unwrap().schema();
|
|
||||||
let mut fr = FileWriter::try_new(vector, schema.deref())
|
|
||||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
|
||||||
|
|
||||||
for batch in results.iter() {
|
|
||||||
fr.write(batch)
|
|
||||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
|
||||||
}
|
|
||||||
fr.finish().or_else(|err| cx.throw_error(err.to_string()))?;
|
|
||||||
let buf = fr
|
|
||||||
.into_inner()
|
|
||||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
|
||||||
Ok(JsBuffer::external(&mut cx, buf))
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
Ok(promise)
|
Ok(promise)
|
||||||
@@ -231,7 +297,7 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
let buffer = cx.argument::<JsBuffer>(1)?;
|
let buffer = cx.argument::<JsBuffer>(1)?;
|
||||||
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx));
|
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx)).or_throw(&mut cx)?;
|
||||||
let schema = batches[0].schema();
|
let schema = batches[0].schema();
|
||||||
|
|
||||||
// Write mode
|
// Write mode
|
||||||
@@ -241,8 +307,6 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
"create" => WriteMode::Create,
|
"create" => WriteMode::Create,
|
||||||
_ => return cx.throw_error("Table::create only supports 'overwrite' and 'create' modes"),
|
_ => return cx.throw_error("Table::create only supports 'overwrite' and 'create' modes"),
|
||||||
};
|
};
|
||||||
let mut params = WriteParams::default();
|
|
||||||
params.mode = mode;
|
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
@@ -250,19 +314,28 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
let database = db.database.clone();
|
let database = db.database.clone();
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 3) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = WriteParams {
|
||||||
|
store_params: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
mode: mode,
|
||||||
|
..WriteParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let batch_reader = RecordBatchIterator::new(batches.into_iter().map(Ok), schema);
|
||||||
batches.into_iter().map(Ok),
|
|
||||||
schema,
|
|
||||||
));
|
|
||||||
let table_rst = database
|
let table_rst = database
|
||||||
.create_table(&table_name, batch_reader, Some(params))
|
.create_table(&table_name, batch_reader, Some(params))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(
|
let table = Arc::new(Mutex::new(table_rst.or_throw(&mut cx)?));
|
||||||
table_rst.or_else(|err| cx.throw_error(err.to_string()))?,
|
|
||||||
));
|
|
||||||
Ok(cx.boxed(JsTable { table }))
|
Ok(cx.boxed(JsTable { table }))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -279,7 +352,8 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
let buffer = cx.argument::<JsBuffer>(0)?;
|
let buffer = cx.argument::<JsBuffer>(0)?;
|
||||||
let write_mode = cx.argument::<JsString>(1)?.value(&mut cx);
|
let write_mode = cx.argument::<JsString>(1)?.value(&mut cx);
|
||||||
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx));
|
|
||||||
|
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx)).or_throw(&mut cx)?;
|
||||||
let schema = batches[0].schema();
|
let schema = batches[0].schema();
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
@@ -289,16 +363,27 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let table = js_table.table.clone();
|
let table = js_table.table.clone();
|
||||||
let write_mode = write_mode_map.get(write_mode.as_str()).cloned();
|
let write_mode = write_mode_map.get(write_mode.as_str()).cloned();
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 2) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = WriteParams {
|
||||||
|
store_params: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
mode: write_mode.unwrap_or(WriteMode::Append),
|
||||||
|
..WriteParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let batch_reader = RecordBatchIterator::new(batches.into_iter().map(Ok), schema);
|
||||||
batches.into_iter().map(Ok),
|
let add_result = table.lock().unwrap().add(batch_reader, Some(params)).await;
|
||||||
schema,
|
|
||||||
));
|
|
||||||
let add_result = table.lock().unwrap().add(batch_reader, write_mode).await;
|
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let added = add_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
let _added = add_result.or_throw(&mut cx)?;
|
||||||
Ok(cx.number(added as f64))
|
Ok(cx.boolean(true))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
Ok(promise)
|
Ok(promise)
|
||||||
@@ -316,7 +401,7 @@ fn table_count_rows(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let num_rows_result = table.lock().unwrap().count_rows().await;
|
let num_rows_result = table.lock().unwrap().count_rows().await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let num_rows = num_rows_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
let num_rows = num_rows_result.or_throw(&mut cx)?;
|
||||||
Ok(cx.number(num_rows as f64))
|
Ok(cx.number(num_rows as f64))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -336,7 +421,7 @@ fn table_delete(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let delete_result = rt.block_on(async move { table.lock().unwrap().delete(&predicate).await });
|
let delete_result = rt.block_on(async move { table.lock().unwrap().delete(&predicate).await });
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
delete_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
delete_result.or_throw(&mut cx)?;
|
||||||
Ok(cx.undefined())
|
Ok(cx.undefined())
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
15
rust/ffi/node/src/neon_ext.rs
Normal file
15
rust/ffi/node/src/neon_ext.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
pub mod js_object_ext;
|
||||||
82
rust/ffi/node/src/neon_ext/js_object_ext.rs
Normal file
82
rust/ffi/node/src/neon_ext/js_object_ext.rs
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use crate::error::{Error, Result};
|
||||||
|
use neon::prelude::*;
|
||||||
|
|
||||||
|
// extends neon's [JsObject] with helper functions to extract properties
|
||||||
|
pub trait JsObjectExt {
|
||||||
|
fn get_opt_u32(&self, cx: &mut FunctionContext, key: &str) -> Result<Option<u32>>;
|
||||||
|
fn get_usize(&self, cx: &mut FunctionContext, key: &str) -> Result<usize>;
|
||||||
|
fn get_opt_usize(&self, cx: &mut FunctionContext, key: &str) -> Result<Option<usize>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsObjectExt for JsObject {
|
||||||
|
fn get_opt_u32(&self, cx: &mut FunctionContext, key: &str) -> Result<Option<u32>> {
|
||||||
|
let val_opt = self
|
||||||
|
.get_opt::<JsNumber, _, _>(cx, key)?
|
||||||
|
.map(|s| f64_to_u32_safe(s.value(cx), key));
|
||||||
|
val_opt.transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_usize(&self, cx: &mut FunctionContext, key: &str) -> Result<usize> {
|
||||||
|
let val = self.get::<JsNumber, _, _>(cx, key)?.value(cx);
|
||||||
|
f64_to_usize_safe(val, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_opt_usize(&self, cx: &mut FunctionContext, key: &str) -> Result<Option<usize>> {
|
||||||
|
let val_opt = self
|
||||||
|
.get_opt::<JsNumber, _, _>(cx, key)?
|
||||||
|
.map(|s| f64_to_usize_safe(s.value(cx), key));
|
||||||
|
val_opt.transpose()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f64_to_u32_safe(n: f64, key: &str) -> Result<u32> {
|
||||||
|
use conv::*;
|
||||||
|
|
||||||
|
n.approx_as::<u32>().map_err(|e| match e {
|
||||||
|
FloatError::NegOverflow(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: "must be > 0".to_string(),
|
||||||
|
},
|
||||||
|
FloatError::PosOverflow(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: format!("must be < {}", u32::MAX),
|
||||||
|
},
|
||||||
|
FloatError::NotANumber(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: "not a valid number".to_string(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f64_to_usize_safe(n: f64, key: &str) -> Result<usize> {
|
||||||
|
use conv::*;
|
||||||
|
|
||||||
|
n.approx_as::<usize>().map_err(|e| match e {
|
||||||
|
FloatError::NegOverflow(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: "must be > 0".to_string(),
|
||||||
|
},
|
||||||
|
FloatError::PosOverflow(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: format!("must be < {}", usize::MAX),
|
||||||
|
},
|
||||||
|
FloatError::NotANumber(_) => Error::RangeError {
|
||||||
|
name: key.into(),
|
||||||
|
message: "not a valid number".to_string(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb"
|
name = "vectordb"
|
||||||
version = "0.1.10"
|
version = "0.1.19"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@@ -12,7 +12,8 @@ arrow-array = { workspace = true }
|
|||||||
arrow-data = { workspace = true }
|
arrow-data = { workspace = true }
|
||||||
arrow-schema = { workspace = true }
|
arrow-schema = { workspace = true }
|
||||||
object_store = { workspace = true }
|
object_store = { workspace = true }
|
||||||
snafu = "0.7.4"
|
snafu = { workspace = true }
|
||||||
|
half = { workspace = true }
|
||||||
lance = { workspace = true }
|
lance = { workspace = true }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
|
|
||||||
|
|||||||
@@ -20,13 +20,16 @@ use lance::dataset::WriteParams;
|
|||||||
use lance::io::object_store::ObjectStore;
|
use lance::io::object_store::ObjectStore;
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use crate::error::{CreateDirSnafu, Result};
|
use crate::error::{CreateDirSnafu, InvalidTableNameSnafu, Result};
|
||||||
use crate::table::{OpenTableParams, Table};
|
use crate::table::{ReadParams, Table};
|
||||||
|
|
||||||
|
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
||||||
|
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
object_store: ObjectStore,
|
object_store: ObjectStore,
|
||||||
|
|
||||||
pub(crate) uri: String,
|
pub(crate) uri: String,
|
||||||
|
pub(crate) base_path: object_store::path::Path,
|
||||||
}
|
}
|
||||||
|
|
||||||
const LANCE_EXTENSION: &str = "lance";
|
const LANCE_EXTENSION: &str = "lance";
|
||||||
@@ -43,12 +46,13 @@ impl Database {
|
|||||||
///
|
///
|
||||||
/// * A [Database] object.
|
/// * A [Database] object.
|
||||||
pub async fn connect(uri: &str) -> Result<Database> {
|
pub async fn connect(uri: &str) -> Result<Database> {
|
||||||
let (object_store, _) = ObjectStore::from_uri(uri).await?;
|
let (object_store, base_path) = ObjectStore::from_uri(uri).await?;
|
||||||
if object_store.is_local() {
|
if object_store.is_local() {
|
||||||
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
||||||
}
|
}
|
||||||
Ok(Database {
|
Ok(Database {
|
||||||
uri: uri.to_string(),
|
uri: uri.to_string(),
|
||||||
|
base_path,
|
||||||
object_store,
|
object_store,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -57,7 +61,7 @@ impl Database {
|
|||||||
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
||||||
let path = Path::new(path);
|
let path = Path::new(path);
|
||||||
if !path.try_exists()? {
|
if !path.try_exists()? {
|
||||||
create_dir_all(&path)?;
|
create_dir_all(path)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -70,23 +74,18 @@ impl Database {
|
|||||||
pub async fn table_names(&self) -> Result<Vec<String>> {
|
pub async fn table_names(&self) -> Result<Vec<String>> {
|
||||||
let f = self
|
let f = self
|
||||||
.object_store
|
.object_store
|
||||||
.read_dir(self.uri.as_str())
|
.read_dir(self.base_path.clone())
|
||||||
.await?
|
.await?
|
||||||
.iter()
|
.iter()
|
||||||
.map(|fname| Path::new(fname))
|
.map(Path::new)
|
||||||
.filter(|path| {
|
.filter(|path| {
|
||||||
let is_lance = path
|
let is_lance = path
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| e.to_str().map(|e| e == LANCE_EXTENSION))
|
.and_then(|e| e.to_str())
|
||||||
.flatten();
|
.map(|e| e == LANCE_EXTENSION);
|
||||||
is_lance.unwrap_or(false)
|
is_lance.unwrap_or(false)
|
||||||
})
|
})
|
||||||
.map(|p| {
|
.filter_map(|p| p.file_stem().and_then(|s| s.to_str().map(String::from)))
|
||||||
p.file_stem()
|
|
||||||
.map(|s| s.to_str().map(|s| String::from(s)))
|
|
||||||
.flatten()
|
|
||||||
})
|
|
||||||
.flatten()
|
|
||||||
.collect();
|
.collect();
|
||||||
Ok(f)
|
Ok(f)
|
||||||
}
|
}
|
||||||
@@ -100,10 +99,11 @@ impl Database {
|
|||||||
pub async fn create_table(
|
pub async fn create_table(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
batches: Box<dyn RecordBatchReader>,
|
batches: impl RecordBatchReader + Send + 'static,
|
||||||
params: Option<WriteParams>,
|
params: Option<WriteParams>,
|
||||||
) -> Result<Table> {
|
) -> Result<Table> {
|
||||||
Table::create(&self.uri, name, batches, params).await
|
let table_uri = self.table_uri(name)?;
|
||||||
|
Table::create(&table_uri, name, batches, params).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open a table in the database.
|
/// Open a table in the database.
|
||||||
@@ -115,7 +115,7 @@ impl Database {
|
|||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
||||||
self.open_table_with_params(name, OpenTableParams::default())
|
self.open_table_with_params(name, &ReadParams::default())
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,12 +128,9 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table_with_params(
|
pub async fn open_table_with_params(&self, name: &str, params: &ReadParams) -> Result<Table> {
|
||||||
&self,
|
let table_uri = self.table_uri(name)?;
|
||||||
name: &str,
|
Table::open_with_params(&table_uri, name, params).await
|
||||||
params: OpenTableParams,
|
|
||||||
) -> Result<Table> {
|
|
||||||
Table::open_with_params(&self.uri, name, params).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Drop a table in the database.
|
/// Drop a table in the database.
|
||||||
@@ -141,10 +138,23 @@ impl Database {
|
|||||||
/// # Arguments
|
/// # Arguments
|
||||||
/// * `name` - The name of the table.
|
/// * `name` - The name of the table.
|
||||||
pub async fn drop_table(&self, name: &str) -> Result<()> {
|
pub async fn drop_table(&self, name: &str) -> Result<()> {
|
||||||
let dir_name = format!("{}/{}.{}", self.uri, name, LANCE_EXTENSION);
|
let dir_name = format!("{}.{}", name, LANCE_EXTENSION);
|
||||||
self.object_store.remove_dir_all(dir_name).await?;
|
let full_path = self.base_path.child(dir_name.clone());
|
||||||
|
self.object_store.remove_dir_all(full_path).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the URI of a table in the database.
|
||||||
|
fn table_uri(&self, name: &str) -> Result<String> {
|
||||||
|
let path = Path::new(&self.uri);
|
||||||
|
let table_uri = path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
|
.to_str()
|
||||||
|
.context(InvalidTableNameSnafu { name })?;
|
||||||
|
Ok(uri.to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -35,6 +35,12 @@ pub struct IvfPQIndexBuilder {
|
|||||||
|
|
||||||
impl IvfPQIndexBuilder {
|
impl IvfPQIndexBuilder {
|
||||||
pub fn new() -> IvfPQIndexBuilder {
|
pub fn new() -> IvfPQIndexBuilder {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for IvfPQIndexBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
IvfPQIndexBuilder {
|
IvfPQIndexBuilder {
|
||||||
column: None,
|
column: None,
|
||||||
index_name: None,
|
index_name: None,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user