mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 13:29:57 +00:00
Compare commits
60 Commits
v0.1.10
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1cbfc1bbf4 | ||
|
|
a2bb497135 | ||
|
|
0cf40c8da3 | ||
|
|
8233c689c3 | ||
|
|
6e24e731b8 | ||
|
|
f4ce86e12c | ||
|
|
0664eaec82 | ||
|
|
63acdc2069 | ||
|
|
a636bb1075 | ||
|
|
5e3167da83 | ||
|
|
f09db4a6d6 | ||
|
|
1d343edbd4 | ||
|
|
980f910f50 | ||
|
|
fb97b03a51 | ||
|
|
141b6647a8 | ||
|
|
b45ac4608f | ||
|
|
a86bc05131 | ||
|
|
3537afb2c3 | ||
|
|
23f5dddc7c | ||
|
|
9748406cba | ||
|
|
6271949d38 | ||
|
|
131ad09ab3 | ||
|
|
030f07e7f0 | ||
|
|
72afa06b7a | ||
|
|
088e745e1d | ||
|
|
7a57cddb2c | ||
|
|
8ff5f88916 | ||
|
|
028a6e433d | ||
|
|
04c6814fb1 | ||
|
|
c62e4ca1eb | ||
|
|
aecc5fc42b | ||
|
|
2fdcb307eb | ||
|
|
ad18826579 | ||
|
|
a8a50591d7 | ||
|
|
6dfe7fabc2 | ||
|
|
2b108e1c80 | ||
|
|
8c9edafccc | ||
|
|
0590413b96 | ||
|
|
bd2d40a927 | ||
|
|
08944bf4fd | ||
|
|
826dc90151 | ||
|
|
08cc483ec9 | ||
|
|
ff1d206182 | ||
|
|
c385c55629 | ||
|
|
0a03f7ca5a | ||
|
|
88be978e87 | ||
|
|
98b12caa06 | ||
|
|
091dffb171 | ||
|
|
ace6aa883a | ||
|
|
80c25f9896 | ||
|
|
caf22fdb71 | ||
|
|
0e7ae5dfbf | ||
|
|
b261e27222 | ||
|
|
9f603f73a9 | ||
|
|
9ef846929b | ||
|
|
97364a2514 | ||
|
|
e6c6da6104 | ||
|
|
a5eb665b7d | ||
|
|
e2325c634b | ||
|
|
507eeae9c8 |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.1.10
|
current_version = 0.1.15
|
||||||
commit = True
|
commit = True
|
||||||
message = Bump version: {current_version} → {new_version}
|
message = Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
24
.github/workflows/docs.yml
vendored
24
.github/workflows/docs.yml
vendored
@@ -39,6 +39,28 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install -e .
|
python -m pip install -e .
|
||||||
python -m pip install -r ../docs/requirements.txt
|
python -m pip install -r ../docs/requirements.txt
|
||||||
|
- name: Set up node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: node/package-lock.json
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Install node dependencies
|
||||||
|
working-directory: node
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Build node
|
||||||
|
working-directory: node
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
npm run build
|
||||||
|
npm run tsc
|
||||||
|
- name: Create markdown files
|
||||||
|
working-directory: node
|
||||||
|
run: |
|
||||||
|
npx typedoc --plugin typedoc-plugin-markdown --out ../docs/src/javascript src/index.ts
|
||||||
- name: Build docs
|
- name: Build docs
|
||||||
run: |
|
run: |
|
||||||
PYTHONPATH=. mkdocs build -f docs/mkdocs.yml
|
PYTHONPATH=. mkdocs build -f docs/mkdocs.yml
|
||||||
@@ -50,4 +72,4 @@ jobs:
|
|||||||
path: "docs/site"
|
path: "docs/site"
|
||||||
- name: Deploy to GitHub Pages
|
- name: Deploy to GitHub Pages
|
||||||
id: deployment
|
id: deployment
|
||||||
uses: actions/deploy-pages@v1
|
uses: actions/deploy-pages@v1
|
||||||
2
.github/workflows/docs_test.yml
vendored
2
.github/workflows/docs_test.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cd docs/test/node_modules/vectordb
|
cd docs/test/node_modules/vectordb
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
npm run build-release
|
||||||
npm run tsc
|
npm run tsc
|
||||||
- name: Create test files
|
- name: Create test files
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
48
.github/workflows/make-release-commit.yml
vendored
48
.github/workflows/make-release-commit.yml
vendored
@@ -25,31 +25,25 @@ jobs:
|
|||||||
bump-version:
|
bump-version:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out main
|
- name: Check out main
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
persist-credentials: false
|
fetch-depth: 0
|
||||||
fetch-depth: 0
|
lfs: true
|
||||||
lfs: true
|
- name: Set git configs for bumpversion
|
||||||
- name: Set git configs for bumpversion
|
shell: bash
|
||||||
shell: bash
|
run: |
|
||||||
run: |
|
git config user.name 'Lance Release'
|
||||||
git config user.name 'Lance Release'
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
git config user.email 'lance-dev@lancedb.com'
|
- name: Set up Python 3.10
|
||||||
- name: Set up Python 3.10
|
uses: actions/setup-python@v4
|
||||||
uses: actions/setup-python@v4
|
with:
|
||||||
with:
|
python-version: "3.10"
|
||||||
python-version: "3.10"
|
- name: Bump version, create tag and commit
|
||||||
- name: Bump version, create tag and commit
|
run: |
|
||||||
run: |
|
pip install bump2version
|
||||||
pip install bump2version
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
bumpversion --verbose ${{ inputs.part }}
|
git push
|
||||||
- name: Push new version and tag
|
- uses: ./.github/workflows/update_package_lock
|
||||||
if: ${{ inputs.dry_run }} == "false"
|
|
||||||
uses: ad-m/github-push-action@master
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
|
||||||
branch: main
|
|
||||||
tags: true
|
|
||||||
|
|
||||||
|
|||||||
12
.github/workflows/node.yml
vendored
12
.github/workflows/node.yml
vendored
@@ -67,8 +67,12 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
|
||||||
npm run tsc
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
npm run pack-build
|
||||||
|
npm install --no-save ./dist/lancedb-vectordb-*.tgz
|
||||||
|
# Remove index.node to test with dependency installed
|
||||||
|
rm index.node
|
||||||
- name: Test
|
- name: Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
macos:
|
macos:
|
||||||
@@ -94,8 +98,12 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
|
||||||
npm run tsc
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
npm run pack-build
|
||||||
|
npm install --no-save ./dist/lancedb-vectordb-*.tgz
|
||||||
|
# Remove index.node to test with dependency installed
|
||||||
|
rm index.node
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
npm run test
|
npm run test
|
||||||
|
|||||||
180
.github/workflows/npm-publish.yml
vendored
Normal file
180
.github/workflows/npm-publish.yml
vendored
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
name: NPM Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
node:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: node
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: node/package-lock.json
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
npm run tsc
|
||||||
|
npm pack
|
||||||
|
- name: Upload Linux Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: node-package
|
||||||
|
path: |
|
||||||
|
node/lancedb-vectordb-*.tgz
|
||||||
|
|
||||||
|
node-macos:
|
||||||
|
runs-on: macos-12
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target: [x86_64-apple-darwin, aarch64-apple-darwin]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: brew install protobuf
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: |
|
||||||
|
cd node
|
||||||
|
npm ci
|
||||||
|
- name: Install rustup target
|
||||||
|
if: ${{ matrix.target == 'aarch64-apple-darwin' }}
|
||||||
|
run: rustup target add aarch64-apple-darwin
|
||||||
|
- name: Build MacOS native node modules
|
||||||
|
run: bash ci/build_macos_artifacts.sh ${{ matrix.target }}
|
||||||
|
- name: Upload Darwin Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-darwin
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-darwin*.tgz
|
||||||
|
|
||||||
|
node-linux:
|
||||||
|
name: node-linux (${{ matrix.arch}}-unknown-linux-${{ matrix.libc }})
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
libc:
|
||||||
|
- gnu
|
||||||
|
# TODO: re-enable musl once we have refactored to pre-built containers
|
||||||
|
# Right now we have to build node from source which is too expensive.
|
||||||
|
# - musl
|
||||||
|
arch:
|
||||||
|
- x86_64
|
||||||
|
# Building on aarch64 is too slow for now
|
||||||
|
# - aarch64
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Change owner to root (for npm)
|
||||||
|
# The docker container is run as root, so we need the files to be owned by root
|
||||||
|
# Otherwise npm is a nightmare: https://github.com/npm/cli/issues/3773
|
||||||
|
run: sudo chown -R root:root .
|
||||||
|
- name: Set up QEMU
|
||||||
|
if: ${{ matrix.arch == 'aarch64' }}
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
|
- name: Build Linux GNU native node modules
|
||||||
|
if: ${{ matrix.libc == 'gnu' }}
|
||||||
|
run: |
|
||||||
|
docker run \
|
||||||
|
-v $(pwd):/io -w /io \
|
||||||
|
rust:1.70-bookworm \
|
||||||
|
bash ci/build_linux_artifacts.sh ${{ matrix.arch }}-unknown-linux-gnu
|
||||||
|
- name: Build musl Linux native node modules
|
||||||
|
if: ${{ matrix.libc == 'musl' }}
|
||||||
|
run: |
|
||||||
|
docker run --platform linux/arm64/v8 \
|
||||||
|
-v $(pwd):/io -w /io \
|
||||||
|
quay.io/pypa/musllinux_1_1_${{ matrix.arch }} \
|
||||||
|
bash ci/build_linux_artifacts.sh ${{ matrix.arch }}-unknown-linux-musl
|
||||||
|
- name: Upload Linux Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-linux
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-linux*.tgz
|
||||||
|
|
||||||
|
node-windows:
|
||||||
|
runs-on: windows-2022
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target: [x86_64-pc-windows-msvc]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install Protoc v21.12
|
||||||
|
working-directory: C:\
|
||||||
|
run: |
|
||||||
|
New-Item -Path 'C:\protoc' -ItemType Directory
|
||||||
|
Set-Location C:\protoc
|
||||||
|
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
||||||
|
7z x protoc.zip
|
||||||
|
Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
||||||
|
shell: powershell
|
||||||
|
- name: Install npm dependencies
|
||||||
|
run: |
|
||||||
|
cd node
|
||||||
|
npm ci
|
||||||
|
- name: Build Windows native node modules
|
||||||
|
run: .\ci\build_windows_artifacts.ps1 ${{ matrix.target }}
|
||||||
|
- name: Upload Windows Artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: native-windows
|
||||||
|
path: |
|
||||||
|
node/dist/lancedb-vectordb-win32*.tgz
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: [node, node-macos, node-linux, node-windows]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
- name: Display structure of downloaded files
|
||||||
|
run: ls -R
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
- name: Publish to NPM
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||||
|
run: |
|
||||||
|
mv */*.tgz .
|
||||||
|
for filename in *.tgz; do
|
||||||
|
npm publish $filename
|
||||||
|
done
|
||||||
|
|
||||||
|
update-package-lock:
|
||||||
|
needs: [release]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- uses: ./.github/workflows/update_package_lock
|
||||||
6
.github/workflows/python.yml
vendored
6
.github/workflows/python.yml
vendored
@@ -61,6 +61,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
pip install pytest pytest-mock
|
pip install pytest pytest-mock black
|
||||||
|
- name: Black
|
||||||
|
run: black --check --diff --no-color --quiet .
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -x -v --durations=30 tests
|
run: pytest -x -v --durations=30 tests
|
||||||
|
|||||||
22
.github/workflows/rust.yml
vendored
22
.github/workflows/rust.yml
vendored
@@ -6,6 +6,7 @@ on:
|
|||||||
- main
|
- main
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
|
- Cargo.toml
|
||||||
- rust/**
|
- rust/**
|
||||||
- .github/workflows/rust.yml
|
- .github/workflows/rust.yml
|
||||||
|
|
||||||
@@ -65,3 +66,24 @@ jobs:
|
|||||||
run: cargo build --all-features
|
run: cargo build --all-features
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --all-features
|
run: cargo test --all-features
|
||||||
|
windows:
|
||||||
|
runs-on: windows-2022
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: rust
|
||||||
|
- name: Install Protoc v21.12
|
||||||
|
working-directory: C:\
|
||||||
|
run: |
|
||||||
|
New-Item -Path 'C:\protoc' -ItemType Directory
|
||||||
|
Set-Location C:\protoc
|
||||||
|
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
||||||
|
7z x protoc.zip
|
||||||
|
Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
||||||
|
shell: powershell
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||||
|
cargo build
|
||||||
|
cargo test
|
||||||
|
|||||||
22
.github/workflows/update_package_lock/action.yml
vendored
Normal file
22
.github/workflows/update_package_lock/action.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: update_package_lock
|
||||||
|
description: "Update node's package.lock"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
- name: Set git configs
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Update package-lock.json file
|
||||||
|
working-directory: ./node
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
git add package-lock.json
|
||||||
|
git commit -m "Updating package-lock.json"
|
||||||
|
git push
|
||||||
|
shell: bash
|
||||||
12
.github/workflows/update_package_lock_run.yml
vendored
Normal file
12
.github/workflows/update_package_lock_run.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
name: Update package-lock.json
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- uses: ./.github/workflows/update_package_lock
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,6 +5,8 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
venv
|
venv
|
||||||
|
|
||||||
|
.vscode
|
||||||
|
|
||||||
rust/target
|
rust/target
|
||||||
rust/Cargo.lock
|
rust/Cargo.lock
|
||||||
|
|
||||||
|
|||||||
12
Cargo.toml
12
Cargo.toml
@@ -6,9 +6,11 @@ members = [
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = "0.5.3"
|
lance = "=0.5.8"
|
||||||
arrow-array = "40.0"
|
arrow-array = "42.0"
|
||||||
arrow-data = "40.0"
|
arrow-data = "42.0"
|
||||||
arrow-schema = "40.0"
|
arrow-schema = "42.0"
|
||||||
arrow-ipc = "40.0"
|
arrow-ipc = "42.0"
|
||||||
|
half = { "version" = "=2.2.1", default-features = false }
|
||||||
object_store = "0.6.1"
|
object_store = "0.6.1"
|
||||||
|
|
||||||
|
|||||||
72
ci/build_linux_artifacts.sh
Normal file
72
ci/build_linux_artifacts.sh
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Builds the Linux artifacts (node binaries).
|
||||||
|
# Usage: ./build_linux_artifacts.sh [target]
|
||||||
|
# Targets supported:
|
||||||
|
# - x86_64-unknown-linux-gnu:centos
|
||||||
|
# - aarch64-unknown-linux-gnu:centos
|
||||||
|
# - aarch64-unknown-linux-musl
|
||||||
|
# - x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# TODO: refactor this into a Docker container we can pull
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
setup_dependencies() {
|
||||||
|
echo "Installing system dependencies..."
|
||||||
|
if [[ $1 == *musl ]]; then
|
||||||
|
# musllinux
|
||||||
|
apk add openssl-dev
|
||||||
|
else
|
||||||
|
# rust / debian
|
||||||
|
apt update
|
||||||
|
apt install -y libssl-dev protobuf-compiler
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
install_node() {
|
||||||
|
echo "Installing node..."
|
||||||
|
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh | bash
|
||||||
|
source "$HOME"/.bashrc
|
||||||
|
|
||||||
|
if [[ $1 == *musl ]]; then
|
||||||
|
# This node version is 15, we need 16 or higher:
|
||||||
|
# apk add nodejs-current npm
|
||||||
|
# So instead we install from source (nvm doesn't provide binaries for musl):
|
||||||
|
nvm install -s --no-progress 17
|
||||||
|
else
|
||||||
|
nvm install --no-progress 17 # latest that supports glibc 2.17
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
build_node_binary() {
|
||||||
|
echo "Building node library for $1..."
|
||||||
|
pushd node
|
||||||
|
|
||||||
|
npm ci
|
||||||
|
|
||||||
|
if [[ $1 == *musl ]]; then
|
||||||
|
# This is needed for cargo to allow build cdylibs with musl
|
||||||
|
export RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Cargo can run out of memory while pulling dependencies, especially when running
|
||||||
|
# in QEMU. This is a workaround for that.
|
||||||
|
export CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
|
# We don't pass in target, since the native target here already matches
|
||||||
|
# We need to pass OPENSSL_LIB_DIR and OPENSSL_INCLUDE_DIR for static build to work https://github.com/sfackler/rust-openssl/issues/877
|
||||||
|
OPENSSL_STATIC=1 OPENSSL_LIB_DIR=/usr/lib/x86_64-linux-gnu OPENSSL_INCLUDE_DIR=/usr/include/openssl/ npm run build-release
|
||||||
|
npm run pack-build
|
||||||
|
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
TARGET=${1:-x86_64-unknown-linux-gnu}
|
||||||
|
# Others:
|
||||||
|
# aarch64-unknown-linux-gnu
|
||||||
|
# x86_64-unknown-linux-musl
|
||||||
|
# aarch64-unknown-linux-musl
|
||||||
|
|
||||||
|
setup_dependencies $TARGET
|
||||||
|
install_node $TARGET
|
||||||
|
build_node_binary $TARGET
|
||||||
33
ci/build_macos_artifacts.sh
Normal file
33
ci/build_macos_artifacts.sh
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Builds the macOS artifacts (node binaries).
|
||||||
|
# Usage: ./ci/build_macos_artifacts.sh [target]
|
||||||
|
# Targets supported: x86_64-apple-darwin aarch64-apple-darwin
|
||||||
|
|
||||||
|
prebuild_rust() {
|
||||||
|
# Building here for the sake of easier debugging.
|
||||||
|
pushd rust/ffi/node
|
||||||
|
echo "Building rust library for $1"
|
||||||
|
export RUST_BACKTRACE=1
|
||||||
|
cargo build --release --target $1
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
build_node_binaries() {
|
||||||
|
pushd node
|
||||||
|
echo "Building node library for $1"
|
||||||
|
npm run build-release -- --target $1
|
||||||
|
npm run pack-build -- --target $1
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
targets=$1
|
||||||
|
else
|
||||||
|
targets="x86_64-apple-darwin aarch64-apple-darwin"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Building artifacts for targets: $targets"
|
||||||
|
for target in $targets
|
||||||
|
do
|
||||||
|
prebuild_rust $target
|
||||||
|
build_node_binaries $target
|
||||||
|
done
|
||||||
41
ci/build_windows_artifacts.ps1
Normal file
41
ci/build_windows_artifacts.ps1
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Builds the Windows artifacts (node binaries).
|
||||||
|
# Usage: .\ci\build_windows_artifacts.ps1 [target]
|
||||||
|
# Targets supported:
|
||||||
|
# - x86_64-pc-windows-msvc
|
||||||
|
# - i686-pc-windows-msvc
|
||||||
|
|
||||||
|
function Prebuild-Rust {
|
||||||
|
param (
|
||||||
|
[string]$target
|
||||||
|
)
|
||||||
|
|
||||||
|
# Building here for the sake of easier debugging.
|
||||||
|
Push-Location -Path "rust/ffi/node"
|
||||||
|
Write-Host "Building rust library for $target"
|
||||||
|
$env:RUST_BACKTRACE=1
|
||||||
|
cargo build --release --target $target
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
|
|
||||||
|
function Build-NodeBinaries {
|
||||||
|
param (
|
||||||
|
[string]$target
|
||||||
|
)
|
||||||
|
|
||||||
|
Push-Location -Path "node"
|
||||||
|
Write-Host "Building node library for $target"
|
||||||
|
npm run build-release -- --target $target
|
||||||
|
npm run pack-build -- --target $target
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
|
|
||||||
|
$targets = $args[0]
|
||||||
|
if (-not $targets) {
|
||||||
|
$targets = "x86_64-pc-windows-msvc"
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Building artifacts for targets: $targets"
|
||||||
|
foreach ($target in $targets) {
|
||||||
|
Prebuild-Rust $target
|
||||||
|
Build-NodeBinaries $target
|
||||||
|
}
|
||||||
@@ -50,13 +50,19 @@ markdown_extensions:
|
|||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
- pymdownx.tabbed:
|
- pymdownx.tabbed:
|
||||||
alternate_style: true
|
alternate_style: true
|
||||||
|
- md_in_html
|
||||||
|
|
||||||
nav:
|
nav:
|
||||||
- Home: index.md
|
- Home: index.md
|
||||||
- Basics: basic.md
|
- Basics: basic.md
|
||||||
- Embeddings: embedding.md
|
- Embeddings: embedding.md
|
||||||
- Python full-text search: fts.md
|
- Python full-text search: fts.md
|
||||||
- Python integrations: integrations.md
|
- Python integrations:
|
||||||
|
- Pandas and PyArrow: python/arrow.md
|
||||||
|
- DuckDB: python/duckdb.md
|
||||||
|
- LangChain 🦜️🔗: https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lancedb.html
|
||||||
|
- LlamaIndex 🦙: https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html
|
||||||
|
- Pydantic: python/pydantic.md
|
||||||
- Python examples:
|
- Python examples:
|
||||||
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
||||||
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
||||||
@@ -65,6 +71,7 @@ nav:
|
|||||||
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
||||||
- Javascript examples:
|
- Javascript examples:
|
||||||
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
|
- TransformersJS Embedding Search: examples/transformerjs_embedding_search_nodejs.md
|
||||||
- References:
|
- References:
|
||||||
- Vector Search: search.md
|
- Vector Search: search.md
|
||||||
- SQL filters: sql.md
|
- SQL filters: sql.md
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# ANN (Approximate Nearest Neighbor) Indexes
|
# ANN (Approximate Nearest Neighbor) Indexes
|
||||||
|
|
||||||
You can create an index over your vector data to make search faster.
|
You can create an index over your vector data to make search faster.
|
||||||
Vector indexes are faster but less accurate than exhaustive search.
|
Vector indexes are faster but less accurate than exhaustive search (KNN or Flat Search).
|
||||||
LanceDB provides many parameters to fine-tune the index's size, the speed of queries, and the accuracy of results.
|
LanceDB provides many parameters to fine-tune the index's size, the speed of queries, and the accuracy of results.
|
||||||
|
|
||||||
Currently, LanceDB does *not* automatically create the ANN index.
|
Currently, LanceDB does *not* automatically create the ANN index.
|
||||||
@@ -10,7 +10,18 @@ If you can live with <100ms latency, skipping index creation is a simpler workfl
|
|||||||
|
|
||||||
In the future we will look to automatically create and configure the ANN index.
|
In the future we will look to automatically create and configure the ANN index.
|
||||||
|
|
||||||
## Creating an ANN Index
|
## Types of Index
|
||||||
|
|
||||||
|
Lance can support multiple index types, the most widely used one is `IVF_PQ`.
|
||||||
|
|
||||||
|
* `IVF_PQ`: use **Inverted File Index (IVF)** to first divide the dataset into `N` partitions,
|
||||||
|
and then use **Product Quantization** to compress vectors in each partition.
|
||||||
|
* `DISKANN` (**Experimental**): organize the vector as a on-disk graph, where the vertices approximately
|
||||||
|
represent the nearest neighbors of each vector.
|
||||||
|
|
||||||
|
## Creating an IVF_PQ Index
|
||||||
|
|
||||||
|
Lance supports `IVF_PQ` index type by default.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
||||||
@@ -45,15 +56,18 @@ In the future we will look to automatically create and configure the ANN index.
|
|||||||
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
||||||
```
|
```
|
||||||
|
|
||||||
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
- **metric** (default: "L2"): The distance metric to use. By default it uses euclidean distance "`L2`".
|
||||||
creation by providing the following parameters:
|
We also support "cosine" and "dot" distance as well.
|
||||||
|
- **num_partitions** (default: 256): The number of partitions of the index.
|
||||||
|
- **num_sub_vectors** (default: 96): The number of sub-vectors (M) that will be created during Product Quantization (PQ).
|
||||||
|
For D dimensional vector, it will be divided into `M` of `D/M` sub-vectors, each of which is presented by
|
||||||
|
a single PQ code.
|
||||||
|
|
||||||
|
<figure markdown>
|
||||||
|

|
||||||
|
<figcaption>IVF_PQ index with <code>num_partitions=2, num_sub_vectors=4</code></figcaption>
|
||||||
|
</figure>
|
||||||
|
|
||||||
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support "cosine" distance.
|
|
||||||
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
|
||||||
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
|
||||||
A higher number leads to faster queries, but it makes index generation slower.
|
|
||||||
- **num_sub_vectors** (default: 96): The number of subvectors (M) that will be created during Product Quantization (PQ). A larger number makes
|
|
||||||
search more accurate, but also makes the index larger and slower to build.
|
|
||||||
|
|
||||||
## Querying an ANN Index
|
## Querying an ANN Index
|
||||||
|
|
||||||
@@ -138,3 +152,31 @@ You can select the columns returned by the query using a select clause.
|
|||||||
.select(["id"])
|
.select(["id"])
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
### When is it necessary to create an ANN vector index.
|
||||||
|
|
||||||
|
`LanceDB` has manually tuned SIMD code for computing vector distances.
|
||||||
|
In our benchmarks, computing 100K pairs of 1K dimension vectors only take less than 20ms.
|
||||||
|
For small dataset (<100K rows) or the applications which can accept 100ms latency, vector indices are usually not necessary.
|
||||||
|
|
||||||
|
For large-scale or higher dimension vectors, it is beneficial to create vector index.
|
||||||
|
|
||||||
|
### How big is my index, and how many memory will it take.
|
||||||
|
|
||||||
|
In LanceDB, all vector indices are disk-based, meaning that when responding to a vector query, only the relevant pages from the index file are loaded from disk and cached in memory. Additionally, each sub-vector is usually encoded into 1 byte PQ code.
|
||||||
|
|
||||||
|
For example, with a 1024-dimension dataset, if we choose `num_sub_vectors=64`, each sub-vector has `1024 / 64 = 16` float32 numbers.
|
||||||
|
Product quantization can lead to approximately `16 * sizeof(float32) / 1 = 64` times of space reduction.
|
||||||
|
|
||||||
|
### How to choose `num_partitions` and `num_sub_vectors` for `IVF_PQ` index.
|
||||||
|
|
||||||
|
`num_partitions` is used to decide how many partitions the first level `IVF` index uses.
|
||||||
|
Higher number of partitions could lead to more efficient I/O during queries and better accuracy, but it takes much more time to train.
|
||||||
|
On `SIFT-1M` dataset, our benchmark shows that keeping each partition 1K-4K rows lead to a good latency / recall.
|
||||||
|
|
||||||
|
`num_sub_vectors` decides how many Product Quantization code to generate on each vector. Because
|
||||||
|
Product Quantization is a lossy compression of the original vector, the more `num_sub_vectors` usually results to
|
||||||
|
less space distortion, and thus yield better accuracy. However, similarly, more `num_sub_vectors` causes heavier I/O and
|
||||||
|
more PQ computation, thus, higher latency. `dimension / num_sub_vectors` should be aligned with 8 for better SIMD efficiency.
|
||||||
BIN
docs/src/assets/ivf_pq.png
Normal file
BIN
docs/src/assets/ivf_pq.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 266 KiB |
@@ -122,6 +122,35 @@ After a table has been created, you can always add more data to it using
|
|||||||
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## How to delete rows from a table
|
||||||
|
|
||||||
|
Use the `delete()` method on tables to delete rows from a table. To choose
|
||||||
|
which rows to delete, provide a filter that matches on the metadata columns.
|
||||||
|
This can delete any number of rows that match the filter.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
```python
|
||||||
|
tbl.delete('item = "fizz"')
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
await tbl.delete('item = "fizz"')
|
||||||
|
```
|
||||||
|
|
||||||
|
The deletion predicate is a SQL expression that supports the same expressions
|
||||||
|
as the `where()` clause on a search. They can be as simple or complex as needed.
|
||||||
|
To see what expressions are supported, see the [SQL filters](sql.md) section.
|
||||||
|
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
Read more: [lancedb.table.Table.delete][]
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
|
||||||
|
Read more: [vectordb.Table.delete](javascript/interfaces/Table.md#delete)
|
||||||
|
|
||||||
## How to search for (approximate) nearest neighbors
|
## How to search for (approximate) nearest neighbors
|
||||||
|
|
||||||
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ You can also use an external API like OpenAI to generate embeddings
|
|||||||
|
|
||||||
def embed_func(c):
|
def embed_func(c):
|
||||||
rs = openai.Embedding.create(input=c, engine="text-embedding-ada-002")
|
rs = openai.Embedding.create(input=c, engine="text-embedding-ada-002")
|
||||||
return [record["embedding"] for record in rs["data"]]
|
return [record["embedding"] for record in rs["data"]]
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Javascript"
|
=== "Javascript"
|
||||||
@@ -126,7 +126,7 @@ belong in the same latent space and your results will be nonsensical.
|
|||||||
=== "Javascript"
|
=== "Javascript"
|
||||||
```javascript
|
```javascript
|
||||||
const results = await table
|
const results = await table
|
||||||
.search('What's the best pizza topping?')
|
.search("What's the best pizza topping?")
|
||||||
.limit(10)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|||||||
121
docs/src/examples/transformerjs_embedding_search_nodejs.md
Normal file
121
docs/src/examples/transformerjs_embedding_search_nodejs.md
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
# Vector embedding search using TransformersJS
|
||||||
|
|
||||||
|
## Embed and query data from LacneDB using TransformersJS
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="transformersjs" src="https://github.com/lancedb/lancedb/assets/43097991/88a31e30-3d6f-4eef-9216-4b7c688f1b4f">
|
||||||
|
|
||||||
|
This example shows how to use the [transformers.js](https://github.com/xenova/transformers.js) library to perform vector embedding search using LanceDB's Javascript API.
|
||||||
|
|
||||||
|
|
||||||
|
### Setting up
|
||||||
|
First, install the dependencies:
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
npm i @xenova/transformers
|
||||||
|
```
|
||||||
|
|
||||||
|
We will also be using the [all-MiniLM-L6-v2](https://huggingface.co/Xenova/all-MiniLM-L6-v2) model to make it compatible with Transformers.js
|
||||||
|
|
||||||
|
Within our `index.js` file we will import the necessary libraries and define our model and database:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
const { pipeline } = await import('@xenova/transformers')
|
||||||
|
const pipe = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating the embedding function
|
||||||
|
|
||||||
|
Next, we will create a function that will take in a string and return the vector embedding of that string. We will use the `pipe` function we defined earlier to get the vector embedding of the string.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Define the function. `sourceColumn` is required for LanceDB to know
|
||||||
|
// which column to use as input.
|
||||||
|
const embed_fun = {}
|
||||||
|
embed_fun.sourceColumn = 'text'
|
||||||
|
embed_fun.embed = async function (batch) {
|
||||||
|
let result = []
|
||||||
|
// Given a batch of strings, we will use the `pipe` function to get
|
||||||
|
// the vector embedding of each string.
|
||||||
|
for (let text of batch) {
|
||||||
|
// 'mean' pooling and normalizing allows the embeddings to share the
|
||||||
|
// same length.
|
||||||
|
const res = await pipe(text, { pooling: 'mean', normalize: true })
|
||||||
|
result.push(Array.from(res['data']))
|
||||||
|
}
|
||||||
|
return (result)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating the database
|
||||||
|
|
||||||
|
Now, we will create the LanceDB database and add the embedding function we defined earlier.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Link a folder and create a table with data
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
// You can also import any other data, but make sure that you have a column
|
||||||
|
// for the embedding function to use.
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Cherry', type: 'fruit' },
|
||||||
|
{ id: 2, text: 'Carrot', type: 'vegetable' },
|
||||||
|
{ id: 3, text: 'Potato', type: 'vegetable' },
|
||||||
|
{ id: 4, text: 'Apple', type: 'fruit' },
|
||||||
|
{ id: 5, text: 'Banana', type: 'fruit' }
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create the table with the embedding function
|
||||||
|
const table = await db.createTable('food_table', data, "create", embed_fun)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performing the search
|
||||||
|
|
||||||
|
Now, we can perform the search using the `search` function. LanceDB automatically uses the embedding function we defined earlier to get the vector embedding of the query string.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Query the table
|
||||||
|
const results = await table
|
||||||
|
.search("a sweet fruit to eat")
|
||||||
|
.metricType("cosine")
|
||||||
|
.limit(2)
|
||||||
|
.execute()
|
||||||
|
console.log(results.map(r => r.text))
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
[ 'Banana', 'Cherry' ]
|
||||||
|
```
|
||||||
|
|
||||||
|
Output of `results`:
|
||||||
|
```bash
|
||||||
|
[
|
||||||
|
{
|
||||||
|
vector: Float32Array(384) [
|
||||||
|
-0.057455405592918396,
|
||||||
|
0.03617725893855095,
|
||||||
|
-0.0367760956287384,
|
||||||
|
... 381 more items
|
||||||
|
],
|
||||||
|
id: 5,
|
||||||
|
text: 'Banana',
|
||||||
|
type: 'fruit',
|
||||||
|
score: 0.4919965863227844
|
||||||
|
},
|
||||||
|
{
|
||||||
|
vector: Float32Array(384) [
|
||||||
|
0.0009714411571621895,
|
||||||
|
0.008223623037338257,
|
||||||
|
0.009571489877998829,
|
||||||
|
... 381 more items
|
||||||
|
],
|
||||||
|
id: 1,
|
||||||
|
text: 'Cherry',
|
||||||
|
type: 'fruit',
|
||||||
|
score: 0.5540297031402588
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Wrapping it up
|
||||||
|
|
||||||
|
In this example, we showed how to use the `transformers.js` library to perform vector embedding search using LanceDB's Javascript API. You can find the full code for this example on [Github](https://github.com/lancedb/lancedb/blob/main/node/examples/js-transformers/index.js)!
|
||||||
@@ -46,7 +46,7 @@ LanceDB's core is written in Rust 🦀 and is built using <a href="https://githu
|
|||||||
|
|
||||||
const uri = "data/sample-lancedb";
|
const uri = "data/sample-lancedb";
|
||||||
const db = await lancedb.connect(uri);
|
const db = await lancedb.connect(uri);
|
||||||
const table = await db.createTable("my_table",
|
const table = await db.createTable("my_table",
|
||||||
[{ id: 1, vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
[{ id: 1, vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
||||||
{ id: 2, vector: [5.9, 26.5], item: "bar", price: 20.0 }])
|
{ id: 2, vector: [5.9, 26.5], item: "bar", price: 20.0 }])
|
||||||
const results = await table.search([100, 100]).limit(2).execute();
|
const results = await table.search([100, 100]).limit(2).execute();
|
||||||
@@ -67,6 +67,6 @@ LanceDB's core is written in Rust 🦀 and is built using <a href="https://githu
|
|||||||
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
||||||
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
||||||
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
||||||
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
* [`Ecosystem Integrations`](python/integration.md) - integrating LanceDB with python data tooling ecosystem.
|
||||||
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
|||||||
@@ -1,116 +0,0 @@
|
|||||||
# Integrations
|
|
||||||
|
|
||||||
Built on top of Apache Arrow, `LanceDB` is easy to integrate with the Python ecosystem, including Pandas, PyArrow and DuckDB.
|
|
||||||
|
|
||||||
## Pandas and PyArrow
|
|
||||||
|
|
||||||
First, we need to connect to a `LanceDB` database.
|
|
||||||
|
|
||||||
```py
|
|
||||||
|
|
||||||
import lancedb
|
|
||||||
|
|
||||||
db = lancedb.connect("data/sample-lancedb")
|
|
||||||
```
|
|
||||||
|
|
||||||
And write a `Pandas DataFrame` to LanceDB directly.
|
|
||||||
|
|
||||||
```py
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
data = pd.DataFrame({
|
|
||||||
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
|
||||||
"item": ["foo", "bar"],
|
|
||||||
"price": [10.0, 20.0]
|
|
||||||
})
|
|
||||||
table = db.create_table("pd_table", data=data)
|
|
||||||
```
|
|
||||||
|
|
||||||
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](ann_indexes.md)
|
|
||||||
sections.
|
|
||||||
|
|
||||||
|
|
||||||
We can now perform similarity searches via `LanceDB`.
|
|
||||||
|
|
||||||
```py
|
|
||||||
# Open the table previously created.
|
|
||||||
table = db.open_table("pd_table")
|
|
||||||
|
|
||||||
query_vector = [100, 100]
|
|
||||||
# Pandas DataFrame
|
|
||||||
df = table.search(query_vector).limit(1).to_df()
|
|
||||||
print(df)
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
vector item price score
|
|
||||||
0 [5.9, 26.5] bar 20.0 14257.05957
|
|
||||||
```
|
|
||||||
|
|
||||||
If you have a simple filter, it's faster to provide a where clause to `LanceDB`'s search query.
|
|
||||||
If you have more complex criteria, you can always apply the filter to the resulting pandas `DataFrame` from the search query.
|
|
||||||
|
|
||||||
```python
|
|
||||||
|
|
||||||
# Apply the filter via LanceDB
|
|
||||||
results = table.search([100, 100]).where("price < 15").to_df()
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results["item"].iloc[0] == "foo"
|
|
||||||
|
|
||||||
# Apply the filter via Pandas
|
|
||||||
df = results = table.search([100, 100]).to_df()
|
|
||||||
results = df[df.price < 15]
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results["item"].iloc[0] == "foo"
|
|
||||||
```
|
|
||||||
|
|
||||||
## DuckDB
|
|
||||||
|
|
||||||
`LanceDB` works with `DuckDB` via [PyArrow integration](https://duckdb.org/docs/guides/python/sql_on_arrow).
|
|
||||||
|
|
||||||
Let us start with installing `duckdb` and `lancedb`.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install duckdb lancedb
|
|
||||||
```
|
|
||||||
|
|
||||||
We will re-use the dataset created previously
|
|
||||||
|
|
||||||
```python
|
|
||||||
import lancedb
|
|
||||||
|
|
||||||
db = lancedb.connect("data/sample-lancedb")
|
|
||||||
table = db.open_table("pd_table")
|
|
||||||
arrow_table = table.to_arrow()
|
|
||||||
```
|
|
||||||
|
|
||||||
`DuckDB` can directly query the `arrow_table`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
import duckdb
|
|
||||||
|
|
||||||
duckdb.query("SELECT * FROM arrow_table")
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────┬─────────┬────────┐
|
|
||||||
│ vector │ item │ price │
|
|
||||||
│ float[] │ varchar │ double │
|
|
||||||
├─────────────┼─────────┼────────┤
|
|
||||||
│ [3.1, 4.1] │ foo │ 10.0 │
|
|
||||||
│ [5.9, 26.5] │ bar │ 20.0 │
|
|
||||||
└─────────────┴─────────┴────────┘
|
|
||||||
```
|
|
||||||
```python
|
|
||||||
duckdb.query("SELECT mean(price) FROM arrow_table")
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
Out[16]:
|
|
||||||
┌─────────────┐
|
|
||||||
│ mean(price) │
|
|
||||||
│ double │
|
|
||||||
├─────────────┤
|
|
||||||
│ 15.0 │
|
|
||||||
└─────────────┘
|
|
||||||
```
|
|
||||||
@@ -10,6 +10,10 @@ A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb)
|
|||||||
npm install vectordb
|
npm install vectordb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This will download the appropriate native library for your platform. We currently
|
||||||
|
support x86_64 Linux, aarch64 Linux, Intel MacOS, and ARM (M1/M2) MacOS. We do not
|
||||||
|
yet support Windows or musl-based Linux (such as Alpine Linux).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Basic Example
|
### Basic Example
|
||||||
@@ -28,12 +32,34 @@ The [examples](./examples) folder contains complete examples.
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run the tests with
|
To build everything fresh:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you should be able to run the tests with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm test
|
npm test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Rebuilding Rust library
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rebuilding Typescript
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix lints
|
||||||
|
|
||||||
To run the linter and have it automatically fix all errors
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ A connection to a LanceDB database.
|
|||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
- [\_db](LocalConnection.md#_db)
|
- [\_db](LocalConnection.md#_db)
|
||||||
- [\_uri](LocalConnection.md#_uri)
|
- [\_options](LocalConnection.md#_options)
|
||||||
|
|
||||||
### Accessors
|
### Accessors
|
||||||
|
|
||||||
@@ -35,18 +35,18 @@ A connection to a LanceDB database.
|
|||||||
|
|
||||||
### constructor
|
### constructor
|
||||||
|
|
||||||
• **new LocalConnection**(`db`, `uri`)
|
• **new LocalConnection**(`db`, `options`)
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
| Name | Type |
|
| Name | Type |
|
||||||
| :------ | :------ |
|
| :------ | :------ |
|
||||||
| `db` | `any` |
|
| `db` | `any` |
|
||||||
| `uri` | `string` |
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:132](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L132)
|
[index.ts:184](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L184)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -56,17 +56,17 @@ A connection to a LanceDB database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:130](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L130)
|
[index.ts:182](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L182)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
### \_uri
|
### \_options
|
||||||
|
|
||||||
• `Private` `Readonly` **\_uri**: `string`
|
• `Private` `Readonly` **\_options**: [`ConnectionOptions`](../interfaces/ConnectionOptions.md)
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:129](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L129)
|
[index.ts:181](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L181)
|
||||||
|
|
||||||
## Accessors
|
## Accessors
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:137](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L137)
|
[index.ts:189](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L189)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ Creates a new Table and initialize it with new data.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:177](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L177)
|
[index.ts:230](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L230)
|
||||||
|
|
||||||
▸ **createTable**(`name`, `data`, `mode`): `Promise`<[`Table`](../interfaces/Table.md)<`number`[]\>\>
|
▸ **createTable**(`name`, `data`, `mode`): `Promise`<[`Table`](../interfaces/Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
@@ -134,7 +134,7 @@ Connection.createTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:178](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L178)
|
[index.ts:231](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L231)
|
||||||
|
|
||||||
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
@@ -165,7 +165,36 @@ Connection.createTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:188](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L188)
|
[index.ts:241](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L241)
|
||||||
|
|
||||||
|
▸ **createTable**<`T`\>(`name`, `data`, `mode`, `embeddings?`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] |
|
||||||
|
| `mode` | [`WriteMode`](../enums/WriteMode.md) |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
Connection.createTable
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:242](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L242)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -190,7 +219,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:201](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L201)
|
[index.ts:266](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L266)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -216,7 +245,7 @@ Drop an existing table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:211](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L211)
|
[index.ts:276](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L276)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -242,7 +271,7 @@ Open a table in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:153](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L153)
|
[index.ts:205](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L205)
|
||||||
|
|
||||||
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
@@ -271,7 +300,34 @@ Connection.openTable
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:160](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L160)
|
[index.ts:212](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L212)
|
||||||
|
|
||||||
|
▸ **openTable**<`T`\>(`name`, `embeddings?`): `Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](../interfaces/Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
Connection.openTable
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:213](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L213)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -291,4 +347,4 @@ Get the names of all tables in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:144](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L144)
|
[index.ts:196](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L196)
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
- [\_embeddings](LocalTable.md#_embeddings)
|
- [\_embeddings](LocalTable.md#_embeddings)
|
||||||
- [\_name](LocalTable.md#_name)
|
- [\_name](LocalTable.md#_name)
|
||||||
|
- [\_options](LocalTable.md#_options)
|
||||||
- [\_tbl](LocalTable.md#_tbl)
|
- [\_tbl](LocalTable.md#_tbl)
|
||||||
|
|
||||||
### Accessors
|
### Accessors
|
||||||
@@ -43,7 +44,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
### constructor
|
### constructor
|
||||||
|
|
||||||
• **new LocalTable**<`T`\>(`tbl`, `name`)
|
• **new LocalTable**<`T`\>(`tbl`, `name`, `options`)
|
||||||
|
|
||||||
#### Type parameters
|
#### Type parameters
|
||||||
|
|
||||||
@@ -57,12 +58,13 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
| :------ | :------ |
|
| :------ | :------ |
|
||||||
| `tbl` | `any` |
|
| `tbl` | `any` |
|
||||||
| `name` | `string` |
|
| `name` | `string` |
|
||||||
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:221](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L221)
|
[index.ts:287](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L287)
|
||||||
|
|
||||||
• **new LocalTable**<`T`\>(`tbl`, `name`, `embeddings`)
|
• **new LocalTable**<`T`\>(`tbl`, `name`, `options`, `embeddings`)
|
||||||
|
|
||||||
#### Type parameters
|
#### Type parameters
|
||||||
|
|
||||||
@@ -76,11 +78,12 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `tbl` | `any` | |
|
| `tbl` | `any` | |
|
||||||
| `name` | `string` | |
|
| `name` | `string` | |
|
||||||
|
| `options` | [`ConnectionOptions`](../interfaces/ConnectionOptions.md) | |
|
||||||
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:227](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L227)
|
[index.ts:294](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L294)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -90,7 +93,7 @@ A LanceDB Table is the collection of Records. Each Record has one or more vector
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:219](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L219)
|
[index.ts:284](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L284)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -100,7 +103,17 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:218](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L218)
|
[index.ts:283](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L283)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_options
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_options**: [`ConnectionOptions`](../interfaces/ConnectionOptions.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:285](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L285)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -110,7 +123,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:217](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L217)
|
[index.ts:282](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L282)
|
||||||
|
|
||||||
## Accessors
|
## Accessors
|
||||||
|
|
||||||
@@ -128,7 +141,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:234](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L234)
|
[index.ts:302](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L302)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -156,7 +169,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:252](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L252)
|
[index.ts:320](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L320)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -176,7 +189,7 @@ Returns the number of rows in this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:278](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L278)
|
[index.ts:362](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L362)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -194,7 +207,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
| `indexParams` | [`IvfPQIndexConfig`](../interfaces/IvfPQIndexConfig.md) | The parameters of this Index, |
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -206,7 +219,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:271](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L271)
|
[index.ts:355](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L355)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -232,7 +245,7 @@ Delete rows from this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:287](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L287)
|
[index.ts:371](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L371)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -260,7 +273,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:262](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L262)
|
[index.ts:338](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L338)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -286,4 +299,4 @@ Creates a search query to find the nearest neighbors of the given search term
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:242](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L242)
|
[index.ts:310](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L310)
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ An embedding function that automatically creates vector representation for a giv
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L21)
|
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L21)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ An embedding function that automatically creates vector representation for a giv
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L19)
|
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L19)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L18)
|
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L18)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -76,7 +76,7 @@ The name of the column that will be used as input for the Embedding Function.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L50)
|
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L50)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -102,4 +102,4 @@ Creates a vector representation for the given values.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/openai.ts#L38)
|
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/openai.ts#L38)
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ A builder for nearest neighbor queries for LanceDB.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:362](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L362)
|
[index.ts:448](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L448)
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ A builder for nearest neighbor queries for LanceDB.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:360](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L360)
|
[index.ts:446](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L446)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -82,7 +82,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:358](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L358)
|
[index.ts:444](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L444)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:354](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L354)
|
[index.ts:440](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L440)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:359](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L359)
|
[index.ts:445](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L445)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:356](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L356)
|
[index.ts:442](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L442)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -122,7 +122,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:352](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L352)
|
[index.ts:438](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L438)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -132,7 +132,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:353](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L353)
|
[index.ts:439](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L439)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -142,7 +142,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:355](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L355)
|
[index.ts:441](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L441)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -152,7 +152,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:357](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L357)
|
[index.ts:443](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L443)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -162,7 +162,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:351](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L351)
|
[index.ts:437](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L437)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -188,7 +188,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:410](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L410)
|
[index.ts:496](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L496)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ Execute the query and return the results as an Array of Objects
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:433](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L433)
|
[index.ts:519](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L519)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -232,7 +232,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:405](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L405)
|
[index.ts:491](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L491)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -254,7 +254,7 @@ Sets the number of results that will be returned
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:378](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L378)
|
[index.ts:464](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L464)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -280,7 +280,7 @@ MetricType for the different options
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:425](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L425)
|
[index.ts:511](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L511)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@ The number of probes used. A higher number makes search more accurate but also s
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:396](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L396)
|
[index.ts:482](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L482)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -324,7 +324,7 @@ Refine the results by reading extra elements and re-ranking them in memory.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:387](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L387)
|
[index.ts:473](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L473)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -346,4 +346,4 @@ Return only the specified columns.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:416](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L416)
|
[index.ts:502](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L502)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ Cosine distance
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:481](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L481)
|
[index.ts:567](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L567)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Dot product
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:486](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L486)
|
[index.ts:572](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L572)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -46,4 +46,4 @@ Euclidean distance
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:476](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L476)
|
[index.ts:562](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L562)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ Append new data to the table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:466](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L466)
|
[index.ts:552](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L552)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Create a new [Table](../interfaces/Table.md).
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:462](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L462)
|
[index.ts:548](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L548)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -46,4 +46,4 @@ Overwrite the existing [Table](../interfaces/Table.md) if presented.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:464](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L464)
|
[index.ts:550](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L550)
|
||||||
|
|||||||
41
docs/src/javascript/interfaces/AwsCredentials.md
Normal file
41
docs/src/javascript/interfaces/AwsCredentials.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / AwsCredentials
|
||||||
|
|
||||||
|
# Interface: AwsCredentials
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [accessKeyId](AwsCredentials.md#accesskeyid)
|
||||||
|
- [secretKey](AwsCredentials.md#secretkey)
|
||||||
|
- [sessionToken](AwsCredentials.md#sessiontoken)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### accessKeyId
|
||||||
|
|
||||||
|
• **accessKeyId**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:31](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L31)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### secretKey
|
||||||
|
|
||||||
|
• **secretKey**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:33](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L33)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sessionToken
|
||||||
|
|
||||||
|
• `Optional` **sessionToken**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:35](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L35)
|
||||||
@@ -32,7 +32,7 @@ Connection could be local against filesystem or remote against a server.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:45](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L45)
|
[index.ts:70](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L70)
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ Creates a new Table and initialize it with new data.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:65](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L65)
|
[index.ts:90](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L90)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:67](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L67)
|
[index.ts:92](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L92)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -106,7 +106,7 @@ Drop an existing table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:73](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L73)
|
[index.ts:98](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L98)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ Open a table in the database.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:55](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L55)
|
[index.ts:80](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L80)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -149,4 +149,4 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:47](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L47)
|
[index.ts:72](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L72)
|
||||||
|
|||||||
30
docs/src/javascript/interfaces/ConnectionOptions.md
Normal file
30
docs/src/javascript/interfaces/ConnectionOptions.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / ConnectionOptions
|
||||||
|
|
||||||
|
# Interface: ConnectionOptions
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [awsCredentials](ConnectionOptions.md#awscredentials)
|
||||||
|
- [uri](ConnectionOptions.md#uri)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### awsCredentials
|
||||||
|
|
||||||
|
• `Optional` **awsCredentials**: [`AwsCredentials`](AwsCredentials.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:40](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L40)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### uri
|
||||||
|
|
||||||
|
• **uri**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:39](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L39)
|
||||||
@@ -45,7 +45,7 @@ Creates a vector representation for the given values.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/embedding_function.ts#L27)
|
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/embedding_function.ts#L27)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -57,4 +57,4 @@ The name of the column that will be used as input for the Embedding Function.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/7247834/node/src/embedding/embedding_function.ts#L22)
|
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/embedding/embedding_function.ts#L22)
|
||||||
|
|||||||
149
docs/src/javascript/interfaces/IvfPQIndexConfig.md
Normal file
149
docs/src/javascript/interfaces/IvfPQIndexConfig.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / IvfPQIndexConfig
|
||||||
|
|
||||||
|
# Interface: IvfPQIndexConfig
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [column](IvfPQIndexConfig.md#column)
|
||||||
|
- [index\_name](IvfPQIndexConfig.md#index_name)
|
||||||
|
- [max\_iters](IvfPQIndexConfig.md#max_iters)
|
||||||
|
- [max\_opq\_iters](IvfPQIndexConfig.md#max_opq_iters)
|
||||||
|
- [metric\_type](IvfPQIndexConfig.md#metric_type)
|
||||||
|
- [num\_bits](IvfPQIndexConfig.md#num_bits)
|
||||||
|
- [num\_partitions](IvfPQIndexConfig.md#num_partitions)
|
||||||
|
- [num\_sub\_vectors](IvfPQIndexConfig.md#num_sub_vectors)
|
||||||
|
- [replace](IvfPQIndexConfig.md#replace)
|
||||||
|
- [type](IvfPQIndexConfig.md#type)
|
||||||
|
- [use\_opq](IvfPQIndexConfig.md#use_opq)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### column
|
||||||
|
|
||||||
|
• `Optional` **column**: `string`
|
||||||
|
|
||||||
|
The column to be indexed
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:382](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L382)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### index\_name
|
||||||
|
|
||||||
|
• `Optional` **index\_name**: `string`
|
||||||
|
|
||||||
|
A unique name for the index
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:387](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L387)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### max\_iters
|
||||||
|
|
||||||
|
• `Optional` **max\_iters**: `number`
|
||||||
|
|
||||||
|
The max number of iterations for kmeans training.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:402](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L402)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### max\_opq\_iters
|
||||||
|
|
||||||
|
• `Optional` **max\_opq\_iters**: `number`
|
||||||
|
|
||||||
|
Max number of iterations to train OPQ, if `use_opq` is true.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:421](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L421)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### metric\_type
|
||||||
|
|
||||||
|
• `Optional` **metric\_type**: [`MetricType`](../enums/MetricType.md)
|
||||||
|
|
||||||
|
Metric type, L2 or Cosine
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:392](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L392)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_bits
|
||||||
|
|
||||||
|
• `Optional` **num\_bits**: `number`
|
||||||
|
|
||||||
|
The number of bits to present one PQ centroid.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:416](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L416)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_partitions
|
||||||
|
|
||||||
|
• `Optional` **num\_partitions**: `number`
|
||||||
|
|
||||||
|
The number of partitions this index
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:397](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L397)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### num\_sub\_vectors
|
||||||
|
|
||||||
|
• `Optional` **num\_sub\_vectors**: `number`
|
||||||
|
|
||||||
|
Number of subvectors to build PQ code
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:412](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L412)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### replace
|
||||||
|
|
||||||
|
• `Optional` **replace**: `boolean`
|
||||||
|
|
||||||
|
Replace an existing index with the same name if it exists.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:426](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L426)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### type
|
||||||
|
|
||||||
|
• **type**: ``"ivf_pq"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:428](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L428)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### use\_opq
|
||||||
|
|
||||||
|
• `Optional` **use\_opq**: `boolean`
|
||||||
|
|
||||||
|
Train as optimized product quantization.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:407](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L407)
|
||||||
@@ -52,7 +52,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:95](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L95)
|
[index.ts:120](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L120)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -72,13 +72,13 @@ Returns the number of rows in this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:115](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L115)
|
[index.ts:140](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L140)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
### createIndex
|
### createIndex
|
||||||
|
|
||||||
• **createIndex**: (`indexParams`: `IvfPQIndexConfig`) => `Promise`<`any`\>
|
• **createIndex**: (`indexParams`: [`IvfPQIndexConfig`](IvfPQIndexConfig.md)) => `Promise`<`any`\>
|
||||||
|
|
||||||
#### Type declaration
|
#### Type declaration
|
||||||
|
|
||||||
@@ -94,7 +94,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
| `indexParams` | [`IvfPQIndexConfig`](IvfPQIndexConfig.md) | The parameters of this Index, |
|
||||||
|
|
||||||
##### Returns
|
##### Returns
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ VectorIndexParams.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:110](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L110)
|
[index.ts:135](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L135)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -116,11 +116,37 @@ ___
|
|||||||
|
|
||||||
Delete rows from this table.
|
Delete rows from this table.
|
||||||
|
|
||||||
|
This can be used to delete a single row, many rows, all rows, or
|
||||||
|
sometimes no rows (if your predicate matches nothing).
|
||||||
|
|
||||||
|
**`Examples`**
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const con = await lancedb.connect("./.lancedb")
|
||||||
|
const data = [
|
||||||
|
{id: 1, vector: [1, 2]},
|
||||||
|
{id: 2, vector: [3, 4]},
|
||||||
|
{id: 3, vector: [5, 6]},
|
||||||
|
];
|
||||||
|
const tbl = await con.createTable("my_table", data)
|
||||||
|
await tbl.delete("id = 2")
|
||||||
|
await tbl.countRows() // Returns 2
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have a list of values to delete, you can combine them into a
|
||||||
|
stringified list and use the `IN` operator:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const to_remove = [1, 5];
|
||||||
|
await tbl.delete(`id IN (${to_remove.join(",")})`)
|
||||||
|
await tbl.countRows() // Returns 1
|
||||||
|
```
|
||||||
|
|
||||||
##### Parameters
|
##### Parameters
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| :------ | :------ | :------ |
|
| :------ | :------ | :------ |
|
||||||
| `filter` | `string` | A filter in the same format used by a sql WHERE clause. |
|
| `filter` | `string` | A filter in the same format used by a sql WHERE clause. The filter must not be empty. |
|
||||||
|
|
||||||
##### Returns
|
##### Returns
|
||||||
|
|
||||||
@@ -128,7 +154,7 @@ Delete rows from this table.
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:122](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L122)
|
[index.ts:174](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L174)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -138,7 +164,7 @@ ___
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:81](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L81)
|
[index.ts:106](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L106)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -166,7 +192,7 @@ The number of rows added to the table
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:103](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L103)
|
[index.ts:128](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L128)
|
||||||
|
|
||||||
___
|
___
|
||||||
|
|
||||||
@@ -192,4 +218,4 @@ Creates a search query to find the nearest neighbors of the given search term
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:87](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L87)
|
[index.ts:112](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L112)
|
||||||
|
|||||||
@@ -18,8 +18,11 @@
|
|||||||
|
|
||||||
### Interfaces
|
### Interfaces
|
||||||
|
|
||||||
|
- [AwsCredentials](interfaces/AwsCredentials.md)
|
||||||
- [Connection](interfaces/Connection.md)
|
- [Connection](interfaces/Connection.md)
|
||||||
|
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
||||||
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
||||||
|
- [IvfPQIndexConfig](interfaces/IvfPQIndexConfig.md)
|
||||||
- [Table](interfaces/Table.md)
|
- [Table](interfaces/Table.md)
|
||||||
|
|
||||||
### Type Aliases
|
### Type Aliases
|
||||||
@@ -34,11 +37,11 @@
|
|||||||
|
|
||||||
### VectorIndexParams
|
### VectorIndexParams
|
||||||
|
|
||||||
Ƭ **VectorIndexParams**: `IvfPQIndexConfig`
|
Ƭ **VectorIndexParams**: [`IvfPQIndexConfig`](interfaces/IvfPQIndexConfig.md)
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:345](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L345)
|
[index.ts:431](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L431)
|
||||||
|
|
||||||
## Functions
|
## Functions
|
||||||
|
|
||||||
@@ -60,4 +63,20 @@ Connect to a LanceDB instance at the given URI
|
|||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
[index.ts:34](https://github.com/lancedb/lancedb/blob/7247834/node/src/index.ts#L34)
|
[index.ts:47](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L47)
|
||||||
|
|
||||||
|
▸ **connect**(`opts`): `Promise`<[`Connection`](interfaces/Connection.md)\>
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `opts` | `Partial`<[`ConnectionOptions`](interfaces/ConnectionOptions.md)\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Connection`](interfaces/Connection.md)\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:48](https://github.com/lancedb/lancedb/blob/b1eeb90/node/src/index.ts#L48)
|
||||||
|
|||||||
101
docs/src/python/arrow.md
Normal file
101
docs/src/python/arrow.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Pandas and PyArrow
|
||||||
|
|
||||||
|
|
||||||
|
Built on top of [Apache Arrow](https://arrow.apache.org/),
|
||||||
|
`LanceDB` is easy to integrate with the Python ecosystem, including [Pandas](https://pandas.pydata.org/)
|
||||||
|
and PyArrow.
|
||||||
|
|
||||||
|
## Create dataset
|
||||||
|
|
||||||
|
First, we need to connect to a `LanceDB` database.
|
||||||
|
|
||||||
|
```py
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
```
|
||||||
|
|
||||||
|
Afterwards, we write a `Pandas DataFrame` to LanceDB directly.
|
||||||
|
|
||||||
|
```py
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
data = pd.DataFrame({
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0]
|
||||||
|
})
|
||||||
|
table = db.create_table("pd_table", data=data)
|
||||||
|
```
|
||||||
|
|
||||||
|
Similar to [`pyarrow.write_dataset()`](https://arrow.apache.org/docs/python/generated/pyarrow.dataset.write_dataset.html),
|
||||||
|
[db.create_table()](../python/#lancedb.db.DBConnection.create_table) accepts a wide-range of forms of data.
|
||||||
|
|
||||||
|
For example, if you have a dataset that is larger than memory size, you can create table with `Iterator[pyarrow.RecordBatch]`,
|
||||||
|
to lazily generate data:
|
||||||
|
|
||||||
|
```py
|
||||||
|
|
||||||
|
from typing import Iterable
|
||||||
|
import pyarrow as pa
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
def make_batches() -> Iterable[pa.RecordBatch]:
|
||||||
|
for i in range(5):
|
||||||
|
yield pa.RecordBatch.from_arrays(
|
||||||
|
[
|
||||||
|
pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
pa.array(["foo", "bar"]),
|
||||||
|
pa.array([10.0, 20.0]),
|
||||||
|
],
|
||||||
|
["vector", "item", "price"])
|
||||||
|
|
||||||
|
schema=pa.schema([
|
||||||
|
pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
pa.field("item", pa.utf8()),
|
||||||
|
pa.field("price", pa.float32()),
|
||||||
|
])
|
||||||
|
|
||||||
|
table = db.create_table("iterable_table", data=make_batches(), schema=schema)
|
||||||
|
```
|
||||||
|
|
||||||
|
You will find detailed instructions of creating dataset in
|
||||||
|
[Basic Operations](../basic.md) and [API](../python/#lancedb.db.DBConnection.create_table)
|
||||||
|
sections.
|
||||||
|
|
||||||
|
## Vector Search
|
||||||
|
|
||||||
|
We can now perform similarity search via `LanceDB` Python API.
|
||||||
|
|
||||||
|
```py
|
||||||
|
# Open the table previously created.
|
||||||
|
table = db.open_table("pd_table")
|
||||||
|
|
||||||
|
query_vector = [100, 100]
|
||||||
|
# Pandas DataFrame
|
||||||
|
df = table.search(query_vector).limit(1).to_df()
|
||||||
|
print(df)
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
vector item price score
|
||||||
|
0 [5.9, 26.5] bar 20.0 14257.05957
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have a simple filter, it's faster to provide a `where clause` to `LanceDB`'s search query.
|
||||||
|
If you have more complex criteria, you can always apply the filter to the resulting Pandas `DataFrame`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
|
||||||
|
# Apply the filter via LanceDB
|
||||||
|
results = table.search([100, 100]).where("price < 15").to_df()
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results["item"].iloc[0] == "foo"
|
||||||
|
|
||||||
|
# Apply the filter via Pandas
|
||||||
|
df = results = table.search([100, 100]).to_df()
|
||||||
|
results = df[df.price < 15]
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results["item"].iloc[0] == "foo"
|
||||||
|
```
|
||||||
56
docs/src/python/duckdb.md
Normal file
56
docs/src/python/duckdb.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# DuckDB
|
||||||
|
|
||||||
|
`LanceDB` works with `DuckDB` via [PyArrow integration](https://duckdb.org/docs/guides/python/sql_on_arrow).
|
||||||
|
|
||||||
|
Let us start with installing `duckdb` and `lancedb`.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install duckdb lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
We will re-use [the dataset created previously](./arrow.md):
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pandas as pd
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
data = pd.DataFrame({
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0]
|
||||||
|
})
|
||||||
|
table = db.create_table("pd_table", data=data)
|
||||||
|
arrow_table = table.to_arrow()
|
||||||
|
```
|
||||||
|
|
||||||
|
`DuckDB` can directly query the `arrow_table`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import duckdb
|
||||||
|
|
||||||
|
duckdb.query("SELECT * FROM arrow_table")
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────┬─────────┬────────┐
|
||||||
|
│ vector │ item │ price │
|
||||||
|
│ float[] │ varchar │ double │
|
||||||
|
├─────────────┼─────────┼────────┤
|
||||||
|
│ [3.1, 4.1] │ foo │ 10.0 │
|
||||||
|
│ [5.9, 26.5] │ bar │ 20.0 │
|
||||||
|
└─────────────┴─────────┴────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
```py
|
||||||
|
duckdb.query("SELECT mean(price) FROM arrow_table")
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────┐
|
||||||
|
│ mean(price) │
|
||||||
|
│ double │
|
||||||
|
├─────────────┤
|
||||||
|
│ 15.0 │
|
||||||
|
└─────────────┘
|
||||||
|
```
|
||||||
7
docs/src/python/integration.md
Normal file
7
docs/src/python/integration.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Integration
|
||||||
|
|
||||||
|
Built on top of [Apache Arrow](https://arrow.apache.org/),
|
||||||
|
`LanceDB` is very easy to be integrate with Python ecosystems.
|
||||||
|
|
||||||
|
* [Pandas and Arrow Integration](./arrow.md)
|
||||||
|
* [DuckDB Integration](./duckdb.md)
|
||||||
35
docs/src/python/pydantic.md
Normal file
35
docs/src/python/pydantic.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Pydantic
|
||||||
|
|
||||||
|
[Pydantic](https://docs.pydantic.dev/latest/) is a data validation library in Python.
|
||||||
|
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
LanceDB supports to create Apache Arrow Schema from a
|
||||||
|
[Pydantic BaseModel](https://docs.pydantic.dev/latest/api/main/#pydantic.main.BaseModel)
|
||||||
|
via [pydantic_to_schema()](python.md##lancedb.pydantic.pydantic_to_schema) method.
|
||||||
|
|
||||||
|
::: lancedb.pydantic.pydantic_to_schema
|
||||||
|
|
||||||
|
## Vector Field
|
||||||
|
|
||||||
|
LanceDB provides a [`vector(dim)`](python.md#lancedb.pydantic.vector) method to define a
|
||||||
|
vector Field in a Pydantic Model.
|
||||||
|
|
||||||
|
::: lancedb.pydantic.vector
|
||||||
|
|
||||||
|
## Type Conversion
|
||||||
|
|
||||||
|
LanceDB automatically convert Pydantic fields to
|
||||||
|
[Apache Arrow DataType](https://arrow.apache.org/docs/python/generated/pyarrow.DataType.html#pyarrow.DataType).
|
||||||
|
|
||||||
|
Current supported type conversions:
|
||||||
|
|
||||||
|
| Pydantic Field Type | PyArrow Data Type |
|
||||||
|
| ------------------- | ----------------- |
|
||||||
|
| `int` | `pyarrow.int64` |
|
||||||
|
| `float` | `pyarrow.float64` |
|
||||||
|
| `bool` | `pyarrow.bool` |
|
||||||
|
| `str` | `pyarrow.utf8()` |
|
||||||
|
| `list` | `pyarrow.List` |
|
||||||
|
| `BaseModel` | `pyarrow.Struct` |
|
||||||
|
| `vector(n)` | `pyarrow.FixedSizeList(float32, n)` |
|
||||||
@@ -10,14 +10,16 @@ pip install lancedb
|
|||||||
|
|
||||||
::: lancedb.connect
|
::: lancedb.connect
|
||||||
|
|
||||||
::: lancedb.LanceDBConnection
|
::: lancedb.db.DBConnection
|
||||||
|
|
||||||
## Table
|
## Table
|
||||||
|
|
||||||
::: lancedb.table.LanceTable
|
::: lancedb.table.Table
|
||||||
|
|
||||||
## Querying
|
## Querying
|
||||||
|
|
||||||
|
::: lancedb.query.Query
|
||||||
|
|
||||||
::: lancedb.query.LanceQueryBuilder
|
::: lancedb.query.LanceQueryBuilder
|
||||||
|
|
||||||
::: lancedb.query.LanceFtsQueryBuilder
|
::: lancedb.query.LanceFtsQueryBuilder
|
||||||
@@ -41,3 +43,17 @@ pip install lancedb
|
|||||||
::: lancedb.fts.populate_index
|
::: lancedb.fts.populate_index
|
||||||
|
|
||||||
::: lancedb.fts.search_index
|
::: lancedb.fts.search_index
|
||||||
|
|
||||||
|
## Utilities
|
||||||
|
|
||||||
|
::: lancedb.vector
|
||||||
|
|
||||||
|
## Integrations
|
||||||
|
|
||||||
|
### Pydantic
|
||||||
|
|
||||||
|
::: lancedb.pydantic.pydantic_to_schema
|
||||||
|
|
||||||
|
::: lancedb.pydantic.vector
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -25,9 +25,9 @@ Currently, we support the following metrics:
|
|||||||
|
|
||||||
### Flat Search
|
### Flat Search
|
||||||
|
|
||||||
|
If LanceDB does not create a vector index, LanceDB would need to scan (`Flat Search`) the entire vector column
|
||||||
|
and compute the distance for each vector in order to find the closest matches.
|
||||||
|
|
||||||
If there is no [vector index is created](ann_indexes.md), LanceDB will just brute-force scan
|
|
||||||
the vector column and compute the distance.
|
|
||||||
|
|
||||||
<!-- Setup Code
|
<!-- Setup Code
|
||||||
```python
|
```python
|
||||||
@@ -79,39 +79,43 @@ await db_setup.createTable('my_vectors', data)
|
|||||||
const tbl = await db.openTable("my_vectors")
|
const tbl = await db.openTable("my_vectors")
|
||||||
|
|
||||||
const results_1 = await tbl.search(Array(1536).fill(1.2))
|
const results_1 = await tbl.search(Array(1536).fill(1.2))
|
||||||
.limit(20)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
<!-- Commenting out for now since metricType fails for JS on Ubuntu 22.04.
|
|
||||||
|
|
||||||
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
||||||
as well.
|
as well.
|
||||||
-->
|
|
||||||
|
|
||||||
<!--
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
-->
|
|
||||||
<!-- ```python
|
```python
|
||||||
df = tbl.search(np.random.random((1536))) \
|
df = tbl.search(np.random.random((1536))) \
|
||||||
.metric("cosine") \
|
.metric("cosine") \
|
||||||
.limit(10) \
|
.limit(10) \
|
||||||
.to_df()
|
.to_df()
|
||||||
```
|
```
|
||||||
-->
|
|
||||||
<!--
|
|
||||||
=== "JavaScript"
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- ```javascript
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
const results_2 = await tbl.search(Array(1536).fill(1.2))
|
const results_2 = await tbl.search(Array(1536).fill(1.2))
|
||||||
.metricType("cosine")
|
.metricType("cosine")
|
||||||
.limit(20)
|
.limit(10)
|
||||||
.execute()
|
.execute()
|
||||||
```
|
```
|
||||||
-->
|
|
||||||
|
|
||||||
### Search with Vector Index.
|
|
||||||
|
### Approximate Nearest Neighbor (ANN) Search with Vector Index.
|
||||||
|
|
||||||
|
To accelerate vector retrievals, it is common to build vector indices.
|
||||||
|
A vector index is a data structure specifically designed to efficiently organize and
|
||||||
|
search vector data based on their similarity or distance metrics.
|
||||||
|
By constructing a vector index, you can reduce the search space and avoid the need
|
||||||
|
for brute-force scanning of the entire vector column.
|
||||||
|
|
||||||
|
However, fast vector search using indices often entails making a trade-off with accuracy to some extent.
|
||||||
|
This is why it is often called **Approximate Nearest Neighbors (ANN)** search, while the Flat Search (KNN)
|
||||||
|
always returns 100% recall.
|
||||||
|
|
||||||
See [ANN Index](ann_indexes.md) for more details.
|
See [ANN Index](ann_indexes.md) for more details.
|
||||||
@@ -7,6 +7,7 @@ const excludedFiles = [
|
|||||||
"../src/embedding.md",
|
"../src/embedding.md",
|
||||||
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
"../src/examples/serverless_lancedb_with_s3_and_lambda.md",
|
||||||
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
"../src/examples/serverless_qa_bot_with_modal_and_langchain.md",
|
||||||
|
"../src/examples/transformerjs_embedding_search_nodejs.md",
|
||||||
"../src/examples/youtube_transcript_bot_with_nodejs.md",
|
"../src/examples/youtube_transcript_bot_with_nodejs.md",
|
||||||
];
|
];
|
||||||
const nodePrefix = "javascript";
|
const nodePrefix = "javascript";
|
||||||
@@ -48,4 +49,4 @@ for (const file of files.filter((file) => !excludedFiles.includes(file))) {
|
|||||||
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
||||||
fs.writeFileSync(outPath, asyncPrefix + "\n" + lines.join("\n") + asyncSuffix);
|
fs.writeFileSync(outPath, asyncPrefix + "\n" + lines.join("\n") + asyncSuffix);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4
node/.npmignore
Normal file
4
node/.npmignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
gen_test_data.py
|
||||||
|
index.node
|
||||||
|
dist/lancedb*.tgz
|
||||||
|
vectordb*.tgz
|
||||||
@@ -8,6 +8,10 @@ A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb)
|
|||||||
npm install vectordb
|
npm install vectordb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This will download the appropriate native library for your platform. We currently
|
||||||
|
support x86_64 Linux, aarch64 Linux, Intel MacOS, and ARM (M1/M2) MacOS. We do not
|
||||||
|
yet support Windows or musl-based Linux (such as Alpine Linux).
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Basic Example
|
### Basic Example
|
||||||
@@ -26,12 +30,34 @@ The [examples](./examples) folder contains complete examples.
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
Run the tests with
|
To build everything fresh:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run tsc
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you should be able to run the tests with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm test
|
npm test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Rebuilding Rust library
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rebuilding Typescript
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix lints
|
||||||
|
|
||||||
To run the linter and have it automatically fix all errors
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
66
node/examples/js-transformers/index.js
Normal file
66
node/examples/js-transformers/index.js
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
|
||||||
|
async function example() {
|
||||||
|
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
|
||||||
|
// Import transformers and the all-MiniLM-L6-v2 model (https://huggingface.co/Xenova/all-MiniLM-L6-v2)
|
||||||
|
const { pipeline } = await import('@xenova/transformers')
|
||||||
|
const pipe = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
||||||
|
|
||||||
|
|
||||||
|
// Create embedding function from pipeline which returns a list of vectors from batch
|
||||||
|
// sourceColumn is the name of the column in the data to be embedded
|
||||||
|
//
|
||||||
|
// Output of pipe is a Tensor { data: Float32Array(384) }, so filter for the vector
|
||||||
|
const embed_fun = {}
|
||||||
|
embed_fun.sourceColumn = 'text'
|
||||||
|
embed_fun.embed = async function (batch) {
|
||||||
|
let result = []
|
||||||
|
for (let text of batch) {
|
||||||
|
const res = await pipe(text, { pooling: 'mean', normalize: true })
|
||||||
|
result.push(Array.from(res['data']))
|
||||||
|
}
|
||||||
|
return (result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Link a folder and create a table with data
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Cherry', type: 'fruit' },
|
||||||
|
{ id: 2, text: 'Carrot', type: 'vegetable' },
|
||||||
|
{ id: 3, text: 'Potato', type: 'vegetable' },
|
||||||
|
{ id: 4, text: 'Apple', type: 'fruit' },
|
||||||
|
{ id: 5, text: 'Banana', type: 'fruit' }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('food_table', data, "create", embed_fun)
|
||||||
|
|
||||||
|
|
||||||
|
// Query the table
|
||||||
|
const results = await table
|
||||||
|
.search("a sweet fruit to eat")
|
||||||
|
.metricType("cosine")
|
||||||
|
.limit(2)
|
||||||
|
.execute()
|
||||||
|
console.log(results.map(r => r.text))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
example().then(_ => { console.log("Done!") })
|
||||||
16
node/examples/js-transformers/package.json
Normal file
16
node/examples/js-transformers/package.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-transformers",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Example for using transformers.js with lancedb",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@xenova/transformers": "^2.4.1",
|
||||||
|
"vectordb": "^0.1.12"
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -12,29 +12,26 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
const { currentTarget } = require('@neon-rs/load');
|
||||||
|
|
||||||
let nativeLib;
|
let nativeLib;
|
||||||
|
|
||||||
function getPlatformLibrary() {
|
|
||||||
if (process.platform === "darwin" && process.arch == "arm64") {
|
|
||||||
return require('./aarch64-apple-darwin.node');
|
|
||||||
} else if (process.platform === "darwin" && process.arch == "x64") {
|
|
||||||
return require('./x86_64-apple-darwin.node');
|
|
||||||
} else if (process.platform === "linux" && process.arch == "x64") {
|
|
||||||
return require('./x86_64-unknown-linux-gnu.node');
|
|
||||||
} else {
|
|
||||||
throw new Error(`vectordb: unsupported platform ${process.platform}_${process.arch}. Please file a bug report at https://github.com/lancedb/lancedb/issues`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nativeLib = require('./index.node')
|
nativeLib = require(`@lancedb/vectordb-${currentTarget()}`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.code === "MODULE_NOT_FOUND") {
|
try {
|
||||||
nativeLib = getPlatformLibrary();
|
// Might be developing locally, so try that. But don't expose that error
|
||||||
} else {
|
// to the user.
|
||||||
throw new Error('vectordb: failed to load native library. Please file a bug report at https://github.com/lancedb/lancedb/issues');
|
nativeLib = require("./index.node");
|
||||||
|
} catch {
|
||||||
|
throw new Error(`vectordb: failed to load native library.
|
||||||
|
You may need to run \`npm install @lancedb/vectordb-${currentTarget()}\`.
|
||||||
|
|
||||||
|
If that does not work, please file a bug report at https://github.com/lancedb/lancedb/issues
|
||||||
|
|
||||||
|
Source error: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = nativeLib
|
// Dynamic require for runtime.
|
||||||
|
module.exports = nativeLib;
|
||||||
|
|||||||
126
node/package-lock.json
generated
126
node/package-lock.json
generated
@@ -1,18 +1,30 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.9",
|
"version": "0.1.15",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.9",
|
"version": "0.1.15",
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apache-arrow/ts": "^12.0.0",
|
"@apache-arrow/ts": "^12.0.0",
|
||||||
"apache-arrow": "^12.0.0"
|
"@neon-rs/load": "^0.0.74",
|
||||||
|
"apache-arrow": "^12.0.0",
|
||||||
|
"axios": "^1.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@neon-rs/cli": "^0.0.74",
|
||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/chai-as-promised": "^7.1.5",
|
"@types/chai-as-promised": "^7.1.5",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
@@ -37,6 +49,13 @@
|
|||||||
"typedoc": "^0.24.7",
|
"typedoc": "^0.24.7",
|
||||||
"typedoc-plugin-markdown": "^3.15.3",
|
"typedoc-plugin-markdown": "^3.15.3",
|
||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@lancedb/vectordb-darwin-arm64": "0.1.15",
|
||||||
|
"@lancedb/vectordb-darwin-x64": "0.1.15",
|
||||||
|
"@lancedb/vectordb-linux-arm64-gnu": "0.1.15",
|
||||||
|
"@lancedb/vectordb-linux-x64-gnu": "0.1.15",
|
||||||
|
"@lancedb/vectordb-win32-x64-msvc": "0.1.15"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@apache-arrow/ts": {
|
"node_modules/@apache-arrow/ts": {
|
||||||
@@ -204,6 +223,20 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@neon-rs/cli": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-9lPmNmjej5iKKOTMPryOMubwkgMRyTWRuaq1yokASvI5mPhr2kzPN7UVjdCOjQvpunNPngR9yAHoirpjiWhUHw==",
|
||||||
|
"dev": true,
|
||||||
|
"bin": {
|
||||||
|
"neon": "index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@neon-rs/load": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-/cPZD907UNz55yrc/ud4wDgQKtU1TvkD9jeqZWG6J4IMmZkp6zgjkQcKA8UvpkZlcpPHvc8J17sGzLFbP/LUYg=="
|
||||||
|
},
|
||||||
"node_modules/@nodelib/fs.scandir": {
|
"node_modules/@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -810,8 +843,7 @@
|
|||||||
"node_modules/asynckit": {
|
"node_modules/asynckit": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"node_modules/available-typed-arrays": {
|
"node_modules/available-typed-arrays": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
@@ -826,12 +858,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/axios": {
|
"node_modules/axios": {
|
||||||
"version": "0.26.1",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"follow-redirects": "^1.14.8"
|
"follow-redirects": "^1.15.0",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/balanced-match": {
|
"node_modules/balanced-match": {
|
||||||
@@ -1062,7 +1095,6 @@
|
|||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"delayed-stream": "~1.0.0"
|
"delayed-stream": "~1.0.0"
|
||||||
},
|
},
|
||||||
@@ -1285,7 +1317,6 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.4.0"
|
"node": ">=0.4.0"
|
||||||
}
|
}
|
||||||
@@ -2052,7 +2083,6 @@
|
|||||||
"version": "1.15.2",
|
"version": "1.15.2",
|
||||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
||||||
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
||||||
"dev": true,
|
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "individual",
|
"type": "individual",
|
||||||
@@ -2081,7 +2111,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -2955,7 +2984,6 @@
|
|||||||
"version": "1.52.0",
|
"version": "1.52.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -2964,7 +2992,6 @@
|
|||||||
"version": "2.1.35",
|
"version": "2.1.35",
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
"dev": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mime-db": "1.52.0"
|
"mime-db": "1.52.0"
|
||||||
},
|
},
|
||||||
@@ -3258,6 +3285,15 @@
|
|||||||
"form-data": "^4.0.0"
|
"form-data": "^4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/openai/node_modules/axios": {
|
||||||
|
"version": "0.26.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||||
|
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"follow-redirects": "^1.14.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.1",
|
"version": "0.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
||||||
@@ -3409,6 +3445,11 @@
|
|||||||
"node": ">= 0.8.0"
|
"node": ">= 0.8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||||
|
},
|
||||||
"node_modules/punycode": {
|
"node_modules/punycode": {
|
||||||
"version": "2.3.0",
|
"version": "2.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||||
@@ -4601,6 +4642,17 @@
|
|||||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"@neon-rs/cli": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-9lPmNmjej5iKKOTMPryOMubwkgMRyTWRuaq1yokASvI5mPhr2kzPN7UVjdCOjQvpunNPngR9yAHoirpjiWhUHw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"@neon-rs/load": {
|
||||||
|
"version": "0.0.74",
|
||||||
|
"resolved": "https://registry.npmjs.org/@neon-rs/load/-/load-0.0.74.tgz",
|
||||||
|
"integrity": "sha512-/cPZD907UNz55yrc/ud4wDgQKtU1TvkD9jeqZWG6J4IMmZkp6zgjkQcKA8UvpkZlcpPHvc8J17sGzLFbP/LUYg=="
|
||||||
|
},
|
||||||
"@nodelib/fs.scandir": {
|
"@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -5056,8 +5108,7 @@
|
|||||||
"asynckit": {
|
"asynckit": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"available-typed-arrays": {
|
"available-typed-arrays": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
@@ -5066,12 +5117,13 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"axios": {
|
"axios": {
|
||||||
"version": "0.26.1",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
|
||||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"follow-redirects": "^1.14.8"
|
"follow-redirects": "^1.15.0",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"balanced-match": {
|
"balanced-match": {
|
||||||
@@ -5251,7 +5303,6 @@
|
|||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"delayed-stream": "~1.0.0"
|
"delayed-stream": "~1.0.0"
|
||||||
}
|
}
|
||||||
@@ -5418,8 +5469,7 @@
|
|||||||
"delayed-stream": {
|
"delayed-stream": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"diff": {
|
"diff": {
|
||||||
"version": "4.0.2",
|
"version": "4.0.2",
|
||||||
@@ -5989,8 +6039,7 @@
|
|||||||
"follow-redirects": {
|
"follow-redirects": {
|
||||||
"version": "1.15.2",
|
"version": "1.15.2",
|
||||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
|
||||||
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
|
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"for-each": {
|
"for-each": {
|
||||||
"version": "0.3.3",
|
"version": "0.3.3",
|
||||||
@@ -6005,7 +6054,6 @@
|
|||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"asynckit": "^0.4.0",
|
"asynckit": "^0.4.0",
|
||||||
"combined-stream": "^1.0.8",
|
"combined-stream": "^1.0.8",
|
||||||
@@ -6619,14 +6667,12 @@
|
|||||||
"mime-db": {
|
"mime-db": {
|
||||||
"version": "1.52.0",
|
"version": "1.52.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"mime-types": {
|
"mime-types": {
|
||||||
"version": "2.1.35",
|
"version": "2.1.35",
|
||||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"mime-db": "1.52.0"
|
"mime-db": "1.52.0"
|
||||||
}
|
}
|
||||||
@@ -6852,6 +6898,17 @@
|
|||||||
"requires": {
|
"requires": {
|
||||||
"axios": "^0.26.0",
|
"axios": "^0.26.0",
|
||||||
"form-data": "^4.0.0"
|
"form-data": "^4.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"axios": {
|
||||||
|
"version": "0.26.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||||
|
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"follow-redirects": "^1.14.8"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"optionator": {
|
"optionator": {
|
||||||
@@ -6960,6 +7017,11 @@
|
|||||||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||||
|
},
|
||||||
"punycode": {
|
"punycode": {
|
||||||
"version": "2.3.0",
|
"version": "2.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
|
||||||
|
|||||||
@@ -1,16 +1,18 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.10",
|
"version": "0.1.15",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"tsc": "tsc -b",
|
"tsc": "tsc -b",
|
||||||
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json",
|
||||||
"build-release": "npm run build -- --release",
|
"build-release": "npm run build -- --release",
|
||||||
"test": "npm run tsc; mocha -recursive dist/test",
|
"test": "npm run tsc && mocha -recursive dist/test",
|
||||||
"lint": "eslint src --ext .js,.ts",
|
"lint": "eslint src --ext .js,.ts",
|
||||||
"clean": "rm -rf node_modules *.node dist/"
|
"clean": "rm -rf node_modules *.node dist/",
|
||||||
|
"pack-build": "neon pack-build",
|
||||||
|
"check-npm": "printenv && which node && which npm && npm --version"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -25,6 +27,7 @@
|
|||||||
"author": "Lance Devs",
|
"author": "Lance Devs",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@neon-rs/cli": "^0.0.74",
|
||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/chai-as-promised": "^7.1.5",
|
"@types/chai-as-promised": "^7.1.5",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
@@ -52,6 +55,33 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apache-arrow/ts": "^12.0.0",
|
"@apache-arrow/ts": "^12.0.0",
|
||||||
"apache-arrow": "^12.0.0"
|
"@neon-rs/load": "^0.0.74",
|
||||||
|
"apache-arrow": "^12.0.0",
|
||||||
|
"axios": "^1.4.0"
|
||||||
|
},
|
||||||
|
"os": [
|
||||||
|
"darwin",
|
||||||
|
"linux",
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"cpu": [
|
||||||
|
"x64",
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"neon": {
|
||||||
|
"targets": {
|
||||||
|
"x86_64-apple-darwin": "@lancedb/vectordb-darwin-x64",
|
||||||
|
"aarch64-apple-darwin": "@lancedb/vectordb-darwin-arm64",
|
||||||
|
"x86_64-unknown-linux-gnu": "@lancedb/vectordb-linux-x64-gnu",
|
||||||
|
"aarch64-unknown-linux-gnu": "@lancedb/vectordb-linux-arm64-gnu",
|
||||||
|
"x86_64-pc-windows-msvc": "@lancedb/vectordb-win32-x64-msvc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@lancedb/vectordb-darwin-arm64": "0.1.15",
|
||||||
|
"@lancedb/vectordb-darwin-x64": "0.1.15",
|
||||||
|
"@lancedb/vectordb-linux-arm64-gnu": "0.1.15",
|
||||||
|
"@lancedb/vectordb-linux-x64-gnu": "0.1.15",
|
||||||
|
"@lancedb/vectordb-win32-x64-msvc": "0.1.15"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,26 +14,67 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
RecordBatchFileWriter,
|
RecordBatchFileWriter,
|
||||||
type Table as ArrowTable,
|
type Table as ArrowTable
|
||||||
tableFromIPC,
|
|
||||||
Vector
|
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
import { fromRecordsToBuffer } from './arrow'
|
import { fromRecordsToBuffer } from './arrow'
|
||||||
import type { EmbeddingFunction } from './embedding/embedding_function'
|
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
import { RemoteConnection } from './remote'
|
||||||
|
import { Query } from './query'
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const { databaseNew, databaseTableNames, databaseOpenTable, databaseDropTable, tableCreate, tableSearch, tableAdd, tableCreateVectorIndex, tableCountRows, tableDelete } = require('../native.js')
|
const { databaseNew, databaseTableNames, databaseOpenTable, databaseDropTable, tableCreate, tableAdd, tableCreateVectorIndex, tableCountRows, tableDelete } = require('../native.js')
|
||||||
|
|
||||||
export type { EmbeddingFunction }
|
export type { EmbeddingFunction }
|
||||||
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||||
|
|
||||||
|
export interface AwsCredentials {
|
||||||
|
accessKeyId: string
|
||||||
|
|
||||||
|
secretKey: string
|
||||||
|
|
||||||
|
sessionToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConnectionOptions {
|
||||||
|
uri: string
|
||||||
|
|
||||||
|
awsCredentials?: AwsCredentials
|
||||||
|
|
||||||
|
// API key for the remote connections
|
||||||
|
apiKey?: string
|
||||||
|
// Region to connect
|
||||||
|
region?: string
|
||||||
|
|
||||||
|
// override the host for the remote connections
|
||||||
|
hostOverride?: string
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance at the given URI
|
* Connect to a LanceDB instance at the given URI
|
||||||
* @param uri The uri of the database.
|
* @param uri The uri of the database.
|
||||||
*/
|
*/
|
||||||
export async function connect (uri: string): Promise<Connection> {
|
export async function connect (uri: string): Promise<Connection>
|
||||||
const db = await databaseNew(uri)
|
export async function connect (opts: Partial<ConnectionOptions>): Promise<Connection>
|
||||||
return new LocalConnection(db, uri)
|
export async function connect (arg: string | Partial<ConnectionOptions>): Promise<Connection> {
|
||||||
|
let opts: ConnectionOptions
|
||||||
|
if (typeof arg === 'string') {
|
||||||
|
opts = { uri: arg }
|
||||||
|
} else {
|
||||||
|
// opts = { uri: arg.uri, awsCredentials = arg.awsCredentials }
|
||||||
|
opts = Object.assign({
|
||||||
|
uri: '',
|
||||||
|
awsCredentials: undefined,
|
||||||
|
apiKey: undefined,
|
||||||
|
region: 'us-west-2'
|
||||||
|
}, arg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts.uri.startsWith('db://')) {
|
||||||
|
// Remote connection
|
||||||
|
return new RemoteConnection(opts)
|
||||||
|
}
|
||||||
|
const db = await databaseNew(opts.uri)
|
||||||
|
return new LocalConnection(db, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -117,7 +158,34 @@ export interface Table<T = number[]> {
|
|||||||
/**
|
/**
|
||||||
* Delete rows from this table.
|
* Delete rows from this table.
|
||||||
*
|
*
|
||||||
* @param filter A filter in the same format used by a sql WHERE clause.
|
* This can be used to delete a single row, many rows, all rows, or
|
||||||
|
* sometimes no rows (if your predicate matches nothing).
|
||||||
|
*
|
||||||
|
* @param filter A filter in the same format used by a sql WHERE clause. The
|
||||||
|
* filter must not be empty.
|
||||||
|
*
|
||||||
|
* @examples
|
||||||
|
*
|
||||||
|
* ```ts
|
||||||
|
* const con = await lancedb.connect("./.lancedb")
|
||||||
|
* const data = [
|
||||||
|
* {id: 1, vector: [1, 2]},
|
||||||
|
* {id: 2, vector: [3, 4]},
|
||||||
|
* {id: 3, vector: [5, 6]},
|
||||||
|
* ];
|
||||||
|
* const tbl = await con.createTable("my_table", data)
|
||||||
|
* await tbl.delete("id = 2")
|
||||||
|
* await tbl.countRows() // Returns 2
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* If you have a list of values to delete, you can combine them into a
|
||||||
|
* stringified list and use the `IN` operator:
|
||||||
|
*
|
||||||
|
* ```ts
|
||||||
|
* const to_remove = [1, 5];
|
||||||
|
* await tbl.delete(`id IN (${to_remove.join(",")})`)
|
||||||
|
* await tbl.countRows() // Returns 1
|
||||||
|
* ```
|
||||||
*/
|
*/
|
||||||
delete: (filter: string) => Promise<void>
|
delete: (filter: string) => Promise<void>
|
||||||
}
|
}
|
||||||
@@ -126,21 +194,21 @@ export interface Table<T = number[]> {
|
|||||||
* A connection to a LanceDB database.
|
* A connection to a LanceDB database.
|
||||||
*/
|
*/
|
||||||
export class LocalConnection implements Connection {
|
export class LocalConnection implements Connection {
|
||||||
private readonly _uri: string
|
private readonly _options: ConnectionOptions
|
||||||
private readonly _db: any
|
private readonly _db: any
|
||||||
|
|
||||||
constructor (db: any, uri: string) {
|
constructor (db: any, options: ConnectionOptions) {
|
||||||
this._uri = uri
|
this._options = options
|
||||||
this._db = db
|
this._db = db
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri (): string {
|
get uri (): string {
|
||||||
return this._uri
|
return this._options.uri
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the names of all tables in the database.
|
* Get the names of all tables in the database.
|
||||||
*/
|
*/
|
||||||
async tableNames (): Promise<string[]> {
|
async tableNames (): Promise<string[]> {
|
||||||
return databaseTableNames.call(this._db)
|
return databaseTableNames.call(this._db)
|
||||||
}
|
}
|
||||||
@@ -151,6 +219,7 @@ export class LocalConnection implements Connection {
|
|||||||
* @param name The name of the table.
|
* @param name The name of the table.
|
||||||
*/
|
*/
|
||||||
async openTable (name: string): Promise<Table>
|
async openTable (name: string): Promise<Table>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a table in the database.
|
* Open a table in the database.
|
||||||
*
|
*
|
||||||
@@ -158,12 +227,13 @@ export class LocalConnection implements Connection {
|
|||||||
* @param embeddings An embedding function to use on this Table
|
* @param embeddings An embedding function to use on this Table
|
||||||
*/
|
*/
|
||||||
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
const tbl = await databaseOpenTable.call(this._db, name)
|
const tbl = await databaseOpenTable.call(this._db, name)
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new LocalTable(tbl, name, embeddings)
|
return new LocalTable(tbl, name, this._options, embeddings)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name)
|
return new LocalTable(tbl, name, this._options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -186,15 +256,27 @@ export class LocalConnection implements Connection {
|
|||||||
* @param embeddings An embedding function to use on this Table
|
* @param embeddings An embedding function to use on this Table
|
||||||
*/
|
*/
|
||||||
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings?: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, mode: WriteMode, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
if (mode === undefined) {
|
if (mode === undefined) {
|
||||||
mode = WriteMode.Create
|
mode = WriteMode.Create
|
||||||
}
|
}
|
||||||
const tbl = await tableCreate.call(this._db, name, await fromRecordsToBuffer(data, embeddings), mode.toLowerCase())
|
|
||||||
|
const createArgs = [this._db, name, await fromRecordsToBuffer(data, embeddings), mode.toLowerCase()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
createArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
createArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
createArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const tbl = await tableCreate.call(...createArgs)
|
||||||
|
|
||||||
if (embeddings !== undefined) {
|
if (embeddings !== undefined) {
|
||||||
return new LocalTable(tbl, name, embeddings)
|
return new LocalTable(tbl, name, this._options, embeddings)
|
||||||
} else {
|
} else {
|
||||||
return new LocalTable(tbl, name)
|
return new LocalTable(tbl, name, this._options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -217,18 +299,21 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _name: string
|
private readonly _name: string
|
||||||
private readonly _embeddings?: EmbeddingFunction<T>
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
private readonly _options: ConnectionOptions
|
||||||
|
|
||||||
constructor (tbl: any, name: string)
|
constructor (tbl: any, name: string, options: ConnectionOptions)
|
||||||
/**
|
/**
|
||||||
* @param tbl
|
* @param tbl
|
||||||
* @param name
|
* @param name
|
||||||
|
* @param options
|
||||||
* @param embeddings An embedding function to use when interacting with this table
|
* @param embeddings An embedding function to use when interacting with this table
|
||||||
*/
|
*/
|
||||||
constructor (tbl: any, name: string, embeddings: EmbeddingFunction<T>)
|
constructor (tbl: any, name: string, options: ConnectionOptions, embeddings: EmbeddingFunction<T>)
|
||||||
constructor (tbl: any, name: string, embeddings?: EmbeddingFunction<T>) {
|
constructor (tbl: any, name: string, options: ConnectionOptions, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._name = name
|
this._name = name
|
||||||
this._embeddings = embeddings
|
this._embeddings = embeddings
|
||||||
|
this._options = options
|
||||||
}
|
}
|
||||||
|
|
||||||
get name (): string {
|
get name (): string {
|
||||||
@@ -240,7 +325,7 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @param query The query search term
|
* @param query The query search term
|
||||||
*/
|
*/
|
||||||
search (query: T): Query<T> {
|
search (query: T): Query<T> {
|
||||||
return new Query(this._tbl, query, this._embeddings)
|
return new Query(query, this._tbl, this._embeddings)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -250,7 +335,15 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString())
|
const callArgs = [this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
callArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tableAdd.call(...callArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -260,6 +353,14 @@ export class LocalTable<T = number[]> implements Table<T> {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
const callArgs = [this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString()]
|
||||||
|
if (this._options.awsCredentials !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.accessKeyId)
|
||||||
|
callArgs.push(this._options.awsCredentials.secretKey)
|
||||||
|
if (this._options.awsCredentials.sessionToken !== undefined) {
|
||||||
|
callArgs.push(this._options.awsCredentials.sessionToken)
|
||||||
|
}
|
||||||
|
}
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,116 +447,6 @@ export interface IvfPQIndexConfig {
|
|||||||
|
|
||||||
export type VectorIndexParams = IvfPQIndexConfig
|
export type VectorIndexParams = IvfPQIndexConfig
|
||||||
|
|
||||||
/**
|
|
||||||
* A builder for nearest neighbor queries for LanceDB.
|
|
||||||
*/
|
|
||||||
export class Query<T = number[]> {
|
|
||||||
private readonly _tbl: any
|
|
||||||
private readonly _query: T
|
|
||||||
private _queryVector?: number[]
|
|
||||||
private _limit: number
|
|
||||||
private _refineFactor?: number
|
|
||||||
private _nprobes: number
|
|
||||||
private _select?: string[]
|
|
||||||
private _filter?: string
|
|
||||||
private _metricType?: MetricType
|
|
||||||
private readonly _embeddings?: EmbeddingFunction<T>
|
|
||||||
|
|
||||||
constructor (tbl: any, query: T, embeddings?: EmbeddingFunction<T>) {
|
|
||||||
this._tbl = tbl
|
|
||||||
this._query = query
|
|
||||||
this._limit = 10
|
|
||||||
this._nprobes = 20
|
|
||||||
this._refineFactor = undefined
|
|
||||||
this._select = undefined
|
|
||||||
this._filter = undefined
|
|
||||||
this._metricType = undefined
|
|
||||||
this._embeddings = embeddings
|
|
||||||
}
|
|
||||||
|
|
||||||
/***
|
|
||||||
* Sets the number of results that will be returned
|
|
||||||
* @param value number of results
|
|
||||||
*/
|
|
||||||
limit (value: number): Query<T> {
|
|
||||||
this._limit = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refine the results by reading extra elements and re-ranking them in memory.
|
|
||||||
* @param value refine factor to use in this query.
|
|
||||||
*/
|
|
||||||
refineFactor (value: number): Query<T> {
|
|
||||||
this._refineFactor = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The number of probes used. A higher number makes search more accurate but also slower.
|
|
||||||
* @param value The number of probes used.
|
|
||||||
*/
|
|
||||||
nprobes (value: number): Query<T> {
|
|
||||||
this._nprobes = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A filter statement to be applied to this query.
|
|
||||||
* @param value A filter in the same format used by a sql WHERE clause.
|
|
||||||
*/
|
|
||||||
filter (value: string): Query<T> {
|
|
||||||
this._filter = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
where = this.filter
|
|
||||||
|
|
||||||
/** Return only the specified columns.
|
|
||||||
*
|
|
||||||
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
|
||||||
*/
|
|
||||||
select (value: string[]): Query<T> {
|
|
||||||
this._select = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The MetricType used for this Query.
|
|
||||||
* @param value The metric to the. @see MetricType for the different options
|
|
||||||
*/
|
|
||||||
metricType (value: MetricType): Query<T> {
|
|
||||||
this._metricType = value
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the query and return the results as an Array of Objects
|
|
||||||
*/
|
|
||||||
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
|
||||||
if (this._embeddings !== undefined) {
|
|
||||||
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
|
||||||
} else {
|
|
||||||
this._queryVector = this._query as number[]
|
|
||||||
}
|
|
||||||
|
|
||||||
const buffer = await tableSearch.call(this._tbl, this)
|
|
||||||
const data = tableFromIPC(buffer)
|
|
||||||
|
|
||||||
return data.toArray().map((entry: Record<string, unknown>) => {
|
|
||||||
const newObject: Record<string, unknown> = {}
|
|
||||||
Object.keys(entry).forEach((key: string) => {
|
|
||||||
if (entry[key] instanceof Vector) {
|
|
||||||
newObject[key] = (entry[key] as Vector).toArray()
|
|
||||||
} else {
|
|
||||||
newObject[key] = entry[key]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return newObject as unknown as T
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write mode for writing a table.
|
* Write mode for writing a table.
|
||||||
*/
|
*/
|
||||||
|
|||||||
130
node/src/query.ts
Normal file
130
node/src/query.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { Vector, tableFromIPC } from 'apache-arrow'
|
||||||
|
import { type EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
import { type MetricType } from '.'
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { tableSearch } = require('../native.js')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A builder for nearest neighbor queries for LanceDB.
|
||||||
|
*/
|
||||||
|
export class Query<T = number[]> {
|
||||||
|
private readonly _query: T
|
||||||
|
private readonly _tbl?: any
|
||||||
|
private _queryVector?: number[]
|
||||||
|
private _limit: number
|
||||||
|
private _refineFactor?: number
|
||||||
|
private _nprobes: number
|
||||||
|
private _select?: string[]
|
||||||
|
private _filter?: string
|
||||||
|
private _metricType?: MetricType
|
||||||
|
protected readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
|
constructor (query: T, tbl?: any, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
this._tbl = tbl
|
||||||
|
this._query = query
|
||||||
|
this._limit = 10
|
||||||
|
this._nprobes = 20
|
||||||
|
this._refineFactor = undefined
|
||||||
|
this._select = undefined
|
||||||
|
this._filter = undefined
|
||||||
|
this._metricType = undefined
|
||||||
|
this._embeddings = embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
/***
|
||||||
|
* Sets the number of results that will be returned
|
||||||
|
* @param value number of results
|
||||||
|
*/
|
||||||
|
limit (value: number): Query<T> {
|
||||||
|
this._limit = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
|
* @param value refine factor to use in this query.
|
||||||
|
*/
|
||||||
|
refineFactor (value: number): Query<T> {
|
||||||
|
this._refineFactor = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
|
* @param value The number of probes used.
|
||||||
|
*/
|
||||||
|
nprobes (value: number): Query<T> {
|
||||||
|
this._nprobes = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A filter statement to be applied to this query.
|
||||||
|
* @param value A filter in the same format used by a sql WHERE clause.
|
||||||
|
*/
|
||||||
|
filter (value: string): Query<T> {
|
||||||
|
this._filter = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
where = this.filter
|
||||||
|
|
||||||
|
/** Return only the specified columns.
|
||||||
|
*
|
||||||
|
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
*/
|
||||||
|
select (value: string[]): Query<T> {
|
||||||
|
this._select = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The MetricType used for this Query.
|
||||||
|
* @param value The metric to the. @see MetricType for the different options
|
||||||
|
*/
|
||||||
|
metricType (value: MetricType): Query<T> {
|
||||||
|
this._metricType = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the query and return the results as an Array of Objects
|
||||||
|
*/
|
||||||
|
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
||||||
|
if (this._embeddings !== undefined) {
|
||||||
|
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
||||||
|
} else {
|
||||||
|
this._queryVector = this._query as number[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = await tableSearch.call(this._tbl, this)
|
||||||
|
const data = tableFromIPC(buffer)
|
||||||
|
|
||||||
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
|
const newObject: Record<string, unknown> = {}
|
||||||
|
Object.keys(entry).forEach((key: string) => {
|
||||||
|
if (entry[key] instanceof Vector) {
|
||||||
|
newObject[key] = (entry[key] as Vector).toArray()
|
||||||
|
} else {
|
||||||
|
newObject[key] = entry[key]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return newObject as unknown as T
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
105
node/src/remote/client.ts
Normal file
105
node/src/remote/client.ts
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import axios, { type AxiosResponse } from 'axios'
|
||||||
|
|
||||||
|
import { tableFromIPC, type Table as ArrowTable } from 'apache-arrow'
|
||||||
|
|
||||||
|
export class HttpLancedbClient {
|
||||||
|
private readonly _url: string
|
||||||
|
|
||||||
|
public constructor (
|
||||||
|
url: string,
|
||||||
|
private readonly _apiKey: string,
|
||||||
|
private readonly _dbName?: string
|
||||||
|
) {
|
||||||
|
this._url = url
|
||||||
|
}
|
||||||
|
|
||||||
|
get uri (): string {
|
||||||
|
return this._url
|
||||||
|
}
|
||||||
|
|
||||||
|
public async search (
|
||||||
|
tableName: string,
|
||||||
|
vector: number[],
|
||||||
|
k: number,
|
||||||
|
nprobes: number,
|
||||||
|
refineFactor?: number,
|
||||||
|
columns?: string[],
|
||||||
|
filter?: string
|
||||||
|
): Promise<ArrowTable<any>> {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${this._url}/v1/table/${tableName}/query/`,
|
||||||
|
{
|
||||||
|
vector,
|
||||||
|
k,
|
||||||
|
nprobes,
|
||||||
|
refineFactor,
|
||||||
|
columns,
|
||||||
|
filter
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': this._apiKey,
|
||||||
|
...(this._dbName !== undefined ? { 'x-lancedb-database': this._dbName } : {})
|
||||||
|
},
|
||||||
|
responseType: 'arraybuffer',
|
||||||
|
timeout: 10000
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.error('error: ', err)
|
||||||
|
return err.response
|
||||||
|
})
|
||||||
|
if (response.status !== 200) {
|
||||||
|
const errorData = new TextDecoder().decode(response.data)
|
||||||
|
throw new Error(
|
||||||
|
`Server Error, status: ${response.status as number}, ` +
|
||||||
|
`message: ${response.statusText as string}: ${errorData}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const table = tableFromIPC(response.data)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sent GET request.
|
||||||
|
*/
|
||||||
|
public async get (path: string, params?: Record<string, string | number>): Promise<AxiosResponse> {
|
||||||
|
const response = await axios.get(
|
||||||
|
`${this._url}${path}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': this._apiKey
|
||||||
|
},
|
||||||
|
params,
|
||||||
|
timeout: 10000
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.error('error: ', err)
|
||||||
|
return err.response
|
||||||
|
})
|
||||||
|
if (response.status !== 200) {
|
||||||
|
const errorData = new TextDecoder().decode(response.data)
|
||||||
|
throw new Error(
|
||||||
|
`Server Error, status: ${response.status as number}, ` +
|
||||||
|
`message: ${response.statusText as string}: ${errorData}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
168
node/src/remote/index.ts
Normal file
168
node/src/remote/index.ts
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
// Copyright 2023 LanceDB Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import {
|
||||||
|
type EmbeddingFunction, type Table, type VectorIndexParams, type Connection,
|
||||||
|
type ConnectionOptions
|
||||||
|
} from '../index'
|
||||||
|
import { Query } from '../query'
|
||||||
|
|
||||||
|
import { type Table as ArrowTable, Vector } from 'apache-arrow'
|
||||||
|
import { HttpLancedbClient } from './client'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remote connection.
|
||||||
|
*/
|
||||||
|
export class RemoteConnection implements Connection {
|
||||||
|
private readonly _client: HttpLancedbClient
|
||||||
|
private readonly _dbName: string
|
||||||
|
|
||||||
|
constructor (opts: ConnectionOptions) {
|
||||||
|
if (!opts.uri.startsWith('db://')) {
|
||||||
|
throw new Error(`Invalid remote DB URI: ${opts.uri}`)
|
||||||
|
}
|
||||||
|
if (opts.apiKey === undefined || opts.region === undefined) {
|
||||||
|
throw new Error('API key and region are not supported for remote connections')
|
||||||
|
}
|
||||||
|
|
||||||
|
this._dbName = opts.uri.slice('db://'.length)
|
||||||
|
let server: string
|
||||||
|
if (opts.hostOverride === undefined) {
|
||||||
|
server = `https://${this._dbName}.${opts.region}.api.lancedb.com`
|
||||||
|
} else {
|
||||||
|
server = opts.hostOverride
|
||||||
|
}
|
||||||
|
this._client = new HttpLancedbClient(server, opts.apiKey, opts.hostOverride === undefined ? undefined : this._dbName)
|
||||||
|
}
|
||||||
|
|
||||||
|
get uri (): string {
|
||||||
|
// add the lancedb+ prefix back
|
||||||
|
return 'db://' + this._client.uri
|
||||||
|
}
|
||||||
|
|
||||||
|
async tableNames (): Promise<string[]> {
|
||||||
|
const response = await this._client.get('/v1/table/')
|
||||||
|
return response.data.tables
|
||||||
|
}
|
||||||
|
|
||||||
|
async openTable (name: string): Promise<Table>
|
||||||
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
return new RemoteTable(this._client, name, embeddings)
|
||||||
|
} else {
|
||||||
|
return new RemoteTable(this._client, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async dropTable (name: string): Promise<void> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class RemoteQuery<T = number[]> extends Query<T> {
|
||||||
|
constructor (query: T, private readonly _client: HttpLancedbClient,
|
||||||
|
private readonly _name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
super(query, undefined, embeddings)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: refactor this to a base class + queryImpl pattern
|
||||||
|
async execute<T = Record<string, unknown>>(): Promise<T[]> {
|
||||||
|
const embeddings = this._embeddings
|
||||||
|
const query = (this as any)._query
|
||||||
|
let queryVector: number[]
|
||||||
|
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
queryVector = (await embeddings.embed([query]))[0]
|
||||||
|
} else {
|
||||||
|
queryVector = query as number[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await this._client.search(
|
||||||
|
this._name,
|
||||||
|
queryVector,
|
||||||
|
(this as any)._limit,
|
||||||
|
(this as any)._nprobes,
|
||||||
|
(this as any)._refineFactor,
|
||||||
|
(this as any)._select,
|
||||||
|
(this as any)._filter
|
||||||
|
)
|
||||||
|
|
||||||
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
|
const newObject: Record<string, unknown> = {}
|
||||||
|
Object.keys(entry).forEach((key: string) => {
|
||||||
|
if (entry[key] instanceof Vector) {
|
||||||
|
newObject[key] = (entry[key] as Vector).toArray()
|
||||||
|
} else {
|
||||||
|
newObject[key] = entry[key]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return newObject as unknown as T
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// we are using extend until we have next next version release
|
||||||
|
// Table and Connection has both been refactored to interfaces
|
||||||
|
export class RemoteTable<T = number[]> implements Table<T> {
|
||||||
|
private readonly _client: HttpLancedbClient
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
private readonly _name: string
|
||||||
|
|
||||||
|
constructor (client: HttpLancedbClient, name: string)
|
||||||
|
constructor (client: HttpLancedbClient, name: string, embeddings: EmbeddingFunction<T>)
|
||||||
|
constructor (client: HttpLancedbClient, name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
|
this._client = client
|
||||||
|
this._name = name
|
||||||
|
this._embeddings = embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
get name (): string {
|
||||||
|
return this._name
|
||||||
|
}
|
||||||
|
|
||||||
|
search (query: T): Query<T> {
|
||||||
|
return new RemoteQuery(query, this._client, this._name)//, this._embeddings_new)
|
||||||
|
}
|
||||||
|
|
||||||
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async countRows (): Promise<number> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete (filter: string): Promise<void> {
|
||||||
|
throw new Error('Not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -18,26 +18,48 @@ import { describe } from 'mocha'
|
|||||||
import { assert } from 'chai'
|
import { assert } from 'chai'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
|
import { type ConnectionOptions } from '../index'
|
||||||
|
|
||||||
describe('LanceDB S3 client', function () {
|
describe('LanceDB S3 client', function () {
|
||||||
if (process.env.TEST_S3_BASE_URL != null) {
|
if (process.env.TEST_S3_BASE_URL != null) {
|
||||||
const baseUri = process.env.TEST_S3_BASE_URL
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
it('should have a valid url', async function () {
|
it('should have a valid url', async function () {
|
||||||
const uri = `${baseUri}/valid_url`
|
const opts = { uri: `${baseUri}/valid_url` }
|
||||||
const table = await createTestDB(uri, 2, 20)
|
const table = await createTestDB(opts, 2, 20)
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(opts)
|
||||||
assert.equal(con.uri, uri)
|
assert.equal(con.uri, opts.uri)
|
||||||
|
|
||||||
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
assert.equal(results.length, 5)
|
assert.equal(results.length, 5)
|
||||||
})
|
}).timeout(10_000)
|
||||||
|
} else {
|
||||||
|
describe.skip('Skip S3 test', function () {})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.TEST_S3_BASE_URL != null && process.env.TEST_AWS_ACCESS_KEY_ID != null && process.env.TEST_AWS_SECRET_ACCESS_KEY != null) {
|
||||||
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
|
it('use custom credentials', async function () {
|
||||||
|
const opts: ConnectionOptions = {
|
||||||
|
uri: `${baseUri}/custom_credentials`,
|
||||||
|
awsCredentials: {
|
||||||
|
accessKeyId: process.env.TEST_AWS_ACCESS_KEY_ID as string,
|
||||||
|
secretKey: process.env.TEST_AWS_SECRET_ACCESS_KEY as string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const table = await createTestDB(opts, 2, 20)
|
||||||
|
const con = await lancedb.connect(opts)
|
||||||
|
assert.equal(con.uri, opts.uri)
|
||||||
|
|
||||||
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
|
assert.equal(results.length, 5)
|
||||||
|
}).timeout(10_000)
|
||||||
} else {
|
} else {
|
||||||
describe.skip('Skip S3 test', function () {})
|
describe.skip('Skip S3 test', function () {})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
async function createTestDB (uri: string, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
async function createTestDB (opts: ConnectionOptions, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(opts)
|
||||||
|
|
||||||
const data = []
|
const data = []
|
||||||
for (let i = 0; i < numRows; i++) {
|
for (let i = 0; i < numRows; i++) {
|
||||||
|
|||||||
@@ -18,7 +18,8 @@ import * as chai from 'chai'
|
|||||||
import * as chaiAsPromised from 'chai-as-promised'
|
import * as chaiAsPromised from 'chai-as-promised'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
import { type EmbeddingFunction, MetricType, Query, WriteMode } from '../index'
|
import { type AwsCredentials, type EmbeddingFunction, MetricType, WriteMode } from '../index'
|
||||||
|
import { Query } from '../query'
|
||||||
|
|
||||||
const expect = chai.expect
|
const expect = chai.expect
|
||||||
const assert = chai.assert
|
const assert = chai.assert
|
||||||
@@ -32,6 +33,22 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(con.uri, uri)
|
assert.equal(con.uri, uri)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should accept an options object', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const con = await lancedb.connect({ uri })
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should accept custom aws credentials', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const awsCredentials: AwsCredentials = {
|
||||||
|
accessKeyId: '',
|
||||||
|
secretKey: ''
|
||||||
|
}
|
||||||
|
const con = await lancedb.connect({ uri, awsCredentials })
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
})
|
||||||
|
|
||||||
it('should return the existing table names', async function () {
|
it('should return the existing table names', async function () {
|
||||||
const uri = await createTestDB()
|
const uri = await createTestDB()
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
@@ -252,7 +269,7 @@ describe('LanceDB client', function () {
|
|||||||
|
|
||||||
describe('Query object', function () {
|
describe('Query object', function () {
|
||||||
it('sets custom parameters', async function () {
|
it('sets custom parameters', async function () {
|
||||||
const query = new Query(undefined, [0.1, 0.3])
|
const query = new Query([0.1, 0.3])
|
||||||
.limit(1)
|
.limit(1)
|
||||||
.metricType(MetricType.Cosine)
|
.metricType(MetricType.Cosine)
|
||||||
.refineFactor(100)
|
.refineFactor(100)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.1.8
|
current_version = 0.1.14
|
||||||
commit = True
|
commit = True
|
||||||
message = [python] Bump version: {current_version} → {new_version}
|
message = [python] Bump version: {current_version} → {new_version}
|
||||||
tag = True
|
tag = True
|
||||||
|
|||||||
@@ -11,16 +11,25 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from .db import URI, LanceDBConnection
|
from typing import Optional
|
||||||
|
|
||||||
|
from .db import URI, DBConnection, LanceDBConnection
|
||||||
|
from .remote.db import RemoteDBConnection
|
||||||
|
from .schema import vector
|
||||||
|
|
||||||
|
|
||||||
def connect(uri: URI) -> LanceDBConnection:
|
def connect(
|
||||||
"""Connect to a LanceDB instance at the given URI
|
uri: URI, *, api_key: Optional[str] = None, region: str = "us-west-2"
|
||||||
|
) -> DBConnection:
|
||||||
|
"""Connect to a LanceDB database.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
uri: str or Path
|
uri: str or Path
|
||||||
The uri of the database.
|
The uri of the database.
|
||||||
|
api_token: str, optional
|
||||||
|
If presented, connect to LanceDB cloud.
|
||||||
|
Otherwise, connect to a database on file system or cloud storage.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
@@ -34,9 +43,17 @@ def connect(uri: URI) -> LanceDBConnection:
|
|||||||
|
|
||||||
>>> db = lancedb.connect("s3://my-bucket/lancedb")
|
>>> db = lancedb.connect("s3://my-bucket/lancedb")
|
||||||
|
|
||||||
|
Connect to LancdDB cloud:
|
||||||
|
|
||||||
|
>>> db = lancedb.connect("db://my_database", api_key="ldb_...")
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
conn : LanceDBConnection
|
conn : DBConnection
|
||||||
A connection to a LanceDB database.
|
A connection to a LanceDB database.
|
||||||
"""
|
"""
|
||||||
|
if isinstance(uri, str) and uri.startswith("db://"):
|
||||||
|
if api_key is None:
|
||||||
|
raise ValueError(f"api_key is required to connected LanceDB cloud: {uri}")
|
||||||
|
return RemoteDBConnection(uri, api_key, region)
|
||||||
return LanceDBConnection(uri)
|
return LanceDBConnection(uri)
|
||||||
|
|||||||
@@ -23,3 +23,13 @@ URI = Union[str, Path]
|
|||||||
# TODO support generator
|
# TODO support generator
|
||||||
DATA = Union[List[dict], dict, pd.DataFrame]
|
DATA = Union[List[dict], dict, pd.DataFrame]
|
||||||
VECTOR_COLUMN_NAME = "vector"
|
VECTOR_COLUMN_NAME = "vector"
|
||||||
|
|
||||||
|
|
||||||
|
class Credential(str):
|
||||||
|
"""Credential field"""
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return "********"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "********"
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
import builtins
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# import lancedb so we don't have to in every example
|
# import lancedb so we don't have to in every example
|
||||||
import lancedb
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
|||||||
@@ -13,184 +13,70 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import functools
|
|
||||||
import os
|
import os
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from pyarrow import fs
|
from pyarrow import fs
|
||||||
|
|
||||||
from .common import DATA, URI
|
from .common import DATA, URI
|
||||||
from .table import LanceTable
|
from .table import LanceTable, Table
|
||||||
from .util import get_uri_location, get_uri_scheme
|
from .util import fs_from_uri, get_uri_location, get_uri_scheme
|
||||||
|
|
||||||
|
|
||||||
class LanceDBConnection:
|
class DBConnection(ABC):
|
||||||
"""
|
"""An active LanceDB connection interface."""
|
||||||
A connection to a LanceDB database.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
uri: str or Path
|
|
||||||
The root uri of the database.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> import lancedb
|
|
||||||
>>> db = lancedb.connect("./.lancedb")
|
|
||||||
>>> db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2},
|
|
||||||
... {"vector": [0.5, 1.3], "b": 4}])
|
|
||||||
LanceTable(my_table)
|
|
||||||
>>> db.create_table("another_table", data=[{"vector": [0.4, 0.4], "b": 6}])
|
|
||||||
LanceTable(another_table)
|
|
||||||
>>> sorted(db.table_names())
|
|
||||||
['another_table', 'my_table']
|
|
||||||
>>> len(db)
|
|
||||||
2
|
|
||||||
>>> db["my_table"]
|
|
||||||
LanceTable(my_table)
|
|
||||||
>>> "my_table" in db
|
|
||||||
True
|
|
||||||
>>> db.drop_table("my_table")
|
|
||||||
>>> db.drop_table("another_table")
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, uri: URI):
|
|
||||||
if not isinstance(uri, Path):
|
|
||||||
scheme = get_uri_scheme(uri)
|
|
||||||
is_local = isinstance(uri, Path) or scheme == "file"
|
|
||||||
# managed lancedb remote uses schema like lancedb+[http|grpc|...]://
|
|
||||||
self._is_managed_remote = not is_local and scheme.startswith("lancedb")
|
|
||||||
if self._is_managed_remote:
|
|
||||||
if len(scheme.split("+")) != 2:
|
|
||||||
raise ValueError(
|
|
||||||
f"Invalid LanceDB URI: {uri}, expected uri to have scheme like lancedb+<flavor>://..."
|
|
||||||
)
|
|
||||||
if is_local:
|
|
||||||
if isinstance(uri, str):
|
|
||||||
uri = Path(uri)
|
|
||||||
uri = uri.expanduser().absolute()
|
|
||||||
Path(uri).mkdir(parents=True, exist_ok=True)
|
|
||||||
self._uri = str(uri)
|
|
||||||
|
|
||||||
self._entered = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def uri(self) -> str:
|
|
||||||
return self._uri
|
|
||||||
|
|
||||||
@functools.cached_property
|
|
||||||
def is_managed_remote(self) -> bool:
|
|
||||||
return self._is_managed_remote
|
|
||||||
|
|
||||||
@functools.cached_property
|
|
||||||
def remote_flavor(self) -> str:
|
|
||||||
if not self.is_managed_remote:
|
|
||||||
raise ValueError(
|
|
||||||
"Not a managed remote LanceDB, there should be no server flavor"
|
|
||||||
)
|
|
||||||
return get_uri_scheme(self.uri).split("+")[1]
|
|
||||||
|
|
||||||
@functools.cached_property
|
|
||||||
def _client(self) -> "lancedb.remote.LanceDBClient":
|
|
||||||
if not self.is_managed_remote:
|
|
||||||
raise ValueError("Not a managed remote LanceDB, there should be no client")
|
|
||||||
|
|
||||||
# don't import unless we are really using remote
|
|
||||||
from lancedb.remote.client import RestfulLanceDBClient
|
|
||||||
|
|
||||||
if self.remote_flavor == "http":
|
|
||||||
return RestfulLanceDBClient(self._uri)
|
|
||||||
|
|
||||||
raise ValueError("Unsupported remote flavor: " + self.remote_flavor)
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
if self._entered:
|
|
||||||
raise ValueError("Cannot re-enter the same LanceDBConnection twice")
|
|
||||||
self._entered = True
|
|
||||||
await self._client.close()
|
|
||||||
|
|
||||||
async def __aenter__(self) -> LanceDBConnection:
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
def table_names(self) -> list[str]:
|
def table_names(self) -> list[str]:
|
||||||
"""Get the names of all tables in the database.
|
"""List all table names in the database."""
|
||||||
|
pass
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
list of str
|
|
||||||
A list of table names.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
filesystem, path = fs.FileSystem.from_uri(self.uri)
|
|
||||||
except pa.ArrowInvalid:
|
|
||||||
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
|
||||||
|
|
||||||
try:
|
|
||||||
paths = filesystem.get_file_info(
|
|
||||||
fs.FileSelector(get_uri_location(self.uri))
|
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
# It is ok if the file does not exist since it will be created
|
|
||||||
paths = []
|
|
||||||
tables = [
|
|
||||||
os.path.splitext(file_info.base_name)[0]
|
|
||||||
for file_info in paths
|
|
||||||
if file_info.extension == "lance"
|
|
||||||
]
|
|
||||||
return tables
|
|
||||||
|
|
||||||
def __len__(self) -> int:
|
|
||||||
return len(self.table_names())
|
|
||||||
|
|
||||||
def __contains__(self, name: str) -> bool:
|
|
||||||
return name in self.table_names()
|
|
||||||
|
|
||||||
def __getitem__(self, name: str) -> LanceTable:
|
|
||||||
return self.open_table(name)
|
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
def create_table(
|
def create_table(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
data: DATA = None,
|
data: Optional[
|
||||||
schema: pa.Schema = None,
|
Union[List[dict], dict, pd.DataFrame, pa.Table, Iterable[pa.RecordBatch]],
|
||||||
|
] = None,
|
||||||
|
schema: Optional[pa.Schema] = None,
|
||||||
mode: str = "create",
|
mode: str = "create",
|
||||||
on_bad_vectors: str = "drop",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> LanceTable:
|
) -> Table:
|
||||||
"""Create a table in the database.
|
"""Create a [Table][lancedb.table.Table] in the database.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
name: str
|
name: str
|
||||||
The name of the table.
|
The name of the table.
|
||||||
data: list, tuple, dict, pd.DataFrame; optional
|
data: list, tuple, dict, pd.DataFrame; optional
|
||||||
The data to insert into the table.
|
The data to initialize the table. User must provide at least one of `data` or `schema`.
|
||||||
schema: pyarrow.Schema; optional
|
schema: pyarrow.Schema; optional
|
||||||
The schema of the table.
|
The schema of the table.
|
||||||
mode: str; default "create"
|
mode: str; default "create"
|
||||||
The mode to use when creating the table. Can be either "create" or "overwrite".
|
The mode to use when creating the table. Can be either "create" or "overwrite".
|
||||||
By default, if the table already exists, an exception is raised.
|
By default, if the table already exists, an exception is raised.
|
||||||
If you want to overwrite the table, use mode="overwrite".
|
If you want to overwrite the table, use mode="overwrite".
|
||||||
on_bad_vectors: str
|
on_bad_vectors: str, default "error"
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
One of "raise", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float
|
fill_value: float
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
|
||||||
Note
|
|
||||||
----
|
|
||||||
The vector index won't be created by default.
|
|
||||||
To create the index, call the `create_index` method on the table.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
LanceTable
|
LanceTable
|
||||||
A reference to the newly created table.
|
A reference to the newly created table.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
The vector index won't be created by default.
|
||||||
|
To create the index, call the `create_index` method on the table.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
|
|
||||||
@@ -236,7 +122,7 @@ class LanceDBConnection:
|
|||||||
|
|
||||||
Data is converted to Arrow before being written to disk. For maximum
|
Data is converted to Arrow before being written to disk. For maximum
|
||||||
control over how data is saved, either provide the PyArrow schema to
|
control over how data is saved, either provide the PyArrow schema to
|
||||||
convert to or else provide a PyArrow table directly.
|
convert to or else provide a [PyArrow Table](pyarrow.Table) directly.
|
||||||
|
|
||||||
>>> custom_schema = pa.schema([
|
>>> custom_schema = pa.schema([
|
||||||
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
@@ -255,22 +141,168 @@ class LanceDBConnection:
|
|||||||
vector: [[[1.1,1.2],[0.2,1.8]]]
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
lat: [[45.5,40.1]]
|
lat: [[45.5,40.1]]
|
||||||
long: [[-122.7,-74.1]]
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
|
||||||
|
It is also possible to create an table from `[Iterable[pa.RecordBatch]]`:
|
||||||
|
|
||||||
|
|
||||||
|
>>> import pyarrow as pa
|
||||||
|
>>> def make_batches():
|
||||||
|
... for i in range(5):
|
||||||
|
... yield pa.RecordBatch.from_arrays(
|
||||||
|
... [
|
||||||
|
... pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
... pa.array(["foo", "bar"]),
|
||||||
|
... pa.array([10.0, 20.0]),
|
||||||
|
... ],
|
||||||
|
... ["vector", "item", "price"],
|
||||||
|
... )
|
||||||
|
>>> schema=pa.schema([
|
||||||
|
... pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
... pa.field("item", pa.utf8()),
|
||||||
|
... pa.field("price", pa.float32()),
|
||||||
|
... ])
|
||||||
|
>>> db.create_table("table4", make_batches(), schema=schema)
|
||||||
|
LanceTable(table4)
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __getitem__(self, name: str) -> LanceTable:
|
||||||
|
return self.open_table(name)
|
||||||
|
|
||||||
|
def open_table(self, name: str) -> Table:
|
||||||
|
"""Open a Lance Table in the database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str
|
||||||
|
The name of the table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
A LanceTable object representing the table.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def drop_table(self, name: str):
|
||||||
|
"""Drop a table from the database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str
|
||||||
|
The name of the table.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class LanceDBConnection(DBConnection):
|
||||||
|
"""
|
||||||
|
A connection to a LanceDB database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri: str or Path
|
||||||
|
The root uri of the database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2},
|
||||||
|
... {"vector": [0.5, 1.3], "b": 4}])
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> db.create_table("another_table", data=[{"vector": [0.4, 0.4], "b": 6}])
|
||||||
|
LanceTable(another_table)
|
||||||
|
>>> sorted(db.table_names())
|
||||||
|
['another_table', 'my_table']
|
||||||
|
>>> len(db)
|
||||||
|
2
|
||||||
|
>>> db["my_table"]
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> "my_table" in db
|
||||||
|
True
|
||||||
|
>>> db.drop_table("my_table")
|
||||||
|
>>> db.drop_table("another_table")
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, uri: URI):
|
||||||
|
if not isinstance(uri, Path):
|
||||||
|
scheme = get_uri_scheme(uri)
|
||||||
|
is_local = isinstance(uri, Path) or scheme == "file"
|
||||||
|
if is_local:
|
||||||
|
if isinstance(uri, str):
|
||||||
|
uri = Path(uri)
|
||||||
|
uri = uri.expanduser().absolute()
|
||||||
|
Path(uri).mkdir(parents=True, exist_ok=True)
|
||||||
|
self._uri = str(uri)
|
||||||
|
|
||||||
|
self._entered = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uri(self) -> str:
|
||||||
|
return self._uri
|
||||||
|
|
||||||
|
def table_names(self) -> list[str]:
|
||||||
|
"""Get the names of all tables in the database.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
list of str
|
||||||
|
A list of table names.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
filesystem, path = fs_from_uri(self.uri)
|
||||||
|
except pa.ArrowInvalid:
|
||||||
|
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
||||||
|
|
||||||
|
try:
|
||||||
|
paths = filesystem.get_file_info(
|
||||||
|
fs.FileSelector(get_uri_location(self.uri))
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# It is ok if the file does not exist since it will be created
|
||||||
|
paths = []
|
||||||
|
tables = [
|
||||||
|
os.path.splitext(file_info.base_name)[0]
|
||||||
|
for file_info in paths
|
||||||
|
if file_info.extension == "lance"
|
||||||
|
]
|
||||||
|
return tables
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self.table_names())
|
||||||
|
|
||||||
|
def __contains__(self, name: str) -> bool:
|
||||||
|
return name in self.table_names()
|
||||||
|
|
||||||
|
def create_table(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
data: Optional[Union[List[dict], dict, pd.DataFrame]] = None,
|
||||||
|
schema: pa.Schema = None,
|
||||||
|
mode: str = "create",
|
||||||
|
on_bad_vectors: str = "error",
|
||||||
|
fill_value: float = 0.0,
|
||||||
|
) -> LanceTable:
|
||||||
|
"""Create a table in the database.
|
||||||
|
|
||||||
|
See
|
||||||
|
---
|
||||||
|
DBConnection.create_table
|
||||||
"""
|
"""
|
||||||
if mode.lower() not in ["create", "overwrite"]:
|
if mode.lower() not in ["create", "overwrite"]:
|
||||||
raise ValueError("mode must be either 'create' or 'overwrite'")
|
raise ValueError("mode must be either 'create' or 'overwrite'")
|
||||||
|
|
||||||
if data is not None:
|
tbl = LanceTable.create(
|
||||||
tbl = LanceTable.create(
|
self,
|
||||||
self,
|
name,
|
||||||
name,
|
data,
|
||||||
data,
|
schema,
|
||||||
schema,
|
mode=mode,
|
||||||
mode=mode,
|
on_bad_vectors=on_bad_vectors,
|
||||||
on_bad_vectors=on_bad_vectors,
|
fill_value=fill_value,
|
||||||
fill_value=fill_value,
|
)
|
||||||
)
|
|
||||||
else:
|
|
||||||
tbl = LanceTable.open(self, name)
|
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
def open_table(self, name: str) -> LanceTable:
|
def open_table(self, name: str) -> LanceTable:
|
||||||
@@ -295,6 +327,6 @@ class LanceDBConnection:
|
|||||||
name: str
|
name: str
|
||||||
The name of the table.
|
The name of the table.
|
||||||
"""
|
"""
|
||||||
filesystem, path = pa.fs.FileSystem.from_uri(self.uri)
|
filesystem, path = fs_from_uri(self.uri)
|
||||||
table_path = os.path.join(path, name + ".lance")
|
table_path = os.path.join(path, name + ".lance")
|
||||||
filesystem.delete_dir(table_path)
|
filesystem.delete_dir(table_path)
|
||||||
|
|||||||
251
python/lancedb/pydantic.py
Normal file
251
python/lancedb/pydantic.py
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Pydantic (v1 / v2) adapter for LanceDB"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Callable, Dict, Generator, List, Type, Union, _GenericAlias
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pyarrow as pa
|
||||||
|
import pydantic
|
||||||
|
import semver
|
||||||
|
|
||||||
|
PYDANTIC_VERSION = semver.Version.parse(pydantic.__version__)
|
||||||
|
try:
|
||||||
|
from pydantic_core import CoreSchema, core_schema
|
||||||
|
except ImportError:
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class FixedSizeListMixin(ABC):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def dim() -> int:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def value_arrow_type() -> pa.DataType:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
def vector(
|
||||||
|
dim: int, value_type: pa.DataType = pa.float32()
|
||||||
|
) -> Type[FixedSizeListMixin]:
|
||||||
|
"""Pydantic Vector Type.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
Experimental feature.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
dim : int
|
||||||
|
The dimension of the vector.
|
||||||
|
value_type : pyarrow.DataType, optional
|
||||||
|
The value type of the vector, by default pa.float32()
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> import pydantic
|
||||||
|
>>> from lancedb.pydantic import vector
|
||||||
|
...
|
||||||
|
>>> class MyModel(pydantic.BaseModel):
|
||||||
|
... id: int
|
||||||
|
... url: str
|
||||||
|
... embeddings: vector(768)
|
||||||
|
>>> schema = pydantic_to_schema(MyModel)
|
||||||
|
>>> assert schema == pa.schema([
|
||||||
|
... pa.field("id", pa.int64(), False),
|
||||||
|
... pa.field("url", pa.utf8(), False),
|
||||||
|
... pa.field("embeddings", pa.list_(pa.float32(), 768), False)
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
|
||||||
|
# TODO: make a public parameterized type.
|
||||||
|
class FixedSizeList(list, FixedSizeListMixin):
|
||||||
|
def __repr__(self):
|
||||||
|
return f"FixedSizeList(dim={dim})"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dim() -> int:
|
||||||
|
return dim
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def value_arrow_type() -> pa.DataType:
|
||||||
|
return value_type
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_core_schema__(
|
||||||
|
cls, _source_type: Any, _handler: pydantic.GetCoreSchemaHandler
|
||||||
|
) -> CoreSchema:
|
||||||
|
return core_schema.no_info_after_validator_function(
|
||||||
|
cls,
|
||||||
|
core_schema.list_schema(
|
||||||
|
min_length=dim,
|
||||||
|
max_length=dim,
|
||||||
|
items_schema=core_schema.float_schema(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_validators__(cls) -> Generator[Callable, None, None]:
|
||||||
|
yield cls.validate
|
||||||
|
|
||||||
|
# For pydantic v1
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, v):
|
||||||
|
if not isinstance(v, (list, range, np.ndarray)) or len(v) != dim:
|
||||||
|
raise TypeError("A list of numbers or numpy.ndarray is needed")
|
||||||
|
return v
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION < (2, 0):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __modify_schema__(cls, field_schema: Dict[str, Any]):
|
||||||
|
field_schema["items"] = {"type": "number"}
|
||||||
|
field_schema["maxItems"] = dim
|
||||||
|
field_schema["minItems"] = dim
|
||||||
|
|
||||||
|
return FixedSizeList
|
||||||
|
|
||||||
|
|
||||||
|
def _py_type_to_arrow_type(py_type: Type[Any]) -> pa.DataType:
|
||||||
|
"""Convert Python Type to Arrow DataType.
|
||||||
|
|
||||||
|
Raises
|
||||||
|
------
|
||||||
|
TypeError
|
||||||
|
If the type is not supported.
|
||||||
|
"""
|
||||||
|
if py_type == int:
|
||||||
|
return pa.int64()
|
||||||
|
elif py_type == float:
|
||||||
|
return pa.float64()
|
||||||
|
elif py_type == str:
|
||||||
|
return pa.utf8()
|
||||||
|
elif py_type == bool:
|
||||||
|
return pa.bool_()
|
||||||
|
elif py_type == bytes:
|
||||||
|
return pa.binary()
|
||||||
|
raise TypeError(
|
||||||
|
f"Converting Pydantic type to Arrow Type: unsupported type {py_type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION.major < 2:
|
||||||
|
|
||||||
|
def _pydantic_model_to_fields(model: pydantic.BaseModel) -> List[pa.Field]:
|
||||||
|
return [
|
||||||
|
_pydantic_to_field(name, field) for name, field in model.__fields__.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def _pydantic_model_to_fields(model: pydantic.BaseModel) -> List[pa.Field]:
|
||||||
|
return [
|
||||||
|
_pydantic_to_field(name, field)
|
||||||
|
for name, field in model.model_fields.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _pydantic_to_arrow_type(field: pydantic.fields.FieldInfo) -> pa.DataType:
|
||||||
|
"""Convert a Pydantic FieldInfo to Arrow DataType"""
|
||||||
|
if isinstance(field.annotation, _GenericAlias) or (
|
||||||
|
sys.version_info > (3, 9) and isinstance(field.annotation, types.GenericAlias)
|
||||||
|
):
|
||||||
|
origin = field.annotation.__origin__
|
||||||
|
args = field.annotation.__args__
|
||||||
|
if origin == list:
|
||||||
|
child = args[0]
|
||||||
|
return pa.list_(_py_type_to_arrow_type(child))
|
||||||
|
elif origin == Union:
|
||||||
|
if len(args) == 2 and args[1] == type(None):
|
||||||
|
return _py_type_to_arrow_type(args[0])
|
||||||
|
elif inspect.isclass(field.annotation):
|
||||||
|
if issubclass(field.annotation, pydantic.BaseModel):
|
||||||
|
# Struct
|
||||||
|
fields = _pydantic_model_to_fields(field.annotation)
|
||||||
|
return pa.struct(fields)
|
||||||
|
elif issubclass(field.annotation, FixedSizeListMixin):
|
||||||
|
return pa.list_(field.annotation.value_arrow_type(), field.annotation.dim())
|
||||||
|
return _py_type_to_arrow_type(field.annotation)
|
||||||
|
|
||||||
|
|
||||||
|
def is_nullable(field: pydantic.fields.FieldInfo) -> bool:
|
||||||
|
"""Check if a Pydantic FieldInfo is nullable."""
|
||||||
|
if isinstance(field.annotation, _GenericAlias):
|
||||||
|
origin = field.annotation.__origin__
|
||||||
|
args = field.annotation.__args__
|
||||||
|
if origin == Union:
|
||||||
|
if len(args) == 2 and args[1] == type(None):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _pydantic_to_field(name: str, field: pydantic.fields.FieldInfo) -> pa.Field:
|
||||||
|
"""Convert a Pydantic field to a PyArrow Field."""
|
||||||
|
dt = _pydantic_to_arrow_type(field)
|
||||||
|
return pa.field(name, dt, is_nullable(field))
|
||||||
|
|
||||||
|
|
||||||
|
def pydantic_to_schema(model: Type[pydantic.BaseModel]) -> pa.Schema:
|
||||||
|
"""Convert a Pydantic model to a PyArrow Schema.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
model : Type[pydantic.BaseModel]
|
||||||
|
The Pydantic BaseModel to convert to Arrow Schema.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pyarrow.Schema
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> from typing import List, Optional
|
||||||
|
>>> import pydantic
|
||||||
|
>>> from lancedb.pydantic import pydantic_to_schema
|
||||||
|
...
|
||||||
|
>>> class InnerModel(pydantic.BaseModel):
|
||||||
|
... a: str
|
||||||
|
... b: Optional[float]
|
||||||
|
>>>
|
||||||
|
>>> class FooModel(pydantic.BaseModel):
|
||||||
|
... id: int
|
||||||
|
... s: Optional[str] = None
|
||||||
|
... vec: List[float]
|
||||||
|
... li: List[int]
|
||||||
|
... inner: InnerModel
|
||||||
|
>>> schema = pydantic_to_schema(FooModel)
|
||||||
|
>>> assert schema == pa.schema([
|
||||||
|
... pa.field("id", pa.int64(), False),
|
||||||
|
... pa.field("s", pa.utf8(), True),
|
||||||
|
... pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
... pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
... pa.field("inner", pa.struct([
|
||||||
|
... pa.field("a", pa.utf8(), False),
|
||||||
|
... pa.field("b", pa.float64(), True),
|
||||||
|
... ]), False),
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
fields = _pydantic_model_to_fields(model)
|
||||||
|
return pa.schema(fields)
|
||||||
@@ -10,18 +10,47 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
from typing import List, Literal, Optional, Union
|
||||||
from typing import Awaitable, Literal
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from .common import VECTOR_COLUMN_NAME
|
from .common import VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
|
|
||||||
|
class Query(BaseModel):
|
||||||
|
"""A Query"""
|
||||||
|
|
||||||
|
vector_column: str = VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
|
# vector to search for
|
||||||
|
vector: List[float]
|
||||||
|
|
||||||
|
# sql filter to refine the query with
|
||||||
|
filter: Optional[str] = None
|
||||||
|
|
||||||
|
# top k results to return
|
||||||
|
k: int
|
||||||
|
|
||||||
|
# # metrics
|
||||||
|
metric: str = "L2"
|
||||||
|
|
||||||
|
# which columns to return in the results
|
||||||
|
columns: Optional[List[str]] = None
|
||||||
|
|
||||||
|
# optional query parameters for tuning the results,
|
||||||
|
# e.g. `{"nprobes": "10", "refine_factor": "10"}`
|
||||||
|
nprobes: int = 10
|
||||||
|
|
||||||
|
# Refine factor.
|
||||||
|
refine_factor: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
class LanceQueryBuilder:
|
class LanceQueryBuilder:
|
||||||
"""
|
"""
|
||||||
A builder for nearest neighbor queries for LanceDB.
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
@@ -47,9 +76,9 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
table: "lancedb.table.LanceTable",
|
table: "lancedb.table.Table",
|
||||||
query: np.ndarray,
|
query: Union[np.ndarray, str],
|
||||||
vector_column_name: str = VECTOR_COLUMN_NAME,
|
vector_column: str = VECTOR_COLUMN_NAME,
|
||||||
):
|
):
|
||||||
self._metric = "L2"
|
self._metric = "L2"
|
||||||
self._nprobes = 20
|
self._nprobes = 20
|
||||||
@@ -59,7 +88,7 @@ class LanceQueryBuilder:
|
|||||||
self._limit = 10
|
self._limit = 10
|
||||||
self._columns = None
|
self._columns = None
|
||||||
self._where = None
|
self._where = None
|
||||||
self._vector_column_name = vector_column_name
|
self._vector_column = vector_column
|
||||||
|
|
||||||
def limit(self, limit: int) -> LanceQueryBuilder:
|
def limit(self, limit: int) -> LanceQueryBuilder:
|
||||||
"""Set the maximum number of results to return.
|
"""Set the maximum number of results to return.
|
||||||
@@ -181,52 +210,29 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
"""
|
"""
|
||||||
Execute the query and return the results as a arrow Table.
|
Execute the query and return the results as an
|
||||||
|
[Apache Arrow Table](https://arrow.apache.org/docs/python/generated/pyarrow.Table.html#pyarrow.Table).
|
||||||
|
|
||||||
In addition to the selected columns, LanceDB also returns a vector
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
and also the "score" column which is the distance between the query
|
and also the "score" column which is the distance between the query
|
||||||
vector and the returned vector.
|
vector and the returned vectors.
|
||||||
"""
|
"""
|
||||||
if self._table._conn.is_managed_remote:
|
vector = self._query if isinstance(self._query, list) else self._query.tolist()
|
||||||
try:
|
query = Query(
|
||||||
loop = asyncio.get_running_loop()
|
vector=vector,
|
||||||
except RuntimeError:
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
result = self._table._conn._client.query(
|
|
||||||
self._table.name, self.to_remote_query()
|
|
||||||
)
|
|
||||||
return loop.run_until_complete(result).to_arrow()
|
|
||||||
|
|
||||||
ds = self._table.to_lance()
|
|
||||||
return ds.to_table(
|
|
||||||
columns=self._columns,
|
|
||||||
filter=self._where,
|
|
||||||
nearest={
|
|
||||||
"column": self._vector_column_name,
|
|
||||||
"q": self._query,
|
|
||||||
"k": self._limit,
|
|
||||||
"metric": self._metric,
|
|
||||||
"nprobes": self._nprobes,
|
|
||||||
"refine_factor": self._refine_factor,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_remote_query(self) -> "VectorQuery":
|
|
||||||
# don't import unless we are connecting to remote
|
|
||||||
from lancedb.remote.client import VectorQuery
|
|
||||||
|
|
||||||
return VectorQuery(
|
|
||||||
vector=self._query.tolist(),
|
|
||||||
filter=self._where,
|
filter=self._where,
|
||||||
k=self._limit,
|
k=self._limit,
|
||||||
_metric=self._metric,
|
metric=self._metric,
|
||||||
columns=self._columns,
|
columns=self._columns,
|
||||||
nprobes=self._nprobes,
|
nprobes=self._nprobes,
|
||||||
refine_factor=self._refine_factor,
|
refine_factor=self._refine_factor,
|
||||||
|
vector_column=self._vector_column,
|
||||||
)
|
)
|
||||||
|
return self._table._execute_query(query)
|
||||||
|
|
||||||
|
|
||||||
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
||||||
def to_df(self) -> pd.DataFrame:
|
def to_arrow(self) -> pd.Table:
|
||||||
try:
|
try:
|
||||||
import tantivy
|
import tantivy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -243,8 +249,9 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
|
|||||||
# get the scores and doc ids
|
# get the scores and doc ids
|
||||||
row_ids, scores = search_index(index, self._query, self._limit)
|
row_ids, scores = search_index(index, self._query, self._limit)
|
||||||
if len(row_ids) == 0:
|
if len(row_ids) == 0:
|
||||||
return pd.DataFrame()
|
empty_schema = pa.schema([pa.field("score", pa.float32())])
|
||||||
|
return pa.Table.from_pylist([], schema=empty_schema)
|
||||||
scores = pa.array(scores)
|
scores = pa.array(scores)
|
||||||
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
||||||
output_tbl = output_tbl.append_column("score", scores)
|
output_tbl = output_tbl.append_column("score", scores)
|
||||||
return output_tbl.to_pandas()
|
return output_tbl
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import abc
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import pandas as pd
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|||||||
22
python/lancedb/remote/arrow.py
Normal file
22
python/lancedb/remote/arrow.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
|
||||||
|
def to_ipc_binary(table: pa.Table) -> bytes:
|
||||||
|
"""Serialize a PyArrow Table to IPC binary."""
|
||||||
|
sink = pa.BufferOutputStream()
|
||||||
|
with pa.ipc.new_stream(sink, table.schema) as writer:
|
||||||
|
writer.write_table(table)
|
||||||
|
return sink.getvalue().to_pybytes()
|
||||||
@@ -13,15 +13,19 @@
|
|||||||
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import urllib.parse
|
from typing import Any, Callable, Dict, Optional, Union
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import attr
|
import attr
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from lancedb.common import Credential
|
||||||
from lancedb.remote import VectorQuery, VectorQueryResult
|
from lancedb.remote import VectorQuery, VectorQueryResult
|
||||||
from lancedb.remote.errors import LanceDBClientError
|
from lancedb.remote.errors import LanceDBClientError
|
||||||
|
|
||||||
|
ARROW_STREAM_CONTENT_TYPE = "application/vnd.apache.arrow.stream"
|
||||||
|
|
||||||
|
|
||||||
def _check_not_closed(f):
|
def _check_not_closed(f):
|
||||||
@functools.wraps(f)
|
@functools.wraps(f)
|
||||||
@@ -33,47 +37,111 @@ def _check_not_closed(f):
|
|||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
async def _read_ipc(resp: aiohttp.ClientResponse) -> pa.Table:
|
||||||
|
resp_body = await resp.read()
|
||||||
|
with pa.ipc.open_file(pa.BufferReader(resp_body)) as reader:
|
||||||
|
return reader.read_all()
|
||||||
|
|
||||||
|
|
||||||
@attr.define(slots=False)
|
@attr.define(slots=False)
|
||||||
class RestfulLanceDBClient:
|
class RestfulLanceDBClient:
|
||||||
url: str
|
db_name: str
|
||||||
|
region: str
|
||||||
|
api_key: Credential
|
||||||
closed: bool = attr.field(default=False, init=False)
|
closed: bool = attr.field(default=False, init=False)
|
||||||
|
|
||||||
@functools.cached_property
|
@functools.cached_property
|
||||||
def session(self) -> aiohttp.ClientSession:
|
def session(self) -> aiohttp.ClientSession:
|
||||||
parsed = urllib.parse.urlparse(self.url)
|
url = f"https://{self.db_name}.{self.region}.api.lancedb.com"
|
||||||
scheme = parsed.scheme
|
|
||||||
if not scheme.startswith("lancedb"):
|
|
||||||
raise ValueError(
|
|
||||||
f"Invalid scheme: {scheme}, must be like lancedb+<flavor>://"
|
|
||||||
)
|
|
||||||
flavor = scheme.split("+")[1]
|
|
||||||
url = f"{flavor}://{parsed.hostname}:{parsed.port}"
|
|
||||||
return aiohttp.ClientSession(url)
|
return aiohttp.ClientSession(url)
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
await self.session.close()
|
await self.session.close()
|
||||||
self.closed = True
|
self.closed = True
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def headers(self) -> Dict[str, str]:
|
||||||
|
headers = {
|
||||||
|
"x-api-key": self.api_key,
|
||||||
|
}
|
||||||
|
if self.region == "local": # Local test mode
|
||||||
|
headers["Host"] = f"{self.db_name}.{self.region}.api.lancedb.com"
|
||||||
|
return headers
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _check_status(resp: aiohttp.ClientResponse):
|
||||||
|
if resp.status == 404:
|
||||||
|
raise LanceDBClientError(f"Not found: {await resp.text()}")
|
||||||
|
elif 400 <= resp.status < 500:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Bad Request: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
elif 500 <= resp.status < 600:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Internal Server Error: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
elif resp.status != 200:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Unknown Error: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
|
||||||
@_check_not_closed
|
@_check_not_closed
|
||||||
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
async def get(self, uri: str, params: Union[Dict[str, Any], BaseModel] = None):
|
||||||
|
"""Send a GET request and returns the deserialized response payload."""
|
||||||
|
if isinstance(params, BaseModel):
|
||||||
|
params: Dict[str, Any] = params.dict(exclude_none=True)
|
||||||
|
async with self.session.get(uri, params=params, headers=self.headers) as resp:
|
||||||
|
await self._check_status(resp)
|
||||||
|
return await resp.json()
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def post(
|
||||||
|
self,
|
||||||
|
uri: str,
|
||||||
|
data: Union[Dict[str, Any], BaseModel, bytes],
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
content_type: Optional[str] = None,
|
||||||
|
deserialize: Callable = lambda resp: resp.json(),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Send a POST request and returns the deserialized response payload.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : str
|
||||||
|
The uri to send the POST request to.
|
||||||
|
data: Union[Dict[str, Any], BaseModel]
|
||||||
|
|
||||||
|
"""
|
||||||
|
if isinstance(data, BaseModel):
|
||||||
|
data: Dict[str, Any] = data.dict(exclude_none=True)
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
req_kwargs = {"data": data}
|
||||||
|
else:
|
||||||
|
req_kwargs = {"json": data}
|
||||||
|
|
||||||
|
headers = self.headers.copy()
|
||||||
|
if content_type is not None:
|
||||||
|
headers["content-type"] = content_type
|
||||||
async with self.session.post(
|
async with self.session.post(
|
||||||
f"/table/{table_name}/", json=query.dict(exclude_none=True)
|
uri,
|
||||||
|
headers=headers,
|
||||||
|
params=params,
|
||||||
|
**req_kwargs,
|
||||||
) as resp:
|
) as resp:
|
||||||
resp: aiohttp.ClientResponse = resp
|
resp: aiohttp.ClientResponse = resp
|
||||||
if 400 <= resp.status < 500:
|
await self._check_status(resp)
|
||||||
raise LanceDBClientError(
|
return await deserialize(resp)
|
||||||
f"Bad Request: {resp.status}, error: {await resp.text()}"
|
|
||||||
)
|
|
||||||
if 500 <= resp.status < 600:
|
|
||||||
raise LanceDBClientError(
|
|
||||||
f"Internal Server Error: {resp.status}, error: {await resp.text()}"
|
|
||||||
)
|
|
||||||
if resp.status != 200:
|
|
||||||
raise LanceDBClientError(
|
|
||||||
f"Unknown Error: {resp.status}, error: {await resp.text()}"
|
|
||||||
)
|
|
||||||
|
|
||||||
resp_body = await resp.read()
|
@_check_not_closed
|
||||||
with pa.ipc.open_file(pa.BufferReader(resp_body)) as reader:
|
async def list_tables(self):
|
||||||
tbl = reader.read_all()
|
"""List all tables in the database."""
|
||||||
|
json = await self.get("/v1/table/", {})
|
||||||
|
return json["tables"]
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
||||||
|
"""Query a table."""
|
||||||
|
tbl = await self.post(
|
||||||
|
f"/v1/table/{table_name}/query/", query, deserialize=_read_ipc
|
||||||
|
)
|
||||||
return VectorQueryResult(tbl)
|
return VectorQueryResult(tbl)
|
||||||
|
|||||||
104
python/lancedb/remote/db.py
Normal file
104
python/lancedb/remote/db.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import uuid
|
||||||
|
from typing import List
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from lancedb.common import DATA
|
||||||
|
from lancedb.db import DBConnection
|
||||||
|
from lancedb.schema import schema_to_json
|
||||||
|
from lancedb.table import Table, _sanitize_data
|
||||||
|
|
||||||
|
from .arrow import to_ipc_binary
|
||||||
|
from .client import ARROW_STREAM_CONTENT_TYPE, RestfulLanceDBClient
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteDBConnection(DBConnection):
|
||||||
|
"""A connection to a remote LanceDB database."""
|
||||||
|
|
||||||
|
def __init__(self, db_url: str, api_key: str, region: str):
|
||||||
|
"""Connect to a remote LanceDB database."""
|
||||||
|
parsed = urlparse(db_url)
|
||||||
|
if parsed.scheme != "db":
|
||||||
|
raise ValueError(f"Invalid scheme: {parsed.scheme}, only accepts db://")
|
||||||
|
self.db_name = parsed.netloc
|
||||||
|
self.api_key = api_key
|
||||||
|
self._client = RestfulLanceDBClient(self.db_name, region, api_key)
|
||||||
|
try:
|
||||||
|
self._loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
self._loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"RemoveConnect(name={self.db_name})"
|
||||||
|
|
||||||
|
def table_names(self) -> List[str]:
|
||||||
|
"""List the names of all tables in the database."""
|
||||||
|
result = self._loop.run_until_complete(self._client.list_tables())
|
||||||
|
return result
|
||||||
|
|
||||||
|
def open_table(self, name: str) -> Table:
|
||||||
|
"""Open a Lance Table in the database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str
|
||||||
|
The name of the table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
A LanceTable object representing the table.
|
||||||
|
"""
|
||||||
|
from .table import RemoteTable
|
||||||
|
|
||||||
|
# TODO: check if table exists
|
||||||
|
|
||||||
|
return RemoteTable(self, name)
|
||||||
|
|
||||||
|
def create_table(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
data: DATA = None,
|
||||||
|
schema: pa.Schema = None,
|
||||||
|
on_bad_vectors: str = "error",
|
||||||
|
fill_value: float = 0.0,
|
||||||
|
) -> Table:
|
||||||
|
if data is None and schema is None:
|
||||||
|
raise ValueError("Either data or schema must be provided.")
|
||||||
|
if data is not None:
|
||||||
|
data = _sanitize_data(
|
||||||
|
data, schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if schema is None:
|
||||||
|
raise ValueError("Either data or schema must be provided")
|
||||||
|
data = pa.Table.from_pylist([], schema=schema)
|
||||||
|
|
||||||
|
from .table import RemoteTable
|
||||||
|
|
||||||
|
data = to_ipc_binary(data)
|
||||||
|
request_id = uuid.uuid4().hex
|
||||||
|
|
||||||
|
self._loop.run_until_complete(
|
||||||
|
self._client.post(
|
||||||
|
f"/v1/table/{name}/create",
|
||||||
|
data=data,
|
||||||
|
params={"request_id": request_id},
|
||||||
|
content_type=ARROW_STREAM_CONTENT_TYPE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return RemoteTable(self, name)
|
||||||
93
python/lancedb/remote/table.py
Normal file
93
python/lancedb/remote/table.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from functools import cached_property
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from lancedb.common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
|
from ..query import LanceQueryBuilder, Query
|
||||||
|
from ..schema import json_to_schema
|
||||||
|
from ..table import Query, Table, _sanitize_data
|
||||||
|
from .arrow import to_ipc_binary
|
||||||
|
from .client import ARROW_STREAM_CONTENT_TYPE
|
||||||
|
from .db import RemoteDBConnection
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteTable(Table):
|
||||||
|
def __init__(self, conn: RemoteDBConnection, name: str):
|
||||||
|
self._conn = conn
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"RemoteTable({self._conn.db_name}.{self._name})"
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def schema(self) -> pa.Schema:
|
||||||
|
"""Return the schema of the table."""
|
||||||
|
resp = self._conn._loop.run_until_complete(
|
||||||
|
self._conn._client.post(f"/v1/table/{self._name}/describe/")
|
||||||
|
)
|
||||||
|
schema = json_to_schema(resp["schema"])
|
||||||
|
return schema
|
||||||
|
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def create_index(
|
||||||
|
self,
|
||||||
|
metric="L2",
|
||||||
|
num_partitions=256,
|
||||||
|
num_sub_vectors=96,
|
||||||
|
vector_column_name: str = VECTOR_COLUMN_NAME,
|
||||||
|
replace: bool = True,
|
||||||
|
):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def add(
|
||||||
|
self,
|
||||||
|
data: DATA,
|
||||||
|
mode: str = "append",
|
||||||
|
on_bad_vectors: str = "error",
|
||||||
|
fill_value: float = 0.0,
|
||||||
|
) -> int:
|
||||||
|
data = _sanitize_data(
|
||||||
|
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
|
)
|
||||||
|
payload = to_ipc_binary(data)
|
||||||
|
|
||||||
|
request_id = uuid.uuid4().hex
|
||||||
|
|
||||||
|
self._conn._loop.run_until_complete(
|
||||||
|
self._conn._client.post(
|
||||||
|
f"/v1/table/{self._name}/insert/",
|
||||||
|
data=payload,
|
||||||
|
params={"request_id": request_id, "mode": mode},
|
||||||
|
content_type=ARROW_STREAM_CONTENT_TYPE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def search(
|
||||||
|
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
||||||
|
) -> LanceQueryBuilder:
|
||||||
|
return LanceQueryBuilder(self, query, vector_column)
|
||||||
|
|
||||||
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
|
result = self._conn._client.query(self._name, query)
|
||||||
|
return self._conn._loop.run_until_complete(result).to_arrow()
|
||||||
|
|
||||||
|
def delete(self, predicate: str):
|
||||||
|
raise NotImplementedError
|
||||||
45
python/lancedb/schema.py
Normal file
45
python/lancedb/schema.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Schema related utilities."""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Type
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
from lance import json_to_schema, schema_to_json
|
||||||
|
|
||||||
|
|
||||||
|
def vector(dimension: int, value_type: pa.DataType = pa.float32()) -> pa.DataType:
|
||||||
|
"""A help function to create a vector type.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
dimension: The dimension of the vector.
|
||||||
|
value_type: pa.DataType, optional
|
||||||
|
The type of the value in the vector.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
A PyArrow DataType for vectors.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
>>> import pyarrow as pa
|
||||||
|
>>> import lancedb
|
||||||
|
>>> schema = pa.schema([
|
||||||
|
... pa.field("id", pa.int64()),
|
||||||
|
... pa.field("vector", lancedb.vector(756)),
|
||||||
|
... ])
|
||||||
|
"""
|
||||||
|
return pa.list_(value_type, dimension)
|
||||||
@@ -14,8 +14,9 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import Any, List, Union
|
from typing import Iterable, List, Union
|
||||||
|
|
||||||
import lance
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -26,7 +27,8 @@ from lance import LanceDataset
|
|||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
|
|
||||||
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
from .query import LanceFtsQueryBuilder, LanceQueryBuilder
|
from .query import LanceFtsQueryBuilder, LanceQueryBuilder, Query
|
||||||
|
from .util import fs_from_uri
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
||||||
@@ -42,19 +44,19 @@ def _sanitize_data(data, schema, on_bad_vectors, fill_value):
|
|||||||
data = _sanitize_schema(
|
data = _sanitize_schema(
|
||||||
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data, schema=schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
)
|
)
|
||||||
if not isinstance(data, pa.Table):
|
if not isinstance(data, (pa.Table, Iterable)):
|
||||||
raise TypeError(f"Unsupported data type: {type(data)}")
|
raise TypeError(f"Unsupported data type: {type(data)}")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
class LanceTable:
|
class Table(ABC):
|
||||||
"""
|
"""
|
||||||
A table in a LanceDB database.
|
A [Table](Table) is a collection of Records in a LanceDB [Database](Database).
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Create using [LanceDBConnection.create_table][lancedb.LanceDBConnection.create_table]
|
Create using [DBConnection.create_table][lancedb.DBConnection.create_table]
|
||||||
(more examples in that method's documentation).
|
(more examples in that method's documentation).
|
||||||
|
|
||||||
>>> import lancedb
|
>>> import lancedb
|
||||||
@@ -69,12 +71,11 @@ class LanceTable:
|
|||||||
vector: [[[1.1,1.2]]]
|
vector: [[[1.1,1.2]]]
|
||||||
b: [[2]]
|
b: [[2]]
|
||||||
|
|
||||||
Can append new data with [LanceTable.add][lancedb.table.LanceTable.add].
|
Can append new data with [Table.add()][lancedb.table.Table.add].
|
||||||
|
|
||||||
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
||||||
2
|
|
||||||
|
|
||||||
Can query the table with [LanceTable.search][lancedb.table.LanceTable.search].
|
Can query the table with [Table.search][lancedb.table.Table.search].
|
||||||
|
|
||||||
>>> table.search([0.4, 0.4]).select(["b"]).to_df()
|
>>> table.search([0.4, 0.4]).select(["b"]).to_df()
|
||||||
b vector score
|
b vector score
|
||||||
@@ -82,8 +83,169 @@ class LanceTable:
|
|||||||
1 2 [1.1, 1.2] 1.13
|
1 2 [1.1, 1.2] 1.13
|
||||||
|
|
||||||
Search queries are much faster when an index is created. See
|
Search queries are much faster when an index is created. See
|
||||||
[LanceTable.create_index][lancedb.table.LanceTable.create_index].
|
[Table.create_index][lancedb.table.Table.create_index].
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def schema(self) -> pa.Schema:
|
||||||
|
"""Return the [Arrow Schema](https://arrow.apache.org/docs/python/api/datatypes.html#) of
|
||||||
|
this [Table](Table)
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def to_pandas(self) -> pd.DataFrame:
|
||||||
|
"""Return the table as a pandas DataFrame.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pd.DataFrame
|
||||||
|
"""
|
||||||
|
return self.to_arrow().to_pandas()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
"""Return the table as a pyarrow Table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Table
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def create_index(
|
||||||
|
self,
|
||||||
|
metric="L2",
|
||||||
|
num_partitions=256,
|
||||||
|
num_sub_vectors=96,
|
||||||
|
vector_column_name: str = VECTOR_COLUMN_NAME,
|
||||||
|
replace: bool = True,
|
||||||
|
):
|
||||||
|
"""Create an index on the table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
metric: str, default "L2"
|
||||||
|
The distance metric to use when creating the index.
|
||||||
|
Valid values are "L2", "cosine", or "dot".
|
||||||
|
L2 is euclidean distance.
|
||||||
|
num_partitions: int
|
||||||
|
The number of IVF partitions to use when creating the index.
|
||||||
|
Default is 256.
|
||||||
|
num_sub_vectors: int
|
||||||
|
The number of PQ sub-vectors to use when creating the index.
|
||||||
|
Default is 96.
|
||||||
|
vector_column_name: str, default "vector"
|
||||||
|
The vector column name to create the index.
|
||||||
|
replace: bool, default True
|
||||||
|
If True, replace the existing index if it exists.
|
||||||
|
If False, raise an error if duplicate index exists.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def add(
|
||||||
|
self,
|
||||||
|
data: DATA,
|
||||||
|
mode: str = "append",
|
||||||
|
on_bad_vectors: str = "error",
|
||||||
|
fill_value: float = 0.0,
|
||||||
|
):
|
||||||
|
"""Add more data to the [Table](Table).
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
data: list-of-dict, dict, pd.DataFrame
|
||||||
|
The data to insert into the table.
|
||||||
|
mode: str
|
||||||
|
The mode to use when writing the data. Valid values are
|
||||||
|
"append" and "overwrite".
|
||||||
|
on_bad_vectors: str, default "error"
|
||||||
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
|
One of "error", "drop", "fill".
|
||||||
|
fill_value: float, default 0.
|
||||||
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def search(
|
||||||
|
self, query: Union[VEC, str], vector_column: str = VECTOR_COLUMN_NAME
|
||||||
|
) -> LanceQueryBuilder:
|
||||||
|
"""Create a search query to find the nearest neighbors
|
||||||
|
of the given query vector.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
query: list, np.ndarray
|
||||||
|
The query vector.
|
||||||
|
vector_column: str, default "vector"
|
||||||
|
The name of the vector column to search.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
|
A query builder object representing the query.
|
||||||
|
Once executed, the query returns selected columns, the vector,
|
||||||
|
and also the "score" column which is the distance between the query
|
||||||
|
vector and the returned vector.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete(self, where: str):
|
||||||
|
"""Delete rows from the table.
|
||||||
|
|
||||||
|
This can be used to delete a single row, many rows, all rows, or
|
||||||
|
sometimes no rows (if your predicate matches nothing).
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
where: str
|
||||||
|
The SQL where clause to use when deleting rows. For example, 'x = 2'
|
||||||
|
or 'x IN (1, 2, 3)'. The filter must not be empty, or it will error.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 2 [3.0, 4.0]
|
||||||
|
2 3 [5.0, 6.0]
|
||||||
|
>>> table.delete("x = 2")
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 3 [5.0, 6.0]
|
||||||
|
|
||||||
|
If you have a list of values to delete, you can combine them into a
|
||||||
|
stringified list and use the `IN` operator:
|
||||||
|
|
||||||
|
>>> to_remove = [1, 5]
|
||||||
|
>>> to_remove = ", ".join([str(v) for v in to_remove])
|
||||||
|
>>> to_remove
|
||||||
|
'1, 5'
|
||||||
|
>>> table.delete(f"x IN ({to_remove})")
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 3 [5.0, 6.0]
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class LanceTable(Table):
|
||||||
|
"""
|
||||||
|
A table in a LanceDB database.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -95,7 +257,8 @@ class LanceTable:
|
|||||||
|
|
||||||
def _reset_dataset(self):
|
def _reset_dataset(self):
|
||||||
try:
|
try:
|
||||||
del self.__dict__["_dataset"]
|
if "_dataset" in self.__dict__:
|
||||||
|
del self.__dict__["_dataset"]
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -139,7 +302,6 @@ class LanceTable:
|
|||||||
vector type
|
vector type
|
||||||
0 [1.1, 0.9] vector
|
0 [1.1, 0.9] vector
|
||||||
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
2
|
|
||||||
>>> table.version
|
>>> table.version
|
||||||
2
|
2
|
||||||
>>> table.checkout(1)
|
>>> table.checkout(1)
|
||||||
@@ -195,26 +357,7 @@ class LanceTable:
|
|||||||
vector_column_name=VECTOR_COLUMN_NAME,
|
vector_column_name=VECTOR_COLUMN_NAME,
|
||||||
replace: bool = True,
|
replace: bool = True,
|
||||||
):
|
):
|
||||||
"""Create an index on the table.
|
"""Create an index on the table."""
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
metric: str, default "L2"
|
|
||||||
The distance metric to use when creating the index.
|
|
||||||
Valid values are "L2", "cosine", or "dot".
|
|
||||||
L2 is euclidean distance.
|
|
||||||
num_partitions: int
|
|
||||||
The number of IVF partitions to use when creating the index.
|
|
||||||
Default is 256.
|
|
||||||
num_sub_vectors: int
|
|
||||||
The number of PQ sub-vectors to use when creating the index.
|
|
||||||
Default is 96.
|
|
||||||
vector_column_name: str, default "vector"
|
|
||||||
The vector column name to create the index.
|
|
||||||
replace: bool, default True
|
|
||||||
If True, replace the existing index if it exists.
|
|
||||||
If False, raise an error if duplicate index exists.
|
|
||||||
"""
|
|
||||||
self._dataset.create_index(
|
self._dataset.create_index(
|
||||||
column=vector_column_name,
|
column=vector_column_name,
|
||||||
index_type="IVF_PQ",
|
index_type="IVF_PQ",
|
||||||
@@ -258,9 +401,9 @@ class LanceTable:
|
|||||||
self,
|
self,
|
||||||
data: DATA,
|
data: DATA,
|
||||||
mode: str = "append",
|
mode: str = "append",
|
||||||
on_bad_vectors: str = "drop",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> int:
|
):
|
||||||
"""Add data to the table.
|
"""Add data to the table.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
@@ -270,9 +413,9 @@ class LanceTable:
|
|||||||
mode: str
|
mode: str
|
||||||
The mode to use when writing the data. Valid values are
|
The mode to use when writing the data. Valid values are
|
||||||
"append" and "overwrite".
|
"append" and "overwrite".
|
||||||
on_bad_vectors: str
|
on_bad_vectors: str, default "error"
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
One of "raise", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float, default 0.
|
fill_value: float, default 0.
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
|
|
||||||
@@ -281,12 +424,12 @@ class LanceTable:
|
|||||||
int
|
int
|
||||||
The number of vectors in the table.
|
The number of vectors in the table.
|
||||||
"""
|
"""
|
||||||
|
# TODO: manage table listing and metadata separately
|
||||||
data = _sanitize_data(
|
data = _sanitize_data(
|
||||||
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data, self.schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
)
|
)
|
||||||
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
return len(self)
|
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, query: Union[VEC, str], vector_column_name=VECTOR_COLUMN_NAME
|
self, query: Union[VEC, str], vector_column_name=VECTOR_COLUMN_NAME
|
||||||
@@ -326,10 +469,10 @@ class LanceTable:
|
|||||||
cls,
|
cls,
|
||||||
db,
|
db,
|
||||||
name,
|
name,
|
||||||
data,
|
data=None,
|
||||||
schema=None,
|
schema=None,
|
||||||
mode="create",
|
mode="create",
|
||||||
on_bad_vectors: str = "drop",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@@ -354,72 +497,67 @@ class LanceTable:
|
|||||||
The LanceDB instance to create the table in.
|
The LanceDB instance to create the table in.
|
||||||
name: str
|
name: str
|
||||||
The name of the table to create.
|
The name of the table to create.
|
||||||
data: list-of-dict, dict, pd.DataFrame
|
data: list-of-dict, dict, pd.DataFrame, default None
|
||||||
The data to insert into the table.
|
The data to insert into the table.
|
||||||
|
At least one of `data` or `schema` must be provided.
|
||||||
schema: dict, optional
|
schema: dict, optional
|
||||||
The schema of the table. If not provided, the schema is inferred from the data.
|
The schema of the table. If not provided, the schema is inferred from the data.
|
||||||
|
At least one of `data` or `schema` must be provided.
|
||||||
mode: str, default "create"
|
mode: str, default "create"
|
||||||
The mode to use when writing the data. Valid values are
|
The mode to use when writing the data. Valid values are
|
||||||
"create", "overwrite", and "append".
|
"create", "overwrite", and "append".
|
||||||
on_bad_vectors: str
|
on_bad_vectors: str, default "error"
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
One of "raise", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float, default 0.
|
fill_value: float, default 0.
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
"""
|
"""
|
||||||
tbl = LanceTable(db, name)
|
tbl = LanceTable(db, name)
|
||||||
data = _sanitize_data(
|
if data is not None:
|
||||||
data, schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
data = _sanitize_data(
|
||||||
)
|
data, schema, on_bad_vectors=on_bad_vectors, fill_value=fill_value
|
||||||
lance.write_dataset(data, tbl._dataset_uri, mode=mode)
|
)
|
||||||
return tbl
|
else:
|
||||||
|
if schema is None:
|
||||||
|
raise ValueError("Either data or schema must be provided")
|
||||||
|
data = pa.Table.from_pylist([], schema=schema)
|
||||||
|
lance.write_dataset(data, tbl._dataset_uri, schema=schema, mode=mode)
|
||||||
|
return LanceTable(db, name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def open(cls, db, name):
|
def open(cls, db, name):
|
||||||
tbl = cls(db, name)
|
tbl = cls(db, name)
|
||||||
if tbl._conn.is_managed_remote:
|
fs, path = fs_from_uri(tbl._dataset_uri)
|
||||||
# Not completely sure how to check for remote table existence yet.
|
file_info = fs.get_file_info(path)
|
||||||
return tbl
|
if file_info.type != pa.fs.FileType.Directory:
|
||||||
if not os.path.exists(tbl._dataset_uri):
|
|
||||||
raise FileNotFoundError(
|
raise FileNotFoundError(
|
||||||
f"Table {name} does not exist. Please first call db.create_table({name}, data)"
|
f"Table {name} does not exist. Please first call db.create_table({name}, data)"
|
||||||
)
|
)
|
||||||
|
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
def delete(self, where: str):
|
def delete(self, where: str):
|
||||||
"""Delete rows from the table.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
where: str
|
|
||||||
The SQL where clause to use when deleting rows.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> import lancedb
|
|
||||||
>>> import pandas as pd
|
|
||||||
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
|
||||||
>>> db = lancedb.connect("./.lancedb")
|
|
||||||
>>> table = db.create_table("my_table", data)
|
|
||||||
>>> table.to_pandas()
|
|
||||||
x vector
|
|
||||||
0 1 [1.0, 2.0]
|
|
||||||
1 2 [3.0, 4.0]
|
|
||||||
2 3 [5.0, 6.0]
|
|
||||||
>>> table.delete("x = 2")
|
|
||||||
>>> table.to_pandas()
|
|
||||||
x vector
|
|
||||||
0 1 [1.0, 2.0]
|
|
||||||
1 3 [5.0, 6.0]
|
|
||||||
"""
|
|
||||||
self._dataset.delete(where)
|
self._dataset.delete(where)
|
||||||
|
|
||||||
|
def _execute_query(self, query: Query) -> pa.Table:
|
||||||
|
ds = self.to_lance()
|
||||||
|
return ds.to_table(
|
||||||
|
columns=query.columns,
|
||||||
|
filter=query.filter,
|
||||||
|
nearest={
|
||||||
|
"column": query.vector_column,
|
||||||
|
"q": query.vector,
|
||||||
|
"k": query.k,
|
||||||
|
"metric": query.metric,
|
||||||
|
"nprobes": query.nprobes,
|
||||||
|
"refine_factor": query.refine_factor,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_schema(
|
def _sanitize_schema(
|
||||||
data: pa.Table,
|
data: pa.Table,
|
||||||
schema: pa.Schema = None,
|
schema: pa.Schema = None,
|
||||||
on_bad_vectors: str = "drop",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> pa.Table:
|
) -> pa.Table:
|
||||||
"""Ensure that the table has the expected schema.
|
"""Ensure that the table has the expected schema.
|
||||||
@@ -431,10 +569,10 @@ def _sanitize_schema(
|
|||||||
schema: pa.Schema; optional
|
schema: pa.Schema; optional
|
||||||
The expected schema. If not provided, this just converts the
|
The expected schema. If not provided, this just converts the
|
||||||
vector column to fixed_size_list(float32) if necessary.
|
vector column to fixed_size_list(float32) if necessary.
|
||||||
on_bad_vectors: str
|
on_bad_vectors: str, default "error"
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
One of "raise", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float
|
fill_value: float, default 0.
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
"""
|
"""
|
||||||
if schema is not None:
|
if schema is not None:
|
||||||
@@ -463,7 +601,7 @@ def _sanitize_schema(
|
|||||||
def _sanitize_vector_column(
|
def _sanitize_vector_column(
|
||||||
data: pa.Table,
|
data: pa.Table,
|
||||||
vector_column_name: str,
|
vector_column_name: str,
|
||||||
on_bad_vectors: str = "drop",
|
on_bad_vectors: str = "error",
|
||||||
fill_value: float = 0.0,
|
fill_value: float = 0.0,
|
||||||
) -> pa.Table:
|
) -> pa.Table:
|
||||||
"""
|
"""
|
||||||
@@ -475,10 +613,10 @@ def _sanitize_vector_column(
|
|||||||
The table to sanitize.
|
The table to sanitize.
|
||||||
vector_column_name: str
|
vector_column_name: str
|
||||||
The name of the vector column.
|
The name of the vector column.
|
||||||
on_bad_vectors: str
|
on_bad_vectors: str, default "error"
|
||||||
What to do if any of the vectors are not the same size or contains NaNs.
|
What to do if any of the vectors are not the same size or contains NaNs.
|
||||||
One of "raise", "drop", "fill".
|
One of "error", "drop", "fill".
|
||||||
fill_value: float
|
fill_value: float, default 0.0
|
||||||
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
The value to use when filling vectors. Only used if on_bad_vectors="fill".
|
||||||
"""
|
"""
|
||||||
if vector_column_name not in data.column_names:
|
if vector_column_name not in data.column_names:
|
||||||
@@ -501,7 +639,7 @@ def _sanitize_vector_column(
|
|||||||
data.column_names.index(vector_column_name), vector_column_name, vec_arr
|
data.column_names.index(vector_column_name), vector_column_name, vec_arr
|
||||||
)
|
)
|
||||||
|
|
||||||
has_nans = pc.any(vec_arr.values.is_nan()).as_py()
|
has_nans = pc.any(pc.is_nan(vec_arr.values)).as_py()
|
||||||
if has_nans:
|
if has_nans:
|
||||||
data = _sanitize_nans(
|
data = _sanitize_nans(
|
||||||
data, fill_value, on_bad_vectors, vec_arr, vector_column_name
|
data, fill_value, on_bad_vectors, vec_arr, vector_column_name
|
||||||
@@ -524,7 +662,7 @@ def ensure_fixed_size_list_of_f32(vec_arr):
|
|||||||
|
|
||||||
def _sanitize_jagged(data, fill_value, on_bad_vectors, vec_arr, vector_column_name):
|
def _sanitize_jagged(data, fill_value, on_bad_vectors, vec_arr, vector_column_name):
|
||||||
"""Sanitize jagged vectors."""
|
"""Sanitize jagged vectors."""
|
||||||
if on_bad_vectors == "raise":
|
if on_bad_vectors == "error":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Vector column {vector_column_name} has variable length vectors "
|
f"Vector column {vector_column_name} has variable length vectors "
|
||||||
"Set on_bad_vectors='drop' to remove them, or "
|
"Set on_bad_vectors='drop' to remove them, or "
|
||||||
@@ -538,7 +676,7 @@ def _sanitize_jagged(data, fill_value, on_bad_vectors, vec_arr, vector_column_na
|
|||||||
if on_bad_vectors == "fill":
|
if on_bad_vectors == "fill":
|
||||||
if fill_value is None:
|
if fill_value is None:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"`fill_value` must not be None if `on_bad_vectors` is 'fill'"
|
"`fill_value` must not be None if `on_bad_vectors` is 'fill'"
|
||||||
)
|
)
|
||||||
fill_arr = pa.scalar([float(fill_value)] * ndims)
|
fill_arr = pa.scalar([float(fill_value)] * ndims)
|
||||||
vec_arr = pc.if_else(correct_ndims, vec_arr, fill_arr)
|
vec_arr = pc.if_else(correct_ndims, vec_arr, fill_arr)
|
||||||
@@ -552,7 +690,7 @@ def _sanitize_jagged(data, fill_value, on_bad_vectors, vec_arr, vector_column_na
|
|||||||
|
|
||||||
def _sanitize_nans(data, fill_value, on_bad_vectors, vec_arr, vector_column_name):
|
def _sanitize_nans(data, fill_value, on_bad_vectors, vec_arr, vector_column_name):
|
||||||
"""Sanitize NaNs in vectors"""
|
"""Sanitize NaNs in vectors"""
|
||||||
if on_bad_vectors == "raise":
|
if on_bad_vectors == "error":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Vector column {vector_column_name} has NaNs. "
|
f"Vector column {vector_column_name} has NaNs. "
|
||||||
"Set on_bad_vectors='drop' to remove them, or "
|
"Set on_bad_vectors='drop' to remove them, or "
|
||||||
@@ -561,10 +699,10 @@ def _sanitize_nans(data, fill_value, on_bad_vectors, vec_arr, vector_column_name
|
|||||||
elif on_bad_vectors == "fill":
|
elif on_bad_vectors == "fill":
|
||||||
if fill_value is None:
|
if fill_value is None:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"`fill_value` must not be None if `on_bad_vectors` is 'fill'"
|
"`fill_value` must not be None if `on_bad_vectors` is 'fill'"
|
||||||
)
|
)
|
||||||
fill_value = float(fill_value)
|
fill_value = float(fill_value)
|
||||||
values = pc.if_else(vec_arr.values.is_nan(), fill_value, vec_arr.values)
|
values = pc.if_else(pc.is_nan(vec_arr.values), fill_value, vec_arr.values)
|
||||||
ndims = len(vec_arr[0])
|
ndims = len(vec_arr[0])
|
||||||
vec_arr = pa.FixedSizeListArray.from_arrays(values, ndims)
|
vec_arr = pa.FixedSizeListArray.from_arrays(values, ndims)
|
||||||
data = data.set_column(
|
data = data.set_column(
|
||||||
|
|||||||
@@ -11,9 +11,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from urllib.parse import ParseResult, urlparse
|
import os
|
||||||
|
from typing import Tuple
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from pyarrow import fs
|
import pyarrow as pa
|
||||||
|
import pyarrow.fs as pa_fs
|
||||||
|
|
||||||
|
|
||||||
def get_uri_scheme(uri: str) -> str:
|
def get_uri_scheme(uri: str) -> str:
|
||||||
@@ -61,3 +64,15 @@ def get_uri_location(uri: str) -> str:
|
|||||||
return parsed.path
|
return parsed.path
|
||||||
else:
|
else:
|
||||||
return parsed.netloc + parsed.path
|
return parsed.netloc + parsed.path
|
||||||
|
|
||||||
|
|
||||||
|
def fs_from_uri(uri: str) -> Tuple[pa_fs.FileSystem, str]:
|
||||||
|
"""
|
||||||
|
Get a PyArrow FileSystem from a URI, handling extra environment variables.
|
||||||
|
"""
|
||||||
|
if get_uri_scheme(uri) == "s3":
|
||||||
|
fs = pa_fs.S3FileSystem(endpoint_override=os.environ.get("AWS_ENDPOINT"))
|
||||||
|
path = get_uri_location(uri)
|
||||||
|
return fs, path
|
||||||
|
|
||||||
|
return pa_fs.FileSystem.from_uri(uri)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.1.9"
|
version = "0.1.14"
|
||||||
dependencies = ["pylance~=0.5.0", "ratelimiter", "retry", "tqdm", "aiohttp", "pydantic", "attr"]
|
dependencies = ["pylance~=0.5.8", "ratelimiter", "retry", "tqdm", "aiohttp", "pydantic", "attr", "semver"]
|
||||||
description = "lancedb"
|
description = "lancedb"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
||||||
@@ -52,3 +52,6 @@ requires = [
|
|||||||
"wheel",
|
"wheel",
|
||||||
]
|
]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import lancedb
|
import lancedb
|
||||||
@@ -75,6 +76,32 @@ def test_ingest_pd(tmp_path):
|
|||||||
assert db.open_table("test").name == db["test"].name
|
assert db.open_table("test").name == db["test"].name
|
||||||
|
|
||||||
|
|
||||||
|
def test_ingest_record_batch_iterator(tmp_path):
|
||||||
|
def batch_reader():
|
||||||
|
for i in range(5):
|
||||||
|
yield pa.RecordBatch.from_arrays(
|
||||||
|
[
|
||||||
|
pa.array([[3.1, 4.1], [5.9, 26.5]]),
|
||||||
|
pa.array(["foo", "bar"]),
|
||||||
|
pa.array([10.0, 20.0]),
|
||||||
|
],
|
||||||
|
["vector", "item", "price"],
|
||||||
|
)
|
||||||
|
|
||||||
|
db = lancedb.connect(tmp_path)
|
||||||
|
tbl = db.create_table(
|
||||||
|
"test",
|
||||||
|
batch_reader(),
|
||||||
|
schema=pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vector", pa.list_(pa.float32())),
|
||||||
|
pa.field("item", pa.utf8()),
|
||||||
|
pa.field("price", pa.float32()),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_create_mode(tmp_path):
|
def test_create_mode(tmp_path):
|
||||||
db = lancedb.connect(tmp_path)
|
db = lancedb.connect(tmp_path)
|
||||||
data = pd.DataFrame(
|
data = pd.DataFrame(
|
||||||
@@ -131,6 +158,9 @@ def test_empty_or_nonexistent_table(tmp_path):
|
|||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
db.open_table("does_not_exist")
|
db.open_table("does_not_exist")
|
||||||
|
|
||||||
|
schema = pa.schema([pa.field("a", pa.int32())])
|
||||||
|
db.create_table("test", schema=schema)
|
||||||
|
|
||||||
|
|
||||||
def test_replace_index(tmp_path):
|
def test_replace_index(tmp_path):
|
||||||
db = lancedb.connect(uri=tmp_path)
|
db = lancedb.connect(uri=tmp_path)
|
||||||
|
|||||||
165
python/tests/test_pydantic.py
Normal file
165
python/tests/test_pydantic.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
import pydantic
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.pydantic import PYDANTIC_VERSION, pydantic_to_schema, vector
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info < (3, 9),
|
||||||
|
reason="using native type alias requires python3.9 or higher",
|
||||||
|
)
|
||||||
|
def test_pydantic_to_arrow():
|
||||||
|
class StructModel(pydantic.BaseModel):
|
||||||
|
a: str
|
||||||
|
b: Optional[float]
|
||||||
|
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
id: int
|
||||||
|
s: str
|
||||||
|
vec: list[float]
|
||||||
|
li: List[int]
|
||||||
|
opt: Optional[str] = None
|
||||||
|
st: StructModel
|
||||||
|
# d: dict
|
||||||
|
|
||||||
|
m = TestModel(
|
||||||
|
id=1, s="hello", vec=[1.0, 2.0, 3.0], li=[2, 3, 4], st=StructModel(a="a", b=1.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
|
||||||
|
expect_schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("id", pa.int64(), False),
|
||||||
|
pa.field("s", pa.utf8(), False),
|
||||||
|
pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
pa.field("opt", pa.utf8(), True),
|
||||||
|
pa.field(
|
||||||
|
"st",
|
||||||
|
pa.struct(
|
||||||
|
[pa.field("a", pa.utf8(), False), pa.field("b", pa.float64(), True)]
|
||||||
|
),
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
assert schema == expect_schema
|
||||||
|
|
||||||
|
|
||||||
|
def test_pydantic_to_arrow_py38():
|
||||||
|
class StructModel(pydantic.BaseModel):
|
||||||
|
a: str
|
||||||
|
b: Optional[float]
|
||||||
|
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
id: int
|
||||||
|
s: str
|
||||||
|
vec: List[float]
|
||||||
|
li: List[int]
|
||||||
|
opt: Optional[str] = None
|
||||||
|
st: StructModel
|
||||||
|
# d: dict
|
||||||
|
|
||||||
|
m = TestModel(
|
||||||
|
id=1, s="hello", vec=[1.0, 2.0, 3.0], li=[2, 3, 4], st=StructModel(a="a", b=1.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
|
||||||
|
expect_schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("id", pa.int64(), False),
|
||||||
|
pa.field("s", pa.utf8(), False),
|
||||||
|
pa.field("vec", pa.list_(pa.float64()), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
pa.field("opt", pa.utf8(), True),
|
||||||
|
pa.field(
|
||||||
|
"st",
|
||||||
|
pa.struct(
|
||||||
|
[pa.field("a", pa.utf8(), False), pa.field("b", pa.float64(), True)]
|
||||||
|
),
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
assert schema == expect_schema
|
||||||
|
|
||||||
|
|
||||||
|
def test_fixed_size_list_field():
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
vec: vector(16)
|
||||||
|
li: List[int]
|
||||||
|
|
||||||
|
data = TestModel(vec=list(range(16)), li=[1, 2, 3])
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
assert json.loads(data.model_dump_json()) == {
|
||||||
|
"vec": list(range(16)),
|
||||||
|
"li": [1, 2, 3],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
assert data.dict() == {
|
||||||
|
"vec": list(range(16)),
|
||||||
|
"li": [1, 2, 3],
|
||||||
|
}
|
||||||
|
|
||||||
|
schema = pydantic_to_schema(TestModel)
|
||||||
|
assert schema == pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vec", pa.list_(pa.float32(), 16), False),
|
||||||
|
pa.field("li", pa.list_(pa.int64()), False),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if PYDANTIC_VERSION >= (2,):
|
||||||
|
json_schema = TestModel.model_json_schema()
|
||||||
|
else:
|
||||||
|
json_schema = TestModel.schema()
|
||||||
|
|
||||||
|
assert json_schema == {
|
||||||
|
"properties": {
|
||||||
|
"vec": {
|
||||||
|
"items": {"type": "number"},
|
||||||
|
"maxItems": 16,
|
||||||
|
"minItems": 16,
|
||||||
|
"title": "Vec",
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"li": {"items": {"type": "integer"}, "title": "Li", "type": "array"},
|
||||||
|
},
|
||||||
|
"required": ["vec", "li"],
|
||||||
|
"title": "TestModel",
|
||||||
|
"type": "object",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_fixed_size_list_validation():
|
||||||
|
class TestModel(pydantic.BaseModel):
|
||||||
|
vec: vector(8)
|
||||||
|
|
||||||
|
with pytest.raises(pydantic.ValidationError):
|
||||||
|
TestModel(vec=range(9))
|
||||||
|
|
||||||
|
with pytest.raises(pydantic.ValidationError):
|
||||||
|
TestModel(vec=range(7))
|
||||||
|
|
||||||
|
TestModel(vec=range(8))
|
||||||
@@ -20,18 +20,33 @@ import pyarrow as pa
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
from lancedb.query import LanceQueryBuilder
|
from lancedb.query import LanceQueryBuilder, Query
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
class MockTable:
|
class MockTable:
|
||||||
def __init__(self, tmp_path):
|
def __init__(self, tmp_path):
|
||||||
self.uri = tmp_path
|
self.uri = tmp_path
|
||||||
self._conn = LanceDBConnection("/tmp/lance/")
|
self._conn = LanceDBConnection(self.uri)
|
||||||
|
|
||||||
def to_lance(self):
|
def to_lance(self):
|
||||||
return lance.dataset(self.uri)
|
return lance.dataset(self.uri)
|
||||||
|
|
||||||
|
def _execute_query(self, query):
|
||||||
|
ds = self.to_lance()
|
||||||
|
return ds.to_table(
|
||||||
|
columns=query.columns,
|
||||||
|
filter=query.filter,
|
||||||
|
nearest={
|
||||||
|
"column": query.vector_column,
|
||||||
|
"q": query.vector,
|
||||||
|
"k": query.k,
|
||||||
|
"metric": query.metric,
|
||||||
|
"nprobes": query.nprobes,
|
||||||
|
"refine_factor": query.refine_factor,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def table(tmp_path) -> MockTable:
|
def table(tmp_path) -> MockTable:
|
||||||
@@ -94,20 +109,18 @@ def test_query_builder_with_different_vector_column():
|
|||||||
)
|
)
|
||||||
ds = mock.Mock()
|
ds = mock.Mock()
|
||||||
table.to_lance.return_value = ds
|
table.to_lance.return_value = ds
|
||||||
table._conn = mock.MagicMock()
|
|
||||||
table._conn.is_managed_remote = False
|
|
||||||
builder.to_arrow()
|
builder.to_arrow()
|
||||||
ds.to_table.assert_called_once_with(
|
table._execute_query.assert_called_once_with(
|
||||||
columns=["b"],
|
Query(
|
||||||
filter="b < 10",
|
vector=query,
|
||||||
nearest={
|
filter="b < 10",
|
||||||
"column": vector_column_name,
|
k=2,
|
||||||
"q": query,
|
metric="cosine",
|
||||||
"k": 2,
|
columns=["b"],
|
||||||
"metric": "cosine",
|
nprobes=20,
|
||||||
"nprobes": 20,
|
refine_factor=None,
|
||||||
"refine_factor": None,
|
vector_column="foo_vector",
|
||||||
},
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
import lancedb
|
||||||
from lancedb.remote.client import VectorQuery, VectorQueryResult
|
from lancedb.remote.client import VectorQuery, VectorQueryResult
|
||||||
|
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ class FakeLanceDBClient:
|
|||||||
|
|
||||||
|
|
||||||
def test_remote_db():
|
def test_remote_db():
|
||||||
conn = LanceDBConnection("lancedb+http://client-will-be-injected")
|
conn = lancedb.connect("db://client-will-be-injected", api_key="fake")
|
||||||
setattr(conn, "_client", FakeLanceDBClient())
|
setattr(conn, "_client", FakeLanceDBClient())
|
||||||
|
|
||||||
table = conn["test"]
|
table = conn["test"]
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
from lance.vector import vec_to_table
|
||||||
|
|
||||||
from lancedb.db import LanceDBConnection
|
from lancedb.db import LanceDBConnection
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
@@ -89,7 +90,31 @@ def test_create_table(db):
|
|||||||
assert expected == tbl
|
assert expected == tbl
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_table(db):
|
||||||
|
schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
|
pa.field("item", pa.string()),
|
||||||
|
pa.field("price", pa.float32()),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
tbl = LanceTable.create(db, "test", schema=schema)
|
||||||
|
data = [
|
||||||
|
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
||||||
|
]
|
||||||
|
tbl.add(data=data)
|
||||||
|
|
||||||
|
|
||||||
def test_add(db):
|
def test_add(db):
|
||||||
|
schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
|
pa.field("item", pa.string()),
|
||||||
|
pa.field("price", pa.float64()),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
table = LanceTable.create(
|
table = LanceTable.create(
|
||||||
db,
|
db,
|
||||||
"test",
|
"test",
|
||||||
@@ -98,12 +123,24 @@ def test_add(db):
|
|||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
_add(table, schema)
|
||||||
|
|
||||||
|
table = LanceTable.create(db, "test2", schema=schema)
|
||||||
|
table.add(
|
||||||
|
data=[
|
||||||
|
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
_add(table, schema)
|
||||||
|
|
||||||
|
|
||||||
|
def _add(table, schema):
|
||||||
# table = LanceTable(db, "test")
|
# table = LanceTable(db, "test")
|
||||||
assert len(table) == 2
|
assert len(table) == 2
|
||||||
|
|
||||||
count = table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
table.add([{"vector": [6.3, 100.5], "item": "new", "price": 30.0}])
|
||||||
assert count == 3
|
assert len(table) == 3
|
||||||
|
|
||||||
expected = pa.Table.from_arrays(
|
expected = pa.Table.from_arrays(
|
||||||
[
|
[
|
||||||
@@ -113,13 +150,7 @@ def test_add(db):
|
|||||||
pa.array(["foo", "bar", "new"]),
|
pa.array(["foo", "bar", "new"]),
|
||||||
pa.array([10.0, 20.0, 30.0]),
|
pa.array([10.0, 20.0, 30.0]),
|
||||||
],
|
],
|
||||||
schema=pa.schema(
|
schema=schema,
|
||||||
[
|
|
||||||
pa.field("vector", pa.list_(pa.float32(), 2)),
|
|
||||||
pa.field("item", pa.string()),
|
|
||||||
pa.field("price", pa.float64()),
|
|
||||||
]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
assert expected == table.to_arrow()
|
assert expected == table.to_arrow()
|
||||||
|
|
||||||
@@ -181,7 +212,21 @@ def test_create_index_method():
|
|||||||
|
|
||||||
|
|
||||||
def test_add_with_nans(db):
|
def test_add_with_nans(db):
|
||||||
# By default we drop bad input vectors
|
# by default we raise an error on bad input vectors
|
||||||
|
bad_data = [
|
||||||
|
{"vector": [np.nan], "item": "bar", "price": 20.0},
|
||||||
|
{"vector": [5], "item": "bar", "price": 20.0},
|
||||||
|
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
|
||||||
|
{"vector": [np.nan, 5.0], "item": "bar", "price": 20.0},
|
||||||
|
]
|
||||||
|
for row in bad_data:
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
LanceTable.create(
|
||||||
|
db,
|
||||||
|
"error_test",
|
||||||
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0}, row],
|
||||||
|
)
|
||||||
|
|
||||||
table = LanceTable.create(
|
table = LanceTable.create(
|
||||||
db,
|
db,
|
||||||
"drop_test",
|
"drop_test",
|
||||||
@@ -191,6 +236,7 @@ def test_add_with_nans(db):
|
|||||||
{"vector": [5], "item": "bar", "price": 20.0},
|
{"vector": [5], "item": "bar", "price": 20.0},
|
||||||
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
|
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
|
||||||
],
|
],
|
||||||
|
on_bad_vectors="drop",
|
||||||
)
|
)
|
||||||
assert len(table) == 1
|
assert len(table) == 1
|
||||||
|
|
||||||
@@ -210,18 +256,3 @@ def test_add_with_nans(db):
|
|||||||
arrow_tbl = table.to_lance().to_table(filter="item == 'bar'")
|
arrow_tbl = table.to_lance().to_table(filter="item == 'bar'")
|
||||||
v = arrow_tbl["vector"].to_pylist()[0]
|
v = arrow_tbl["vector"].to_pylist()[0]
|
||||||
assert np.allclose(v, np.array([0.0, 0.0]))
|
assert np.allclose(v, np.array([0.0, 0.0]))
|
||||||
|
|
||||||
bad_data = [
|
|
||||||
{"vector": [np.nan], "item": "bar", "price": 20.0},
|
|
||||||
{"vector": [5], "item": "bar", "price": 20.0},
|
|
||||||
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
|
|
||||||
{"vector": [np.nan, 5.0], "item": "bar", "price": 20.0},
|
|
||||||
]
|
|
||||||
for row in bad_data:
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
LanceTable.create(
|
|
||||||
db,
|
|
||||||
"raise_test",
|
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0}, row],
|
|
||||||
on_bad_vectors="raise",
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb-node"
|
name = "vectordb-node"
|
||||||
version = "0.1.10"
|
version = "0.1.15"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
@@ -15,7 +15,11 @@ arrow-ipc = { workspace = true }
|
|||||||
arrow-schema = { workspace = true }
|
arrow-schema = { workspace = true }
|
||||||
once_cell = "1"
|
once_cell = "1"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
|
half = { workspace = true }
|
||||||
lance = { workspace = true }
|
lance = { workspace = true }
|
||||||
vectordb = { path = "../../vectordb" }
|
vectordb = { path = "../../vectordb" }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
||||||
|
object_store = { workspace = true, features = ["aws"] }
|
||||||
|
async-trait = "0"
|
||||||
|
env_logger = "0"
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use std::ops::Deref;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::cast::as_list_array;
|
use arrow_array::cast::as_list_array;
|
||||||
@@ -25,10 +24,13 @@ use lance::arrow::{FixedSizeListArrayExt, RecordBatchExt};
|
|||||||
pub(crate) fn convert_record_batch(record_batch: RecordBatch) -> RecordBatch {
|
pub(crate) fn convert_record_batch(record_batch: RecordBatch) -> RecordBatch {
|
||||||
let column = record_batch
|
let column = record_batch
|
||||||
.column_by_name("vector")
|
.column_by_name("vector")
|
||||||
|
.cloned()
|
||||||
.expect("vector column is missing");
|
.expect("vector column is missing");
|
||||||
let arr = as_list_array(column.deref());
|
// TODO: we should just consume the underlaying js buffer in the future instead of this arrow around a bunch of times
|
||||||
|
let arr = as_list_array(column.as_ref());
|
||||||
let list_size = arr.values().len() / record_batch.num_rows();
|
let list_size = arr.values().len() / record_batch.num_rows();
|
||||||
let r = FixedSizeListArray::try_new(arr.values(), list_size as i32).unwrap();
|
let r =
|
||||||
|
FixedSizeListArray::try_new_from_values(arr.values().to_owned(), list_size as i32).unwrap();
|
||||||
|
|
||||||
let schema = Arc::new(Schema::new(vec![Field::new(
|
let schema = Arc::new(Schema::new(vec![Field::new(
|
||||||
"vector",
|
"vector",
|
||||||
|
|||||||
@@ -17,19 +17,23 @@ use std::convert::TryFrom;
|
|||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use arrow_array::{Float32Array, RecordBatchIterator, RecordBatchReader};
|
use arrow_array::{Float32Array, RecordBatchIterator};
|
||||||
use arrow_ipc::writer::FileWriter;
|
use arrow_ipc::writer::FileWriter;
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::{TryFutureExt, TryStreamExt};
|
use futures::{TryFutureExt, TryStreamExt};
|
||||||
use lance::dataset::{WriteMode, WriteParams};
|
use lance::dataset::{WriteMode, WriteParams};
|
||||||
use lance::index::vector::MetricType;
|
use lance::index::vector::MetricType;
|
||||||
|
use lance::io::object_store::ObjectStoreParams;
|
||||||
use neon::prelude::*;
|
use neon::prelude::*;
|
||||||
use neon::types::buffer::TypedArray;
|
use neon::types::buffer::TypedArray;
|
||||||
|
use object_store::aws::{AwsCredential, AwsCredentialProvider};
|
||||||
|
use object_store::CredentialProvider;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use tokio::runtime::Runtime;
|
use tokio::runtime::Runtime;
|
||||||
|
|
||||||
use vectordb::database::Database;
|
use vectordb::database::Database;
|
||||||
use vectordb::error::Error;
|
use vectordb::error::Error;
|
||||||
use vectordb::table::Table;
|
use vectordb::table::{ReadParams, Table};
|
||||||
|
|
||||||
use crate::arrow::arrow_buffer_to_record_batch;
|
use crate::arrow::arrow_buffer_to_record_batch;
|
||||||
|
|
||||||
@@ -49,8 +53,38 @@ struct JsTable {
|
|||||||
|
|
||||||
impl Finalize for JsTable {}
|
impl Finalize for JsTable {}
|
||||||
|
|
||||||
|
// TODO: object_store didn't export this type so I copied it.
|
||||||
|
// Make a requiest to object_store to export this type
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct StaticCredentialProvider<T> {
|
||||||
|
credential: Arc<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> StaticCredentialProvider<T> {
|
||||||
|
pub fn new(credential: T) -> Self {
|
||||||
|
Self {
|
||||||
|
credential: Arc::new(credential),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<T> CredentialProvider for StaticCredentialProvider<T>
|
||||||
|
where
|
||||||
|
T: std::fmt::Debug + Send + Sync,
|
||||||
|
{
|
||||||
|
type Credential = T;
|
||||||
|
|
||||||
|
async fn get_credential(&self) -> object_store::Result<Arc<T>> {
|
||||||
|
Ok(Arc::clone(&self.credential))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
||||||
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
static RUNTIME: OnceCell<Runtime> = OnceCell::new();
|
||||||
|
static LOG: OnceCell<()> = OnceCell::new();
|
||||||
|
|
||||||
|
LOG.get_or_init(|| env_logger::init());
|
||||||
|
|
||||||
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
||||||
}
|
}
|
||||||
@@ -97,19 +131,67 @@ fn database_table_names(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
Ok(promise)
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_aws_creds<T>(
|
||||||
|
cx: &mut FunctionContext,
|
||||||
|
arg_starting_location: i32,
|
||||||
|
) -> Result<Option<AwsCredentialProvider>, NeonResult<T>> {
|
||||||
|
let secret_key_id = cx
|
||||||
|
.argument_opt(arg_starting_location)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
let secret_key = cx
|
||||||
|
.argument_opt(arg_starting_location + 1)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
let temp_token = cx
|
||||||
|
.argument_opt(arg_starting_location + 2)
|
||||||
|
.map(|arg| arg.downcast_or_throw::<JsString, FunctionContext>(cx).ok())
|
||||||
|
.flatten()
|
||||||
|
.map(|v| v.value(cx));
|
||||||
|
|
||||||
|
match (secret_key_id, secret_key, temp_token) {
|
||||||
|
(Some(key_id), Some(key), optional_token) => Ok(Some(Arc::new(
|
||||||
|
StaticCredentialProvider::new(AwsCredential {
|
||||||
|
key_id: key_id,
|
||||||
|
secret_key: key,
|
||||||
|
token: optional_token,
|
||||||
|
}),
|
||||||
|
))),
|
||||||
|
(None, None, None) => Ok(None),
|
||||||
|
_ => Err(cx.throw_error("Invalid credentials configuration")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let db = cx
|
let db = cx
|
||||||
.this()
|
.this()
|
||||||
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 1) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = ReadParams {
|
||||||
|
store_options: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
..ReadParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
let database = db.database.clone();
|
let database = db.database.clone();
|
||||||
|
|
||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
let table_rst = database.open_table(&table_name).await;
|
let table_rst = database.open_table_with_params(&table_name, ¶ms).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(
|
let table = Arc::new(Mutex::new(
|
||||||
@@ -241,8 +323,6 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
"create" => WriteMode::Create,
|
"create" => WriteMode::Create,
|
||||||
_ => return cx.throw_error("Table::create only supports 'overwrite' and 'create' modes"),
|
_ => return cx.throw_error("Table::create only supports 'overwrite' and 'create' modes"),
|
||||||
};
|
};
|
||||||
let mut params = WriteParams::default();
|
|
||||||
params.mode = mode;
|
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
@@ -250,11 +330,22 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
let database = db.database.clone();
|
let database = db.database.clone();
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 3) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = WriteParams {
|
||||||
|
store_params: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
mode: mode,
|
||||||
|
..WriteParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let batch_reader = RecordBatchIterator::new(batches.into_iter().map(Ok), schema);
|
||||||
batches.into_iter().map(Ok),
|
|
||||||
schema,
|
|
||||||
));
|
|
||||||
let table_rst = database
|
let table_rst = database
|
||||||
.create_table(&table_name, batch_reader, Some(params))
|
.create_table(&table_name, batch_reader, Some(params))
|
||||||
.await;
|
.await;
|
||||||
@@ -289,16 +380,27 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let table = js_table.table.clone();
|
let table = js_table.table.clone();
|
||||||
let write_mode = write_mode_map.get(write_mode.as_str()).cloned();
|
let write_mode = write_mode_map.get(write_mode.as_str()).cloned();
|
||||||
|
|
||||||
|
let aws_creds = match get_aws_creds(&mut cx, 2) {
|
||||||
|
Ok(creds) => creds,
|
||||||
|
Err(err) => return err,
|
||||||
|
};
|
||||||
|
|
||||||
|
let params = WriteParams {
|
||||||
|
store_params: Some(ObjectStoreParams {
|
||||||
|
aws_credentials: aws_creds,
|
||||||
|
..ObjectStoreParams::default()
|
||||||
|
}),
|
||||||
|
mode: write_mode.unwrap_or(WriteMode::Append),
|
||||||
|
..WriteParams::default()
|
||||||
|
};
|
||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let batch_reader = RecordBatchIterator::new(batches.into_iter().map(Ok), schema);
|
||||||
batches.into_iter().map(Ok),
|
let add_result = table.lock().unwrap().add(batch_reader, Some(params)).await;
|
||||||
schema,
|
|
||||||
));
|
|
||||||
let add_result = table.lock().unwrap().add(batch_reader, write_mode).await;
|
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let added = add_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
let _added = add_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
Ok(cx.number(added as f64))
|
Ok(cx.boolean(true))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
Ok(promise)
|
Ok(promise)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb"
|
name = "vectordb"
|
||||||
version = "0.1.10"
|
version = "0.1.15"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@@ -13,6 +13,7 @@ arrow-data = { workspace = true }
|
|||||||
arrow-schema = { workspace = true }
|
arrow-schema = { workspace = true }
|
||||||
object_store = { workspace = true }
|
object_store = { workspace = true }
|
||||||
snafu = "0.7.4"
|
snafu = "0.7.4"
|
||||||
|
half = { workspace = true }
|
||||||
lance = { workspace = true }
|
lance = { workspace = true }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
|
|
||||||
|
|||||||
@@ -20,13 +20,16 @@ use lance::dataset::WriteParams;
|
|||||||
use lance::io::object_store::ObjectStore;
|
use lance::io::object_store::ObjectStore;
|
||||||
use snafu::prelude::*;
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use crate::error::{CreateDirSnafu, Result};
|
use crate::error::{CreateDirSnafu, InvalidTableNameSnafu, Result};
|
||||||
use crate::table::{OpenTableParams, Table};
|
use crate::table::{ReadParams, Table};
|
||||||
|
|
||||||
|
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
||||||
|
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
object_store: ObjectStore,
|
object_store: ObjectStore,
|
||||||
|
|
||||||
pub(crate) uri: String,
|
pub(crate) uri: String,
|
||||||
|
pub(crate) base_path: object_store::path::Path,
|
||||||
}
|
}
|
||||||
|
|
||||||
const LANCE_EXTENSION: &str = "lance";
|
const LANCE_EXTENSION: &str = "lance";
|
||||||
@@ -43,12 +46,13 @@ impl Database {
|
|||||||
///
|
///
|
||||||
/// * A [Database] object.
|
/// * A [Database] object.
|
||||||
pub async fn connect(uri: &str) -> Result<Database> {
|
pub async fn connect(uri: &str) -> Result<Database> {
|
||||||
let (object_store, _) = ObjectStore::from_uri(uri).await?;
|
let (object_store, base_path) = ObjectStore::from_uri(uri).await?;
|
||||||
if object_store.is_local() {
|
if object_store.is_local() {
|
||||||
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
||||||
}
|
}
|
||||||
Ok(Database {
|
Ok(Database {
|
||||||
uri: uri.to_string(),
|
uri: uri.to_string(),
|
||||||
|
base_path,
|
||||||
object_store,
|
object_store,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -57,7 +61,7 @@ impl Database {
|
|||||||
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
||||||
let path = Path::new(path);
|
let path = Path::new(path);
|
||||||
if !path.try_exists()? {
|
if !path.try_exists()? {
|
||||||
create_dir_all(&path)?;
|
create_dir_all(path)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -70,23 +74,18 @@ impl Database {
|
|||||||
pub async fn table_names(&self) -> Result<Vec<String>> {
|
pub async fn table_names(&self) -> Result<Vec<String>> {
|
||||||
let f = self
|
let f = self
|
||||||
.object_store
|
.object_store
|
||||||
.read_dir(self.uri.as_str())
|
.read_dir(self.base_path.clone())
|
||||||
.await?
|
.await?
|
||||||
.iter()
|
.iter()
|
||||||
.map(|fname| Path::new(fname))
|
.map(Path::new)
|
||||||
.filter(|path| {
|
.filter(|path| {
|
||||||
let is_lance = path
|
let is_lance = path
|
||||||
.extension()
|
.extension()
|
||||||
.map(|e| e.to_str().map(|e| e == LANCE_EXTENSION))
|
.and_then(|e| e.to_str())
|
||||||
.flatten();
|
.map(|e| e == LANCE_EXTENSION);
|
||||||
is_lance.unwrap_or(false)
|
is_lance.unwrap_or(false)
|
||||||
})
|
})
|
||||||
.map(|p| {
|
.filter_map(|p| p.file_stem().and_then(|s| s.to_str().map(String::from)))
|
||||||
p.file_stem()
|
|
||||||
.map(|s| s.to_str().map(|s| String::from(s)))
|
|
||||||
.flatten()
|
|
||||||
})
|
|
||||||
.flatten()
|
|
||||||
.collect();
|
.collect();
|
||||||
Ok(f)
|
Ok(f)
|
||||||
}
|
}
|
||||||
@@ -100,10 +99,11 @@ impl Database {
|
|||||||
pub async fn create_table(
|
pub async fn create_table(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
batches: Box<dyn RecordBatchReader>,
|
batches: impl RecordBatchReader + Send + 'static,
|
||||||
params: Option<WriteParams>,
|
params: Option<WriteParams>,
|
||||||
) -> Result<Table> {
|
) -> Result<Table> {
|
||||||
Table::create(&self.uri, name, batches, params).await
|
let table_uri = self.table_uri(name)?;
|
||||||
|
Table::create(&table_uri, name, batches, params).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open a table in the database.
|
/// Open a table in the database.
|
||||||
@@ -115,7 +115,7 @@ impl Database {
|
|||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
||||||
self.open_table_with_params(name, OpenTableParams::default())
|
self.open_table_with_params(name, &ReadParams::default())
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,12 +128,9 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table_with_params(
|
pub async fn open_table_with_params(&self, name: &str, params: &ReadParams) -> Result<Table> {
|
||||||
&self,
|
let table_uri = self.table_uri(name)?;
|
||||||
name: &str,
|
Table::open_with_params(&table_uri, name, params).await
|
||||||
params: OpenTableParams,
|
|
||||||
) -> Result<Table> {
|
|
||||||
Table::open_with_params(&self.uri, name, params).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Drop a table in the database.
|
/// Drop a table in the database.
|
||||||
@@ -141,10 +138,23 @@ impl Database {
|
|||||||
/// # Arguments
|
/// # Arguments
|
||||||
/// * `name` - The name of the table.
|
/// * `name` - The name of the table.
|
||||||
pub async fn drop_table(&self, name: &str) -> Result<()> {
|
pub async fn drop_table(&self, name: &str) -> Result<()> {
|
||||||
let dir_name = format!("{}/{}.{}", self.uri, name, LANCE_EXTENSION);
|
let dir_name = format!("{}.{}", name, LANCE_EXTENSION);
|
||||||
self.object_store.remove_dir_all(dir_name).await?;
|
let full_path = self.base_path.child(dir_name.clone());
|
||||||
|
self.object_store.remove_dir_all(full_path).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the URI of a table in the database.
|
||||||
|
fn table_uri(&self, name: &str) -> Result<String> {
|
||||||
|
let path = Path::new(&self.uri);
|
||||||
|
let table_uri = path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
|
.to_str()
|
||||||
|
.context(InvalidTableNameSnafu { name })?;
|
||||||
|
Ok(uri.to_string())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -35,6 +35,12 @@ pub struct IvfPQIndexBuilder {
|
|||||||
|
|
||||||
impl IvfPQIndexBuilder {
|
impl IvfPQIndexBuilder {
|
||||||
pub fn new() -> IvfPQIndexBuilder {
|
pub fn new() -> IvfPQIndexBuilder {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for IvfPQIndexBuilder {
|
||||||
|
fn default() -> Self {
|
||||||
IvfPQIndexBuilder {
|
IvfPQIndexBuilder {
|
||||||
column: None,
|
column: None,
|
||||||
index_name: None,
|
index_name: None,
|
||||||
|
|||||||
@@ -173,10 +173,8 @@ mod tests {
|
|||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_setters_getters() {
|
async fn test_setters_getters() {
|
||||||
let mut batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let ds = Dataset::write(&mut batches, "memory://foo", None)
|
let ds = Dataset::write(batches, "memory://foo", None).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
||||||
let query = Query::new(Arc::new(ds), vector.clone());
|
let query = Query::new(Arc::new(ds), vector.clone());
|
||||||
@@ -202,10 +200,8 @@ mod tests {
|
|||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_execute() {
|
async fn test_execute() {
|
||||||
let mut batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let ds = Dataset::write(&mut batches, "memory://foo", None)
|
let ds = Dataset::write(batches, "memory://foo", None).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let vector = Float32Array::from_iter_values([0.1; 128]);
|
let vector = Float32Array::from_iter_values([0.1; 128]);
|
||||||
let query = Query::new(Arc::new(ds), vector.clone());
|
let query = Query::new(Arc::new(ds), vector.clone());
|
||||||
@@ -213,7 +209,7 @@ mod tests {
|
|||||||
assert_eq!(result.is_ok(), true);
|
assert_eq!(result.is_ok(), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_test_batches() -> Box<dyn RecordBatchReader> {
|
fn make_test_batches() -> impl RecordBatchReader + Send + 'static {
|
||||||
let dim: usize = 128;
|
let dim: usize = 128;
|
||||||
let schema = Arc::new(ArrowSchema::new(vec![
|
let schema = Arc::new(ArrowSchema::new(vec![
|
||||||
ArrowField::new("key", DataType::Int32, false),
|
ArrowField::new("key", DataType::Int32, false),
|
||||||
@@ -227,11 +223,11 @@ mod tests {
|
|||||||
),
|
),
|
||||||
ArrowField::new("uri", DataType::Utf8, true),
|
ArrowField::new("uri", DataType::Utf8, true),
|
||||||
]));
|
]));
|
||||||
Box::new(RecordBatchIterator::new(
|
RecordBatchIterator::new(
|
||||||
vec![RecordBatch::new_empty(schema.clone())]
|
vec![RecordBatch::new_empty(schema.clone())]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(Ok),
|
.map(Ok),
|
||||||
schema,
|
schema,
|
||||||
))
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,21 +12,22 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::{Float32Array, RecordBatchReader};
|
use arrow_array::{Float32Array, RecordBatchReader};
|
||||||
use lance::dataset::{Dataset, ReadParams, WriteParams};
|
use arrow_schema::SchemaRef;
|
||||||
|
use lance::dataset::{Dataset, WriteParams};
|
||||||
use lance::index::IndexType;
|
use lance::index::IndexType;
|
||||||
use snafu::prelude::*;
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::error::{Error, InvalidTableNameSnafu, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::index::vector::VectorIndexBuilder;
|
use crate::index::vector::VectorIndexBuilder;
|
||||||
use crate::WriteMode;
|
|
||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
|
use crate::WriteMode;
|
||||||
|
|
||||||
|
pub use lance::dataset::ReadParams;
|
||||||
|
|
||||||
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
||||||
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
|
||||||
|
|
||||||
/// A table in a LanceDB database.
|
/// A table in a LanceDB database.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -42,24 +43,25 @@ impl std::fmt::Display for Table {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct OpenTableParams {
|
|
||||||
pub open_table_params: ReadParams,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Table {
|
impl Table {
|
||||||
/// Opens an existing Table
|
/// Opens an existing Table
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `base_path` - The base path where the table is located
|
/// * `uri` - The uri to a [Table]
|
||||||
/// * `name` The Table name
|
/// * `name` - The table name
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open(base_uri: &str, name: &str) -> Result<Self> {
|
pub async fn open(uri: &str) -> Result<Self> {
|
||||||
Self::open_with_params(base_uri, name, OpenTableParams::default()).await
|
let name = Self::get_table_name(uri)?;
|
||||||
|
Self::open_with_params(uri, &name, &ReadParams::default()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open an Table with a given name.
|
||||||
|
pub async fn open_with_name(uri: &str, name: &str) -> Result<Self> {
|
||||||
|
Self::open_with_params(uri, name, &ReadParams::default()).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Opens an existing Table
|
/// Opens an existing Table
|
||||||
@@ -68,25 +70,13 @@ impl Table {
|
|||||||
///
|
///
|
||||||
/// * `base_path` - The base path where the table is located
|
/// * `base_path` - The base path where the table is located
|
||||||
/// * `name` The Table name
|
/// * `name` The Table name
|
||||||
/// * `params` The [OpenTableParams] to use when opening the table
|
/// * `params` The [ReadParams] to use when opening the table
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_with_params(
|
pub async fn open_with_params(uri: &str, name: &str, params: &ReadParams) -> Result<Self> {
|
||||||
base_uri: &str,
|
let dataset = Dataset::open_with_params(uri, params)
|
||||||
name: &str,
|
|
||||||
params: OpenTableParams,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let path = Path::new(base_uri);
|
|
||||||
|
|
||||||
let table_uri = path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
|
||||||
let uri = table_uri
|
|
||||||
.as_path()
|
|
||||||
.to_str()
|
|
||||||
.context(InvalidTableNameSnafu { name })?;
|
|
||||||
|
|
||||||
let dataset = Dataset::open_with_params(uri, ¶ms.open_table_params)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| match e {
|
.map_err(|e| match e {
|
||||||
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
|
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
|
||||||
@@ -103,31 +93,73 @@ impl Table {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checkout a specific version of this [`Table`]
|
||||||
|
///
|
||||||
|
pub async fn checkout(uri: &str, version: u64) -> Result<Self> {
|
||||||
|
let name = Self::get_table_name(uri)?;
|
||||||
|
Self::checkout_with_params(uri, &name, version, &ReadParams::default()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn checkout_with_name(uri: &str, name: &str, version: u64) -> Result<Self> {
|
||||||
|
Self::checkout_with_params(uri, name, version, &ReadParams::default()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn checkout_with_params(
|
||||||
|
uri: &str,
|
||||||
|
name: &str,
|
||||||
|
version: u64,
|
||||||
|
params: &ReadParams,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let dataset = Dataset::checkout_with_params(uri, version, params)
|
||||||
|
.await
|
||||||
|
.map_err(|e| match e {
|
||||||
|
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
|
||||||
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
e => Error::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
|
Ok(Table {
|
||||||
|
name: name.to_string(),
|
||||||
|
uri: uri.to_string(),
|
||||||
|
dataset: Arc::new(dataset),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_table_name(uri: &str) -> Result<String> {
|
||||||
|
let path = Path::new(uri);
|
||||||
|
let name = path
|
||||||
|
.file_stem()
|
||||||
|
.ok_or(Error::TableNotFound {
|
||||||
|
name: uri.to_string(),
|
||||||
|
})?
|
||||||
|
.to_str()
|
||||||
|
.ok_or(Error::InvalidTableName {
|
||||||
|
name: uri.to_string(),
|
||||||
|
})?;
|
||||||
|
Ok(name.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates a new Table
|
/// Creates a new Table
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `base_path` - The base path where the table is located
|
/// * `uri` - The URI to the table.
|
||||||
/// * `name` The Table name
|
/// * `name` The Table name
|
||||||
/// * `batches` RecordBatch to be saved in the database
|
/// * `batches` RecordBatch to be saved in the database.
|
||||||
|
/// * `params` - Write parameters.
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
base_uri: &str,
|
uri: &str,
|
||||||
name: &str,
|
name: &str,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
batches: impl RecordBatchReader + Send + 'static,
|
||||||
params: Option<WriteParams>,
|
params: Option<WriteParams>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let base_path = Path::new(base_uri);
|
let dataset = Dataset::write(batches, uri, params)
|
||||||
let table_uri = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
|
||||||
let uri = table_uri
|
|
||||||
.as_path()
|
|
||||||
.to_str()
|
|
||||||
.context(InvalidTableNameSnafu { name })?
|
|
||||||
.to_string();
|
|
||||||
let dataset = Dataset::write(&mut batches, &uri, params)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| match e {
|
.map_err(|e| match e {
|
||||||
lance::Error::DatasetAlreadyExists { .. } => Error::TableAlreadyExists {
|
lance::Error::DatasetAlreadyExists { .. } => Error::TableAlreadyExists {
|
||||||
@@ -139,11 +171,21 @@ impl Table {
|
|||||||
})?;
|
})?;
|
||||||
Ok(Table {
|
Ok(Table {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
uri,
|
uri: uri.to_string(),
|
||||||
dataset: Arc::new(dataset),
|
dataset: Arc::new(dataset),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Schema of this Table.
|
||||||
|
pub fn schema(&self) -> SchemaRef {
|
||||||
|
Arc::new(self.dataset.schema().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Version of this Table
|
||||||
|
pub fn version(&self) -> u64 {
|
||||||
|
self.dataset.version().version
|
||||||
|
}
|
||||||
|
|
||||||
/// Create index on the table.
|
/// Create index on the table.
|
||||||
pub async fn create_index(&mut self, index_builder: &impl VectorIndexBuilder) -> Result<()> {
|
pub async fn create_index(&mut self, index_builder: &impl VectorIndexBuilder) -> Result<()> {
|
||||||
use lance::index::DatasetIndexExt;
|
use lance::index::DatasetIndexExt;
|
||||||
@@ -176,14 +218,16 @@ impl Table {
|
|||||||
/// * The number of rows added
|
/// * The number of rows added
|
||||||
pub async fn add(
|
pub async fn add(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
batches: impl RecordBatchReader + Send + 'static,
|
||||||
write_mode: Option<WriteMode>,
|
params: Option<WriteParams>,
|
||||||
) -> Result<usize> {
|
) -> Result<()> {
|
||||||
let mut params = WriteParams::default();
|
let params = params.unwrap_or(WriteParams {
|
||||||
params.mode = write_mode.unwrap_or(WriteMode::Append);
|
mode: WriteMode::Append,
|
||||||
|
..WriteParams::default()
|
||||||
|
});
|
||||||
|
|
||||||
self.dataset = Arc::new(Dataset::write(&mut batches, &self.uri, Some(params)).await?);
|
self.dataset = Arc::new(Dataset::write(batches, &self.uri, Some(params)).await?);
|
||||||
Ok(batches.count())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new Query object that can be executed.
|
/// Creates a new Query object that can be executed.
|
||||||
@@ -207,12 +251,12 @@ impl Table {
|
|||||||
/// Merge new data into this table.
|
/// Merge new data into this table.
|
||||||
pub async fn merge(
|
pub async fn merge(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
batches: impl RecordBatchReader + Send + 'static,
|
||||||
left_on: &str,
|
left_on: &str,
|
||||||
right_on: &str,
|
right_on: &str,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut dataset = self.dataset.as_ref().clone();
|
let mut dataset = self.dataset.as_ref().clone();
|
||||||
dataset.merge(&mut batches, left_on, right_on).await?;
|
dataset.merge(batches, left_on, right_on).await?;
|
||||||
self.dataset = Arc::new(dataset);
|
self.dataset = Arc::new(dataset);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -251,14 +295,13 @@ mod tests {
|
|||||||
async fn test_open() {
|
async fn test_open() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let dataset_path = tmp_dir.path().join("test.lance");
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
Dataset::write(batches, dataset_path.to_str().unwrap(), None)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let table = Table::open(uri, "test").await.unwrap();
|
let table = Table::open(dataset_path.to_str().unwrap()).await.unwrap();
|
||||||
|
|
||||||
assert_eq!(table.name, "test")
|
assert_eq!(table.name, "test")
|
||||||
}
|
}
|
||||||
@@ -267,11 +310,12 @@ mod tests {
|
|||||||
async fn test_open_not_found() {
|
async fn test_open_not_found() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
let table = Table::open(uri, "test").await;
|
let table = Table::open(uri).await;
|
||||||
assert!(matches!(table.unwrap_err(), Error::TableNotFound { .. }));
|
assert!(matches!(table.unwrap_err(), Error::TableNotFound { .. }));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[cfg(not(windows))]
|
||||||
fn test_object_store_path() {
|
fn test_object_store_path() {
|
||||||
use std::path::Path as StdPath;
|
use std::path::Path as StdPath;
|
||||||
let p = StdPath::new("s3://bucket/path/to/file");
|
let p = StdPath::new("s3://bucket/path/to/file");
|
||||||
@@ -284,11 +328,11 @@ mod tests {
|
|||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let _ = batches.schema().clone();
|
let _ = batches.schema().clone();
|
||||||
Table::create(&uri, "test", batches, None).await.unwrap();
|
Table::create(&uri, "test", batches, None).await.unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let result = Table::create(&uri, "test", batches, None).await;
|
let result = Table::create(&uri, "test", batches, None).await;
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
result.unwrap_err(),
|
result.unwrap_err(),
|
||||||
@@ -301,12 +345,12 @@ mod tests {
|
|||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(&uri, "test", batches, None).await.unwrap();
|
let mut table = Table::create(&uri, "test", batches, None).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let new_batches = RecordBatchIterator::new(
|
||||||
vec![RecordBatch::try_new(
|
vec![RecordBatch::try_new(
|
||||||
schema.clone(),
|
schema.clone(),
|
||||||
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
||||||
@@ -315,7 +359,7 @@ mod tests {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(Ok),
|
.map(Ok),
|
||||||
schema.clone(),
|
schema.clone(),
|
||||||
));
|
);
|
||||||
|
|
||||||
table.add(new_batches, None).await.unwrap();
|
table.add(new_batches, None).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 20);
|
assert_eq!(table.count_rows().await.unwrap(), 20);
|
||||||
@@ -327,12 +371,12 @@ mod tests {
|
|||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(uri, "test", batches, None).await.unwrap();
|
let mut table = Table::create(uri, "test", batches, None).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> = Box::new(RecordBatchIterator::new(
|
let new_batches = RecordBatchIterator::new(
|
||||||
vec![RecordBatch::try_new(
|
vec![RecordBatch::try_new(
|
||||||
schema.clone(),
|
schema.clone(),
|
||||||
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
||||||
@@ -341,12 +385,14 @@ mod tests {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(Ok),
|
.map(Ok),
|
||||||
schema.clone(),
|
schema.clone(),
|
||||||
));
|
);
|
||||||
|
|
||||||
table
|
let param: WriteParams = WriteParams {
|
||||||
.add(new_batches, Some(WriteMode::Overwrite))
|
mode: WriteMode::Overwrite,
|
||||||
.await
|
..Default::default()
|
||||||
.unwrap();
|
};
|
||||||
|
|
||||||
|
table.add(new_batches, Some(param)).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
assert_eq!(table.name, "test");
|
assert_eq!(table.name, "test");
|
||||||
}
|
}
|
||||||
@@ -355,21 +401,21 @@ mod tests {
|
|||||||
async fn test_search() {
|
async fn test_search() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let dataset_path = tmp_dir.path().join("test.lance");
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = dataset_path.to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
Dataset::write(batches, dataset_path.to_str().unwrap(), None)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let table = Table::open(uri, "test").await.unwrap();
|
let table = Table::open(uri).await.unwrap();
|
||||||
|
|
||||||
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
||||||
let query = table.search(vector.clone());
|
let query = table.search(vector.clone());
|
||||||
assert_eq!(vector, query.query_vector);
|
assert_eq!(vector, query.query_vector);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default, Debug)]
|
||||||
struct NoOpCacheWrapper {
|
struct NoOpCacheWrapper {
|
||||||
called: AtomicBool,
|
called: AtomicBool,
|
||||||
}
|
}
|
||||||
@@ -394,10 +440,10 @@ mod tests {
|
|||||||
async fn test_open_table_options() {
|
async fn test_open_table_options() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let dataset_path = tmp_dir.path().join("test.lance");
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
let uri = tmp_dir.path().to_str().unwrap();
|
let uri = dataset_path.to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = make_test_batches();
|
let batches = make_test_batches();
|
||||||
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
Dataset::write(batches, dataset_path.to_str().unwrap(), None)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -405,27 +451,24 @@ mod tests {
|
|||||||
|
|
||||||
let mut object_store_params = ObjectStoreParams::default();
|
let mut object_store_params = ObjectStoreParams::default();
|
||||||
object_store_params.object_store_wrapper = Some(wrapper.clone());
|
object_store_params.object_store_wrapper = Some(wrapper.clone());
|
||||||
let param = OpenTableParams {
|
let param = ReadParams {
|
||||||
open_table_params: ReadParams {
|
store_options: Some(object_store_params),
|
||||||
store_options: Some(object_store_params),
|
..Default::default()
|
||||||
..ReadParams::default()
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert!(!wrapper.called());
|
assert!(!wrapper.called());
|
||||||
let _ = Table::open_with_params(uri, "test", param).await.unwrap();
|
let _ = Table::open_with_params(uri, "test", ¶m).await.unwrap();
|
||||||
assert!(wrapper.called());
|
assert!(wrapper.called());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_test_batches() -> Box<dyn RecordBatchReader> {
|
fn make_test_batches() -> impl RecordBatchReader + Send + Sync + 'static {
|
||||||
let schema = Arc::new(Schema::new(vec![Field::new("i", DataType::Int32, false)]));
|
let schema = Arc::new(Schema::new(vec![Field::new("i", DataType::Int32, false)]));
|
||||||
Box::new(RecordBatchIterator::new(
|
RecordBatchIterator::new(
|
||||||
vec![RecordBatch::try_new(
|
vec![RecordBatch::try_new(
|
||||||
schema.clone(),
|
schema.clone(),
|
||||||
vec![Arc::new(Int32Array::from_iter_values(0..10))],
|
vec![Arc::new(Int32Array::from_iter_values(0..10))],
|
||||||
)],
|
)],
|
||||||
schema,
|
schema,
|
||||||
))
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -465,9 +508,7 @@ mod tests {
|
|||||||
schema,
|
schema,
|
||||||
);
|
);
|
||||||
|
|
||||||
let reader: Box<dyn RecordBatchReader + Send> = Box::new(batches);
|
let mut table = Table::create(uri, "test", batches, None).await.unwrap();
|
||||||
let mut table = Table::create(uri, "test", reader, None).await.unwrap();
|
|
||||||
|
|
||||||
let mut i = IvfPQIndexBuilder::new();
|
let mut i = IvfPQIndexBuilder::new();
|
||||||
|
|
||||||
let index_builder = i
|
let index_builder = i
|
||||||
|
|||||||
Reference in New Issue
Block a user