mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 13:29:57 +00:00
Compare commits
70 Commits
v0.1.3
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e4c52b8a6 | ||
|
|
c8bebf4776 | ||
|
|
c14ad91df0 | ||
|
|
ad48242ffb | ||
|
|
1a9a392e20 | ||
|
|
b489edc576 | ||
|
|
8708fde3ef | ||
|
|
cc7e54298b | ||
|
|
d1e8a97a2a | ||
|
|
01dadb0862 | ||
|
|
0724d41c4b | ||
|
|
cbb56e25ab | ||
|
|
78de8f5782 | ||
|
|
a6544c2a31 | ||
|
|
39ed70896a | ||
|
|
ae672df1b7 | ||
|
|
15c3f42387 | ||
|
|
f65d85efcc | ||
|
|
6b5c046c3b | ||
|
|
d00f4e51d0 | ||
|
|
fbc44d4243 | ||
|
|
b53eee42ce | ||
|
|
7e0d6088ca | ||
|
|
5210f40a33 | ||
|
|
5ec4a5d730 | ||
|
|
e4f64fca7b | ||
|
|
4744640bd2 | ||
|
|
094b5e643c | ||
|
|
a318778d2a | ||
|
|
9b83ce3d2a | ||
|
|
7bad676f30 | ||
|
|
0e981e782b | ||
|
|
e18cdfc7cf | ||
|
|
fed33a51d5 | ||
|
|
a56b65db84 | ||
|
|
f21caebeda | ||
|
|
12da77a9f7 | ||
|
|
131b2dc57b | ||
|
|
3798f56a9b | ||
|
|
50cdb16b45 | ||
|
|
d803482588 | ||
|
|
f37994b72a | ||
|
|
2418de0a3c | ||
|
|
d0c47e3838 | ||
|
|
41cca31f48 | ||
|
|
b621009d39 | ||
|
|
6a9cde22de | ||
|
|
bfa90b35ee | ||
|
|
12ec29f55b | ||
|
|
cdd08ef35c | ||
|
|
adcb2a1387 | ||
|
|
9d52a32668 | ||
|
|
11b2e63eea | ||
|
|
daedf1396b | ||
|
|
8af5f19cc1 | ||
|
|
fbd0bc7740 | ||
|
|
f765a453cf | ||
|
|
45b3a14f26 | ||
|
|
9965b4564d | ||
|
|
0719e4b3fb | ||
|
|
091fb9b665 | ||
|
|
03013a4434 | ||
|
|
3e14b357e7 | ||
|
|
99cbda8b07 | ||
|
|
e50b642d80 | ||
|
|
6d8cf52e01 | ||
|
|
53f3882d6e | ||
|
|
2b26775ed1 | ||
|
|
306ada5cb8 | ||
|
|
d3aa8bfbc5 |
12
.bumpversion.cfg
Normal file
12
.bumpversion.cfg
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[bumpversion]
|
||||||
|
current_version = 0.1.9
|
||||||
|
commit = True
|
||||||
|
message = Bump version: {current_version} → {new_version}
|
||||||
|
tag = True
|
||||||
|
tag_name = v{new_version}
|
||||||
|
|
||||||
|
[bumpversion:file:node/package.json]
|
||||||
|
|
||||||
|
[bumpversion:file:rust/ffi/node/Cargo.toml]
|
||||||
|
|
||||||
|
[bumpversion:file:rust/vectordb/Cargo.toml]
|
||||||
29
.github/workflows/cargo-publish.yml
vendored
Normal file
29
.github/workflows/cargo-publish.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: Cargo Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
env:
|
||||||
|
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
||||||
|
# key, so we set it to make sure it is always consistent.
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
# Only runs on tags that matches the make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: rust
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Publish the package
|
||||||
|
run: |
|
||||||
|
cargo publish -p vectordb --all-features --token ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||||
55
.github/workflows/make-release-commit.yml
vendored
Normal file
55
.github/workflows/make-release-commit.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: Create release commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- "true"
|
||||||
|
- "false"
|
||||||
|
part:
|
||||||
|
description: 'What kind of release is this?'
|
||||||
|
required: true
|
||||||
|
default: 'patch'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- patch
|
||||||
|
- minor
|
||||||
|
- major
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out main
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: Set git configs for bumpversion
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Bump version, create tag and commit
|
||||||
|
run: |
|
||||||
|
pip install bump2version
|
||||||
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
|
- name: Push new version and tag
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
tags: true
|
||||||
|
|
||||||
31
.github/workflows/pypi-publish.yml
vendored
Normal file
31
.github/workflows/pypi-publish.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: PyPI Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only runs on tags that matches the python-make-release action
|
||||||
|
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: python
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
- name: Build distribution
|
||||||
|
run: |
|
||||||
|
ls -la
|
||||||
|
pip install wheel setuptools --upgrade
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
|
- name: Publish
|
||||||
|
uses: pypa/gh-action-pypi-publish@v1.8.5
|
||||||
|
with:
|
||||||
|
password: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
|
packages-dir: python/dist
|
||||||
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: Python - Create release commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- "true"
|
||||||
|
- "false"
|
||||||
|
part:
|
||||||
|
description: 'What kind of release is this?'
|
||||||
|
required: true
|
||||||
|
default: 'patch'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- patch
|
||||||
|
- minor
|
||||||
|
- major
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out main
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: Set git configs for bumpversion
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Bump version, create tag and commit
|
||||||
|
working-directory: python
|
||||||
|
run: |
|
||||||
|
pip install bump2version
|
||||||
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
|
- name: Push new version and tag
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
tags: true
|
||||||
|
|
||||||
10
.github/workflows/python.yml
vendored
10
.github/workflows/python.yml
vendored
@@ -32,9 +32,15 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
pip install pytest
|
pip install pytest pytest-mock black isort
|
||||||
|
- name: Black
|
||||||
|
run: black --check --diff --no-color --quiet .
|
||||||
|
- name: isort
|
||||||
|
run: isort --check --diff --quiet .
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -x -v --durations=30 tests
|
run: pytest -x -v --durations=30 tests
|
||||||
|
- name: doctest
|
||||||
|
run: pytest --doctest-modules lancedb
|
||||||
mac:
|
mac:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
runs-on: "macos-12"
|
runs-on: "macos-12"
|
||||||
@@ -55,6 +61,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
pip install pytest
|
pip install pytest pytest-mock
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -x -v --durations=30 tests
|
run: pytest -x -v --durations=30 tests
|
||||||
67
.github/workflows/rust.yml
vendored
Normal file
67
.github/workflows/rust.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
name: Rust
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- rust/**
|
||||||
|
- .github/workflows/rust.yml
|
||||||
|
|
||||||
|
env:
|
||||||
|
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
||||||
|
# key, so we set it to make sure it is always consistent.
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
# Disable full debug symbol generation to speed up CI build and keep memory down
|
||||||
|
# "1" means line tables only, which is useful for panic tracebacks.
|
||||||
|
RUSTFLAGS: "-C debuginfo=1"
|
||||||
|
RUST_BACKTRACE: "1"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
linux:
|
||||||
|
timeout-minutes: 30
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: rust
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: rust
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --all-features
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --all-features
|
||||||
|
macos:
|
||||||
|
runs-on: macos-12
|
||||||
|
timeout-minutes: 30
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: rust
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: CPU features
|
||||||
|
run: sysctl -a | grep cpu
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: rust
|
||||||
|
- name: Install dependencies
|
||||||
|
run: brew install protobuf
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --all-features
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --all-features
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -15,7 +15,7 @@ site
|
|||||||
python/build
|
python/build
|
||||||
python/dist
|
python/dist
|
||||||
|
|
||||||
notebooks/.ipynb_checkpoints
|
**/.ipynb_checkpoints
|
||||||
|
|
||||||
**/.hypothesis
|
**/.hypothesis
|
||||||
|
|
||||||
|
|||||||
244
Cargo.lock
generated
244
Cargo.lock
generated
@@ -2,12 +2,6 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "accelerate-src"
|
|
||||||
version = "0.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "415ed64958754dbe991900f3940677e6a7eefb4d7367afd70d642677b0c7d19d"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "adler"
|
name = "adler"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
@@ -68,9 +62,9 @@ checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow"
|
name = "arrow"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1aea9fcb25bbb70f7f922f95b99ca29c1013dab47f6df61a6f24861842dd7f2e"
|
checksum = "6619cab21a0cdd8c9b9f1d9e09bfaa9b1974e5ef809a6566aef0b998caf38ace"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow-arith",
|
"arrow-arith",
|
||||||
@@ -90,9 +84,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-arith"
|
name = "arrow-arith"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8d967b42f7b12c91fd78acd396b20c2973b184c8866846674abbb00c963e93ab"
|
checksum = "e0dc95485623a76e00929bda8caa40c1f838190952365c4f43a7b9ae86d03e94"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -105,9 +99,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-array"
|
name = "arrow-array"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3190f208ee7aa0f3596fa0098d42911dec5e123ca88c002a08b24877ad14c71e"
|
checksum = "3267847f53d3042473cfd2c769afd8d74a6d7d201fc3a34f5cb84c0282ef47a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -122,9 +116,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-buffer"
|
name = "arrow-buffer"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5d33c733c5b6c44a0fc526f29c09546e04eb56772a7a21e48e602f368be381f6"
|
checksum = "c5f66553e66e120ac4b21570368ee9ebf35ff3f5399f872b0667699e145678f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"half",
|
"half",
|
||||||
"num",
|
"num",
|
||||||
@@ -132,9 +126,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-cast"
|
name = "arrow-cast"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "abd349520b6a1ed4924ae2afc9d23330a3044319e4ec3d5b124c09e4d440ae87"
|
checksum = "65e6f3579dbf0d97c683d451b2550062b0f0e62a3169bf74238b5f59f44ad6d8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -149,9 +143,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-csv"
|
name = "arrow-csv"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c80af3c3e290a2a7e1cc518f1471dff331878cb4af9a5b088bf030b89debf649"
|
checksum = "373579c4c1a8f5307d3125b7a89c700fcf8caf85821c77eb4baab3855ae0aba5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -168,9 +162,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-data"
|
name = "arrow-data"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b1c8361947aaa96d331da9df3f7a08bdd8ab805a449994c97f5c4d24c4b7e2cf"
|
checksum = "61bc8df9912cca6642665fdf989d6fa0de2570f18a7f709bcf59d29de96d2097"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
@@ -180,9 +174,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-ipc"
|
name = "arrow-ipc"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9a46ee000b9fbd1e8db6e8b26acb8c760838512b39d8c9f9d73892cb55351d50"
|
checksum = "0105dcf5f91daa7182d87b713ee0b32b3bfc88e0c48e7dc3e9d6f1277a07d1ae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -190,13 +184,14 @@ dependencies = [
|
|||||||
"arrow-data",
|
"arrow-data",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
"flatbuffers",
|
"flatbuffers",
|
||||||
|
"zstd",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-json"
|
name = "arrow-json"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4bf2366607be867ced681ad7f272371a5cf1fc2941328eef7b4fee14565166fb"
|
checksum = "e73134fb5b5ec8770f8cbb214c2c487b2d350081e403ca4eeeb6f8f5e19846ac"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -214,9 +209,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-ord"
|
name = "arrow-ord"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "304069901c867200e21ec868ae7521165875470ef2f1f6d58f979a443d63997e"
|
checksum = "89f25bc66e18d4c2aa1fe2f9bb03e2269da60e636213210385ae41a107f9965a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -229,9 +224,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-row"
|
name = "arrow-row"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0d57fe8ceef3392fdd493269d8a2d589de17bafce151aacbffbddac7a57f441a"
|
checksum = "1095ff85ea4f5ff02d17b30b089de31b51a50be01c6b674f0a0509ab771232f1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -244,15 +239,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-schema"
|
name = "arrow-schema"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a16b88a93ac8350f0200b1cd336a1f887315925b8dd7aa145a37b8bdbd8497a4"
|
checksum = "25187bbef474151a2e4ddec67b9e34bda5cbfba292dc571392fa3a1f71ff5a82"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-select"
|
name = "arrow-select"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "98e8a4d6ca37d5212439b24caad4d80743fcbb706706200dd174bb98e68fe9d8"
|
checksum = "fd0d4ee884aec3aa05e41478e3cd312bf609de9babb5d187a43fb45931da4da4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -263,9 +258,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrow-string"
|
name = "arrow-string"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cbb594efa397eb6a546f42b1f8df3d242ea84dbfda5232e06035dc2b2e2c8459"
|
checksum = "d6d71c3ffe4c07e66ce8fdc6aed5b00e0e60c5144911879b10546f5b72d8fa1c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
@@ -273,7 +268,7 @@ dependencies = [
|
|||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
"arrow-select",
|
"arrow-select",
|
||||||
"regex",
|
"regex",
|
||||||
"regex-syntax 0.6.29",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -654,6 +649,12 @@ version = "3.12.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8"
|
checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytemuck"
|
||||||
|
version = "1.13.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byteorder"
|
name = "byteorder"
|
||||||
version = "1.4.3"
|
version = "1.4.3"
|
||||||
@@ -676,26 +677,6 @@ dependencies = [
|
|||||||
"either",
|
"either",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cblas"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3de46dff748ed7e891bc46faae117f48d2a7911041c6630aed3c61a3fe12326f"
|
|
||||||
dependencies = [
|
|
||||||
"cblas-sys",
|
|
||||||
"libc",
|
|
||||||
"num-complex",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cblas-sys"
|
|
||||||
version = "0.1.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b6feecd82cce51b0204cf063f0041d69f24ce83f680d87514b004248e7b0fa65"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.79"
|
version = "1.0.79"
|
||||||
@@ -922,9 +903,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion"
|
name = "datafusion"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8a7d4b334f4512ff2fdbce87f511f570ae895af1ac7c729e77c12583253b22a"
|
checksum = "9992c267436551d40b52d65289b144712e7b0ebdc62c8c859fd1574e5f73efbb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow",
|
"arrow",
|
||||||
@@ -949,7 +930,7 @@ dependencies = [
|
|||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"object_store",
|
"object_store 0.5.6",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"parquet",
|
"parquet",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
@@ -967,31 +948,31 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-common"
|
name = "datafusion-common"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "80abfcb1dbc6390f952f21de9069e6177ad6318fcae5fbceabb50666d96533dd"
|
checksum = "c3be97f7a7c720cdbb71e9eeabf814fa6ad8102b9022390f6cac74d3b4af6392"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"chrono",
|
"chrono",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"object_store",
|
"object_store 0.5.6",
|
||||||
"parquet",
|
"parquet",
|
||||||
"sqlparser",
|
"sqlparser",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-execution"
|
name = "datafusion-execution"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "df2524f1b4b58319895b112809d2a59e54fa662d0e46330a455f22882c2cb7b9"
|
checksum = "c77c4b14b809b0e4c5bb101b6834504f06cdbb0d3c643400c61d0d844b33264e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"datafusion-common",
|
"datafusion-common",
|
||||||
"datafusion-expr",
|
"datafusion-expr",
|
||||||
"hashbrown 0.13.2",
|
"hashbrown 0.13.2",
|
||||||
"log",
|
"log",
|
||||||
"object_store",
|
"object_store 0.5.6",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"rand",
|
"rand",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@@ -1000,21 +981,24 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-expr"
|
name = "datafusion-expr"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af8040b7a75b04685f4db0a1b11ffa93cd163c1bc13751df3f5cf76baabaf5a1"
|
checksum = "e6ec7409bd45cf4fae6395d7d1024c8a97e543cadc88363e405d2aad5330e5e7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow",
|
"arrow",
|
||||||
"datafusion-common",
|
"datafusion-common",
|
||||||
|
"lazy_static",
|
||||||
"sqlparser",
|
"sqlparser",
|
||||||
|
"strum",
|
||||||
|
"strum_macros",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-optimizer"
|
name = "datafusion-optimizer"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "74ceae25accc0f640a4238283f55f3a9fd181d55398703a4330fb2c46261e6a2"
|
checksum = "64b537c93f87989c212db92a448a0f5eb4f0995e27199bb7687ae94f8b64a7a8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -1025,14 +1009,14 @@ dependencies = [
|
|||||||
"hashbrown 0.13.2",
|
"hashbrown 0.13.2",
|
||||||
"itertools",
|
"itertools",
|
||||||
"log",
|
"log",
|
||||||
"regex-syntax 0.6.29",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-physical-expr"
|
name = "datafusion-physical-expr"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "df4cf228b312f2758cb78e93fe3d2dc602345028efdf7cfa5b338cb370d0a347"
|
checksum = "f60ee3f53340fdef36ee54d9e12d446ae2718b1d0196ac581f791d34808ec876"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow",
|
"arrow",
|
||||||
@@ -1052,14 +1036,15 @@ dependencies = [
|
|||||||
"paste",
|
"paste",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"rand",
|
"rand",
|
||||||
|
"regex",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-row"
|
name = "datafusion-row"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b52b486fb3d81bb132e400304be01af5aba0ad6737e3518045bb98944991fe32"
|
checksum = "d58fc64058aa3bcb00077a0d19474a0d584d31dec8c7ac3406868f485f659af9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"datafusion-common",
|
"datafusion-common",
|
||||||
@@ -1069,9 +1054,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "datafusion-sql"
|
name = "datafusion-sql"
|
||||||
version = "23.0.0"
|
version = "26.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "773e985c182e41cfd68f7a7b483ab6bfb68beaac241c348cd4b1bf9f9d61b762"
|
checksum = "1531f0314151a34bf6c0a83c7261525688b7c729876f53e7896b8f4ca8f57d07"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
@@ -1482,6 +1467,12 @@ version = "1.0.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
|
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "humantime"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hyper"
|
name = "hyper"
|
||||||
version = "0.14.26"
|
version = "0.14.26"
|
||||||
@@ -1645,18 +1636,19 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance"
|
name = "lance"
|
||||||
version = "0.4.12"
|
version = "0.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fc96cf89139af6f439a0e28ccd04ddf81be795b79fda3105b7a8952fadeb778e"
|
checksum = "84dfe2a2af3e7b079a4743e303617c6ac19f43d212b7d6def8873305266f2bcd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"accelerate-src",
|
|
||||||
"arrow",
|
"arrow",
|
||||||
"arrow-arith",
|
"arrow-arith",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-buffer",
|
"arrow-buffer",
|
||||||
"arrow-cast",
|
"arrow-cast",
|
||||||
"arrow-data",
|
"arrow-data",
|
||||||
|
"arrow-ipc",
|
||||||
"arrow-ord",
|
"arrow-ord",
|
||||||
|
"arrow-row",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
"arrow-select",
|
"arrow-select",
|
||||||
"async-recursion",
|
"async-recursion",
|
||||||
@@ -1665,15 +1657,15 @@ dependencies = [
|
|||||||
"aws-credential-types",
|
"aws-credential-types",
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"bytes",
|
"bytes",
|
||||||
"cblas",
|
|
||||||
"chrono",
|
"chrono",
|
||||||
|
"dashmap",
|
||||||
"datafusion",
|
"datafusion",
|
||||||
"futures",
|
"futures",
|
||||||
"lapack",
|
"log",
|
||||||
"lru_time_cache",
|
"lru_time_cache",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"object_store",
|
"object_store 0.6.1",
|
||||||
"openblas-src",
|
"openblas-src",
|
||||||
"ordered-float 3.7.0",
|
"ordered-float 3.7.0",
|
||||||
"path-absolutize",
|
"path-absolutize",
|
||||||
@@ -1683,7 +1675,9 @@ dependencies = [
|
|||||||
"prost-types",
|
"prost-types",
|
||||||
"rand",
|
"rand",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
|
"roaring",
|
||||||
"shellexpand",
|
"shellexpand",
|
||||||
|
"snafu",
|
||||||
"sqlparser-lance",
|
"sqlparser-lance",
|
||||||
"tokio",
|
"tokio",
|
||||||
"url",
|
"url",
|
||||||
@@ -1691,26 +1685,6 @@ dependencies = [
|
|||||||
"vcpkg",
|
"vcpkg",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lapack"
|
|
||||||
version = "0.19.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ad676a6b4df7e76a9fd80a0c50c619a3948d6105b62a0ab135f064d99c51d207"
|
|
||||||
dependencies = [
|
|
||||||
"lapack-sys",
|
|
||||||
"libc",
|
|
||||||
"num-complex",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lapack-sys"
|
|
||||||
version = "0.14.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "447f56c85fb410a7a3d36701b2153c1018b1d2b908c5fbaf01c1b04fac33bcbe"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -2055,13 +2029,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "ec9cd6ca25e796a49fa242876d1c4de36a24a6da5258e9f0bc062dbf5e81c53b"
|
checksum = "ec9cd6ca25e796a49fa242876d1c4de36a24a6da5258e9f0bc062dbf5e81c53b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"aws-config",
|
"bytes",
|
||||||
"aws-credential-types",
|
"chrono",
|
||||||
"aws-types",
|
"futures",
|
||||||
|
"itertools",
|
||||||
|
"parking_lot",
|
||||||
|
"percent-encoding",
|
||||||
|
"snafu",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
"url",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object_store"
|
||||||
|
version = "0.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "27c776db4f332b571958444982ff641d2531417a326ca368995073b639205d58"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
"base64 0.21.0",
|
"base64 0.21.0",
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"futures",
|
"futures",
|
||||||
|
"humantime",
|
||||||
|
"hyper",
|
||||||
"itertools",
|
"itertools",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
@@ -2210,9 +2203,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parquet"
|
name = "parquet"
|
||||||
version = "37.0.0"
|
version = "40.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b5022d98333271f4ca3e87bab760498e61726bf5a6ca919123c80517e20ded29"
|
checksum = "d6a656fcc17e641657c955742c689732684e096f790ff30865d9f8dcc39f7c4a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
@@ -2232,6 +2225,7 @@ dependencies = [
|
|||||||
"lz4",
|
"lz4",
|
||||||
"num",
|
"num",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
|
"object_store 0.5.6",
|
||||||
"paste",
|
"paste",
|
||||||
"seq-macro",
|
"seq-macro",
|
||||||
"snap",
|
"snap",
|
||||||
@@ -2537,15 +2531,9 @@ checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-syntax 0.7.1",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.6.29"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@@ -2596,6 +2584,12 @@ dependencies = [
|
|||||||
"winreg",
|
"winreg",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "retain_mut"
|
||||||
|
version = "0.1.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8c31b5c4033f8fdde8700e4657be2c497e7288f01515be52168c631e2e4d4086"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ring"
|
name = "ring"
|
||||||
version = "0.16.20"
|
version = "0.16.20"
|
||||||
@@ -2611,6 +2605,17 @@ dependencies = [
|
|||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "roaring"
|
||||||
|
version = "0.10.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ef0fb5e826a8bde011ecae6a8539dd333884335c57ff0f003fbe27c25bbe8f71"
|
||||||
|
dependencies = [
|
||||||
|
"bytemuck",
|
||||||
|
"byteorder",
|
||||||
|
"retain_mut",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc_version"
|
name = "rustc_version"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@@ -2899,9 +2904,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlparser"
|
name = "sqlparser"
|
||||||
version = "0.33.0"
|
version = "0.34.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "355dc4d4b6207ca8a3434fc587db0a8016130a574dbcdbfb93d7f7b5bc5b211a"
|
checksum = "37d3706eefb17039056234df6b566b0014f303f867f2656108334a55b8096f59"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"sqlparser_derive",
|
"sqlparser_derive",
|
||||||
@@ -2938,6 +2943,9 @@ name = "strum"
|
|||||||
version = "0.24.1"
|
version = "0.24.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f"
|
checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f"
|
||||||
|
dependencies = [
|
||||||
|
"strum_macros",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strum_macros"
|
name = "strum_macros"
|
||||||
@@ -3356,20 +3364,22 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "vectordb"
|
name = "vectordb"
|
||||||
version = "0.0.1"
|
version = "0.1.8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-data",
|
"arrow-data",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
"lance",
|
"lance",
|
||||||
|
"object_store 0.5.6",
|
||||||
"rand",
|
"rand",
|
||||||
|
"snafu",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "vectordb-node"
|
name = "vectordb-node"
|
||||||
version = "0.1.0"
|
version = "0.1.8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
"arrow-ipc",
|
"arrow-ipc",
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -10,6 +10,10 @@
|
|||||||
<a href="https://discord.gg/zMM32dvNtd">Discord</a> •
|
<a href="https://discord.gg/zMM32dvNtd">Discord</a> •
|
||||||
<a href="https://twitter.com/lancedb">Twitter</a>
|
<a href="https://twitter.com/lancedb">Twitter</a>
|
||||||
|
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<img max-width="750px" alt="LanceDB Multimodal Search" src="https://github.com/lancedb/lancedb/assets/917119/09c5afc5-7816-4687-bae4-f2ca194426ec">
|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -23,13 +27,15 @@ The key features of LanceDB include:
|
|||||||
|
|
||||||
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
||||||
|
|
||||||
|
* Support for vector similarity search, full-text search and SQL.
|
||||||
|
|
||||||
* Native Python and Javascript/Typescript support.
|
* Native Python and Javascript/Typescript support.
|
||||||
|
|
||||||
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
||||||
|
|
||||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||||
|
|
||||||
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/eto-ai/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -69,4 +75,4 @@ result = table.search([100, 100]).limit(2).to_df()
|
|||||||
|
|
||||||
## Blogs, Tutorials & Videos
|
## Blogs, Tutorials & Videos
|
||||||
* 📈 <a href="https://blog.eto.ai/benchmarking-random-access-in-lance-ed690757a826">2000x better performance with Lance over Parquet</a>
|
* 📈 <a href="https://blog.eto.ai/benchmarking-random-access-in-lance-ed690757a826">2000x better performance with Lance over Parquet</a>
|
||||||
* 🤖 <a href="https://github.com/lancedb/lancedb/blob/main/notebooks/youtube_transcript_search.ipynb">Build a question and answer bot with LanceDB</a>
|
* 🤖 <a href="https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb">Build a question and answer bot with LanceDB</a>
|
||||||
|
|||||||
@@ -1,29 +1,45 @@
|
|||||||
site_name: LanceDB Documentation
|
site_name: LanceDB Docs
|
||||||
|
repo_url: https://github.com/lancedb/lancedb
|
||||||
|
repo_name: lancedb/lancedb
|
||||||
docs_dir: src
|
docs_dir: src
|
||||||
|
|
||||||
theme:
|
theme:
|
||||||
name: "material"
|
name: "material"
|
||||||
|
logo: assets/logo.png
|
||||||
|
favicon: assets/logo.png
|
||||||
features:
|
features:
|
||||||
- content.code.copy
|
- content.code.copy
|
||||||
|
- content.tabs.link
|
||||||
|
icon:
|
||||||
|
repo: fontawesome/brands/github
|
||||||
|
custom_dir: overrides
|
||||||
|
|
||||||
plugins:
|
plugins:
|
||||||
- search
|
- search
|
||||||
|
- autorefs
|
||||||
- mkdocstrings:
|
- mkdocstrings:
|
||||||
handlers:
|
handlers:
|
||||||
python:
|
python:
|
||||||
paths: [../python]
|
paths: [../python]
|
||||||
|
selection:
|
||||||
|
docstring_style: numpy
|
||||||
|
rendering:
|
||||||
|
heading_level: 4
|
||||||
|
show_source: false
|
||||||
|
show_symbol_type_in_heading: true
|
||||||
|
show_signature_annotations: true
|
||||||
|
show_root_heading: true
|
||||||
|
members_order: source
|
||||||
|
import:
|
||||||
|
# for cross references
|
||||||
|
- https://arrow.apache.org/docs/objects.inv
|
||||||
|
- https://pandas.pydata.org/docs/objects.inv
|
||||||
- mkdocs-jupyter
|
- mkdocs-jupyter
|
||||||
|
|
||||||
nav:
|
|
||||||
- Home: index.md
|
|
||||||
- Basics: basic.md
|
|
||||||
- Embeddings: embedding.md
|
|
||||||
- Indexing: ann_indexes.md
|
|
||||||
- Full-text search: fts.md
|
|
||||||
- Integrations: integrations.md
|
|
||||||
- Python API: python.md
|
|
||||||
|
|
||||||
markdown_extensions:
|
markdown_extensions:
|
||||||
|
- admonition
|
||||||
|
- pymdownx.superfences
|
||||||
|
- pymdownx.details
|
||||||
- pymdownx.highlight:
|
- pymdownx.highlight:
|
||||||
anchor_linenums: true
|
anchor_linenums: true
|
||||||
line_spans: __span
|
line_spans: __span
|
||||||
@@ -31,3 +47,29 @@ markdown_extensions:
|
|||||||
- pymdownx.inlinehilite
|
- pymdownx.inlinehilite
|
||||||
- pymdownx.snippets
|
- pymdownx.snippets
|
||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
|
- pymdownx.tabbed:
|
||||||
|
alternate_style: true
|
||||||
|
|
||||||
|
nav:
|
||||||
|
- Home: index.md
|
||||||
|
- Basics: basic.md
|
||||||
|
- Embeddings: embedding.md
|
||||||
|
- Python full-text search: fts.md
|
||||||
|
- Python integrations: integrations.md
|
||||||
|
- Python examples:
|
||||||
|
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
||||||
|
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
||||||
|
- Multimodal search using CLIP: notebooks/multimodal_search.ipynb
|
||||||
|
- Serverless QA Bot with S3 and Lambda: examples/serverless_lancedb_with_s3_and_lambda.md
|
||||||
|
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
||||||
|
- Javascript examples:
|
||||||
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
|
- References:
|
||||||
|
- Vector Search: search.md
|
||||||
|
- Indexing: ann_indexes.md
|
||||||
|
- API references:
|
||||||
|
- Python API: python/python.md
|
||||||
|
- Javascript API: javascript/modules.md
|
||||||
|
|
||||||
|
extra_css:
|
||||||
|
- styles/global.css
|
||||||
|
|||||||
176
docs/overrides/partials/header.html
Normal file
176
docs/overrides/partials/header.html
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
<!--
|
||||||
|
Copyright (c) 2016-2023 Martin Donath <martin.donath@squidfunk.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
||||||
|
-->
|
||||||
|
|
||||||
|
{% set class = "md-header" %}
|
||||||
|
{% if "navigation.tabs.sticky" in features %}
|
||||||
|
{% set class = class ~ " md-header--shadow md-header--lifted" %}
|
||||||
|
{% elif "navigation.tabs" not in features %}
|
||||||
|
{% set class = class ~ " md-header--shadow" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Header -->
|
||||||
|
<header class="{{ class }}" data-md-component="header">
|
||||||
|
<nav
|
||||||
|
class="md-header__inner md-grid"
|
||||||
|
aria-label="{{ lang.t('header') }}"
|
||||||
|
>
|
||||||
|
|
||||||
|
<!-- Link to home -->
|
||||||
|
<a
|
||||||
|
href="{{ config.extra.homepage | d(nav.homepage.url, true) | url }}"
|
||||||
|
title="{{ config.site_name | e }}"
|
||||||
|
class="md-header__button md-logo"
|
||||||
|
aria-label="{{ config.site_name }}"
|
||||||
|
data-md-component="logo"
|
||||||
|
>
|
||||||
|
{% include "partials/logo.html" %}
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<!-- Button to open drawer -->
|
||||||
|
<label class="md-header__button md-icon" for="__drawer">
|
||||||
|
{% include ".icons/material/menu" ~ ".svg" %}
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<!-- Header title -->
|
||||||
|
<div class="md-header__title" style="width: auto !important;" data-md-component="header-title">
|
||||||
|
<div class="md-header__ellipsis">
|
||||||
|
<div class="md-header__topic">
|
||||||
|
<span class="md-ellipsis">
|
||||||
|
{{ config.site_name }}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div class="md-header__topic" data-md-component="header-topic">
|
||||||
|
<span class="md-ellipsis">
|
||||||
|
{% if page.meta and page.meta.title %}
|
||||||
|
{{ page.meta.title }}
|
||||||
|
{% else %}
|
||||||
|
{{ page.title }}
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Color palette -->
|
||||||
|
{% if config.theme.palette %}
|
||||||
|
{% if not config.theme.palette is mapping %}
|
||||||
|
<form class="md-header__option" data-md-component="palette">
|
||||||
|
{% for option in config.theme.palette %}
|
||||||
|
{% set scheme = option.scheme | d("default", true) %}
|
||||||
|
{% set primary = option.primary | d("indigo", true) %}
|
||||||
|
{% set accent = option.accent | d("indigo", true) %}
|
||||||
|
<input
|
||||||
|
class="md-option"
|
||||||
|
data-md-color-media="{{ option.media }}"
|
||||||
|
data-md-color-scheme="{{ scheme | replace(' ', '-') }}"
|
||||||
|
data-md-color-primary="{{ primary | replace(' ', '-') }}"
|
||||||
|
data-md-color-accent="{{ accent | replace(' ', '-') }}"
|
||||||
|
{% if option.toggle %}
|
||||||
|
aria-label="{{ option.toggle.name }}"
|
||||||
|
{% else %}
|
||||||
|
aria-hidden="true"
|
||||||
|
{% endif %}
|
||||||
|
type="radio"
|
||||||
|
name="__palette"
|
||||||
|
id="__palette_{{ loop.index }}"
|
||||||
|
/>
|
||||||
|
{% if option.toggle %}
|
||||||
|
<label
|
||||||
|
class="md-header__button md-icon"
|
||||||
|
title="{{ option.toggle.name }}"
|
||||||
|
for="__palette_{{ loop.index0 or loop.length }}"
|
||||||
|
hidden
|
||||||
|
>
|
||||||
|
{% include ".icons/" ~ option.toggle.icon ~ ".svg" %}
|
||||||
|
</label>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</form>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Site language selector -->
|
||||||
|
{% if config.extra.alternate %}
|
||||||
|
<div class="md-header__option">
|
||||||
|
<div class="md-select">
|
||||||
|
{% set icon = config.theme.icon.alternate or "material/translate" %}
|
||||||
|
<button
|
||||||
|
class="md-header__button md-icon"
|
||||||
|
aria-label="{{ lang.t('select.language') }}"
|
||||||
|
>
|
||||||
|
{% include ".icons/" ~ icon ~ ".svg" %}
|
||||||
|
</button>
|
||||||
|
<div class="md-select__inner">
|
||||||
|
<ul class="md-select__list">
|
||||||
|
{% for alt in config.extra.alternate %}
|
||||||
|
<li class="md-select__item">
|
||||||
|
<a
|
||||||
|
href="{{ alt.link | url }}"
|
||||||
|
hreflang="{{ alt.lang }}"
|
||||||
|
class="md-select__link"
|
||||||
|
>
|
||||||
|
{{ alt.name }}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Button to open search modal -->
|
||||||
|
{% if "material/search" in config.plugins %}
|
||||||
|
<label class="md-header__button md-icon" for="__search">
|
||||||
|
{% include ".icons/material/magnify.svg" %}
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<!-- Search interface -->
|
||||||
|
{% include "partials/search.html" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div style="margin-left: 10px; margin-right: 5px;">
|
||||||
|
<a href="https://discord.com/invite/zMM32dvNtd" target="_blank" rel="noopener noreferrer">
|
||||||
|
<svg fill="#FFFFFF" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 50 50" width="25px" height="25px"><path d="M 41.625 10.769531 C 37.644531 7.566406 31.347656 7.023438 31.078125 7.003906 C 30.660156 6.96875 30.261719 7.203125 30.089844 7.589844 C 30.074219 7.613281 29.9375 7.929688 29.785156 8.421875 C 32.417969 8.867188 35.652344 9.761719 38.578125 11.578125 C 39.046875 11.867188 39.191406 12.484375 38.902344 12.953125 C 38.710938 13.261719 38.386719 13.429688 38.050781 13.429688 C 37.871094 13.429688 37.6875 13.378906 37.523438 13.277344 C 32.492188 10.15625 26.210938 10 25 10 C 23.789063 10 17.503906 10.15625 12.476563 13.277344 C 12.007813 13.570313 11.390625 13.425781 11.101563 12.957031 C 10.808594 12.484375 10.953125 11.871094 11.421875 11.578125 C 14.347656 9.765625 17.582031 8.867188 20.214844 8.425781 C 20.0625 7.929688 19.925781 7.617188 19.914063 7.589844 C 19.738281 7.203125 19.34375 6.960938 18.921875 7.003906 C 18.652344 7.023438 12.355469 7.566406 8.320313 10.8125 C 6.214844 12.761719 2 24.152344 2 34 C 2 34.175781 2.046875 34.34375 2.132813 34.496094 C 5.039063 39.605469 12.972656 40.941406 14.78125 41 C 14.789063 41 14.800781 41 14.8125 41 C 15.132813 41 15.433594 40.847656 15.621094 40.589844 L 17.449219 38.074219 C 12.515625 36.800781 9.996094 34.636719 9.851563 34.507813 C 9.4375 34.144531 9.398438 33.511719 9.765625 33.097656 C 10.128906 32.683594 10.761719 32.644531 11.175781 33.007813 C 11.234375 33.0625 15.875 37 25 37 C 34.140625 37 38.78125 33.046875 38.828125 33.007813 C 39.242188 32.648438 39.871094 32.683594 40.238281 33.101563 C 40.601563 33.515625 40.5625 34.144531 40.148438 34.507813 C 40.003906 34.636719 37.484375 36.800781 32.550781 38.074219 L 34.378906 40.589844 C 34.566406 40.847656 34.867188 41 35.1875 41 C 35.199219 41 35.210938 41 35.21875 41 C 37.027344 40.941406 44.960938 39.605469 47.867188 34.496094 C 47.953125 34.34375 48 34.175781 48 34 C 48 24.152344 43.785156 12.761719 41.625 10.769531 Z M 18.5 30 C 16.566406 30 15 28.210938 15 26 C 15 23.789063 16.566406 22 18.5 22 C 20.433594 22 22 23.789063 22 26 C 22 28.210938 20.433594 30 18.5 30 Z M 31.5 30 C 29.566406 30 28 28.210938 28 26 C 28 23.789063 29.566406 22 31.5 22 C 33.433594 22 35 23.789063 35 26 C 35 28.210938 33.433594 30 31.5 30 Z"/></svg>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<div style="margin-left: 5px; margin-right: 5px;">
|
||||||
|
<a href="https://twitter.com/lancedb" target="_blank" rel="noopener noreferrer">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0,0,256,256" width="25px" height="25px" fill-rule="nonzero"><g fill-opacity="0" fill="#ffffff" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><path d="M0,256v-256h256v256z" id="bgRectangle"></path></g><g fill="#ffffff" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal"><g transform="scale(4,4)"><path d="M57,17.114c-1.32,1.973 -2.991,3.707 -4.916,5.097c0.018,0.423 0.028,0.847 0.028,1.274c0,13.013 -9.902,28.018 -28.016,28.018c-5.562,0 -12.81,-1.948 -15.095,-4.423c0.772,0.092 1.556,0.138 2.35,0.138c4.615,0 8.861,-1.575 12.23,-4.216c-4.309,-0.079 -7.946,-2.928 -9.199,-6.84c1.96,0.308 4.447,-0.17 4.447,-0.17c0,0 -7.7,-1.322 -7.899,-9.779c2.226,1.291 4.46,1.231 4.46,1.231c0,0 -4.441,-2.734 -4.379,-8.195c0.037,-3.221 1.331,-4.953 1.331,-4.953c8.414,10.361 20.298,10.29 20.298,10.29c0,0 -0.255,-1.471 -0.255,-2.243c0,-5.437 4.408,-9.847 9.847,-9.847c2.832,0 5.391,1.196 7.187,3.111c2.245,-0.443 4.353,-1.263 6.255,-2.391c-0.859,3.44 -4.329,5.448 -4.329,5.448c0,0 2.969,-0.329 5.655,-1.55z"></path></g></g></svg>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Repository information -->
|
||||||
|
{% if config.repo_url %}
|
||||||
|
<div class="md-header__source" style="margin-left: -5px !important;">
|
||||||
|
{% include "partials/source.html" %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<!-- Navigation tabs (sticky) -->
|
||||||
|
{% if "navigation.tabs.sticky" in features %}
|
||||||
|
{% if "navigation.tabs" in features %}
|
||||||
|
{% include "partials/tabs.html" %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</header>
|
||||||
@@ -12,29 +12,43 @@ In the future we will look to automatically create and configure the ANN index.
|
|||||||
|
|
||||||
## Creating an ANN Index
|
## Creating an ANN Index
|
||||||
|
|
||||||
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
=== "Python"
|
||||||
|
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
import numpy as np
|
import numpy as np
|
||||||
uri = "~/.lancedb"
|
uri = "data/sample-lancedb"
|
||||||
db = lancedb.connect(uri)
|
db = lancedb.connect(uri)
|
||||||
|
|
||||||
# Create 10,000 sample vectors
|
# Create 10,000 sample vectors
|
||||||
data = [{"vector": row, "item": f"item {i}"}
|
data = [{"vector": row, "item": f"item {i}"}
|
||||||
for i, row in enumerate(np.random.random((10_000, 768)).astype('float32'))]
|
for i, row in enumerate(np.random.random((10_000, 768)).astype('float32'))]
|
||||||
|
|
||||||
# Add the vectors to a table
|
# Add the vectors to a table
|
||||||
tbl = db.create_table("my_vectors", data=data)
|
tbl = db.create_table("my_vectors", data=data)
|
||||||
|
|
||||||
# Create and train the index - you need to have enough data in the table for an effective training step
|
# Create and train the index - you need to have enough data in the table for an effective training step
|
||||||
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
let data = []
|
||||||
|
for (let i = 0; i < 10_000; i++) {
|
||||||
|
data.push({vector: Array(1536).fill(i), id: `${i}`, content: "", longId: `${i}`},)
|
||||||
|
}
|
||||||
|
const table = await db.createTable('vectors', data)
|
||||||
|
await table.create_index({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
||||||
|
```
|
||||||
|
|
||||||
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
||||||
creation by providing the following parameters:
|
creation by providing the following parameters:
|
||||||
|
|
||||||
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support cosine distance.
|
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support "cosine" distance.
|
||||||
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
||||||
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
||||||
A higher number leads to faster queries, but it makes index generation slower.
|
A higher number leads to faster queries, but it makes index generation slower.
|
||||||
@@ -53,22 +67,32 @@ There are a couple of parameters that can be used to fine-tune the search:
|
|||||||
e.g., for 1M vectors divided up into 256 partitions, nprobes should be set to ~20-40.<br/>
|
e.g., for 1M vectors divided up into 256 partitions, nprobes should be set to ~20-40.<br/>
|
||||||
Note: nprobes is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
Note: nprobes is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
||||||
- **refine_factor** (default: None): Refine the results by reading extra elements and re-ranking them in memory.<br/>
|
- **refine_factor** (default: None): Refine the results by reading extra elements and re-ranking them in memory.<br/>
|
||||||
A higher number makes search more accurate but also slower. If you find the recall is less than idea, try refine_factor=10 to start.<br/>
|
A higher number makes search more accurate but also slower. If you find the recall is less than ideal, try refine_factor=10 to start.<br/>
|
||||||
e.g., for 1M vectors divided into 256 partitions, if you're looking for top 20, then refine_factor=200 reranks the whole partition.<br/>
|
e.g., for 1M vectors divided into 256 partitions, if you're looking for top 20, then refine_factor=200 reranks the whole partition.<br/>
|
||||||
Note: refine_factor is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
Note: refine_factor is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl.search(np.random.random((768))) \
|
tbl.search(np.random.random((768))) \
|
||||||
.limit(2) \
|
.limit(2) \
|
||||||
.nprobes(20) \
|
.nprobes(20) \
|
||||||
.refine_factor(10) \
|
.refine_factor(10) \
|
||||||
.to_df()
|
.to_df()
|
||||||
|
|
||||||
vector item score
|
vector item score
|
||||||
0 [0.44949695, 0.8444449, 0.06281311, 0.23338133... item 1141 103.575333
|
0 [0.44949695, 0.8444449, 0.06281311, 0.23338133... item 1141 103.575333
|
||||||
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(768).fill(1.2))
|
||||||
|
.limit(2)
|
||||||
|
.nprobes(20)
|
||||||
|
.refineFactor(10)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
The search will return the data requested in addition to the score of each item.
|
The search will return the data requested in addition to the score of each item.
|
||||||
|
|
||||||
@@ -78,18 +102,36 @@ The search will return the data requested in addition to the score of each item.
|
|||||||
|
|
||||||
You can further filter the elements returned by a search using a where clause.
|
You can further filter the elements returned by a search using a where clause.
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
tbl.search(np.random.random((768))).where("item != 'item 1141'").to_df()
|
```python
|
||||||
```
|
tbl.search(np.random.random((768))).where("item != 'item 1141'").to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.where("item != 'item 1141'")
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
### Projections (select clause)
|
### Projections (select clause)
|
||||||
|
|
||||||
You can select the columns returned by the query using a select clause.
|
You can select the columns returned by the query using a select clause.
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
tbl.search(np.random.random((768))).select(["vector"]).to_df()
|
```python
|
||||||
vector score
|
tbl.search(np.random.random((768))).select(["vector"]).to_df()
|
||||||
0 [0.30928212, 0.022668175, 0.1756372, 0.4911822... 93.971092
|
vector score
|
||||||
1 [0.2525465, 0.01723831, 0.261568, 0.002007689,... 95.173485
|
0 [0.30928212, 0.022668175, 0.1756372, 0.4911822... 93.971092
|
||||||
...
|
1 [0.2525465, 0.01723831, 0.261568, 0.002007689,... 95.173485
|
||||||
```
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.select(["id"])
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|||||||
BIN
docs/src/assets/lancedb_embedded_explanation.png
Normal file
BIN
docs/src/assets/lancedb_embedded_explanation.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 190 KiB |
BIN
docs/src/assets/lancedb_local_data_explanation.png
Normal file
BIN
docs/src/assets/lancedb_local_data_explanation.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 101 KiB |
BIN
docs/src/assets/logo.png
Normal file
BIN
docs/src/assets/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.7 KiB |
@@ -1,74 +1,142 @@
|
|||||||
# Basic LanceDB Functionality
|
# Basic LanceDB Functionality
|
||||||
|
|
||||||
|
We'll cover the basics of using LanceDB on your local machine in this section.
|
||||||
|
|
||||||
|
??? info "LanceDB runs embedded on your backend application, so there is no need to run a separate server."
|
||||||
|
|
||||||
|
<img src="../assets/lancedb_embedded_explanation.png" width="650px" />
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
```shell
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```shell
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
## How to connect to a database
|
## How to connect to a database
|
||||||
|
|
||||||
In local mode, LanceDB stores data in a directory on your local machine. To connect to a local database, you can use the following code:
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
uri = "~/.lancedb"
|
uri = "~/.lancedb"
|
||||||
db = lancedb.connect(uri)
|
db = lancedb.connect(uri)
|
||||||
```
|
```
|
||||||
|
|
||||||
LanceDB will create the directory if it doesn't exist (including parent directories).
|
LanceDB will create the directory if it doesn't exist (including parent directories).
|
||||||
|
|
||||||
If you need a reminder of the uri, use the `db.uri` property.
|
If you need a reminder of the uri, use the `db.uri` property.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
|
const uri = "~./lancedb";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
```
|
||||||
|
|
||||||
|
LanceDB will create the directory if it doesn't exist (including parent directories).
|
||||||
|
|
||||||
|
If you need a reminder of the uri, you can call `db.uri()`.
|
||||||
|
|
||||||
## How to create a table
|
## How to create a table
|
||||||
|
|
||||||
To create a table, you can use the following code:
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl = db.create_table("my_table",
|
tbl = db.create_table("my_table",
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
```
|
```
|
||||||
|
|
||||||
Under the hood, LanceDB is converting the input data into an Apache Arrow table
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
and persisting it to disk in [Lance format](github.com/eto-ai/lance).
|
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||||
|
to the `create_table` method.
|
||||||
|
|
||||||
If the table already exists, LanceDB will raise an error by default.
|
You can also pass in a pandas DataFrame directly:
|
||||||
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
```python
|
||||||
to the `create_table` method.
|
import pandas as pd
|
||||||
|
df = pd.DataFrame([{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
|
tbl = db.create_table("table_from_df", data=df)
|
||||||
|
```
|
||||||
|
|
||||||
You can also pass in a pandas DataFrame directly:
|
=== "Javascript"
|
||||||
```python
|
```javascript
|
||||||
import pandas as pd
|
const tb = await db.createTable("my_table",
|
||||||
df = pd.DataFrame([{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
tbl = db.create_table("table_from_df", data=df)
|
```
|
||||||
```
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
|
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||||
|
to the `createTable` function.
|
||||||
|
|
||||||
|
??? info "Under the hood, LanceDB is converting the input data into an Apache Arrow table and persisting it to disk in [Lance format](https://www.github.com/lancedb/lance)."
|
||||||
|
|
||||||
## How to open an existing table
|
## How to open an existing table
|
||||||
|
|
||||||
Once created, you can open a table using the following code:
|
Once created, you can open a table using the following code:
|
||||||
```python
|
|
||||||
tbl = db.open_table("my_table")
|
|
||||||
```
|
|
||||||
|
|
||||||
If you forget the name of your table, you can always get a listing of all table names:
|
=== "Python"
|
||||||
|
```python
|
||||||
|
tbl = db.open_table("my_table")
|
||||||
|
```
|
||||||
|
|
||||||
```python
|
If you forget the name of your table, you can always get a listing of all table names:
|
||||||
db.table_names()
|
|
||||||
```
|
```python
|
||||||
|
print(db.table_names())
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const tbl = await db.openTable("my_table");
|
||||||
|
```
|
||||||
|
|
||||||
|
If you forget the name of your table, you can always get a listing of all table names:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
console.log(db.tableNames());
|
||||||
|
```
|
||||||
|
|
||||||
## How to add data to a table
|
## How to add data to a table
|
||||||
|
|
||||||
After a table has been created, you can always add more data to it using
|
After a table has been created, you can always add more data to it using
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
df = pd.DataFrame([{"vector": [1.3, 1.4], "item": "fizz", "price": 100.0},
|
```python
|
||||||
{"vector": [9.5, 56.2], "item": "buzz", "price": 200.0}])
|
df = pd.DataFrame([{"vector": [1.3, 1.4], "item": "fizz", "price": 100.0},
|
||||||
tbl.add(df)
|
{"vector": [9.5, 56.2], "item": "buzz", "price": 200.0}])
|
||||||
```
|
tbl.add(df)
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
await tbl.add([vector: [1.3, 1.4], item: "fizz", price: 100.0},
|
||||||
|
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
||||||
|
```
|
||||||
|
|
||||||
## How to search for (approximate) nearest neighbors
|
## How to search for (approximate) nearest neighbors
|
||||||
|
|
||||||
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
tbl.search([100, 100]).limit(2).to_df()
|
```python
|
||||||
```
|
tbl.search([100, 100]).limit(2).to_df()
|
||||||
|
```
|
||||||
|
|
||||||
This returns a pandas DataFrame with the results.
|
This returns a pandas DataFrame with the results.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const query = await tbl.search([100, 100]).limit(2).execute();
|
||||||
|
```
|
||||||
|
|
||||||
## What's next
|
## What's next
|
||||||
|
|
||||||
|
|||||||
@@ -25,55 +25,88 @@ def embed_func(batch):
|
|||||||
return [model.encode(sentence) for sentence in batch]
|
return [model.encode(sentence) for sentence in batch]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Please note that currently HuggingFace is only supported in the Python SDK.
|
||||||
|
|
||||||
### OpenAI example
|
### OpenAI example
|
||||||
|
|
||||||
You can also use an external API like OpenAI to generate embeddings
|
You can also use an external API like OpenAI to generate embeddings
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
import openai
|
```python
|
||||||
import os
|
import openai
|
||||||
|
import os
|
||||||
|
|
||||||
# Configuring the environment variable OPENAI_API_KEY
|
# Configuring the environment variable OPENAI_API_KEY
|
||||||
if "OPENAI_API_KEY" not in os.environ:
|
if "OPENAI_API_KEY" not in os.environ:
|
||||||
# OR set the key here as a variable
|
# OR set the key here as a variable
|
||||||
openai.api_key = "sk-..."
|
openai.api_key = "sk-..."
|
||||||
|
|
||||||
# verify that the API key is working
|
# verify that the API key is working
|
||||||
assert len(openai.Model.list()["data"]) > 0
|
assert len(openai.Model.list()["data"]) > 0
|
||||||
|
|
||||||
def embed_func(c):
|
def embed_func(c):
|
||||||
rs = openai.Embedding.create(input=c, engine="text-embedding-ada-002")
|
rs = openai.Embedding.create(input=c, engine="text-embedding-ada-002")
|
||||||
return [record["embedding"] for record in rs["data"]]
|
return [record["embedding"] for record in rs["data"]]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
|
// You need to provide an OpenAI API key
|
||||||
|
const apiKey = "sk-..."
|
||||||
|
// The embedding function will create embeddings for the 'text' column
|
||||||
|
const embedding = new lancedb.OpenAIEmbeddingFunction('text', apiKey)
|
||||||
|
```
|
||||||
|
|
||||||
## Applying an embedding function
|
## Applying an embedding function
|
||||||
|
|
||||||
Using an embedding function, you can apply it to raw data
|
=== "Python"
|
||||||
to generate embeddings for each row.
|
Using an embedding function, you can apply it to raw data
|
||||||
|
to generate embeddings for each row.
|
||||||
|
|
||||||
Say if you have a pandas DataFrame with a `text` column that you want to be embedded,
|
Say if you have a pandas DataFrame with a `text` column that you want to be embedded,
|
||||||
you can use the [with_embeddings](https://lancedb.github.io/lancedb/python/#lancedb.embeddings.with_embeddings)
|
you can use the [with_embeddings](https://lancedb.github.io/lancedb/python/#lancedb.embeddings.with_embeddings)
|
||||||
function to generate embeddings and add create a combined pyarrow table:
|
function to generate embeddings and add create a combined pyarrow table:
|
||||||
|
|
||||||
```python
|
|
||||||
import pandas as pd
|
|
||||||
from lancedb.embeddings import with_embeddings
|
|
||||||
|
|
||||||
df = pd.DataFrame([{"text": "pepperoni"},
|
```python
|
||||||
{"text": "pineapple"}])
|
import pandas as pd
|
||||||
data = with_embeddings(embed_func, df)
|
from lancedb.embeddings import with_embeddings
|
||||||
|
|
||||||
# The output is used to create / append to a table
|
df = pd.DataFrame([{"text": "pepperoni"},
|
||||||
# db.create_table("my_table", data=data)
|
{"text": "pineapple"}])
|
||||||
```
|
data = with_embeddings(embed_func, df)
|
||||||
|
|
||||||
If your data is in a different column, you can specify the `column` kwarg to `with_embeddings`.
|
# The output is used to create / append to a table
|
||||||
|
# db.create_table("my_table", data=data)
|
||||||
|
```
|
||||||
|
|
||||||
By default, LanceDB calls the function with batches of 1000 rows. This can be configured
|
If your data is in a different column, you can specify the `column` kwarg to `with_embeddings`.
|
||||||
using the `batch_size` parameter to `with_embeddings`.
|
|
||||||
|
By default, LanceDB calls the function with batches of 1000 rows. This can be configured
|
||||||
|
using the `batch_size` parameter to `with_embeddings`.
|
||||||
|
|
||||||
|
LanceDB automatically wraps the function with retry and rate-limit logic to ensure the OpenAI
|
||||||
|
API call is reliable.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
Using an embedding function, you can apply it to raw data
|
||||||
|
to generate embeddings for each row.
|
||||||
|
|
||||||
|
You can just pass the embedding function created previously and LanceDB will automatically generate
|
||||||
|
embededings for your data.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const db = await lancedb.connect("/tmp/lancedb");
|
||||||
|
const data = [
|
||||||
|
{ text: 'pepperoni' },
|
||||||
|
{ text: 'pineapple' }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('vectors', data, embedding)
|
||||||
|
```
|
||||||
|
|
||||||
LanceDB automatically wraps the function with retry and rate-limit logic to ensure the OpenAI
|
|
||||||
API call is reliable.
|
|
||||||
|
|
||||||
## Searching with an embedding function
|
## Searching with an embedding function
|
||||||
|
|
||||||
@@ -81,13 +114,25 @@ At inference time, you also need the same embedding function to embed your query
|
|||||||
It's important that you use the same model / function otherwise the embedding vectors don't
|
It's important that you use the same model / function otherwise the embedding vectors don't
|
||||||
belong in the same latent space and your results will be nonsensical.
|
belong in the same latent space and your results will be nonsensical.
|
||||||
|
|
||||||
```python
|
=== "Python"
|
||||||
query = "What's the best pizza topping?"
|
```python
|
||||||
query_vector = embed_func([query])[0]
|
query = "What's the best pizza topping?"
|
||||||
tbl.search(query_vector).limit(10).to_df()
|
query_vector = embed_func([query])[0]
|
||||||
```
|
tbl.search(query_vector).limit(10).to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
The above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search('What's the best pizza topping?')
|
||||||
|
.limit(10)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
The above snippet returns an array of records with the 10 closest vectors to the query.
|
||||||
|
|
||||||
The above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
|
||||||
|
|
||||||
## Roadmap
|
## Roadmap
|
||||||
|
|
||||||
|
|||||||
@@ -4,4 +4,4 @@
|
|||||||
|
|
||||||
<img id="splash" width="400" alt="langchain" src="https://user-images.githubusercontent.com/917119/236580868-61a246a9-e587-4c2b-8ae5-6fe5f7b7e81e.png">
|
<img id="splash" width="400" alt="langchain" src="https://user-images.githubusercontent.com/917119/236580868-61a246a9-e587-4c2b-8ae5-6fe5f7b7e81e.png">
|
||||||
|
|
||||||
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/notebooks/code_qa_bot.ipynb)
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/code_qa_bot.ipynb)
|
||||||
120
docs/src/examples/modal_langchain.py
Normal file
120
docs/src/examples/modal_langchain.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import pickle
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import zipfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from langchain.chains import RetrievalQA
|
||||||
|
from langchain.document_loaders import UnstructuredHTMLLoader
|
||||||
|
from langchain.embeddings import OpenAIEmbeddings
|
||||||
|
from langchain.llms import OpenAI
|
||||||
|
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||||
|
from langchain.vectorstores import LanceDB
|
||||||
|
from modal import Image, Secret, Stub, web_endpoint
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
lancedb_image = Image.debian_slim().pip_install(
|
||||||
|
"lancedb", "langchain", "openai", "pandas", "tiktoken", "unstructured", "tabulate"
|
||||||
|
)
|
||||||
|
|
||||||
|
stub = Stub(
|
||||||
|
name="example-langchain-lancedb",
|
||||||
|
image=lancedb_image,
|
||||||
|
secrets=[Secret.from_name("my-openai-secret")],
|
||||||
|
)
|
||||||
|
|
||||||
|
docsearch = None
|
||||||
|
docs_path = Path("docs.pkl")
|
||||||
|
db_path = Path("lancedb")
|
||||||
|
|
||||||
|
|
||||||
|
def get_document_title(document):
|
||||||
|
m = str(document.metadata["source"])
|
||||||
|
title = re.findall("pandas.documentation(.*).html", m)
|
||||||
|
if title[0] is not None:
|
||||||
|
return title[0]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def download_docs():
|
||||||
|
pandas_docs = requests.get(
|
||||||
|
"https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip"
|
||||||
|
)
|
||||||
|
with open(Path("pandas.documentation.zip"), "wb") as f:
|
||||||
|
f.write(pandas_docs.content)
|
||||||
|
|
||||||
|
file = zipfile.ZipFile(Path("pandas.documentation.zip"))
|
||||||
|
file.extractall(path=Path("pandas_docs"))
|
||||||
|
|
||||||
|
|
||||||
|
def store_docs():
|
||||||
|
docs = []
|
||||||
|
|
||||||
|
if not docs_path.exists():
|
||||||
|
for p in Path("pandas_docs/pandas.documentation").rglob("*.html"):
|
||||||
|
if p.is_dir():
|
||||||
|
continue
|
||||||
|
loader = UnstructuredHTMLLoader(p)
|
||||||
|
raw_document = loader.load()
|
||||||
|
|
||||||
|
m = {}
|
||||||
|
m["title"] = get_document_title(raw_document[0])
|
||||||
|
m["version"] = "2.0rc0"
|
||||||
|
raw_document[0].metadata = raw_document[0].metadata | m
|
||||||
|
raw_document[0].metadata["source"] = str(raw_document[0].metadata["source"])
|
||||||
|
docs = docs + raw_document
|
||||||
|
|
||||||
|
with docs_path.open("wb") as fh:
|
||||||
|
pickle.dump(docs, fh)
|
||||||
|
else:
|
||||||
|
with docs_path.open("rb") as fh:
|
||||||
|
docs = pickle.load(fh)
|
||||||
|
|
||||||
|
return docs
|
||||||
|
|
||||||
|
|
||||||
|
def qanda_langchain(query):
|
||||||
|
download_docs()
|
||||||
|
docs = store_docs()
|
||||||
|
|
||||||
|
text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=1000,
|
||||||
|
chunk_overlap=200,
|
||||||
|
)
|
||||||
|
documents = text_splitter.split_documents(docs)
|
||||||
|
embeddings = OpenAIEmbeddings()
|
||||||
|
|
||||||
|
db = lancedb.connect(db_path)
|
||||||
|
table = db.create_table(
|
||||||
|
"pandas_docs",
|
||||||
|
data=[
|
||||||
|
{
|
||||||
|
"vector": embeddings.embed_query("Hello World"),
|
||||||
|
"text": "Hello World",
|
||||||
|
"id": "1",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
mode="overwrite",
|
||||||
|
)
|
||||||
|
docsearch = LanceDB.from_documents(documents, embeddings, connection=table)
|
||||||
|
qa = RetrievalQA.from_chain_type(
|
||||||
|
llm=OpenAI(), chain_type="stuff", retriever=docsearch.as_retriever()
|
||||||
|
)
|
||||||
|
return qa.run(query)
|
||||||
|
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
@web_endpoint(method="GET")
|
||||||
|
def web(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
return {
|
||||||
|
"answer": answer,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
def cli(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
print(answer)
|
||||||
7
docs/src/examples/multimodal_search.md
Normal file
7
docs/src/examples/multimodal_search.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Image multimodal search
|
||||||
|
|
||||||
|
## Search through an image dataset using natural language, full text and SQL
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="multimodal search" src="https://github.com/lancedb/lancedb/assets/917119/993a7c9f-be01-449d-942e-1ce1d4ed63af">
|
||||||
|
|
||||||
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/multimodal_search.ipynb)
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
# YouTube transcript QA bot with NodeJS
|
|
||||||
|
|
||||||
## use LanceDB's Javascript API and OpenAI to build a QA bot for YouTube transcripts
|
|
||||||
|
|
||||||
<img id="splash" width="400" alt="nodejs" src="https://github.com/lancedb/lancedb/assets/917119/3a140e75-bf8e-438a-a1e4-af14a72bcf98">
|
|
||||||
|
|
||||||
This Q&A bot will allow you to search through youtube transcripts using natural language! We'll introduce how you can use LanceDB's Javascript API to store and manage your data easily.
|
|
||||||
|
|
||||||
For this example we're using a HuggingFace dataset that contains YouTube transcriptions: `jamescalam/youtube-transcriptions`, to make it easier, we've converted it to a LanceDB `db` already, which you can download and put in a working directory:
|
|
||||||
|
|
||||||
```wget -c https://eto-public.s3.us-west-2.amazonaws.com/lancedb_demo.tar.gz -O - | tar -xz -C .```
|
|
||||||
|
|
||||||
Now, we'll create a simple app that can:
|
|
||||||
1. Take a text based query and search for contexts in our corpus, using embeddings generated from the OpenAI Embedding API.
|
|
||||||
2. Create a prompt with the contexts, and call the OpenAI Completion API to answer the text based query.
|
|
||||||
|
|
||||||
Dependencies and setup of OpenAI API:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("vectordb");
|
|
||||||
const { Configuration, OpenAIApi } = require("openai");
|
|
||||||
|
|
||||||
const configuration = new Configuration({
|
|
||||||
apiKey: process.env.OPENAI_API_KEY,
|
|
||||||
});
|
|
||||||
const openai = new OpenAIApi(configuration);
|
|
||||||
```
|
|
||||||
|
|
||||||
First, let's set our question and the context amount. The context amount will be used to query similar documents in our corpus.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const QUESTION = "who was the 12th person on the moon and when did they land?";
|
|
||||||
const CONTEXT_AMOUNT = 3;
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, let's generate an embedding from this question:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const embeddingResponse = await openai.createEmbedding({
|
|
||||||
model: "text-embedding-ada-002",
|
|
||||||
input: QUESTION,
|
|
||||||
});
|
|
||||||
|
|
||||||
const embedding = embeddingResponse.data["data"][0]["embedding"];
|
|
||||||
```
|
|
||||||
|
|
||||||
Once we have the embedding, we can connect to LanceDB (using the database we downloaded earlier), and search through the chatbot table.
|
|
||||||
We'll extract 3 similar documents found.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const db = await lancedb.connect('./lancedb');
|
|
||||||
const tbl = await db.openTable('chatbot');
|
|
||||||
const query = tbl.search(embedding);
|
|
||||||
query.limit = CONTEXT_AMOUNT;
|
|
||||||
const context = await query.execute();
|
|
||||||
```
|
|
||||||
|
|
||||||
Let's combine the context together so we can pass it into our prompt:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
for (let i = 1; i < context.length; i++) {
|
|
||||||
context[0]["text"] += " " + context[i]["text"];
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Lastly, let's construct the prompt. You could play around with this to create more accurate/better prompts to yield results.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const prompt = "Answer the question based on the context below.\n\n" +
|
|
||||||
"Context:\n" +
|
|
||||||
`${context[0]["text"]}\n` +
|
|
||||||
`\n\nQuestion: ${QUESTION}\nAnswer:`;
|
|
||||||
```
|
|
||||||
|
|
||||||
We pass the prompt, along with the context, to the completion API.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const completion = await openai.createCompletion({
|
|
||||||
model: "text-davinci-003",
|
|
||||||
prompt,
|
|
||||||
temperature: 0,
|
|
||||||
max_tokens: 400,
|
|
||||||
top_p: 1,
|
|
||||||
frequency_penalty: 0,
|
|
||||||
presence_penalty: 0,
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
And that's it!
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
console.log(completion.data.choices[0].text);
|
|
||||||
```
|
|
||||||
|
|
||||||
The response is (which is non deterministic):
|
|
||||||
|
|
||||||
```
|
|
||||||
The 12th person on the moon was Harrison Schmitt and he landed on December 11, 1972.
|
|
||||||
```
|
|
||||||
166
docs/src/examples/serverless_qa_bot_with_modal_and_langchain.md
Normal file
166
docs/src/examples/serverless_qa_bot_with_modal_and_langchain.md
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# Serverless QA Bot with Modal and LangChain
|
||||||
|
|
||||||
|
## use LanceDB's LangChain integration with Modal to run a serverless app
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="modal" src="https://github.com/lancedb/lancedb/assets/917119/7d80a40f-60d7-48a6-972f-dab05000eccf">
|
||||||
|
|
||||||
|
We're going to build a QA bot for your documentation using LanceDB's LangChain integration and use Modal for deployment.
|
||||||
|
|
||||||
|
Modal is an end-to-end compute platform for model inference, batch jobs, task queues, web apps and more. It's a great way to deploy your LanceDB models and apps.
|
||||||
|
|
||||||
|
To get started, ensure that you have created an account and logged into [Modal](https://modal.com/). To follow along, the full source code is available on Github [here](https://github.com/lancedb/lancedb/blob/main/docs/src/examples/modal_langchain.py).
|
||||||
|
|
||||||
|
### Setting up Modal
|
||||||
|
|
||||||
|
We'll start by specifying our dependencies and creating a new Modal `Stub`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
lancedb_image = Image.debian_slim().pip_install(
|
||||||
|
"lancedb",
|
||||||
|
"langchain",
|
||||||
|
"openai",
|
||||||
|
"pandas",
|
||||||
|
"tiktoken",
|
||||||
|
"unstructured",
|
||||||
|
"tabulate"
|
||||||
|
)
|
||||||
|
|
||||||
|
stub = Stub(
|
||||||
|
name="example-langchain-lancedb",
|
||||||
|
image=lancedb_image,
|
||||||
|
secrets=[Secret.from_name("my-openai-secret")],
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
We're using Modal's Secrets injection to secure our OpenAI key. To set your own, you can access the Modal UI and enter your key.
|
||||||
|
|
||||||
|
### Setting up caches for LanceDB and LangChain
|
||||||
|
|
||||||
|
Next, we can setup some globals to cache our LanceDB database, as well as our LangChain docsource:
|
||||||
|
|
||||||
|
```python
|
||||||
|
docsearch = None
|
||||||
|
docs_path = Path("docs.pkl")
|
||||||
|
db_path = Path("lancedb")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Downloading our dataset
|
||||||
|
|
||||||
|
We're going use a pregenerated dataset, which stores HTML files of the Pandas 2.0 documentation.
|
||||||
|
You could switch this out for your own dataset.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def download_docs():
|
||||||
|
pandas_docs = requests.get("https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip")
|
||||||
|
with open(Path("pandas.documentation.zip"), "wb") as f:
|
||||||
|
f.write(pandas_docs.content)
|
||||||
|
|
||||||
|
file = zipfile.ZipFile(Path("pandas.documentation.zip"))
|
||||||
|
file.extractall(path=Path("pandas_docs"))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pre-processing the dataset and generating metadata
|
||||||
|
|
||||||
|
Once we've downloaded it, we want to parse and pre-process them using LangChain, and then vectorize them and store it in LanceDB.
|
||||||
|
Let's first create a function that uses LangChains `UnstructuredHTMLLoader` to parse them.
|
||||||
|
We can then add our own metadata to it and store it alongside the data, we'll later be able to use this for filtering metadata.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def store_docs():
|
||||||
|
docs = []
|
||||||
|
|
||||||
|
if not docs_path.exists():
|
||||||
|
for p in Path("pandas_docs/pandas.documentation").rglob("*.html"):
|
||||||
|
if p.is_dir():
|
||||||
|
continue
|
||||||
|
loader = UnstructuredHTMLLoader(p)
|
||||||
|
raw_document = loader.load()
|
||||||
|
|
||||||
|
m = {}
|
||||||
|
m["title"] = get_document_title(raw_document[0])
|
||||||
|
m["version"] = "2.0rc0"
|
||||||
|
raw_document[0].metadata = raw_document[0].metadata | m
|
||||||
|
raw_document[0].metadata["source"] = str(raw_document[0].metadata["source"])
|
||||||
|
docs = docs + raw_document
|
||||||
|
|
||||||
|
with docs_path.open("wb") as fh:
|
||||||
|
pickle.dump(docs, fh)
|
||||||
|
else:
|
||||||
|
with docs_path.open("rb") as fh:
|
||||||
|
docs = pickle.load(fh)
|
||||||
|
|
||||||
|
return docs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Simple LangChain chain for a QA bot
|
||||||
|
|
||||||
|
Now we can create a simple LangChain chain for our QA bot. We'll use the `RecursiveCharacterTextSplitter` to split our documents into chunks, and then use the `OpenAIEmbeddings` to vectorize them.
|
||||||
|
|
||||||
|
Lastly, we'll create a LanceDB table and store the vectorized documents in it, then create a `RetrievalQA` model from the chain and return it.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def qanda_langchain(query):
|
||||||
|
download_docs()
|
||||||
|
docs = store_docs()
|
||||||
|
|
||||||
|
text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=1000,
|
||||||
|
chunk_overlap=200,
|
||||||
|
)
|
||||||
|
documents = text_splitter.split_documents(docs)
|
||||||
|
embeddings = OpenAIEmbeddings()
|
||||||
|
|
||||||
|
db = lancedb.connect(db_path)
|
||||||
|
table = db.create_table("pandas_docs", data=[
|
||||||
|
{"vector": embeddings.embed_query("Hello World"), "text": "Hello World", "id": "1"}
|
||||||
|
], mode="overwrite")
|
||||||
|
docsearch = LanceDB.from_documents(documents, embeddings, connection=table)
|
||||||
|
qa = RetrievalQA.from_chain_type(llm=OpenAI(), chain_type="stuff", retriever=docsearch.as_retriever())
|
||||||
|
return qa.run(query)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating our Modal entry points
|
||||||
|
|
||||||
|
Now we can create our Modal entry points for our CLI and web endpoint:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@stub.function()
|
||||||
|
@web_endpoint(method="GET")
|
||||||
|
def web(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
return {
|
||||||
|
"answer": answer,
|
||||||
|
}
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
def cli(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
print(answer)
|
||||||
|
```
|
||||||
|
|
||||||
|
# Testing it out!
|
||||||
|
|
||||||
|
Testing the CLI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
modal run modal_langchain.py --query "What are the major differences in pandas 2.0?"
|
||||||
|
```
|
||||||
|
|
||||||
|
Testing the web endpoint:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
modal serve modal_langchain.py
|
||||||
|
```
|
||||||
|
|
||||||
|
In the CLI, Modal will provide you a web endpoint. Copy this endpoint URI for the next step.
|
||||||
|
Once this is served, then we can hit it with `curl`.
|
||||||
|
|
||||||
|
Note, the first time this runs, it will take a few minutes to download the dataset and vectorize it.
|
||||||
|
An actual production example would pre-cache/load the dataset and vectorized documents prior
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl --get --data-urlencode "query=What are the major differences in pandas 2.0?" https://your-modal-endpoint-app.modal.run
|
||||||
|
|
||||||
|
{"answer":" The major differences in pandas 2.0 include the ability to use any numpy numeric dtype in a Index, installing optional dependencies with pip extras, and enhancements, bug fixes, and performance improvements."}
|
||||||
|
```
|
||||||
|
|
||||||
@@ -4,4 +4,4 @@
|
|||||||
|
|
||||||
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
||||||
|
|
||||||
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/notebooks/youtube_transcript_search.ipynb)
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb)
|
||||||
139
docs/src/examples/youtube_transcript_bot_with_nodejs.md
Normal file
139
docs/src/examples/youtube_transcript_bot_with_nodejs.md
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
# YouTube transcript QA bot with NodeJS
|
||||||
|
|
||||||
|
## use LanceDB's Javascript API and OpenAI to build a QA bot for YouTube transcripts
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="nodejs" src="https://github.com/lancedb/lancedb/assets/917119/3a140e75-bf8e-438a-a1e4-af14a72bcf98">
|
||||||
|
|
||||||
|
This Q&A bot will allow you to search through youtube transcripts using natural language! We'll introduce how to use LanceDB's Javascript API to store and manage your data easily.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Download the data
|
||||||
|
|
||||||
|
For this example, we're using a sample of a HuggingFace dataset that contains YouTube transcriptions: `jamescalam/youtube-transcriptions`. Download and extract this file under the `data` folder:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
wget -c https://eto-public.s3.us-west-2.amazonaws.com/datasets/youtube_transcript/youtube-transcriptions_sample.jsonl
|
||||||
|
```
|
||||||
|
|
||||||
|
## Prepare Context
|
||||||
|
|
||||||
|
Each item in the dataset contains just a short chunk of text. We'll need to merge a bunch of these chunks together on a rolling basis. For this demo, we'll look back 20 records to create a more complete context for each sentence.
|
||||||
|
|
||||||
|
First, we need to read and parse the input file.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lines = (await fs.readFile(INPUT_FILE_NAME, 'utf-8'))
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.map(line => JSON.parse(line))
|
||||||
|
|
||||||
|
const data = contextualize(lines, 20, 'video_id')
|
||||||
|
```
|
||||||
|
|
||||||
|
The contextualize function groups the transcripts by video_id and then creates the expanded context for each item.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function contextualize (rows, contextSize, groupColumn) {
|
||||||
|
const grouped = []
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (!grouped[row[groupColumn]]) {
|
||||||
|
grouped[row[groupColumn]] = []
|
||||||
|
}
|
||||||
|
grouped[row[groupColumn]].push(row)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
Object.keys(grouped).forEach(key => {
|
||||||
|
for (let i = 0; i < grouped[key].length; i++) {
|
||||||
|
const start = i - contextSize > 0 ? i - contextSize : 0
|
||||||
|
grouped[key][i].context = grouped[key].slice(start, i + 1).map(r => r.text).join(' ')
|
||||||
|
}
|
||||||
|
data.push(...grouped[key])
|
||||||
|
})
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create the LanceDB Table
|
||||||
|
|
||||||
|
To load our data into LanceDB, we need to create embedding (vectors) for each item. For this example, we will use the OpenAI embedding functions, which have a native integration with LanceDB.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'context' column
|
||||||
|
const embedFunction = new lancedb.OpenAIEmbeddingFunction('context', apiKey)
|
||||||
|
// Connects to LanceDB
|
||||||
|
const db = await lancedb.connect('data/youtube-lancedb')
|
||||||
|
const tbl = await db.createTable('vectors', data, embedFunction)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create and answer the prompt
|
||||||
|
|
||||||
|
We will accept questions in natural language and use our corpus stored in LanceDB to answer them. First, we need to set up the OpenAI client:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const configuration = new Configuration({ apiKey })
|
||||||
|
const openai = new OpenAIApi(configuration)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then we can prompt questions and use LanceDB to retrieve the three most relevant transcripts for this prompt.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const query = await rl.question('Prompt: ')
|
||||||
|
const results = await tbl
|
||||||
|
.search(query)
|
||||||
|
.select(['title', 'text', 'context'])
|
||||||
|
.limit(3)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
The query and the transcripts' context are appended together in a single prompt:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function createPrompt (query, context) {
|
||||||
|
let prompt =
|
||||||
|
'Answer the question based on the context below.\n\n' +
|
||||||
|
'Context:\n'
|
||||||
|
|
||||||
|
// need to make sure our prompt is not larger than max size
|
||||||
|
prompt = prompt + context.map(c => c.context).join('\n\n---\n\n').substring(0, 3750)
|
||||||
|
prompt = prompt + `\n\nQuestion: ${query}\nAnswer:`
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
We can now use the OpenAI Completion API to process our custom prompt and give us an answer.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const response = await openai.createCompletion({
|
||||||
|
model: 'text-davinci-003',
|
||||||
|
prompt: createPrompt(query, results),
|
||||||
|
max_tokens: 400,
|
||||||
|
temperature: 0,
|
||||||
|
top_p: 1,
|
||||||
|
frequency_penalty: 0,
|
||||||
|
presence_penalty: 0
|
||||||
|
})
|
||||||
|
console.log(response.data.choices[0].text)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Let's put it all together now
|
||||||
|
|
||||||
|
Now we can provide queries and have them answered based on your local LanceDB data.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Prompt: who was the 12th person on the moon and when did they land?
|
||||||
|
The 12th person on the moon was Harrison Schmitt and he landed on December 11, 1972.
|
||||||
|
Prompt: Which training method should I use for sentence transformers when I only have pairs of related sentences?
|
||||||
|
NLI with multiple negative ranking loss.
|
||||||
|
```
|
||||||
|
|
||||||
|
## That's a wrap
|
||||||
|
|
||||||
|
In this example, you learned how to use LanceDB to store and query embedding representations of your local data. The complete example code is on [GitHub](https://github.com/lancedb/lancedb/tree/main/node/examples), and you can also download the LanceDB dataset using [this link](https://eto-public.s3.us-west-2.amazonaws.com/datasets/youtube_transcript/youtube-lancedb.zip).
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# Welcome to LanceDB's Documentation
|
# Welcome to LanceDB's Documentation
|
||||||
|
|
||||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrivial, filtering and management of embeddings.
|
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings.
|
||||||
|
|
||||||
The key features of LanceDB include:
|
The key features of LanceDB include:
|
||||||
|
|
||||||
@@ -8,38 +8,59 @@ The key features of LanceDB include:
|
|||||||
|
|
||||||
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
||||||
|
|
||||||
* Native Python and Javascript/Typescript support (coming soon).
|
* Support for vector similarity search, full-text search and SQL.
|
||||||
|
|
||||||
|
* Native Python and Javascript/Typescript support.
|
||||||
|
|
||||||
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
||||||
|
|
||||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lancedb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||||
|
|
||||||
LanceDB's core is written in Rust 🦀 and is built using Lance, an open-source columnar format designed for performant ML workloads.
|
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
## Installation
|
=== "Python"
|
||||||
|
```shell
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
```shell
|
```python
|
||||||
pip install lancedb
|
import lancedb
|
||||||
```
|
|
||||||
|
|
||||||
## Quickstart
|
uri = "/tmp/lancedb"
|
||||||
|
db = lancedb.connect(uri)
|
||||||
|
table = db.create_table("my_table",
|
||||||
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
|
result = table.search([100, 100]).limit(2).to_df()
|
||||||
|
```
|
||||||
|
|
||||||
```python
|
=== "Javascript"
|
||||||
import lancedb
|
```shell
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
db = lancedb.connect(".")
|
```javascript
|
||||||
table = db.create_table("my_table",
|
const lancedb = require("vectordb");
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
|
||||||
result = table.search([100, 100]).limit(2).to_df()
|
|
||||||
```
|
|
||||||
|
|
||||||
## Complete Demos
|
const uri = "/tmp/lancedb";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
const table = await db.createTable("my_table",
|
||||||
|
[{ id: 1, vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
||||||
|
{ id: 2, vector: [5.9, 26.5], item: "bar", price: 20.0 }])
|
||||||
|
const results = await table.search([100, 100]).limit(2).execute();
|
||||||
|
```
|
||||||
|
|
||||||
We will be adding completed demo apps built using LanceDB.
|
## Complete Demos (Python)
|
||||||
- [YouTube Transcript Search](../notebooks/youtube_transcript_search.ipynb)
|
- [YouTube Transcript Search](notebooks/youtube_transcript_search.ipynb)
|
||||||
|
- [Documentation QA Bot using LangChain](notebooks/code_qa_bot.ipynb)
|
||||||
|
- [Multimodal search using CLIP](notebooks/multimodal_search.ipynb)
|
||||||
|
- [Serverless QA Bot with S3 and Lambda](examples/serverless_lancedb_with_s3_and_lambda.md)
|
||||||
|
- [Serverless QA Bot with Modal](examples/serverless_qa_bot_with_modal_and_langchain.md)
|
||||||
|
|
||||||
|
## Complete Demos (JavaScript)
|
||||||
|
- [YouTube Transcript Search](examples/youtube_transcript_bot_with_nodejs.md)
|
||||||
|
|
||||||
## Documentation Quick Links
|
## Documentation Quick Links
|
||||||
* [`Basic Operations`](basic.md) - basic functionality of LanceDB.
|
* [`Basic Operations`](basic.md) - basic functionality of LanceDB.
|
||||||
@@ -47,4 +68,5 @@ We will be adding completed demo apps built using LanceDB.
|
|||||||
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
||||||
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
||||||
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
||||||
* [`API Reference`](python.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
|||||||
@@ -24,12 +24,9 @@ data = pd.DataFrame({
|
|||||||
"price": [10.0, 20.0]
|
"price": [10.0, 20.0]
|
||||||
})
|
})
|
||||||
table = db.create_table("pd_table", data=data)
|
table = db.create_table("pd_table", data=data)
|
||||||
|
|
||||||
# Optionally, create a IVF_PQ index
|
|
||||||
table.create_index(num_partitions=256, num_sub_vectors=96)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](indexing.md)
|
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](ann_indexes.md)
|
||||||
sections.
|
sections.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1
docs/src/javascript/.nojekyll
Normal file
1
docs/src/javascript/.nojekyll
Normal file
@@ -0,0 +1 @@
|
|||||||
|
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.
|
||||||
51
docs/src/javascript/README.md
Normal file
51
docs/src/javascript/README.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
vectordb / [Exports](modules.md)
|
||||||
|
|
||||||
|
# LanceDB
|
||||||
|
|
||||||
|
A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lancedb = require('vectordb');
|
||||||
|
const db = lancedb.connect('<PATH_TO_LANCEDB_DATASET>');
|
||||||
|
const table = await db.openTable('my_table');
|
||||||
|
const query = await table.search([0.1, 0.3]).setLimit(20).execute();
|
||||||
|
console.log(results);
|
||||||
|
```
|
||||||
|
|
||||||
|
The [examples](./examples) folder contains complete examples.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
The LanceDB javascript is built with npm:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the tests with
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run lint -- --fix
|
||||||
|
```
|
||||||
|
|
||||||
|
To build documentation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx typedoc --plugin typedoc-plugin-markdown --out ../docs/src/javascript src/index.ts
|
||||||
|
```
|
||||||
211
docs/src/javascript/classes/Connection.md
Normal file
211
docs/src/javascript/classes/Connection.md
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Connection
|
||||||
|
|
||||||
|
# Class: Connection
|
||||||
|
|
||||||
|
A connection to a LanceDB database.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Connection.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_db](Connection.md#_db)
|
||||||
|
- [\_uri](Connection.md#_uri)
|
||||||
|
|
||||||
|
### Accessors
|
||||||
|
|
||||||
|
- [uri](Connection.md#uri)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [createTable](Connection.md#createtable)
|
||||||
|
- [createTableArrow](Connection.md#createtablearrow)
|
||||||
|
- [openTable](Connection.md#opentable)
|
||||||
|
- [tableNames](Connection.md#tablenames)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Connection**(`db`, `uri`)
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `db` | `any` |
|
||||||
|
| `uri` | `string` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:46](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L46)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_db
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_db**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:44](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L44)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_uri
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_uri**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:43](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L43)
|
||||||
|
|
||||||
|
## Accessors
|
||||||
|
|
||||||
|
### uri
|
||||||
|
|
||||||
|
• `get` **uri**(): `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:51](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L51)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### createTable
|
||||||
|
|
||||||
|
▸ **createTable**(`name`, `data`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
Creates a new Table and initialize it with new data.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:91](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L91)
|
||||||
|
|
||||||
|
▸ **createTable**<`T`\>(`name`, `data`, `embeddings`): `Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
Creates a new Table and initialize it with new data.
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the Table |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use on this Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:99](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L99)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### createTableArrow
|
||||||
|
|
||||||
|
▸ **createTableArrow**(`name`, `table`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `table` | `Table`<`any`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:109](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L109)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### openTable
|
||||||
|
|
||||||
|
▸ **openTable**(`name`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
Open a table in the database.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:67](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L67)
|
||||||
|
|
||||||
|
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
Open a table in the database.
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use on this Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:74](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L74)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### tableNames
|
||||||
|
|
||||||
|
▸ **tableNames**(): `Promise`<`string`[]\>
|
||||||
|
|
||||||
|
Get the names of all tables in the database.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`string`[]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:58](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L58)
|
||||||
105
docs/src/javascript/classes/OpenAIEmbeddingFunction.md
Normal file
105
docs/src/javascript/classes/OpenAIEmbeddingFunction.md
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / OpenAIEmbeddingFunction
|
||||||
|
|
||||||
|
# Class: OpenAIEmbeddingFunction
|
||||||
|
|
||||||
|
An embedding function that automatically creates vector representation for a given column.
|
||||||
|
|
||||||
|
## Implements
|
||||||
|
|
||||||
|
- [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`string`\>
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](OpenAIEmbeddingFunction.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_modelName](OpenAIEmbeddingFunction.md#_modelname)
|
||||||
|
- [\_openai](OpenAIEmbeddingFunction.md#_openai)
|
||||||
|
- [sourceColumn](OpenAIEmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [embed](OpenAIEmbeddingFunction.md#embed)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new OpenAIEmbeddingFunction**(`sourceColumn`, `openAIKey`, `modelName?`)
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Default value |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `sourceColumn` | `string` | `undefined` |
|
||||||
|
| `openAIKey` | `string` | `undefined` |
|
||||||
|
| `modelName` | `string` | `'text-embedding-ada-002'` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L21)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_modelName
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_modelName**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L19)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_openai
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_openai**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L18)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sourceColumn
|
||||||
|
|
||||||
|
• **sourceColumn**: `string`
|
||||||
|
|
||||||
|
The name of the column that will be used as input for the Embedding Function.
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
[EmbeddingFunction](../interfaces/EmbeddingFunction.md).[sourceColumn](../interfaces/EmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L50)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### embed
|
||||||
|
|
||||||
|
▸ **embed**(`data`): `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
Creates a vector representation for the given values.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `data` | `string`[] |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
[EmbeddingFunction](../interfaces/EmbeddingFunction.md).[embed](../interfaces/EmbeddingFunction.md#embed)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L38)
|
||||||
299
docs/src/javascript/classes/Query.md
Normal file
299
docs/src/javascript/classes/Query.md
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Query
|
||||||
|
|
||||||
|
# Class: Query<T\>
|
||||||
|
|
||||||
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Query.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_columns](Query.md#_columns)
|
||||||
|
- [\_embeddings](Query.md#_embeddings)
|
||||||
|
- [\_filter](Query.md#_filter)
|
||||||
|
- [\_limit](Query.md#_limit)
|
||||||
|
- [\_metricType](Query.md#_metrictype)
|
||||||
|
- [\_nprobes](Query.md#_nprobes)
|
||||||
|
- [\_query](Query.md#_query)
|
||||||
|
- [\_queryVector](Query.md#_queryvector)
|
||||||
|
- [\_refineFactor](Query.md#_refinefactor)
|
||||||
|
- [\_tbl](Query.md#_tbl)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [execute](Query.md#execute)
|
||||||
|
- [filter](Query.md#filter)
|
||||||
|
- [limit](Query.md#limit)
|
||||||
|
- [metricType](Query.md#metrictype)
|
||||||
|
- [nprobes](Query.md#nprobes)
|
||||||
|
- [refineFactor](Query.md#refinefactor)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Query**<`T`\>(`tbl`, `query`, `embeddings?`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `tbl` | `any` |
|
||||||
|
| `query` | `T` |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:241](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L241)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_columns
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_columns**: `string`[]
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:236](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L236)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_embeddings
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_embeddings**: [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:239](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L239)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_filter
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_filter**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:237](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L237)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_limit
|
||||||
|
|
||||||
|
• `Private` **\_limit**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:233](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L233)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_metricType
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_metricType**: [`MetricType`](../enums/MetricType.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:238](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L238)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_nprobes
|
||||||
|
|
||||||
|
• `Private` **\_nprobes**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:235](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L235)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_query
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_query**: `T`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:231](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L231)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_queryVector
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_queryVector**: `number`[]
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:232](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L232)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_refineFactor
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_refineFactor**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:234](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L234)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_tbl
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_tbl**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:230](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L230)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### execute
|
||||||
|
|
||||||
|
▸ **execute**<`T`\>(): `Promise`<`T`[]\>
|
||||||
|
|
||||||
|
Execute the query and return the results as an Array of Objects
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `Record`<`string`, `unknown`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`T`[]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:301](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L301)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### filter
|
||||||
|
|
||||||
|
▸ **filter**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
A filter statement to be applied to this query.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `string` | A filter in the same format used by a sql WHERE clause. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:284](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L284)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### limit
|
||||||
|
|
||||||
|
▸ **limit**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Sets the number of results that will be returned
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | number of results |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:257](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L257)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### metricType
|
||||||
|
|
||||||
|
▸ **metricType**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
The MetricType used for this Query.
|
||||||
|
|
||||||
|
**`See`**
|
||||||
|
|
||||||
|
MetricType for the different options
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | [`MetricType`](../enums/MetricType.md) | The metric to the. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:293](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L293)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### nprobes
|
||||||
|
|
||||||
|
▸ **nprobes**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | The number of probes used. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:275](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L275)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### refineFactor
|
||||||
|
|
||||||
|
▸ **refineFactor**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | refine factor to use in this query. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:266](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L266)
|
||||||
215
docs/src/javascript/classes/Table.md
Normal file
215
docs/src/javascript/classes/Table.md
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Table
|
||||||
|
|
||||||
|
# Class: Table<T\>
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Table.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_embeddings](Table.md#_embeddings)
|
||||||
|
- [\_name](Table.md#_name)
|
||||||
|
- [\_tbl](Table.md#_tbl)
|
||||||
|
|
||||||
|
### Accessors
|
||||||
|
|
||||||
|
- [name](Table.md#name)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [add](Table.md#add)
|
||||||
|
- [create\_index](Table.md#create_index)
|
||||||
|
- [overwrite](Table.md#overwrite)
|
||||||
|
- [search](Table.md#search)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Table**<`T`\>(`tbl`, `name`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `tbl` | `any` |
|
||||||
|
| `name` | `string` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:121](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L121)
|
||||||
|
|
||||||
|
• **new Table**<`T`\>(`tbl`, `name`, `embeddings`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `tbl` | `any` | |
|
||||||
|
| `name` | `string` | |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:127](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L127)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_embeddings
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_embeddings**: [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:119](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L119)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_name
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_name**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:118](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L118)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_tbl
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_tbl**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:117](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L117)
|
||||||
|
|
||||||
|
## Accessors
|
||||||
|
|
||||||
|
### name
|
||||||
|
|
||||||
|
• `get` **name**(): `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:134](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L134)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### add
|
||||||
|
|
||||||
|
▸ **add**(`data`): `Promise`<`number`\>
|
||||||
|
|
||||||
|
Insert records into this Table.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`\>
|
||||||
|
|
||||||
|
The number of rows added to the table
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:152](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L152)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### create\_index
|
||||||
|
|
||||||
|
▸ **create_index**(`indexParams`): `Promise`<`any`\>
|
||||||
|
|
||||||
|
Create an ANN index on this Table vector index.
|
||||||
|
|
||||||
|
**`See`**
|
||||||
|
|
||||||
|
VectorIndexParams.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`any`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:171](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L171)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### overwrite
|
||||||
|
|
||||||
|
▸ **overwrite**(`data`): `Promise`<`number`\>
|
||||||
|
|
||||||
|
Insert records into this Table, replacing its contents.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`\>
|
||||||
|
|
||||||
|
The number of rows added to the table
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:162](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L162)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### search
|
||||||
|
|
||||||
|
▸ **search**(`query`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Creates a search query to find the nearest neighbors of the given search term
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `query` | `T` | The query search term |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:142](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L142)
|
||||||
36
docs/src/javascript/enums/MetricType.md
Normal file
36
docs/src/javascript/enums/MetricType.md
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / MetricType
|
||||||
|
|
||||||
|
# Enumeration: MetricType
|
||||||
|
|
||||||
|
Distance metrics type.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumeration Members
|
||||||
|
|
||||||
|
- [Cosine](MetricType.md#cosine)
|
||||||
|
- [L2](MetricType.md#l2)
|
||||||
|
|
||||||
|
## Enumeration Members
|
||||||
|
|
||||||
|
### Cosine
|
||||||
|
|
||||||
|
• **Cosine** = ``"cosine"``
|
||||||
|
|
||||||
|
Cosine distance
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:341](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L341)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### L2
|
||||||
|
|
||||||
|
• **L2** = ``"l2"``
|
||||||
|
|
||||||
|
Euclidean distance
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:336](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L336)
|
||||||
30
docs/src/javascript/enums/WriteMode.md
Normal file
30
docs/src/javascript/enums/WriteMode.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / WriteMode
|
||||||
|
|
||||||
|
# Enumeration: WriteMode
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumeration Members
|
||||||
|
|
||||||
|
- [Append](WriteMode.md#append)
|
||||||
|
- [Overwrite](WriteMode.md#overwrite)
|
||||||
|
|
||||||
|
## Enumeration Members
|
||||||
|
|
||||||
|
### Append
|
||||||
|
|
||||||
|
• **Append** = ``"append"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:326](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L326)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### Overwrite
|
||||||
|
|
||||||
|
• **Overwrite** = ``"overwrite"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:325](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L325)
|
||||||
60
docs/src/javascript/interfaces/EmbeddingFunction.md
Normal file
60
docs/src/javascript/interfaces/EmbeddingFunction.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / EmbeddingFunction
|
||||||
|
|
||||||
|
# Interface: EmbeddingFunction<T\>
|
||||||
|
|
||||||
|
An embedding function that automatically creates vector representation for a given column.
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
## Implemented by
|
||||||
|
|
||||||
|
- [`OpenAIEmbeddingFunction`](../classes/OpenAIEmbeddingFunction.md)
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [embed](EmbeddingFunction.md#embed)
|
||||||
|
- [sourceColumn](EmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### embed
|
||||||
|
|
||||||
|
• **embed**: (`data`: `T`[]) => `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Type declaration
|
||||||
|
|
||||||
|
▸ (`data`): `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
Creates a vector representation for the given values.
|
||||||
|
|
||||||
|
##### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `data` | `T`[] |
|
||||||
|
|
||||||
|
##### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/embedding_function.ts#L27)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sourceColumn
|
||||||
|
|
||||||
|
• **sourceColumn**: `string`
|
||||||
|
|
||||||
|
The name of the column that will be used as input for the Embedding Function.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/embedding_function.ts#L22)
|
||||||
61
docs/src/javascript/modules.md
Normal file
61
docs/src/javascript/modules.md
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
[vectordb](README.md) / Exports
|
||||||
|
|
||||||
|
# vectordb
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumerations
|
||||||
|
|
||||||
|
- [MetricType](enums/MetricType.md)
|
||||||
|
- [WriteMode](enums/WriteMode.md)
|
||||||
|
|
||||||
|
### Classes
|
||||||
|
|
||||||
|
- [Connection](classes/Connection.md)
|
||||||
|
- [OpenAIEmbeddingFunction](classes/OpenAIEmbeddingFunction.md)
|
||||||
|
- [Query](classes/Query.md)
|
||||||
|
- [Table](classes/Table.md)
|
||||||
|
|
||||||
|
### Interfaces
|
||||||
|
|
||||||
|
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
||||||
|
|
||||||
|
### Type Aliases
|
||||||
|
|
||||||
|
- [VectorIndexParams](modules.md#vectorindexparams)
|
||||||
|
|
||||||
|
### Functions
|
||||||
|
|
||||||
|
- [connect](modules.md#connect)
|
||||||
|
|
||||||
|
## Type Aliases
|
||||||
|
|
||||||
|
### VectorIndexParams
|
||||||
|
|
||||||
|
Ƭ **VectorIndexParams**: `IvfPQIndexConfig`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:224](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L224)
|
||||||
|
|
||||||
|
## Functions
|
||||||
|
|
||||||
|
### connect
|
||||||
|
|
||||||
|
▸ **connect**(`uri`): `Promise`<[`Connection`](classes/Connection.md)\>
|
||||||
|
|
||||||
|
Connect to a LanceDB instance at the given URI
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `uri` | `string` | The uri of the database. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Connection`](classes/Connection.md)\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:34](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L34)
|
||||||
@@ -72,6 +72,8 @@
|
|||||||
"import lancedb\n",
|
"import lancedb\n",
|
||||||
"import re\n",
|
"import re\n",
|
||||||
"import pickle\n",
|
"import pickle\n",
|
||||||
|
"import requests\n",
|
||||||
|
"import zipfile\n",
|
||||||
"from pathlib import Path\n",
|
"from pathlib import Path\n",
|
||||||
"\n",
|
"\n",
|
||||||
"from langchain.document_loaders import UnstructuredHTMLLoader\n",
|
"from langchain.document_loaders import UnstructuredHTMLLoader\n",
|
||||||
@@ -85,10 +87,25 @@
|
|||||||
{
|
{
|
||||||
"attachments": {},
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "6ccf9b2b",
|
"id": "56cc6d50",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"You can download the Pandas documentation from https://pandas.pydata.org/docs/. To make sure we're not littering our repo with docs, we won't include it in the LanceDB repo, so download this and store it locally first."
|
"To make this easier, we've downloaded Pandas documentation and stored the raw HTML files for you to download. We'll download them and then use LangChain's HTML document readers to parse them and store them in LanceDB as a vector store, along with relevant metadata."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "7da77e75",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"pandas_docs = requests.get(\"https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip\")\n",
|
||||||
|
"with open('/tmp/pandas.documentation.zip', 'wb') as f:\n",
|
||||||
|
" f.write(pandas_docs.content)\n",
|
||||||
|
"\n",
|
||||||
|
"file = zipfile.ZipFile(\"/tmp/pandas.documentation.zip\")\n",
|
||||||
|
"file.extractall(path=\"/tmp/pandas_docs\")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -137,7 +154,8 @@
|
|||||||
"docs = []\n",
|
"docs = []\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if not docs_path.exists():\n",
|
"if not docs_path.exists():\n",
|
||||||
" for p in Path(\"./pandas.documentation\").rglob(\"*.html\"):\n",
|
" for p in Path(\"/tmp/pandas_docs/pandas.documentation\").rglob(\"*.html\"):\n",
|
||||||
|
" print(p)\n",
|
||||||
" if p.is_dir():\n",
|
" if p.is_dir():\n",
|
||||||
" continue\n",
|
" continue\n",
|
||||||
" loader = UnstructuredHTMLLoader(p)\n",
|
" loader = UnstructuredHTMLLoader(p)\n",
|
||||||
109
docs/src/notebooks/diffusiondb/datagen.py
Executable file
109
docs/src/notebooks/diffusiondb/datagen.py
Executable file
@@ -0,0 +1,109 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Dataset hf://poloclub/diffusiondb
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from multiprocessing import Pool
|
||||||
|
|
||||||
|
import lance
|
||||||
|
import pyarrow as pa
|
||||||
|
from datasets import load_dataset
|
||||||
|
from PIL import Image
|
||||||
|
from transformers import CLIPModel, CLIPProcessor, CLIPTokenizerFast
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
MODEL_ID = "openai/clip-vit-base-patch32"
|
||||||
|
|
||||||
|
device = "cuda"
|
||||||
|
|
||||||
|
tokenizer = CLIPTokenizerFast.from_pretrained(MODEL_ID)
|
||||||
|
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32").to(device)
|
||||||
|
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
||||||
|
|
||||||
|
schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("prompt", pa.string()),
|
||||||
|
pa.field("seed", pa.uint32()),
|
||||||
|
pa.field("step", pa.uint16()),
|
||||||
|
pa.field("cfg", pa.float32()),
|
||||||
|
pa.field("sampler", pa.string()),
|
||||||
|
pa.field("width", pa.uint16()),
|
||||||
|
pa.field("height", pa.uint16()),
|
||||||
|
pa.field("timestamp", pa.timestamp("s")),
|
||||||
|
pa.field("image_nsfw", pa.float32()),
|
||||||
|
pa.field("prompt_nsfw", pa.float32()),
|
||||||
|
pa.field("vector", pa.list_(pa.float32(), 512)),
|
||||||
|
pa.field("image", pa.binary()),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pil_to_bytes(img) -> list[bytes]:
|
||||||
|
buf = io.BytesIO()
|
||||||
|
img.save(buf, format="PNG")
|
||||||
|
return buf.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_clip_embeddings(batch) -> pa.RecordBatch:
|
||||||
|
image = processor(text=None, images=batch["image"], return_tensors="pt")[
|
||||||
|
"pixel_values"
|
||||||
|
].to(device)
|
||||||
|
img_emb = model.get_image_features(image)
|
||||||
|
batch["vector"] = img_emb.cpu().tolist()
|
||||||
|
|
||||||
|
with Pool() as p:
|
||||||
|
batch["image_bytes"] = p.map(pil_to_bytes, batch["image"])
|
||||||
|
return batch
|
||||||
|
|
||||||
|
|
||||||
|
def datagen(args):
|
||||||
|
"""Generate DiffusionDB dataset, and use CLIP model to generate image embeddings."""
|
||||||
|
dataset = load_dataset("poloclub/diffusiondb", args.subset)
|
||||||
|
data = []
|
||||||
|
for b in dataset.map(
|
||||||
|
generate_clip_embeddings, batched=True, batch_size=256, remove_columns=["image"]
|
||||||
|
)["train"]:
|
||||||
|
b["image"] = b["image_bytes"]
|
||||||
|
del b["image_bytes"]
|
||||||
|
data.append(b)
|
||||||
|
tbl = pa.Table.from_pylist(data, schema=schema)
|
||||||
|
return tbl
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
"-o", "--output", metavar="DIR", help="Output lance directory", required=True
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s",
|
||||||
|
"--subset",
|
||||||
|
choices=["2m_all", "2m_first_10k", "2m_first_100k"],
|
||||||
|
default="2m_first_10k",
|
||||||
|
help="subset of the hg dataset",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
batches = datagen(args)
|
||||||
|
lance.write_dataset(batches, args.output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
9
docs/src/notebooks/diffusiondb/requirements.txt
Normal file
9
docs/src/notebooks/diffusiondb/requirements.txt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
datasets
|
||||||
|
Pillow
|
||||||
|
lancedb
|
||||||
|
isort
|
||||||
|
black
|
||||||
|
transformers
|
||||||
|
--index-url https://download.pytorch.org/whl/cu118
|
||||||
|
torch
|
||||||
|
torchvision
|
||||||
269
docs/src/notebooks/multimodal_search.ipynb
Normal file
269
docs/src/notebooks/multimodal_search.ipynb
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.1.2\u001b[0m\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n",
|
||||||
|
"\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.1.2\u001b[0m\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"!pip install --quiet -U lancedb\n",
|
||||||
|
"!pip install --quiet gradio transformers torch torchvision"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import io\n",
|
||||||
|
"import PIL\n",
|
||||||
|
"import duckdb\n",
|
||||||
|
"import lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## First run setup: Download data and pre-process"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 30,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"<lance.dataset.LanceDataset at 0x3045db590>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 30,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# remove null prompts\n",
|
||||||
|
"import lance\n",
|
||||||
|
"import pyarrow.compute as pc\n",
|
||||||
|
"\n",
|
||||||
|
"# download s3://eto-public/datasets/diffusiondb/small_10k.lance to this uri\n",
|
||||||
|
"data = lance.dataset(\"~/datasets/rawdata.lance\").to_table()\n",
|
||||||
|
"\n",
|
||||||
|
"# First data processing and full-text-search index\n",
|
||||||
|
"db = lancedb.connect(\"~/datasets/demo\")\n",
|
||||||
|
"tbl = db.create_table(\"diffusiondb\", data.filter(~pc.field(\"prompt\").is_null()))\n",
|
||||||
|
"tbl = tbl.create_fts_index([\"prompt\"])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Create / Open LanceDB Table"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"db = lancedb.connect(\"~/datasets/demo\")\n",
|
||||||
|
"tbl = db.open_table(\"diffusiondb\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Create CLIP embedding function for the text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from transformers import CLIPModel, CLIPProcessor, CLIPTokenizerFast\n",
|
||||||
|
"\n",
|
||||||
|
"MODEL_ID = \"openai/clip-vit-base-patch32\"\n",
|
||||||
|
"\n",
|
||||||
|
"tokenizer = CLIPTokenizerFast.from_pretrained(MODEL_ID)\n",
|
||||||
|
"model = CLIPModel.from_pretrained(MODEL_ID)\n",
|
||||||
|
"processor = CLIPProcessor.from_pretrained(MODEL_ID)\n",
|
||||||
|
"\n",
|
||||||
|
"def embed_func(query):\n",
|
||||||
|
" inputs = tokenizer([query], padding=True, return_tensors=\"pt\")\n",
|
||||||
|
" text_features = model.get_text_features(**inputs)\n",
|
||||||
|
" return text_features.detach().numpy()[0]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Search functions for Gradio"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def find_image_vectors(query):\n",
|
||||||
|
" emb = embed_func(query)\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" f\"embedding = embed_func('{query}')\\n\"\n",
|
||||||
|
" \"tbl.search(embedding).limit(9).to_df()\"\n",
|
||||||
|
" )\n",
|
||||||
|
" return (_extract(tbl.search(emb).limit(9).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def find_image_keywords(query):\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" f\"tbl.search('{query}').limit(9).to_df()\"\n",
|
||||||
|
" )\n",
|
||||||
|
" return (_extract(tbl.search(query).limit(9).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def find_image_sql(query):\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"import duckdb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" \"diffusiondb = tbl.to_lance()\\n\"\n",
|
||||||
|
" f\"duckdb.sql('{query}').to_df()\"\n",
|
||||||
|
" ) \n",
|
||||||
|
" diffusiondb = tbl.to_lance()\n",
|
||||||
|
" return (_extract(duckdb.sql(query).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def _extract(df):\n",
|
||||||
|
" image_col = \"image\"\n",
|
||||||
|
" return [(PIL.Image.open(io.BytesIO(row[image_col])), row[\"prompt\"]) for _, row in df.iterrows()]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Setup Gradio interface"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Running on local URL: http://127.0.0.1:7881\n",
|
||||||
|
"\n",
|
||||||
|
"To create a public link, set `share=True` in `launch()`.\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div><iframe src=\"http://127.0.0.1:7881/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.core.display.HTML object>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "display_data"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": []
|
||||||
|
},
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import gradio as gr\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"with gr.Blocks() as demo:\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" with gr.Tab(\"Embeddings\"):\n",
|
||||||
|
" vector_query = gr.Textbox(value=\"portraits of a person\", show_label=False)\n",
|
||||||
|
" b1 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Tab(\"Keywords\"):\n",
|
||||||
|
" keyword_query = gr.Textbox(value=\"ninja turtle\", show_label=False)\n",
|
||||||
|
" b2 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Tab(\"SQL\"):\n",
|
||||||
|
" sql_query = gr.Textbox(value=\"SELECT * from diffusiondb WHERE image_nsfw >= 2 LIMIT 9\", show_label=False)\n",
|
||||||
|
" b3 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" code = gr.Code(label=\"Code\", language=\"python\")\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" gallery = gr.Gallery(\n",
|
||||||
|
" label=\"Found images\", show_label=False, elem_id=\"gallery\"\n",
|
||||||
|
" ).style(columns=[3], rows=[3], object_fit=\"contain\", height=\"auto\") \n",
|
||||||
|
" \n",
|
||||||
|
" b1.click(find_image_vectors, inputs=vector_query, outputs=[gallery, code])\n",
|
||||||
|
" b2.click(find_image_keywords, inputs=keyword_query, outputs=[gallery, code])\n",
|
||||||
|
" b3.click(find_image_sql, inputs=sql_query, outputs=[gallery, code])\n",
|
||||||
|
" \n",
|
||||||
|
"demo.launch()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3 (ipykernel)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 1
|
||||||
|
}
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
{
|
{
|
||||||
"cells": [
|
"cells": [
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "42bf01fb",
|
"id": "42bf01fb",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"# We're going to build question and answer bot\n",
|
"# Youtube Transcript Search QA Bot\n",
|
||||||
"\n",
|
"\n",
|
||||||
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily."
|
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily."
|
||||||
]
|
]
|
||||||
@@ -35,6 +36,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "22e570f4",
|
"id": "22e570f4",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -87,6 +89,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "5ac2b6a3",
|
"id": "5ac2b6a3",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -181,6 +184,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "3044e0b0",
|
"id": "3044e0b0",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -209,6 +213,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "db586267",
|
"id": "db586267",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -229,6 +234,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "2106b5bb",
|
"id": "2106b5bb",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -338,6 +344,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "53e4bff1",
|
"id": "53e4bff1",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -371,6 +378,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "8ef34fca",
|
"id": "8ef34fca",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -459,6 +467,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "23afc2f9",
|
"id": "23afc2f9",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -541,6 +550,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "28705959",
|
"id": "28705959",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -571,6 +581,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "559a095b",
|
"id": "559a095b",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# LanceDB Python API Reference
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install lancedb
|
|
||||||
```
|
|
||||||
|
|
||||||
## ::: lancedb
|
|
||||||
## ::: lancedb.db
|
|
||||||
## ::: lancedb.table
|
|
||||||
## ::: lancedb.query
|
|
||||||
## ::: lancedb.embeddings
|
|
||||||
## ::: lancedb.context
|
|
||||||
43
docs/src/python/python.md
Normal file
43
docs/src/python/python.md
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# LanceDB Python API Reference
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection
|
||||||
|
|
||||||
|
::: lancedb.connect
|
||||||
|
|
||||||
|
::: lancedb.LanceDBConnection
|
||||||
|
|
||||||
|
## Table
|
||||||
|
|
||||||
|
::: lancedb.table.LanceTable
|
||||||
|
|
||||||
|
## Querying
|
||||||
|
|
||||||
|
::: lancedb.query.LanceQueryBuilder
|
||||||
|
|
||||||
|
::: lancedb.query.LanceFtsQueryBuilder
|
||||||
|
|
||||||
|
## Embeddings
|
||||||
|
|
||||||
|
::: lancedb.embeddings.with_embeddings
|
||||||
|
|
||||||
|
::: lancedb.embeddings.EmbeddingFunction
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
::: lancedb.context.contextualize
|
||||||
|
|
||||||
|
::: lancedb.context.Contextualizer
|
||||||
|
|
||||||
|
## Full text search
|
||||||
|
|
||||||
|
::: lancedb.fts.create_index
|
||||||
|
|
||||||
|
::: lancedb.fts.populate_index
|
||||||
|
|
||||||
|
::: lancedb.fts.search_index
|
||||||
85
docs/src/search.md
Normal file
85
docs/src/search.md
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Vector Search
|
||||||
|
|
||||||
|
`Vector Search` finds the nearest vectors from the database.
|
||||||
|
In a recommendation system or search engine, you can find similar products from
|
||||||
|
the one you searched.
|
||||||
|
In LLM and other AI applications,
|
||||||
|
each data point can be [presented by the embeddings generated from some models](embedding.md),
|
||||||
|
it returns the most relevant features.
|
||||||
|
|
||||||
|
A search in high-dimensional vector space, is to find `K-Nearest-Neighbors (KNN)` of the query vector.
|
||||||
|
|
||||||
|
## Metric
|
||||||
|
|
||||||
|
In LanceDB, a `Metric` is the way to describe the distance between a pair of vectors.
|
||||||
|
Currently, we support the following metrics:
|
||||||
|
|
||||||
|
| Metric | Description |
|
||||||
|
| ----------- | ------------------------------------ |
|
||||||
|
| `L2` | [Euclidean / L2 distance](https://en.wikipedia.org/wiki/Euclidean_distance) |
|
||||||
|
| `Cosine` | [Cosine Similarity](https://en.wikipedia.org/wiki/Cosine_similarity)|
|
||||||
|
|
||||||
|
|
||||||
|
## Search
|
||||||
|
|
||||||
|
### Flat Search
|
||||||
|
|
||||||
|
|
||||||
|
If there is no [vector index is created](ann_indexes.md), LanceDB will just brute-force scan
|
||||||
|
the vector column and compute the distance.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
import lancedb
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
df = tbl.search(np.random.random((768)))
|
||||||
|
.limit(10)
|
||||||
|
.to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
const results = await tbl.search(Array(768))
|
||||||
|
.limit(20)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
||||||
|
as well.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
df = tbl.search(np.random.random((768)))
|
||||||
|
.metric("cosine")
|
||||||
|
.limit(10)
|
||||||
|
.to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
const results = await tbl.search(Array(768))
|
||||||
|
.metric("cosine")
|
||||||
|
.limit(20)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search with Vector Index.
|
||||||
|
|
||||||
|
See [ANN Index](ann_indexes.md) for more details.
|
||||||
6
docs/src/styles/global.css
Normal file
6
docs/src/styles/global.css
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
:root {
|
||||||
|
--md-primary-fg-color: #625eff;
|
||||||
|
--md-primary-fg-color--dark: #4338ca;
|
||||||
|
--md-text-font: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||||
|
--md-code-font: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||||
|
}
|
||||||
64
node/CHANGELOG.md
Normal file
64
node/CHANGELOG.md
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.5] - 2023-06-00
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Support for macOS X86
|
||||||
|
|
||||||
|
## [0.1.4] - 2023-06-03
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Select / Project query API
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Deprecated created_index in favor of createIndex
|
||||||
|
|
||||||
|
## [0.1.3] - 2023-06-01
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Support S3 and Google Cloud Storage
|
||||||
|
- Embedding functions support
|
||||||
|
- OpenAI embedding function
|
||||||
|
|
||||||
|
## [0.1.2] - 2023-05-27
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Append records API
|
||||||
|
- Extra query params to to nodejs client
|
||||||
|
- Create_index API
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- bugfix: string columns should be converted to Utf8Array (#94)
|
||||||
|
|
||||||
|
## [0.1.1] - 2023-05-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- create_table API
|
||||||
|
- limit parameter for queries
|
||||||
|
- Typescript / JavaScript examples
|
||||||
|
- Linux support
|
||||||
|
|
||||||
|
## [0.1.0] - 2023-05-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Initial JavaScript / Node.js library for LanceDB
|
||||||
|
- Read-only api to query LanceDB datasets
|
||||||
|
- Supports macOS arm only
|
||||||
|
|
||||||
|
## [pre-0.1.0]
|
||||||
|
|
||||||
|
- Various prototypes / test builds
|
||||||
|
|
||||||
@@ -41,3 +41,9 @@ To run the linter and have it automatically fix all errors
|
|||||||
```bash
|
```bash
|
||||||
npm run lint -- --fix
|
npm run lint -- --fix
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To build documentation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx typedoc --plugin typedoc-plugin-markdown --out ../docs/src/javascript src/index.ts
|
||||||
|
```
|
||||||
41
node/examples/js-openai/index.js
Normal file
41
node/examples/js-openai/index.js
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
async function example () {
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'text' column(text in this case)
|
||||||
|
const embedding = new lancedb.OpenAIEmbeddingFunction('text', apiKey)
|
||||||
|
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Black T-Shirt', price: 10 },
|
||||||
|
{ id: 2, text: 'Leather Jacket', price: 50 }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('vectors', data, embedding)
|
||||||
|
console.log(await db.tableNames())
|
||||||
|
|
||||||
|
const results = await table
|
||||||
|
.search('keeps me warm')
|
||||||
|
.limit(1)
|
||||||
|
.execute()
|
||||||
|
console.log(results[0].text)
|
||||||
|
}
|
||||||
|
|
||||||
|
example().then(_ => { console.log('All done!') })
|
||||||
15
node/examples/js-openai/package.json
Normal file
15
node/examples/js-openai/package.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-openai",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"vectordb": "file:../..",
|
||||||
|
"openai": "^3.2.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
122
node/examples/js-youtube-transcripts/index.js
Normal file
122
node/examples/js-youtube-transcripts/index.js
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
const fs = require('fs/promises')
|
||||||
|
const readline = require('readline/promises')
|
||||||
|
const { stdin: input, stdout: output } = require('process')
|
||||||
|
const { Configuration, OpenAIApi } = require('openai')
|
||||||
|
|
||||||
|
// Download file from XYZ
|
||||||
|
const INPUT_FILE_NAME = 'data/youtube-transcriptions_sample.jsonl';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'context' column
|
||||||
|
const embedFunction = new lancedb.OpenAIEmbeddingFunction('context', apiKey)
|
||||||
|
|
||||||
|
// Connects to LanceDB
|
||||||
|
const db = await lancedb.connect('data/youtube-lancedb')
|
||||||
|
|
||||||
|
// Open the vectors table or create one if it does not exist
|
||||||
|
let tbl
|
||||||
|
if ((await db.tableNames()).includes('vectors')) {
|
||||||
|
tbl = await db.openTable('vectors', embedFunction)
|
||||||
|
} else {
|
||||||
|
tbl = await createEmbeddingsTable(db, embedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use OpenAI Completion API to generate and answer based on the context that LanceDB provides
|
||||||
|
const configuration = new Configuration({ apiKey })
|
||||||
|
const openai = new OpenAIApi(configuration)
|
||||||
|
const rl = readline.createInterface({ input, output })
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const query = await rl.question('Prompt: ')
|
||||||
|
const results = await tbl
|
||||||
|
.search(query)
|
||||||
|
.select(['title', 'text', 'context'])
|
||||||
|
.limit(3)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
// console.table(results)
|
||||||
|
|
||||||
|
const response = await openai.createCompletion({
|
||||||
|
model: 'text-davinci-003',
|
||||||
|
prompt: createPrompt(query, results),
|
||||||
|
max_tokens: 400,
|
||||||
|
temperature: 0,
|
||||||
|
top_p: 1,
|
||||||
|
frequency_penalty: 0,
|
||||||
|
presence_penalty: 0
|
||||||
|
})
|
||||||
|
console.log(response.data.choices[0].text)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Error: ', err)
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
process.exit(1)
|
||||||
|
})()
|
||||||
|
|
||||||
|
async function createEmbeddingsTable (db, embedFunction) {
|
||||||
|
console.log(`Creating embeddings from ${INPUT_FILE_NAME}`)
|
||||||
|
// read the input file into a JSON array, skipping empty lines
|
||||||
|
const lines = (await fs.readFile(INPUT_FILE_NAME, 'utf-8'))
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.map(line => JSON.parse(line))
|
||||||
|
|
||||||
|
const data = contextualize(lines, 20, 'video_id')
|
||||||
|
return await db.createTable('vectors', data, embedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Each transcript has a small text column, we include previous transcripts in order to
|
||||||
|
// have more context information when creating embeddings
|
||||||
|
function contextualize (rows, contextSize, groupColumn) {
|
||||||
|
const grouped = []
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (!grouped[row[groupColumn]]) {
|
||||||
|
grouped[row[groupColumn]] = []
|
||||||
|
}
|
||||||
|
grouped[row[groupColumn]].push(row)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
Object.keys(grouped).forEach(key => {
|
||||||
|
for (let i = 0; i < grouped[key].length; i++) {
|
||||||
|
const start = i - contextSize > 0 ? i - contextSize : 0
|
||||||
|
grouped[key][i].context = grouped[key].slice(start, i + 1).map(r => r.text).join(' ')
|
||||||
|
}
|
||||||
|
data.push(...grouped[key])
|
||||||
|
})
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a prompt by aggregating all relevant contexts
|
||||||
|
function createPrompt (query, context) {
|
||||||
|
let prompt =
|
||||||
|
'Answer the question based on the context below.\n\n' +
|
||||||
|
'Context:\n'
|
||||||
|
|
||||||
|
// need to make sure our prompt is not larger than max size
|
||||||
|
prompt = prompt + context.map(c => c.context).join('\n\n---\n\n').substring(0, 3750)
|
||||||
|
prompt = prompt + `\n\nQuestion: ${query}\nAnswer:`
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
15
node/examples/js-youtube-transcripts/package.json
Normal file
15
node/examples/js-youtube-transcripts/package.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-openai",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"vectordb": "file:../..",
|
||||||
|
"openai": "^3.2.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,6 @@
|
|||||||
"author": "Lance Devs",
|
"author": "Lance Devs",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"vectordb": "^0.1.0"
|
"vectordb": "file:../.."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,6 @@
|
|||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"vectordb": "^0.1.0"
|
"vectordb": "file:../.."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
import lancedb
|
|
||||||
|
|
||||||
uri = "sample-lancedb"
|
|
||||||
db = lancedb.connect(uri)
|
|
||||||
table = db.create_table("my_table",
|
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
|
||||||
|
|
||||||
879
node/package-lock.json
generated
879
node/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.1",
|
"version": "0.1.9",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -9,7 +9,8 @@
|
|||||||
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
||||||
"build-release": "npm run build -- --release",
|
"build-release": "npm run build -- --release",
|
||||||
"test": "mocha -recursive dist/test",
|
"test": "mocha -recursive dist/test",
|
||||||
"lint": "eslint src --ext .js,.ts"
|
"lint": "eslint src --ext .js,.ts",
|
||||||
|
"clean": "rm -rf node_modules *.node dist/"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -27,19 +28,24 @@
|
|||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
"@types/node": "^18.16.2",
|
"@types/node": "^18.16.2",
|
||||||
|
"@types/sinon": "^10.0.15",
|
||||||
"@types/temp": "^0.9.1",
|
"@types/temp": "^0.9.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.59.1",
|
"@typescript-eslint/eslint-plugin": "^5.59.1",
|
||||||
"cargo-cp-artifact": "^0.1",
|
"cargo-cp-artifact": "^0.1",
|
||||||
"chai": "^4.3.7",
|
"chai": "^4.3.7",
|
||||||
"eslint": "^8.39.0",
|
"eslint": "^8.39.0",
|
||||||
"eslint-config-standard-with-typescript": "^34.0.1",
|
"eslint-config-standard-with-typescript": "^34.0.1",
|
||||||
"eslint-plugin-import": "^2.27.5",
|
"eslint-plugin-import": "^2.26.0",
|
||||||
"eslint-plugin-n": "^15.7.0",
|
"eslint-plugin-n": "^15.7.0",
|
||||||
"eslint-plugin-promise": "^6.1.1",
|
"eslint-plugin-promise": "^6.1.1",
|
||||||
"mocha": "^10.2.0",
|
"mocha": "^10.2.0",
|
||||||
|
"openai": "^3.2.1",
|
||||||
|
"sinon": "^15.1.0",
|
||||||
"temp": "^0.9.4",
|
"temp": "^0.9.4",
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"ts-node-dev": "^2.0.0",
|
"ts-node-dev": "^2.0.0",
|
||||||
|
"typedoc": "^0.24.7",
|
||||||
|
"typedoc-plugin-markdown": "^3.15.3",
|
||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -15,15 +15,16 @@
|
|||||||
import {
|
import {
|
||||||
Field,
|
Field,
|
||||||
Float32,
|
Float32,
|
||||||
List,
|
List, type ListBuilder,
|
||||||
makeBuilder,
|
makeBuilder,
|
||||||
RecordBatchFileWriter,
|
RecordBatchFileWriter,
|
||||||
Table, Utf8,
|
Table, Utf8,
|
||||||
type Vector,
|
type Vector,
|
||||||
vectorFromArray
|
vectorFromArray
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
|
import { type EmbeddingFunction } from './index'
|
||||||
|
|
||||||
export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
export async function convertToTable<T> (data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table> {
|
||||||
if (data.length === 0) {
|
if (data.length === 0) {
|
||||||
throw new Error('At least one record needs to be provided')
|
throw new Error('At least one record needs to be provided')
|
||||||
}
|
}
|
||||||
@@ -33,11 +34,7 @@ export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
|||||||
|
|
||||||
for (const columnsKey of columns) {
|
for (const columnsKey of columns) {
|
||||||
if (columnsKey === 'vector') {
|
if (columnsKey === 'vector') {
|
||||||
const children = new Field<Float32>('item', new Float32())
|
const listBuilder = newVectorListBuilder()
|
||||||
const list = new List(children)
|
|
||||||
const listBuilder = makeBuilder({
|
|
||||||
type: list
|
|
||||||
})
|
|
||||||
const vectorSize = (data[0].vector as any[]).length
|
const vectorSize = (data[0].vector as any[]).length
|
||||||
for (const datum of data) {
|
for (const datum of data) {
|
||||||
if ((datum[columnsKey] as any[]).length !== vectorSize) {
|
if ((datum[columnsKey] as any[]).length !== vectorSize) {
|
||||||
@@ -52,6 +49,14 @@ export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
|||||||
for (const datum of data) {
|
for (const datum of data) {
|
||||||
values.push(datum[columnsKey])
|
values.push(datum[columnsKey])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (columnsKey === embeddings?.sourceColumn) {
|
||||||
|
const vectors = await embeddings.embed(values as T[])
|
||||||
|
const listBuilder = newVectorListBuilder()
|
||||||
|
vectors.map(v => listBuilder.append(v))
|
||||||
|
records.vector = listBuilder.finish().toVector()
|
||||||
|
}
|
||||||
|
|
||||||
if (typeof values[0] === 'string') {
|
if (typeof values[0] === 'string') {
|
||||||
// `vectorFromArray` converts strings into dictionary vectors, forcing it back to a string column
|
// `vectorFromArray` converts strings into dictionary vectors, forcing it back to a string column
|
||||||
records[columnsKey] = vectorFromArray(values, new Utf8())
|
records[columnsKey] = vectorFromArray(values, new Utf8())
|
||||||
@@ -64,8 +69,17 @@ export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
|||||||
return new Table(records)
|
return new Table(records)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fromRecordsToBuffer (data: Array<Record<string, unknown>>): Promise<Buffer> {
|
// Creates a new Arrow ListBuilder that stores a Vector column
|
||||||
const table = convertToTable(data)
|
function newVectorListBuilder (): ListBuilder<Float32, any> {
|
||||||
|
const children = new Field<Float32>('item', new Float32())
|
||||||
|
const list = new List(children)
|
||||||
|
return makeBuilder({
|
||||||
|
type: list
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fromRecordsToBuffer<T> (data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Buffer> {
|
||||||
|
const table = await convertToTable(data, embeddings)
|
||||||
const writer = RecordBatchFileWriter.writeAll(table)
|
const writer = RecordBatchFileWriter.writeAll(table)
|
||||||
return Buffer.from(await writer.toUint8Array())
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|||||||
28
node/src/embedding/embedding_function.ts
Normal file
28
node/src/embedding/embedding_function.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An embedding function that automatically creates vector representation for a given column.
|
||||||
|
*/
|
||||||
|
export interface EmbeddingFunction<T> {
|
||||||
|
/**
|
||||||
|
* The name of the column that will be used as input for the Embedding Function.
|
||||||
|
*/
|
||||||
|
sourceColumn: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a vector representation for the given values.
|
||||||
|
*/
|
||||||
|
embed: (data: T[]) => Promise<number[][]>
|
||||||
|
}
|
||||||
51
node/src/embedding/openai.ts
Normal file
51
node/src/embedding/openai.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { type EmbeddingFunction } from '../index'
|
||||||
|
|
||||||
|
export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
|
||||||
|
private readonly _openai: any
|
||||||
|
private readonly _modelName: string
|
||||||
|
|
||||||
|
constructor (sourceColumn: string, openAIKey: string, modelName: string = 'text-embedding-ada-002') {
|
||||||
|
let openai
|
||||||
|
try {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
openai = require('openai')
|
||||||
|
} catch {
|
||||||
|
throw new Error('please install openai using npm install openai')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.sourceColumn = sourceColumn
|
||||||
|
const configuration = new openai.Configuration({
|
||||||
|
apiKey: openAIKey
|
||||||
|
})
|
||||||
|
this._openai = new openai.OpenAIApi(configuration)
|
||||||
|
this._modelName = modelName
|
||||||
|
}
|
||||||
|
|
||||||
|
async embed (data: string[]): Promise<number[][]> {
|
||||||
|
const response = await this._openai.createEmbedding({
|
||||||
|
model: this._modelName,
|
||||||
|
input: data
|
||||||
|
})
|
||||||
|
const embeddings: number[][] = []
|
||||||
|
for (let i = 0; i < response.data.data.length; i++) {
|
||||||
|
embeddings.push(response.data.data[i].embedding as number[])
|
||||||
|
}
|
||||||
|
return embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceColumn: string
|
||||||
|
}
|
||||||
@@ -19,16 +19,21 @@ import {
|
|||||||
Vector
|
Vector
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
import { fromRecordsToBuffer } from './arrow'
|
import { fromRecordsToBuffer } from './arrow'
|
||||||
|
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const { databaseNew, databaseTableNames, databaseOpenTable, tableCreate, tableSearch, tableAdd, tableCreateVectorIndex } = require('../native.js')
|
const { databaseNew, databaseTableNames, databaseOpenTable, databaseDropTable, tableCreate, tableSearch, tableAdd, tableCreateVectorIndex, tableCountRows, tableDelete } = require('../native.js')
|
||||||
|
|
||||||
|
export type { EmbeddingFunction }
|
||||||
|
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance at the given URI
|
* Connect to a LanceDB instance at the given URI
|
||||||
* @param uri The uri of the database.
|
* @param uri The uri of the database.
|
||||||
*/
|
*/
|
||||||
export async function connect (uri: string): Promise<Connection> {
|
export async function connect (uri: string): Promise<Connection> {
|
||||||
return new Connection(uri)
|
const db = await databaseNew(uri)
|
||||||
|
return new Connection(db, uri)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -38,9 +43,9 @@ export class Connection {
|
|||||||
private readonly _uri: string
|
private readonly _uri: string
|
||||||
private readonly _db: any
|
private readonly _db: any
|
||||||
|
|
||||||
constructor (uri: string) {
|
constructor (db: any, uri: string) {
|
||||||
this._uri = uri
|
this._uri = uri
|
||||||
this._db = databaseNew(uri)
|
this._db = db
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri (): string {
|
get uri (): string {
|
||||||
@@ -55,17 +60,50 @@ export class Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a table in the database.
|
* Open a table in the database.
|
||||||
* @param name The name of the table.
|
*
|
||||||
*/
|
* @param name The name of the table.
|
||||||
async openTable (name: string): Promise<Table> {
|
*/
|
||||||
|
async openTable (name: string): Promise<Table>
|
||||||
|
/**
|
||||||
|
* Open a table in the database.
|
||||||
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param embeddings An embedding function to use on this Table
|
||||||
|
*/
|
||||||
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
const tbl = await databaseOpenTable.call(this._db, name)
|
const tbl = await databaseOpenTable.call(this._db, name)
|
||||||
return new Table(tbl, name)
|
if (embeddings !== undefined) {
|
||||||
|
return new Table(tbl, name, embeddings)
|
||||||
|
} else {
|
||||||
|
return new Table(tbl, name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table> {
|
/**
|
||||||
await tableCreate.call(this._db, name, await fromRecordsToBuffer(data))
|
* Creates a new Table and initialize it with new data.
|
||||||
return await this.openTable(name)
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param data Non-empty Array of Records to be inserted into the Table
|
||||||
|
*/
|
||||||
|
|
||||||
|
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table>
|
||||||
|
/**
|
||||||
|
* Creates a new Table and initialize it with new data.
|
||||||
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param data Non-empty Array of Records to be inserted into the Table
|
||||||
|
* @param embeddings An embedding function to use on this Table
|
||||||
|
*/
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
const tbl = await tableCreate.call(this._db, name, await fromRecordsToBuffer(data, embeddings))
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
return new Table(tbl, name, embeddings)
|
||||||
|
} else {
|
||||||
|
return new Table(tbl, name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
||||||
@@ -73,18 +111,32 @@ export class Connection {
|
|||||||
await tableCreate.call(this._db, name, Buffer.from(await writer.toUint8Array()))
|
await tableCreate.call(this._db, name, Buffer.from(await writer.toUint8Array()))
|
||||||
return await this.openTable(name)
|
return await this.openTable(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop an existing table.
|
||||||
|
* @param name The name of the table to drop.
|
||||||
|
*/
|
||||||
|
async dropTable (name: string): Promise<void> {
|
||||||
|
await databaseDropTable.call(this._db, name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export class Table<T = number[]> {
|
||||||
* A table in a LanceDB database.
|
|
||||||
*/
|
|
||||||
export class Table {
|
|
||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _name: string
|
private readonly _name: string
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
constructor (tbl: any, name: string) {
|
constructor (tbl: any, name: string)
|
||||||
|
/**
|
||||||
|
* @param tbl
|
||||||
|
* @param name
|
||||||
|
* @param embeddings An embedding function to use when interacting with this table
|
||||||
|
*/
|
||||||
|
constructor (tbl: any, name: string, embeddings: EmbeddingFunction<T>)
|
||||||
|
constructor (tbl: any, name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._name = name
|
this._name = name
|
||||||
|
this._embeddings = embeddings
|
||||||
}
|
}
|
||||||
|
|
||||||
get name (): string {
|
get name (): string {
|
||||||
@@ -92,11 +144,11 @@ export class Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a search query to find the nearest neighbors of the given query vector.
|
* Creates a search query to find the nearest neighbors of the given search term
|
||||||
* @param queryVector The query vector.
|
* @param query The query search term
|
||||||
*/
|
*/
|
||||||
search (queryVector: number[]): Query {
|
search (query: T): Query<T> {
|
||||||
return new Query(this._tbl, queryVector)
|
return new Query(this._tbl, query, this._embeddings)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -106,7 +158,7 @@ export class Table {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data), WriteMode.Append.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -116,12 +168,40 @@ export class Table {
|
|||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data), WriteMode.Overwrite.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
async create_index (indexParams: VectorIndexParams): Promise<any> {
|
/**
|
||||||
|
* Create an ANN index on this Table vector index.
|
||||||
|
*
|
||||||
|
* @param indexParams The parameters of this Index, @see VectorIndexParams.
|
||||||
|
*/
|
||||||
|
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||||
return tableCreateVectorIndex.call(this._tbl, indexParams)
|
return tableCreateVectorIndex.call(this._tbl, indexParams)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use [Table.createIndex]
|
||||||
|
*/
|
||||||
|
async create_index (indexParams: VectorIndexParams): Promise<any> {
|
||||||
|
return await this.createIndex(indexParams)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the number of rows in this table.
|
||||||
|
*/
|
||||||
|
async countRows (): Promise<number> {
|
||||||
|
return tableCountRows.call(this._tbl)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete rows from this table.
|
||||||
|
*
|
||||||
|
* @param filter The filter to be applied to this table.
|
||||||
|
*/
|
||||||
|
async delete (filter: string): Promise<void> {
|
||||||
|
return tableDelete.call(this._tbl, filter)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface IvfPQIndexConfig {
|
interface IvfPQIndexConfig {
|
||||||
@@ -177,32 +257,35 @@ export type VectorIndexParams = IvfPQIndexConfig
|
|||||||
/**
|
/**
|
||||||
* A builder for nearest neighbor queries for LanceDB.
|
* A builder for nearest neighbor queries for LanceDB.
|
||||||
*/
|
*/
|
||||||
export class Query {
|
export class Query<T = number[]> {
|
||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _queryVector: number[]
|
private readonly _query: T
|
||||||
|
private _queryVector?: number[]
|
||||||
private _limit: number
|
private _limit: number
|
||||||
private _refineFactor?: number
|
private _refineFactor?: number
|
||||||
private _nprobes: number
|
private _nprobes: number
|
||||||
private readonly _columns?: string[]
|
private _select?: string[]
|
||||||
private _filter?: string
|
private _filter?: string
|
||||||
private _metricType?: MetricType
|
private _metricType?: MetricType
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
constructor (tbl: any, queryVector: number[]) {
|
constructor (tbl: any, query: T, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._queryVector = queryVector
|
this._query = query
|
||||||
this._limit = 10
|
this._limit = 10
|
||||||
this._nprobes = 20
|
this._nprobes = 20
|
||||||
this._refineFactor = undefined
|
this._refineFactor = undefined
|
||||||
this._columns = undefined
|
this._select = undefined
|
||||||
this._filter = undefined
|
this._filter = undefined
|
||||||
this._metricType = undefined
|
this._metricType = undefined
|
||||||
|
this._embeddings = embeddings
|
||||||
}
|
}
|
||||||
|
|
||||||
/***
|
/***
|
||||||
* Sets the number of results that will be returned
|
* Sets the number of results that will be returned
|
||||||
* @param value number of results
|
* @param value number of results
|
||||||
*/
|
*/
|
||||||
limit (value: number): Query {
|
limit (value: number): Query<T> {
|
||||||
this._limit = value
|
this._limit = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
@@ -211,7 +294,7 @@ export class Query {
|
|||||||
* Refine the results by reading extra elements and re-ranking them in memory.
|
* Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
* @param value refine factor to use in this query.
|
* @param value refine factor to use in this query.
|
||||||
*/
|
*/
|
||||||
refineFactor (value: number): Query {
|
refineFactor (value: number): Query<T> {
|
||||||
this._refineFactor = value
|
this._refineFactor = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
@@ -220,7 +303,7 @@ export class Query {
|
|||||||
* The number of probes used. A higher number makes search more accurate but also slower.
|
* The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
* @param value The number of probes used.
|
* @param value The number of probes used.
|
||||||
*/
|
*/
|
||||||
nprobes (value: number): Query {
|
nprobes (value: number): Query<T> {
|
||||||
this._nprobes = value
|
this._nprobes = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
@@ -229,16 +312,27 @@ export class Query {
|
|||||||
* A filter statement to be applied to this query.
|
* A filter statement to be applied to this query.
|
||||||
* @param value A filter in the same format used by a sql WHERE clause.
|
* @param value A filter in the same format used by a sql WHERE clause.
|
||||||
*/
|
*/
|
||||||
filter (value: string): Query {
|
filter (value: string): Query<T> {
|
||||||
this._filter = value
|
this._filter = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
where = this.filter
|
||||||
|
|
||||||
|
/** Return only the specified columns.
|
||||||
|
*
|
||||||
|
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
*/
|
||||||
|
select (value: string[]): Query<T> {
|
||||||
|
this._select = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The MetricType used for this Query.
|
* The MetricType used for this Query.
|
||||||
* @param value The metric to the. @see MetricType for the different options
|
* @param value The metric to the. @see MetricType for the different options
|
||||||
*/
|
*/
|
||||||
metricType (value: MetricType): Query {
|
metricType (value: MetricType): Query<T> {
|
||||||
this._metricType = value
|
this._metricType = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
@@ -247,6 +341,12 @@ export class Query {
|
|||||||
* Execute the query and return the results as an Array of Objects
|
* Execute the query and return the results as an Array of Objects
|
||||||
*/
|
*/
|
||||||
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
||||||
|
if (this._embeddings !== undefined) {
|
||||||
|
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
||||||
|
} else {
|
||||||
|
this._queryVector = this._query as number[]
|
||||||
|
}
|
||||||
|
|
||||||
const buffer = await tableSearch.call(this._tbl, this)
|
const buffer = await tableSearch.call(this._tbl, this)
|
||||||
const data = tableFromIPC(buffer)
|
const data = tableFromIPC(buffer)
|
||||||
return data.toArray().map((entry: Record<string, unknown>) => {
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
|
|||||||
50
node/src/test/embedding/openai.ts
Normal file
50
node/src/test/embedding/openai.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { describe } from 'mocha'
|
||||||
|
import { assert } from 'chai'
|
||||||
|
|
||||||
|
import { OpenAIEmbeddingFunction } from '../../embedding/openai'
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { OpenAIApi } = require('openai')
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { stub } = require('sinon')
|
||||||
|
|
||||||
|
describe('OpenAPIEmbeddings', function () {
|
||||||
|
const stubValue = {
|
||||||
|
data: {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
embedding: Array(1536).fill(1.0)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
embedding: Array(1536).fill(2.0)
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('#embed', function () {
|
||||||
|
it('should create vector embeddings', async function () {
|
||||||
|
const openAIStub = stub(OpenAIApi.prototype, 'createEmbedding').returns(stubValue)
|
||||||
|
const f = new OpenAIEmbeddingFunction('text', 'sk-key')
|
||||||
|
const vectors = await f.embed(['abc', 'def'])
|
||||||
|
assert.isTrue(openAIStub.calledOnce)
|
||||||
|
assert.equal(vectors.length, 2)
|
||||||
|
assert.deepEqual(vectors[0], stubValue.data.data[0].embedding)
|
||||||
|
assert.deepEqual(vectors[1], stubValue.data.data[1].embedding)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
52
node/src/test/io.ts
Normal file
52
node/src/test/io.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// IO tests
|
||||||
|
|
||||||
|
import { describe } from 'mocha'
|
||||||
|
import { assert } from 'chai'
|
||||||
|
|
||||||
|
import * as lancedb from '../index'
|
||||||
|
|
||||||
|
describe('LanceDB S3 client', function () {
|
||||||
|
if (process.env.TEST_S3_BASE_URL != null) {
|
||||||
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
|
it('should have a valid url', async function () {
|
||||||
|
const uri = `${baseUri}/valid_url`
|
||||||
|
const table = await createTestDB(uri, 2, 20)
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
|
||||||
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
|
assert.equal(results.length, 5)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
describe.skip('Skip S3 test', function () {})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
async function createTestDB (uri: string, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
for (let i = 0; i < numRows; i++) {
|
||||||
|
const vector = []
|
||||||
|
for (let j = 0; j < numDimensions; j++) {
|
||||||
|
vector.push(i + (j * 0.1))
|
||||||
|
}
|
||||||
|
data.push({ id: i + 1, name: `name_${i}`, price: i + 10, is_active: (i % 2 === 0), vector })
|
||||||
|
}
|
||||||
|
|
||||||
|
return await con.createTable('vectors', data)
|
||||||
|
}
|
||||||
@@ -17,7 +17,7 @@ import { assert } from 'chai'
|
|||||||
import { track } from 'temp'
|
import { track } from 'temp'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
import { MetricType, Query } from '../index'
|
import { type EmbeddingFunction, MetricType, Query } from '../index'
|
||||||
|
|
||||||
describe('LanceDB client', function () {
|
describe('LanceDB client', function () {
|
||||||
describe('when creating a connection to lancedb', function () {
|
describe('when creating a connection to lancedb', function () {
|
||||||
@@ -64,13 +64,36 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(results[0].id, 1)
|
assert.equal(results[0].id, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('uses a filter', async function () {
|
it('uses a filter / where clause', async function () {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
||||||
|
const assertResults = (results: Array<Record<string, unknown>>) => {
|
||||||
|
assert.equal(results.length, 1)
|
||||||
|
assert.equal(results[0].id, 2)
|
||||||
|
}
|
||||||
|
|
||||||
const uri = await createTestDB()
|
const uri = await createTestDB()
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
const table = await con.openTable('vectors')
|
const table = await con.openTable('vectors')
|
||||||
const results = await table.search([0.1, 0.1]).filter('id == 2').execute()
|
let results = await table.search([0.1, 0.1]).filter('id == 2').execute()
|
||||||
assert.equal(results.length, 1)
|
assertResults(results)
|
||||||
assert.equal(results[0].id, 2)
|
results = await table.search([0.1, 0.1]).where('id == 2').execute()
|
||||||
|
assertResults(results)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('select only a subset of columns', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
const results = await table.search([0.1, 0.1]).select(['is_active']).execute()
|
||||||
|
assert.equal(results.length, 2)
|
||||||
|
// vector and score are always returned
|
||||||
|
assert.isDefined(results[0].vector)
|
||||||
|
assert.isDefined(results[0].score)
|
||||||
|
assert.isDefined(results[0].is_active)
|
||||||
|
|
||||||
|
assert.isUndefined(results[0].id)
|
||||||
|
assert.isUndefined(results[0].name)
|
||||||
|
assert.isUndefined(results[0].price)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -87,9 +110,7 @@ describe('LanceDB client', function () {
|
|||||||
const tableName = `vectors_${Math.floor(Math.random() * 100)}`
|
const tableName = `vectors_${Math.floor(Math.random() * 100)}`
|
||||||
const table = await con.createTable(tableName, data)
|
const table = await con.createTable(tableName, data)
|
||||||
assert.equal(table.name, tableName)
|
assert.equal(table.name, tableName)
|
||||||
|
assert.equal(await table.countRows(), 2)
|
||||||
const results = await table.search([0.1, 0.3]).execute()
|
|
||||||
assert.equal(results.length, 2)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('appends records to an existing table ', async function () {
|
it('appends records to an existing table ', async function () {
|
||||||
@@ -102,16 +123,14 @@ describe('LanceDB client', function () {
|
|||||||
]
|
]
|
||||||
|
|
||||||
const table = await con.createTable('vectors', data)
|
const table = await con.createTable('vectors', data)
|
||||||
const results = await table.search([0.1, 0.3]).execute()
|
assert.equal(await table.countRows(), 2)
|
||||||
assert.equal(results.length, 2)
|
|
||||||
|
|
||||||
const dataAdd = [
|
const dataAdd = [
|
||||||
{ id: 3, vector: [2.1, 2.2], price: 10, name: 'c' },
|
{ id: 3, vector: [2.1, 2.2], price: 10, name: 'c' },
|
||||||
{ id: 4, vector: [3.1, 3.2], price: 50, name: 'd' }
|
{ id: 4, vector: [3.1, 3.2], price: 50, name: 'd' }
|
||||||
]
|
]
|
||||||
await table.add(dataAdd)
|
await table.add(dataAdd)
|
||||||
const resultsAdd = await table.search([0.1, 0.3]).execute()
|
assert.equal(await table.countRows(), 4)
|
||||||
assert.equal(resultsAdd.length, 4)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('overwrite all records in a table', async function () {
|
it('overwrite all records in a table', async function () {
|
||||||
@@ -119,16 +138,25 @@ describe('LanceDB client', function () {
|
|||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
|
|
||||||
const table = await con.openTable('vectors')
|
const table = await con.openTable('vectors')
|
||||||
const results = await table.search([0.1, 0.3]).execute()
|
assert.equal(await table.countRows(), 2)
|
||||||
assert.equal(results.length, 2)
|
|
||||||
|
|
||||||
const dataOver = [
|
const dataOver = [
|
||||||
{ vector: [2.1, 2.2], price: 10, name: 'foo' },
|
{ vector: [2.1, 2.2], price: 10, name: 'foo' },
|
||||||
{ vector: [3.1, 3.2], price: 50, name: 'bar' }
|
{ vector: [3.1, 3.2], price: 50, name: 'bar' }
|
||||||
]
|
]
|
||||||
await table.overwrite(dataOver)
|
await table.overwrite(dataOver)
|
||||||
const resultsAdd = await table.search([0.1, 0.3]).execute()
|
assert.equal(await table.countRows(), 2)
|
||||||
assert.equal(resultsAdd.length, 2)
|
})
|
||||||
|
|
||||||
|
it('can delete records from a table', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
assert.equal(await table.countRows(), 2)
|
||||||
|
|
||||||
|
await table.delete('price = 10')
|
||||||
|
assert.equal(await table.countRows(), 1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -137,9 +165,42 @@ describe('LanceDB client', function () {
|
|||||||
const uri = await createTestDB(32, 300)
|
const uri = await createTestDB(32, 300)
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
const table = await con.openTable('vectors')
|
const table = await con.openTable('vectors')
|
||||||
await table.create_index({ type: 'ivf_pq', column: 'vector', num_partitions: 2, max_iters: 2 })
|
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 2, max_iters: 2 })
|
||||||
}).timeout(10_000) // Timeout is high partially because GH macos runner is pretty slow
|
}).timeout(10_000) // Timeout is high partially because GH macos runner is pretty slow
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('when using a custom embedding function', function () {
|
||||||
|
class TextEmbedding implements EmbeddingFunction<string> {
|
||||||
|
sourceColumn: string
|
||||||
|
|
||||||
|
constructor (targetColumn: string) {
|
||||||
|
this.sourceColumn = targetColumn
|
||||||
|
}
|
||||||
|
|
||||||
|
_embedding_map = new Map<string, number[]>([
|
||||||
|
['foo', [2.1, 2.2]],
|
||||||
|
['bar', [3.1, 3.2]]
|
||||||
|
])
|
||||||
|
|
||||||
|
async embed (data: string[]): Promise<number[][]> {
|
||||||
|
return data.map(datum => this._embedding_map.get(datum) ?? [0.0, 0.0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should encode the original data into embeddings', async function () {
|
||||||
|
const dir = await track().mkdir('lancejs')
|
||||||
|
const con = await lancedb.connect(dir)
|
||||||
|
const embeddings = new TextEmbedding('name')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ price: 10, name: 'foo' },
|
||||||
|
{ price: 50, name: 'bar' }
|
||||||
|
]
|
||||||
|
const table = await con.createTable('vectors', data, embeddings)
|
||||||
|
const results = await table.search('foo').execute()
|
||||||
|
assert.equal(results.length, 2)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Query object', function () {
|
describe('Query object', function () {
|
||||||
@@ -148,11 +209,13 @@ describe('Query object', function () {
|
|||||||
.limit(1)
|
.limit(1)
|
||||||
.metricType(MetricType.Cosine)
|
.metricType(MetricType.Cosine)
|
||||||
.refineFactor(100)
|
.refineFactor(100)
|
||||||
|
.select(['a', 'b'])
|
||||||
.nprobes(20) as Record<string, any>
|
.nprobes(20) as Record<string, any>
|
||||||
assert.equal(query._limit, 1)
|
assert.equal(query._limit, 1)
|
||||||
assert.equal(query._metricType, MetricType.Cosine)
|
assert.equal(query._metricType, MetricType.Cosine)
|
||||||
assert.equal(query._refineFactor, 100)
|
assert.equal(query._refineFactor, 100)
|
||||||
assert.equal(query._nprobes, 20)
|
assert.equal(query._nprobes, 20)
|
||||||
|
assert.deepEqual(query._select, ['a', 'b'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -172,3 +235,22 @@ async function createTestDB (numDimensions: number = 2, numRows: number = 2): Pr
|
|||||||
await con.createTable('vectors', data)
|
await con.createTable('vectors', data)
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
describe('Drop table', function () {
|
||||||
|
it('drop a table', async function () {
|
||||||
|
const dir = await track().mkdir('lancejs')
|
||||||
|
const con = await lancedb.connect(dir)
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ price: 10, name: 'foo', vector: [1, 2, 3] },
|
||||||
|
{ price: 50, name: 'bar', vector: [4, 5, 6] }
|
||||||
|
]
|
||||||
|
await con.createTable('t1', data)
|
||||||
|
await con.createTable('t2', data)
|
||||||
|
|
||||||
|
assert.deepEqual(await con.tableNames(), ['t1', 't2'])
|
||||||
|
|
||||||
|
await con.dropTable('t1')
|
||||||
|
assert.deepEqual(await con.tableNames(), ['t2'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|||||||
8
python/.bumpversion.cfg
Normal file
8
python/.bumpversion.cfg
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[bumpversion]
|
||||||
|
current_version = 0.1.8
|
||||||
|
commit = True
|
||||||
|
message = [python] Bump version: {current_version} → {new_version}
|
||||||
|
tag = True
|
||||||
|
tag_name = python-v{new_version}
|
||||||
|
|
||||||
|
[bumpversion:file:pyproject.toml]
|
||||||
@@ -22,8 +22,21 @@ def connect(uri: URI) -> LanceDBConnection:
|
|||||||
uri: str or Path
|
uri: str or Path
|
||||||
The uri of the database.
|
The uri of the database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
For a local directory, provide a path for the database:
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("~/.lancedb")
|
||||||
|
|
||||||
|
For object storage, use a URI prefix:
|
||||||
|
|
||||||
|
>>> db = lancedb.connect("s3://my-bucket/lancedb")
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A connection to a LanceDB database.
|
conn : LanceDBConnection
|
||||||
|
A connection to a LanceDB database.
|
||||||
"""
|
"""
|
||||||
return LanceDBConnection(uri)
|
return LanceDBConnection(uri)
|
||||||
|
|||||||
18
python/lancedb/conftest.py
Normal file
18
python/lancedb/conftest.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import builtins
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# import lancedb so we don't have to in every example
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def doctest_setup(monkeypatch, tmpdir):
|
||||||
|
# disable color for doctests so we don't have to include
|
||||||
|
# escape codes in docstrings
|
||||||
|
monkeypatch.setitem(os.environ, "NO_COLOR", "1")
|
||||||
|
# Explicitly set the column width
|
||||||
|
monkeypatch.setitem(os.environ, "COLUMNS", "80")
|
||||||
|
# Work in a temporary directory
|
||||||
|
monkeypatch.chdir(tmpdir)
|
||||||
@@ -14,20 +14,109 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
|
from .exceptions import MissingColumnError, MissingValueError
|
||||||
|
|
||||||
|
|
||||||
def contextualize(raw_df: pd.DataFrame) -> Contextualizer:
|
def contextualize(raw_df: pd.DataFrame) -> Contextualizer:
|
||||||
"""Create a Contextualizer object for the given DataFrame.
|
"""Create a Contextualizer object for the given DataFrame.
|
||||||
Used to create context windows.
|
|
||||||
|
Used to create context windows. Context windows are rolling subsets of text
|
||||||
|
data.
|
||||||
|
|
||||||
|
The input text column should already be separated into rows that will be the
|
||||||
|
unit of the window. So to create a context window over tokens, start with
|
||||||
|
a DataFrame with one token per row. To create a context window over sentences,
|
||||||
|
start with a DataFrame with one sentence per row.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> from lancedb.context import contextualize
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({
|
||||||
|
... 'token': ['The', 'quick', 'brown', 'fox', 'jumped', 'over',
|
||||||
|
... 'the', 'lazy', 'dog', 'I', 'love', 'sandwiches'],
|
||||||
|
... 'document_id': [1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2]
|
||||||
|
... })
|
||||||
|
|
||||||
|
``window`` determines how many rows to include in each window. In our case
|
||||||
|
this how many tokens, but depending on the input data, it could be sentences,
|
||||||
|
paragraphs, messages, etc.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(3).stride(1).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown 1
|
||||||
|
1 quick brown fox 1
|
||||||
|
2 brown fox jumped 1
|
||||||
|
3 fox jumped over 1
|
||||||
|
4 jumped over the 1
|
||||||
|
5 over the lazy 1
|
||||||
|
6 the lazy dog 1
|
||||||
|
7 lazy dog I 1
|
||||||
|
8 dog I love 1
|
||||||
|
9 I love sandwiches 2
|
||||||
|
10 love sandwiches 2
|
||||||
|
>>> contextualize(data).window(7).stride(1).min_window_size(7).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox jumped over the 1
|
||||||
|
1 quick brown fox jumped over the lazy 1
|
||||||
|
2 brown fox jumped over the lazy dog 1
|
||||||
|
3 fox jumped over the lazy dog I 1
|
||||||
|
4 jumped over the lazy dog I love 1
|
||||||
|
5 over the lazy dog I love sandwiches 1
|
||||||
|
|
||||||
|
``stride`` determines how many rows to skip between each window start. This can
|
||||||
|
be used to reduce the total number of windows generated.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(4).stride(2).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox 1
|
||||||
|
2 brown fox jumped over 1
|
||||||
|
4 jumped over the lazy 1
|
||||||
|
6 the lazy dog I 1
|
||||||
|
8 dog I love sandwiches 1
|
||||||
|
10 love sandwiches 2
|
||||||
|
|
||||||
|
``groupby`` determines how to group the rows. For example, we would like to have
|
||||||
|
context windows that don't cross document boundaries. In this case, we can
|
||||||
|
pass ``document_id`` as the group by.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(4).stride(2).text_col('token').groupby('document_id').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox 1
|
||||||
|
2 brown fox jumped over 1
|
||||||
|
4 jumped over the lazy 1
|
||||||
|
6 the lazy dog 1
|
||||||
|
9 I love sandwiches 2
|
||||||
|
|
||||||
|
``min_window_size`` determines the minimum size of the context windows that are generated
|
||||||
|
This can be used to trim the last few context windows which have size less than
|
||||||
|
``min_window_size``. By default context windows of size 1 are skipped.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(6).stride(3).text_col('token').groupby('document_id').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox jumped over 1
|
||||||
|
3 fox jumped over the lazy dog 1
|
||||||
|
6 the lazy dog 1
|
||||||
|
9 I love sandwiches 2
|
||||||
|
|
||||||
|
>>> contextualize(data).window(6).stride(3).min_window_size(4).text_col('token').groupby('document_id').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox jumped over 1
|
||||||
|
3 fox jumped over the lazy dog 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return Contextualizer(raw_df)
|
return Contextualizer(raw_df)
|
||||||
|
|
||||||
|
|
||||||
class Contextualizer:
|
class Contextualizer:
|
||||||
|
"""Create context windows from a DataFrame. See [lancedb.context.contextualize][]."""
|
||||||
|
|
||||||
def __init__(self, raw_df):
|
def __init__(self, raw_df):
|
||||||
self._text_col = None
|
self._text_col = None
|
||||||
self._groupby = None
|
self._groupby = None
|
||||||
self._stride = None
|
self._stride = None
|
||||||
self._window = None
|
self._window = None
|
||||||
|
self._min_window_size = 2
|
||||||
self._raw_df = raw_df
|
self._raw_df = raw_df
|
||||||
|
|
||||||
def window(self, window: int) -> Contextualizer:
|
def window(self, window: int) -> Contextualizer:
|
||||||
@@ -75,17 +164,50 @@ class Contextualizer:
|
|||||||
self._text_col = text_col
|
self._text_col = text_col
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def min_window_size(self, min_window_size: int) -> Contextualizer:
|
||||||
|
"""Set the (optional) min_window_size size for the context window.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
min_window_size: int
|
||||||
|
The min_window_size.
|
||||||
|
"""
|
||||||
|
self._min_window_size = min_window_size
|
||||||
|
return self
|
||||||
|
|
||||||
def to_df(self) -> pd.DataFrame:
|
def to_df(self) -> pd.DataFrame:
|
||||||
"""Create the context windows and return a DataFrame."""
|
"""Create the context windows and return a DataFrame."""
|
||||||
|
|
||||||
|
if self._text_col not in self._raw_df.columns.tolist():
|
||||||
|
raise MissingColumnError(self._text_col)
|
||||||
|
|
||||||
|
if self._window is None or self._window < 1:
|
||||||
|
raise MissingValueError(
|
||||||
|
"The value of window is None or less than 1. Specify the "
|
||||||
|
"window size (number of rows to include in each window)"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._stride is None or self._stride < 1:
|
||||||
|
raise MissingValueError(
|
||||||
|
"The value of stride is None or less than 1. Specify the "
|
||||||
|
"stride (number of rows to skip between each window)"
|
||||||
|
)
|
||||||
|
|
||||||
def process_group(grp):
|
def process_group(grp):
|
||||||
# For each group, create the text rolling window
|
# For each group, create the text rolling window
|
||||||
|
# with values of size >= min_window_size
|
||||||
text = grp[self._text_col].values
|
text = grp[self._text_col].values
|
||||||
contexts = grp.iloc[: -self._window : self._stride, :].copy()
|
contexts = grp.iloc[:: self._stride, :].copy()
|
||||||
contexts[self._text_col] = [
|
windows = [
|
||||||
" ".join(text[start_i : start_i + self._window])
|
" ".join(text[start_i : min(start_i + self._window, len(grp))])
|
||||||
for start_i in range(0, len(grp) - self._window, self._stride)
|
for start_i in range(0, len(grp), self._stride)
|
||||||
|
if start_i + self._window <= len(grp)
|
||||||
|
or len(grp) - start_i >= self._min_window_size
|
||||||
]
|
]
|
||||||
|
# if last few rows dropped
|
||||||
|
if len(windows) < len(contexts):
|
||||||
|
contexts = contexts.iloc[: len(windows)]
|
||||||
|
contexts[self._text_col] = windows
|
||||||
return contexts
|
return contexts
|
||||||
|
|
||||||
if self._groupby is None:
|
if self._groupby is None:
|
||||||
|
|||||||
@@ -13,22 +13,59 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from pyarrow import fs
|
||||||
|
|
||||||
from .common import DATA, URI
|
from .common import DATA, URI
|
||||||
from .table import LanceTable
|
from .table import LanceTable
|
||||||
from .util import get_uri_scheme
|
from .util import get_uri_location, get_uri_scheme
|
||||||
|
|
||||||
|
|
||||||
class LanceDBConnection:
|
class LanceDBConnection:
|
||||||
"""
|
"""
|
||||||
A connection to a LanceDB database.
|
A connection to a LanceDB database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri: str or Path
|
||||||
|
The root uri of the database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2},
|
||||||
|
... {"vector": [0.5, 1.3], "b": 4}])
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> db.create_table("another_table", data=[{"vector": [0.4, 0.4], "b": 6}])
|
||||||
|
LanceTable(another_table)
|
||||||
|
>>> sorted(db.table_names())
|
||||||
|
['another_table', 'my_table']
|
||||||
|
>>> len(db)
|
||||||
|
2
|
||||||
|
>>> db["my_table"]
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> "my_table" in db
|
||||||
|
True
|
||||||
|
>>> db.drop_table("my_table")
|
||||||
|
>>> db.drop_table("another_table")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, uri: URI):
|
def __init__(self, uri: URI):
|
||||||
is_local = isinstance(uri, Path) or get_uri_scheme(uri) == "file"
|
if not isinstance(uri, Path):
|
||||||
|
scheme = get_uri_scheme(uri)
|
||||||
|
is_local = isinstance(uri, Path) or scheme == "file"
|
||||||
|
# managed lancedb remote uses schema like lancedb+[http|grpc|...]://
|
||||||
|
self._is_managed_remote = not is_local and scheme.startswith("lancedb")
|
||||||
|
if self._is_managed_remote:
|
||||||
|
if len(scheme.split("+")) != 2:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid LanceDB URI: {uri}, expected uri to have scheme like lancedb+<flavor>://..."
|
||||||
|
)
|
||||||
if is_local:
|
if is_local:
|
||||||
if isinstance(uri, str):
|
if isinstance(uri, str):
|
||||||
uri = Path(uri)
|
uri = Path(uri)
|
||||||
@@ -36,22 +73,75 @@ class LanceDBConnection:
|
|||||||
Path(uri).mkdir(parents=True, exist_ok=True)
|
Path(uri).mkdir(parents=True, exist_ok=True)
|
||||||
self._uri = str(uri)
|
self._uri = str(uri)
|
||||||
|
|
||||||
|
self._entered = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uri(self) -> str:
|
def uri(self) -> str:
|
||||||
return self._uri
|
return self._uri
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def is_managed_remote(self) -> bool:
|
||||||
|
return self._is_managed_remote
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def remote_flavor(self) -> str:
|
||||||
|
if not self.is_managed_remote:
|
||||||
|
raise ValueError(
|
||||||
|
"Not a managed remote LanceDB, there should be no server flavor"
|
||||||
|
)
|
||||||
|
return get_uri_scheme(self.uri).split("+")[1]
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def _client(self) -> "lancedb.remote.LanceDBClient":
|
||||||
|
if not self.is_managed_remote:
|
||||||
|
raise ValueError("Not a managed remote LanceDB, there should be no client")
|
||||||
|
|
||||||
|
# don't import unless we are really using remote
|
||||||
|
from lancedb.remote.client import RestfulLanceDBClient
|
||||||
|
|
||||||
|
if self.remote_flavor == "http":
|
||||||
|
return RestfulLanceDBClient(self._uri)
|
||||||
|
|
||||||
|
raise ValueError("Unsupported remote flavor: " + self.remote_flavor)
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
if self._entered:
|
||||||
|
raise ValueError("Cannot re-enter the same LanceDBConnection twice")
|
||||||
|
self._entered = True
|
||||||
|
await self._client.close()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> LanceDBConnection:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||||
|
await self.close()
|
||||||
|
|
||||||
def table_names(self) -> list[str]:
|
def table_names(self) -> list[str]:
|
||||||
"""Get the names of all tables in the database.
|
"""Get the names of all tables in the database.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A list of table names.
|
list of str
|
||||||
|
A list of table names.
|
||||||
"""
|
"""
|
||||||
if get_uri_scheme(self.uri) == "file":
|
try:
|
||||||
return [p.stem for p in Path(self.uri).glob("*.lance")]
|
filesystem, path = fs.FileSystem.from_uri(self.uri)
|
||||||
raise NotImplementedError(
|
except pa.ArrowInvalid:
|
||||||
"List table_names is only supported for local filesystem for now"
|
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
||||||
)
|
|
||||||
|
try:
|
||||||
|
paths = filesystem.get_file_info(
|
||||||
|
fs.FileSelector(get_uri_location(self.uri))
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# It is ok if the file does not exist since it will be created
|
||||||
|
paths = []
|
||||||
|
tables = [
|
||||||
|
os.path.splitext(file_info.base_name)[0]
|
||||||
|
for file_info in paths
|
||||||
|
if file_info.extension == "lance"
|
||||||
|
]
|
||||||
|
return tables
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self.table_names())
|
return len(self.table_names())
|
||||||
@@ -91,7 +181,73 @@ class LanceDBConnection:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A LanceTable object representing the table.
|
LanceTable
|
||||||
|
A reference to the newly created table.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Can create with list of tuples or dictionaries:
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> data = [{"vector": [1.1, 1.2], "lat": 45.5, "long": -122.7},
|
||||||
|
... {"vector": [0.2, 1.8], "lat": 40.1, "long": -74.1}]
|
||||||
|
>>> db.create_table("my_table", data)
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> db["my_table"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: double
|
||||||
|
long: double
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
You can also pass a pandas DataFrame:
|
||||||
|
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({
|
||||||
|
... "vector": [[1.1, 1.2], [0.2, 1.8]],
|
||||||
|
... "lat": [45.5, 40.1],
|
||||||
|
... "long": [-122.7, -74.1]
|
||||||
|
... })
|
||||||
|
>>> db.create_table("table2", data)
|
||||||
|
LanceTable(table2)
|
||||||
|
>>> db["table2"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: double
|
||||||
|
long: double
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
Data is converted to Arrow before being written to disk. For maximum
|
||||||
|
control over how data is saved, either provide the PyArrow schema to
|
||||||
|
convert to or else provide a PyArrow table directly.
|
||||||
|
|
||||||
|
>>> custom_schema = pa.schema([
|
||||||
|
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
|
... pa.field("lat", pa.float32()),
|
||||||
|
... pa.field("long", pa.float32())
|
||||||
|
... ])
|
||||||
|
>>> db.create_table("table3", data, schema = custom_schema)
|
||||||
|
LanceTable(table3)
|
||||||
|
>>> db["table3"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: float
|
||||||
|
long: float
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
"""
|
"""
|
||||||
if data is not None:
|
if data is not None:
|
||||||
tbl = LanceTable.create(self, name, data, schema, mode=mode)
|
tbl = LanceTable.create(self, name, data, schema, mode=mode)
|
||||||
@@ -112,3 +268,15 @@ class LanceDBConnection:
|
|||||||
A LanceTable object representing the table.
|
A LanceTable object representing the table.
|
||||||
"""
|
"""
|
||||||
return LanceTable(self, name)
|
return LanceTable(self, name)
|
||||||
|
|
||||||
|
def drop_table(self, name: str):
|
||||||
|
"""Drop a table from the database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str
|
||||||
|
The name of the table.
|
||||||
|
"""
|
||||||
|
filesystem, path = pa.fs.FileSystem.from_uri(self.uri)
|
||||||
|
table_path = os.path.join(path, name + ".lance")
|
||||||
|
filesystem.delete_dir(table_path)
|
||||||
|
|||||||
@@ -29,7 +29,31 @@ def with_embeddings(
|
|||||||
wrap_api: bool = True,
|
wrap_api: bool = True,
|
||||||
show_progress: bool = False,
|
show_progress: bool = False,
|
||||||
batch_size: int = 1000,
|
batch_size: int = 1000,
|
||||||
):
|
) -> pa.Table:
|
||||||
|
"""Add a vector column to a table using the given embedding function.
|
||||||
|
|
||||||
|
The new columns will be called "vector".
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
func : Callable
|
||||||
|
A function that takes a list of strings and returns a list of vectors.
|
||||||
|
data : pa.Table or pd.DataFrame
|
||||||
|
The data to add an embedding column to.
|
||||||
|
column : str, default "text"
|
||||||
|
The name of the column to use as input to the embedding function.
|
||||||
|
wrap_api : bool, default True
|
||||||
|
Whether to wrap the embedding function in a retry and rate limiter.
|
||||||
|
show_progress : bool, default False
|
||||||
|
Whether to show a progress bar.
|
||||||
|
batch_size : int, default 1000
|
||||||
|
The number of row values to pass to each call of the embedding function.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Table
|
||||||
|
The input table with a new column called "vector" containing the embeddings.
|
||||||
|
"""
|
||||||
func = EmbeddingFunction(func)
|
func = EmbeddingFunction(func)
|
||||||
if wrap_api:
|
if wrap_api:
|
||||||
func = func.retry().rate_limit()
|
func = func.retry().rate_limit()
|
||||||
|
|||||||
22
python/lancedb/exceptions.py
Normal file
22
python/lancedb/exceptions.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""Custom exception handling"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingValueError(ValueError):
|
||||||
|
"""Exception raised when a required value is missing."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MissingColumnError(KeyError):
|
||||||
|
"""
|
||||||
|
Exception raised when a column name specified is not in
|
||||||
|
the DataFrame object
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, column_name):
|
||||||
|
self.column_name = column_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return (
|
||||||
|
f"Error: Column '{self.column_name}' does not exist in the DataFrame object"
|
||||||
|
)
|
||||||
@@ -68,6 +68,11 @@ def populate_index(index: tantivy.Index, table: LanceTable, fields: List[str]) -
|
|||||||
The table to index
|
The table to index
|
||||||
fields : List[str]
|
fields : List[str]
|
||||||
List of fields to index
|
List of fields to index
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
int
|
||||||
|
The number of rows indexed
|
||||||
"""
|
"""
|
||||||
# first check the fields exist and are string or large string type
|
# first check the fields exist and are string or large string type
|
||||||
for name in fields:
|
for name in fields:
|
||||||
@@ -118,6 +123,8 @@ def search_index(
|
|||||||
query = index.parse_query(query)
|
query = index.parse_query(query)
|
||||||
# get top results
|
# get top results
|
||||||
results = searcher.search(query, limit)
|
results = searcher.search(query, limit)
|
||||||
|
if results.count == 0:
|
||||||
|
return tuple(), tuple()
|
||||||
return tuple(
|
return tuple(
|
||||||
zip(
|
zip(
|
||||||
*[
|
*[
|
||||||
|
|||||||
@@ -12,6 +12,9 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from typing import Awaitable, Literal
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
@@ -22,6 +25,24 @@ from .common import VECTOR_COLUMN_NAME
|
|||||||
class LanceQueryBuilder:
|
class LanceQueryBuilder:
|
||||||
"""
|
"""
|
||||||
A builder for nearest neighbor queries for LanceDB.
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> data = [{"vector": [1.1, 1.2], "b": 2},
|
||||||
|
... {"vector": [0.5, 1.3], "b": 4},
|
||||||
|
... {"vector": [0.4, 0.4], "b": 6},
|
||||||
|
... {"vector": [0.4, 0.4], "b": 10}]
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data=data)
|
||||||
|
>>> (table.search([0.4, 0.4])
|
||||||
|
... .metric("cosine")
|
||||||
|
... .where("b < 10")
|
||||||
|
... .select(["b"])
|
||||||
|
... .limit(2)
|
||||||
|
... .to_df())
|
||||||
|
b vector score
|
||||||
|
0 6 [0.4, 0.4] 0.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, table: "lancedb.table.LanceTable", query: np.ndarray):
|
def __init__(self, table: "lancedb.table.LanceTable", query: np.ndarray):
|
||||||
@@ -44,7 +65,8 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._limit = limit
|
self._limit = limit
|
||||||
return self
|
return self
|
||||||
@@ -59,7 +81,8 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._columns = columns
|
self._columns = columns
|
||||||
return self
|
return self
|
||||||
@@ -74,22 +97,24 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._where = where
|
self._where = where
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def metric(self, metric: str) -> LanceQueryBuilder:
|
def metric(self, metric: Literal["L2", "cosine"]) -> LanceQueryBuilder:
|
||||||
"""Set the distance metric to use.
|
"""Set the distance metric to use.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
metric: str
|
metric: "L2" or "cosine"
|
||||||
The distance metric to use. By default "l2" is used.
|
The distance metric to use. By default "L2" is used.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._metric = metric
|
self._metric = metric
|
||||||
return self
|
return self
|
||||||
@@ -97,6 +122,12 @@ class LanceQueryBuilder:
|
|||||||
def nprobes(self, nprobes: int) -> LanceQueryBuilder:
|
def nprobes(self, nprobes: int) -> LanceQueryBuilder:
|
||||||
"""Set the number of probes to use.
|
"""Set the number of probes to use.
|
||||||
|
|
||||||
|
Higher values will yield better recall (more likely to find vectors if
|
||||||
|
they exist) at the expense of latency.
|
||||||
|
|
||||||
|
See discussion in [Querying an ANN Index][../querying-an-ann-index] for
|
||||||
|
tuning advice.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
nprobes: int
|
nprobes: int
|
||||||
@@ -104,13 +135,20 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._nprobes = nprobes
|
self._nprobes = nprobes
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def refine_factor(self, refine_factor: int) -> LanceQueryBuilder:
|
def refine_factor(self, refine_factor: int) -> LanceQueryBuilder:
|
||||||
"""Set the refine factor to use.
|
"""Set the refine factor to use, increasing the number of vectors sampled.
|
||||||
|
|
||||||
|
As an example, a refine factor of 2 will sample 2x as many vectors as
|
||||||
|
requested, re-ranks them, and returns the top half most relevant results.
|
||||||
|
|
||||||
|
See discussion in [Querying an ANN Index][querying-an-ann-index] for
|
||||||
|
tuning advice.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@@ -119,7 +157,8 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The LanceQueryBuilder object.
|
LanceQueryBuilder
|
||||||
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._refine_factor = refine_factor
|
self._refine_factor = refine_factor
|
||||||
return self
|
return self
|
||||||
@@ -131,8 +170,28 @@ class LanceQueryBuilder:
|
|||||||
and also the "score" column which is the distance between the query
|
and also the "score" column which is the distance between the query
|
||||||
vector and the returned vector.
|
vector and the returned vector.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
return self.to_arrow().to_pandas()
|
||||||
|
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
"""
|
||||||
|
Execute the query and return the results as a arrow Table.
|
||||||
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
|
and also the "score" column which is the distance between the query
|
||||||
|
vector and the returned vector.
|
||||||
|
"""
|
||||||
|
if self._table._conn.is_managed_remote:
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
result = self._table._conn._client.query(
|
||||||
|
self._table.name, self.to_remote_query()
|
||||||
|
)
|
||||||
|
return loop.run_until_complete(result).to_arrow()
|
||||||
|
|
||||||
ds = self._table.to_lance()
|
ds = self._table.to_lance()
|
||||||
tbl = ds.to_table(
|
return ds.to_table(
|
||||||
columns=self._columns,
|
columns=self._columns,
|
||||||
filter=self._where,
|
filter=self._where,
|
||||||
nearest={
|
nearest={
|
||||||
@@ -144,7 +203,20 @@ class LanceQueryBuilder:
|
|||||||
"refine_factor": self._refine_factor,
|
"refine_factor": self._refine_factor,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
return tbl.to_pandas()
|
|
||||||
|
def to_remote_query(self) -> "VectorQuery":
|
||||||
|
# don't import unless we are connecting to remote
|
||||||
|
from lancedb.remote.client import VectorQuery
|
||||||
|
|
||||||
|
return VectorQuery(
|
||||||
|
vector=self._query.tolist(),
|
||||||
|
filter=self._where,
|
||||||
|
k=self._limit,
|
||||||
|
_metric=self._metric,
|
||||||
|
columns=self._columns,
|
||||||
|
nprobes=self._nprobes,
|
||||||
|
refine_factor=self._refine_factor,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
||||||
@@ -164,6 +236,8 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
|
|||||||
index = tantivy.Index.open(index_path)
|
index = tantivy.Index.open(index_path)
|
||||||
# get the scores and doc ids
|
# get the scores and doc ids
|
||||||
row_ids, scores = search_index(index, self._query, self._limit)
|
row_ids, scores = search_index(index, self._query, self._limit)
|
||||||
|
if len(row_ids) == 0:
|
||||||
|
return pd.DataFrame()
|
||||||
scores = pa.array(scores)
|
scores = pa.array(scores)
|
||||||
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
||||||
output_tbl = output_tbl.append_column("score", scores)
|
output_tbl = output_tbl.append_column("score", scores)
|
||||||
|
|||||||
61
python/lancedb/remote/__init__.py
Normal file
61
python/lancedb/remote/__init__.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import abc
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import attr
|
||||||
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
__all__ = ["LanceDBClient", "VectorQuery", "VectorQueryResult"]
|
||||||
|
|
||||||
|
|
||||||
|
class VectorQuery(BaseModel):
|
||||||
|
# vector to search for
|
||||||
|
vector: List[float]
|
||||||
|
|
||||||
|
# sql filter to refine the query with
|
||||||
|
filter: Optional[str] = None
|
||||||
|
|
||||||
|
# top k results to return
|
||||||
|
k: int
|
||||||
|
|
||||||
|
# # metrics
|
||||||
|
_metric: str = "L2"
|
||||||
|
|
||||||
|
# which columns to return in the results
|
||||||
|
columns: Optional[List[str]] = None
|
||||||
|
|
||||||
|
# optional query parameters for tuning the results,
|
||||||
|
# e.g. `{"nprobes": "10", "refine_factor": "10"}`
|
||||||
|
nprobes: int = 10
|
||||||
|
|
||||||
|
refine_factor: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
@attr.define
|
||||||
|
class VectorQueryResult:
|
||||||
|
# for now the response is directly seralized into a pandas dataframe
|
||||||
|
tbl: pa.Table
|
||||||
|
|
||||||
|
def to_arrow(self) -> pa.Table:
|
||||||
|
return self.tbl
|
||||||
|
|
||||||
|
|
||||||
|
class LanceDBClient(abc.ABC):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
||||||
|
"""Query the LanceDB server for the given table and query."""
|
||||||
|
pass
|
||||||
79
python/lancedb/remote/client.py
Normal file
79
python/lancedb/remote/client.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import attr
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from lancedb.remote import VectorQuery, VectorQueryResult
|
||||||
|
from lancedb.remote.errors import LanceDBClientError
|
||||||
|
|
||||||
|
|
||||||
|
def _check_not_closed(f):
|
||||||
|
@functools.wraps(f)
|
||||||
|
def wrapped(self, *args, **kwargs):
|
||||||
|
if self.closed:
|
||||||
|
raise ValueError("Connection is closed")
|
||||||
|
return f(self, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
@attr.define(slots=False)
|
||||||
|
class RestfulLanceDBClient:
|
||||||
|
url: str
|
||||||
|
closed: bool = attr.field(default=False, init=False)
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def session(self) -> aiohttp.ClientSession:
|
||||||
|
parsed = urllib.parse.urlparse(self.url)
|
||||||
|
scheme = parsed.scheme
|
||||||
|
if not scheme.startswith("lancedb"):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid scheme: {scheme}, must be like lancedb+<flavor>://"
|
||||||
|
)
|
||||||
|
flavor = scheme.split("+")[1]
|
||||||
|
url = f"{flavor}://{parsed.hostname}:{parsed.port}"
|
||||||
|
return aiohttp.ClientSession(url)
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
await self.session.close()
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
@_check_not_closed
|
||||||
|
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
||||||
|
async with self.session.post(
|
||||||
|
f"/table/{table_name}/", json=query.dict(exclude_none=True)
|
||||||
|
) as resp:
|
||||||
|
resp: aiohttp.ClientResponse = resp
|
||||||
|
if 400 <= resp.status < 500:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Bad Request: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
if 500 <= resp.status < 600:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Internal Server Error: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
if resp.status != 200:
|
||||||
|
raise LanceDBClientError(
|
||||||
|
f"Unknown Error: {resp.status}, error: {await resp.text()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
resp_body = await resp.read()
|
||||||
|
with pa.ipc.open_file(pa.BufferReader(resp_body)) as reader:
|
||||||
|
tbl = reader.read_all()
|
||||||
|
return VectorQueryResult(tbl)
|
||||||
16
python/lancedb/remote/errors.py
Normal file
16
python/lancedb/remote/errors.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
class LanceDBClientError(RuntimeError):
|
||||||
|
pass
|
||||||
@@ -14,7 +14,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import List, Union
|
from typing import List, Union
|
||||||
|
|
||||||
@@ -27,7 +26,6 @@ from lance.vector import vec_to_table
|
|||||||
|
|
||||||
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
from .query import LanceFtsQueryBuilder, LanceQueryBuilder
|
from .query import LanceFtsQueryBuilder, LanceQueryBuilder
|
||||||
from .util import get_uri_scheme
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_data(data, schema):
|
def _sanitize_data(data, schema):
|
||||||
@@ -47,6 +45,40 @@ def _sanitize_data(data, schema):
|
|||||||
class LanceTable:
|
class LanceTable:
|
||||||
"""
|
"""
|
||||||
A table in a LanceDB database.
|
A table in a LanceDB database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Create using [LanceDBConnection.create_table][lancedb.LanceDBConnection.create_table]
|
||||||
|
(more examples in that method's documentation).
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2}])
|
||||||
|
>>> table.head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
b: int64
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2]]]
|
||||||
|
b: [[2]]
|
||||||
|
|
||||||
|
Can append new data with [LanceTable.add][lancedb.table.LanceTable.add].
|
||||||
|
|
||||||
|
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
||||||
|
2
|
||||||
|
|
||||||
|
Can query the table with [LanceTable.search][lancedb.table.LanceTable.search].
|
||||||
|
|
||||||
|
>>> table.search([0.4, 0.4]).select(["b"]).to_df()
|
||||||
|
b vector score
|
||||||
|
0 4 [0.5, 1.3] 0.82
|
||||||
|
1 2 [1.1, 1.2] 1.13
|
||||||
|
|
||||||
|
Search queries are much faster when an index is created. See
|
||||||
|
[LanceTable.create_index][lancedb.table.LanceTable.create_index].
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -64,7 +96,12 @@ class LanceTable:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def schema(self) -> pa.Schema:
|
def schema(self) -> pa.Schema:
|
||||||
"""Return the schema of the table."""
|
"""Return the schema of the table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Schema
|
||||||
|
A PyArrow schema object."""
|
||||||
return self._dataset.schema
|
return self._dataset.schema
|
||||||
|
|
||||||
def list_versions(self):
|
def list_versions(self):
|
||||||
@@ -72,12 +109,39 @@ class LanceTable:
|
|||||||
return self._dataset.versions()
|
return self._dataset.versions()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self) -> int:
|
||||||
"""Get the current version of the table"""
|
"""Get the current version of the table"""
|
||||||
return self._dataset.version
|
return self._dataset.version
|
||||||
|
|
||||||
def checkout(self, version: int):
|
def checkout(self, version: int):
|
||||||
"""Checkout a version of the table"""
|
"""Checkout a version of the table. This is an in-place operation.
|
||||||
|
|
||||||
|
This allows viewing previous versions of the table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
version : int
|
||||||
|
The version to checkout.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
||||||
|
>>> table.version
|
||||||
|
1
|
||||||
|
>>> table.to_pandas()
|
||||||
|
vector type
|
||||||
|
0 [1.1, 0.9] vector
|
||||||
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
|
2
|
||||||
|
>>> table.version
|
||||||
|
2
|
||||||
|
>>> table.checkout(1)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
vector type
|
||||||
|
0 [1.1, 0.9] vector
|
||||||
|
"""
|
||||||
max_ver = max([v["version"] for v in self._dataset.versions()])
|
max_ver = max([v["version"] for v in self._dataset.versions()])
|
||||||
if version < 1 or version > max_ver:
|
if version < 1 or version > max_ver:
|
||||||
raise ValueError(f"Invalid version {version}")
|
raise ValueError(f"Invalid version {version}")
|
||||||
@@ -98,11 +162,20 @@ class LanceTable:
|
|||||||
return self._dataset.head(n)
|
return self._dataset.head(n)
|
||||||
|
|
||||||
def to_pandas(self) -> pd.DataFrame:
|
def to_pandas(self) -> pd.DataFrame:
|
||||||
"""Return the table as a pandas DataFrame."""
|
"""Return the table as a pandas DataFrame.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pd.DataFrame
|
||||||
|
"""
|
||||||
return self.to_arrow().to_pandas()
|
return self.to_arrow().to_pandas()
|
||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
"""Return the table as a pyarrow Table."""
|
"""Return the table as a pyarrow Table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Table"""
|
||||||
return self._dataset.to_table()
|
return self._dataset.to_table()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -175,7 +248,8 @@ class LanceTable:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The number of vectors added to the table.
|
int
|
||||||
|
The number of vectors in the table.
|
||||||
"""
|
"""
|
||||||
data = _sanitize_data(data, self.schema)
|
data = _sanitize_data(data, self.schema)
|
||||||
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
||||||
@@ -193,10 +267,11 @@ class LanceTable:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A LanceQueryBuilder object representing the query.
|
LanceQueryBuilder
|
||||||
Once executed, the query returns selected columns, the vector,
|
A query builder object representing the query.
|
||||||
and also the "score" column which is the distance between the query
|
Once executed, the query returns selected columns, the vector,
|
||||||
vector and the returned vector.
|
and also the "score" column which is the distance between the query
|
||||||
|
vector and the returned vector.
|
||||||
"""
|
"""
|
||||||
if isinstance(query, str):
|
if isinstance(query, str):
|
||||||
# fts
|
# fts
|
||||||
@@ -217,6 +292,34 @@ class LanceTable:
|
|||||||
lance.write_dataset(data, tbl._dataset_uri, mode=mode)
|
lance.write_dataset(data, tbl._dataset_uri, mode=mode)
|
||||||
return tbl
|
return tbl
|
||||||
|
|
||||||
|
def delete(self, where: str):
|
||||||
|
"""Delete rows from the table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
where: str
|
||||||
|
The SQL where clause to use when deleting rows.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({"x": [1, 2, 3], "vector": [[1, 2], [3, 4], [5, 6]]})
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 2 [3.0, 4.0]
|
||||||
|
2 3 [5.0, 6.0]
|
||||||
|
>>> table.delete("x = 2")
|
||||||
|
>>> table.to_pandas()
|
||||||
|
x vector
|
||||||
|
0 1 [1.0, 2.0]
|
||||||
|
1 3 [5.0, 6.0]
|
||||||
|
"""
|
||||||
|
self._dataset.delete(where)
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_schema(data: pa.Table, schema: pa.Schema = None) -> pa.Table:
|
def _sanitize_schema(data: pa.Table, schema: pa.Schema = None) -> pa.Table:
|
||||||
"""Ensure that the table has the expected schema.
|
"""Ensure that the table has the expected schema.
|
||||||
@@ -253,8 +356,7 @@ def _sanitize_vector_column(data: pa.Table, vector_column_name: str) -> pa.Table
|
|||||||
vector_column_name: str
|
vector_column_name: str
|
||||||
The name of the vector column.
|
The name of the vector column.
|
||||||
"""
|
"""
|
||||||
i = data.column_names.index(vector_column_name)
|
if vector_column_name not in data.column_names:
|
||||||
if i < 0:
|
|
||||||
raise ValueError(f"Missing vector column: {vector_column_name}")
|
raise ValueError(f"Missing vector column: {vector_column_name}")
|
||||||
vec_arr = data[vector_column_name].combine_chunks()
|
vec_arr = data[vector_column_name].combine_chunks()
|
||||||
if pa.types.is_fixed_size_list(vec_arr.type):
|
if pa.types.is_fixed_size_list(vec_arr.type):
|
||||||
@@ -266,4 +368,6 @@ def _sanitize_vector_column(data: pa.Table, vector_column_name: str) -> pa.Table
|
|||||||
values = values.cast(pa.float32())
|
values = values.cast(pa.float32())
|
||||||
list_size = len(values) / len(data)
|
list_size = len(values) / len(data)
|
||||||
vec_arr = pa.FixedSizeListArray.from_arrays(values, list_size)
|
vec_arr = pa.FixedSizeListArray.from_arrays(values, list_size)
|
||||||
return data.set_column(i, vector_column_name, vec_arr)
|
return data.set_column(
|
||||||
|
data.column_names.index(vector_column_name), vector_column_name, vec_arr
|
||||||
|
)
|
||||||
|
|||||||
@@ -41,3 +41,23 @@ def get_uri_scheme(uri: str) -> str:
|
|||||||
# So we add special handling here for schemes that are a single character
|
# So we add special handling here for schemes that are a single character
|
||||||
scheme = "file"
|
scheme = "file"
|
||||||
return scheme
|
return scheme
|
||||||
|
|
||||||
|
|
||||||
|
def get_uri_location(uri: str) -> str:
|
||||||
|
"""
|
||||||
|
Get the location of a URI. If the parameter is not a url, assumes it is just a path
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : str
|
||||||
|
The URI to parse.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str: Location part of the URL, without scheme
|
||||||
|
"""
|
||||||
|
parsed = urlparse(uri)
|
||||||
|
if not parsed.netloc:
|
||||||
|
return parsed.path
|
||||||
|
else:
|
||||||
|
return parsed.netloc + parsed.path
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.1.3"
|
version = "0.1.9"
|
||||||
dependencies = ["pylance>=0.4.15", "ratelimiter", "retry", "tqdm"]
|
dependencies = ["pylance~=0.5.0", "ratelimiter", "retry", "tqdm", "aiohttp", "pydantic", "attr"]
|
||||||
description = "lancedb"
|
description = "lancedb"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
||||||
@@ -33,11 +33,11 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
repository = "https://github.com/eto-ai/lancedb"
|
repository = "https://github.com/lancedb/lancedb"
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
tests = [
|
tests = [
|
||||||
"pytest"
|
"pytest", "pytest-mock", "pytest-asyncio"
|
||||||
]
|
]
|
||||||
dev = [
|
dev = [
|
||||||
"ruff", "pre-commit", "black"
|
"ruff", "pre-commit", "black"
|
||||||
|
|||||||
77
python/tests/test_context.py
Normal file
77
python/tests/test_context.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.context import contextualize
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def raw_df() -> pd.DataFrame:
|
||||||
|
return pd.DataFrame(
|
||||||
|
{
|
||||||
|
"token": [
|
||||||
|
"The",
|
||||||
|
"quick",
|
||||||
|
"brown",
|
||||||
|
"fox",
|
||||||
|
"jumped",
|
||||||
|
"over",
|
||||||
|
"the",
|
||||||
|
"lazy",
|
||||||
|
"dog",
|
||||||
|
"I",
|
||||||
|
"love",
|
||||||
|
"sandwiches",
|
||||||
|
],
|
||||||
|
"document_id": [1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_contextualizer(raw_df: pd.DataFrame):
|
||||||
|
result = (
|
||||||
|
contextualize(raw_df)
|
||||||
|
.window(6)
|
||||||
|
.stride(3)
|
||||||
|
.text_col("token")
|
||||||
|
.groupby("document_id")
|
||||||
|
.to_df()["token"]
|
||||||
|
.to_list()
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == [
|
||||||
|
"The quick brown fox jumped over",
|
||||||
|
"fox jumped over the lazy dog",
|
||||||
|
"the lazy dog",
|
||||||
|
"I love sandwiches",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_contextualizer_with_threshold(raw_df: pd.DataFrame):
|
||||||
|
result = (
|
||||||
|
contextualize(raw_df)
|
||||||
|
.window(6)
|
||||||
|
.stride(3)
|
||||||
|
.text_col("token")
|
||||||
|
.groupby("document_id")
|
||||||
|
.min_window_size(4)
|
||||||
|
.to_df()["token"]
|
||||||
|
.to_list()
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == [
|
||||||
|
"The quick brown fox jumped over",
|
||||||
|
"fox jumped over the lazy dog",
|
||||||
|
]
|
||||||
@@ -97,3 +97,26 @@ def test_create_mode(tmp_path):
|
|||||||
)
|
)
|
||||||
tbl = db.create_table("test", data=new_data, mode="overwrite")
|
tbl = db.create_table("test", data=new_data, mode="overwrite")
|
||||||
assert tbl.to_pandas().item.tolist() == ["fizz", "buzz"]
|
assert tbl.to_pandas().item.tolist() == ["fizz", "buzz"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_table(tmp_path):
|
||||||
|
db = lancedb.connect(tmp_path)
|
||||||
|
data = pd.DataFrame(
|
||||||
|
{
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
|
||||||
|
db.drop_table("test")
|
||||||
|
assert db.table_names() == []
|
||||||
|
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
|||||||
27
python/tests/test_e2e_remote_db.py
Normal file
27
python/tests/test_e2e_remote_db.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from lancedb import LanceDBConnection
|
||||||
|
|
||||||
|
# TODO: setup integ test mark and script
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Need to set up a local server")
|
||||||
|
def test_against_local_server():
|
||||||
|
conn = LanceDBConnection("lancedb+http://localhost:10024")
|
||||||
|
table = conn.open_table("sift1m_ivf1024_pq16")
|
||||||
|
df = table.search(np.random.rand(128)).to_df()
|
||||||
|
assert len(df) == 10
|
||||||
@@ -14,6 +14,7 @@ import sys
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from lancedb.embeddings import with_embeddings
|
from lancedb.embeddings import with_embeddings
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,13 +13,13 @@
|
|||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
|
||||||
import lancedb.fts
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pytest
|
import pytest
|
||||||
import tantivy
|
import tantivy
|
||||||
|
|
||||||
import lancedb as ldb
|
import lancedb as ldb
|
||||||
|
import lancedb.fts
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@@ -82,3 +82,10 @@ def test_create_index_multiple_columns(tmp_path, table):
|
|||||||
assert len(df) == 10
|
assert len(df) == 10
|
||||||
assert "text" in df.columns
|
assert "text" in df.columns
|
||||||
assert "text2" in df.columns
|
assert "text2" in df.columns
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_rs(tmp_path, table, mocker):
|
||||||
|
table.create_fts_index(["text", "text2"])
|
||||||
|
mocker.patch("lancedb.fts.search_index", return_value=([], []))
|
||||||
|
df = table.search("puppy").limit(10).to_df()
|
||||||
|
assert len(df) == 0
|
||||||
|
|||||||
51
python/tests/test_io.py
Normal file
51
python/tests/test_io.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
# You need to setup AWS credentials an a base path to run this test. Example
|
||||||
|
# AWS_PROFILE=default TEST_S3_BASE_URL=s3://my_bucket/dataset pytest tests/test_io.py
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
(os.environ.get("TEST_S3_BASE_URL") is None),
|
||||||
|
reason="please setup s3 base url",
|
||||||
|
)
|
||||||
|
def test_s3_io():
|
||||||
|
db = lancedb.connect(os.environ.get("TEST_S3_BASE_URL"))
|
||||||
|
assert db.table_names() == []
|
||||||
|
|
||||||
|
table = db.create_table(
|
||||||
|
"test",
|
||||||
|
data=[
|
||||||
|
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
rs = table.search([100, 100]).limit(1).to_df()
|
||||||
|
assert len(rs) == 1
|
||||||
|
assert rs["item"].iloc[0] == "bar"
|
||||||
|
|
||||||
|
rs = table.search([100, 100]).where("price < 15").limit(2).to_df()
|
||||||
|
assert len(rs) == 1
|
||||||
|
assert rs["item"].iloc[0] == "foo"
|
||||||
|
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
assert "test" in db
|
||||||
|
assert len(db) == 1
|
||||||
|
|
||||||
|
assert db.open_table("test").name == db["test"].name
|
||||||
@@ -17,12 +17,15 @@ import pandas as pd
|
|||||||
import pandas.testing as tm
|
import pandas.testing as tm
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from lancedb.db import LanceDBConnection
|
||||||
from lancedb.query import LanceQueryBuilder
|
from lancedb.query import LanceQueryBuilder
|
||||||
|
|
||||||
|
|
||||||
class MockTable:
|
class MockTable:
|
||||||
def __init__(self, tmp_path):
|
def __init__(self, tmp_path):
|
||||||
self.uri = tmp_path
|
self.uri = tmp_path
|
||||||
|
self._conn = LanceDBConnection("/tmp/lance/")
|
||||||
|
|
||||||
def to_lance(self):
|
def to_lance(self):
|
||||||
return lance.dataset(self.uri)
|
return lance.dataset(self.uri)
|
||||||
@@ -30,23 +33,17 @@ class MockTable:
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def table(tmp_path) -> MockTable:
|
def table(tmp_path) -> MockTable:
|
||||||
df = pd.DataFrame(
|
df = pa.table(
|
||||||
{
|
{
|
||||||
"vector": [[1, 2], [3, 4]],
|
"vector": pa.array(
|
||||||
"id": [1, 2],
|
[[1, 2], [3, 4]], type=pa.list_(pa.float32(), list_size=2)
|
||||||
"str_field": ["a", "b"],
|
),
|
||||||
"float_field": [1.0, 2.0],
|
"id": pa.array([1, 2]),
|
||||||
|
"str_field": pa.array(["a", "b"]),
|
||||||
|
"float_field": pa.array([1.0, 2.0]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
schema = pa.schema(
|
lance.write_dataset(df, tmp_path)
|
||||||
[
|
|
||||||
pa.field("vector", pa.list_(pa.float32(), list_size=2)),
|
|
||||||
pa.field("id", pa.int32()),
|
|
||||||
pa.field("str_field", pa.string()),
|
|
||||||
pa.field("float_field", pa.float64()),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
lance.write_dataset(df, tmp_path, schema)
|
|
||||||
return MockTable(tmp_path)
|
return MockTable(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
@@ -65,7 +62,7 @@ def test_query_builder_with_filter(table):
|
|||||||
def test_query_builder_with_metric(table):
|
def test_query_builder_with_metric(table):
|
||||||
query = [4, 8]
|
query = [4, 8]
|
||||||
df_default = LanceQueryBuilder(table, query).to_df()
|
df_default = LanceQueryBuilder(table, query).to_df()
|
||||||
df_l2 = LanceQueryBuilder(table, query).metric("l2").to_df()
|
df_l2 = LanceQueryBuilder(table, query).metric("L2").to_df()
|
||||||
tm.assert_frame_equal(df_default, df_l2)
|
tm.assert_frame_equal(df_default, df_l2)
|
||||||
|
|
||||||
df_cosine = LanceQueryBuilder(table, query).metric("cosine").limit(1).to_df()
|
df_cosine = LanceQueryBuilder(table, query).metric("cosine").limit(1).to_df()
|
||||||
|
|||||||
95
python/tests/test_remote_client.py
Normal file
95
python/tests/test_remote_client.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import attr
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
|
import pytest
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from lancedb.remote.client import RestfulLanceDBClient, VectorQuery
|
||||||
|
|
||||||
|
|
||||||
|
@attr.define
|
||||||
|
class MockLanceDBServer:
|
||||||
|
runner: web.AppRunner = attr.field(init=False)
|
||||||
|
site: web.TCPSite = attr.field(init=False)
|
||||||
|
|
||||||
|
async def query_handler(self, request: web.Request) -> web.Response:
|
||||||
|
table_name = request.match_info["table_name"]
|
||||||
|
assert table_name == "test_table"
|
||||||
|
|
||||||
|
request_json = await request.json()
|
||||||
|
# TODO: do some matching
|
||||||
|
|
||||||
|
vecs = pd.Series([np.random.rand(128) for x in range(10)], name="vector")
|
||||||
|
ids = pd.Series(range(10), name="id")
|
||||||
|
df = pd.DataFrame([vecs, ids]).T
|
||||||
|
|
||||||
|
batch = pa.RecordBatch.from_pandas(
|
||||||
|
df,
|
||||||
|
schema=pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("vector", pa.list_(pa.float32(), 128)),
|
||||||
|
pa.field("id", pa.int64()),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
sink = pa.BufferOutputStream()
|
||||||
|
with pa.ipc.new_file(sink, batch.schema) as writer:
|
||||||
|
writer.write_batch(batch)
|
||||||
|
|
||||||
|
return web.Response(body=sink.getvalue().to_pybytes())
|
||||||
|
|
||||||
|
async def setup(self):
|
||||||
|
app = web.Application()
|
||||||
|
app.add_routes([web.post("/table/{table_name}", self.query_handler)])
|
||||||
|
self.runner = web.AppRunner(app)
|
||||||
|
await self.runner.setup()
|
||||||
|
self.site = web.TCPSite(self.runner, "localhost", 8111)
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
await self.site.start()
|
||||||
|
|
||||||
|
async def stop(self):
|
||||||
|
await self.runner.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="flaky somehow, fix later")
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_e2e_with_mock_server():
|
||||||
|
mock_server = MockLanceDBServer()
|
||||||
|
await mock_server.setup()
|
||||||
|
await mock_server.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
client = RestfulLanceDBClient("lancedb+http://localhost:8111")
|
||||||
|
df = (
|
||||||
|
await client.query(
|
||||||
|
"test_table",
|
||||||
|
VectorQuery(
|
||||||
|
vector=np.random.rand(128).tolist(),
|
||||||
|
k=10,
|
||||||
|
_metric="L2",
|
||||||
|
columns=["id", "vector"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
).to_df()
|
||||||
|
|
||||||
|
assert "vector" in df.columns
|
||||||
|
assert "id" in df.columns
|
||||||
|
finally:
|
||||||
|
# make sure we don't leak resources
|
||||||
|
await mock_server.stop()
|
||||||
35
python/tests/test_remote_db.py
Normal file
35
python/tests/test_remote_db.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
from lancedb.db import LanceDBConnection
|
||||||
|
from lancedb.remote.client import VectorQuery, VectorQueryResult
|
||||||
|
|
||||||
|
|
||||||
|
class FakeLanceDBClient:
|
||||||
|
async def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def query(self, table_name: str, query: VectorQuery) -> VectorQueryResult:
|
||||||
|
assert table_name == "test"
|
||||||
|
t = pa.schema([]).empty_table()
|
||||||
|
return VectorQueryResult(t)
|
||||||
|
|
||||||
|
|
||||||
|
def test_remote_db():
|
||||||
|
conn = LanceDBConnection("lancedb+http://client-will-be-injected")
|
||||||
|
setattr(conn, "_client", FakeLanceDBClient())
|
||||||
|
|
||||||
|
table = conn["test"]
|
||||||
|
table.search([1.0, 2.0]).to_df()
|
||||||
@@ -11,11 +11,13 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import functools
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
@@ -23,6 +25,10 @@ class MockDB:
|
|||||||
def __init__(self, uri: Path):
|
def __init__(self, uri: Path):
|
||||||
self.uri = uri
|
self.uri = uri
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def is_managed_remote(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def db(tmp_path) -> MockDB:
|
def db(tmp_path) -> MockDB:
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb-node"
|
name = "vectordb-node"
|
||||||
version = "0.1.0"
|
version = "0.1.9"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
@@ -10,12 +10,12 @@ exclude = ["index.node"]
|
|||||||
crate-type = ["cdylib"]
|
crate-type = ["cdylib"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arrow-array = "37.0"
|
arrow-array = "40.0"
|
||||||
arrow-ipc = "37.0"
|
arrow-ipc = "40.0"
|
||||||
arrow-schema = "37.0"
|
arrow-schema = "40.0"
|
||||||
once_cell = "1"
|
once_cell = "1"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
lance = "0.4.3"
|
lance = "0.5.0"
|
||||||
vectordb = { path = "../../vectordb" }
|
vectordb = { path = "../../vectordb" }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ pub(crate) fn table_create_vector_index(mut cx: FunctionContext) -> JsResult<JsP
|
|||||||
let add_result = table
|
let add_result = table
|
||||||
.lock()
|
.lock()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.create_idx(&index_params_builder)
|
.create_index(&index_params_builder)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
@@ -97,6 +97,7 @@ fn get_index_params_builder(
|
|||||||
let ivf_params = IvfBuildParams {
|
let ivf_params = IvfBuildParams {
|
||||||
num_partitions: np,
|
num_partitions: np,
|
||||||
max_iters,
|
max_iters,
|
||||||
|
centroids: None,
|
||||||
};
|
};
|
||||||
index_builder.ivf_params(ivf_params)
|
index_builder.ivf_params(ivf_params)
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -56,23 +56,46 @@ fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
|||||||
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_new(mut cx: FunctionContext) -> JsResult<JsBox<JsDatabase>> {
|
fn database_new(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let path = cx.argument::<JsString>(0)?.value(&mut cx);
|
let path = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
let db = JsDatabase {
|
|
||||||
database: Arc::new(Database::connect(path).or_else(|err| cx.throw_error(err.to_string()))?),
|
let rt = runtime(&mut cx)?;
|
||||||
};
|
let channel = cx.channel();
|
||||||
Ok(cx.boxed(db))
|
let (deferred, promise) = cx.promise();
|
||||||
|
|
||||||
|
rt.spawn(async move {
|
||||||
|
let database = Database::connect(&path).await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
let db = JsDatabase {
|
||||||
|
database: Arc::new(database.or_else(|err| cx.throw_error(err.to_string()))?),
|
||||||
|
};
|
||||||
|
Ok(cx.boxed(db))
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_table_names(mut cx: FunctionContext) -> JsResult<JsArray> {
|
fn database_table_names(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let db = cx
|
let db = cx
|
||||||
.this()
|
.this()
|
||||||
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
let tables = db
|
|
||||||
.database
|
let rt = runtime(&mut cx)?;
|
||||||
.table_names()
|
let (deferred, promise) = cx.promise();
|
||||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
let channel = cx.channel();
|
||||||
convert::vec_str_to_array(&tables, &mut cx)
|
let database = db.database.clone();
|
||||||
|
|
||||||
|
rt.spawn(async move {
|
||||||
|
let tables_rst = database.table_names().await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
let tables = tables_rst.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
|
let table_names = convert::vec_str_to_array(&tables, &mut cx);
|
||||||
|
table_names
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
@@ -87,7 +110,7 @@ fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
let table_rst = database.open_table(table_name).await;
|
let table_rst = database.open_table(&table_name).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(
|
let table = Arc::new(Mutex::new(
|
||||||
@@ -99,6 +122,27 @@ fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
Ok(promise)
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn database_drop_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
|
let db = cx
|
||||||
|
.this()
|
||||||
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
|
let table_name = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
|
|
||||||
|
let rt = runtime(&mut cx)?;
|
||||||
|
let channel = cx.channel();
|
||||||
|
let database = db.database.clone();
|
||||||
|
|
||||||
|
let (deferred, promise) = cx.promise();
|
||||||
|
rt.spawn(async move {
|
||||||
|
let result = database.drop_table(&table_name).await;
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
result.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
|
Ok(cx.null())
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
|
}
|
||||||
|
|
||||||
fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
let query_obj = cx.argument::<JsObject>(0)?;
|
let query_obj = cx.argument::<JsObject>(0)?;
|
||||||
@@ -106,6 +150,17 @@ fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
let limit = query_obj
|
let limit = query_obj
|
||||||
.get::<JsNumber, _, _>(&mut cx, "_limit")?
|
.get::<JsNumber, _, _>(&mut cx, "_limit")?
|
||||||
.value(&mut cx);
|
.value(&mut cx);
|
||||||
|
let select = query_obj
|
||||||
|
.get_opt::<JsArray, _, _>(&mut cx, "_select")?
|
||||||
|
.map(|arr| {
|
||||||
|
let js_array = arr.deref();
|
||||||
|
let mut projection_vec: Vec<String> = Vec::new();
|
||||||
|
for i in 0..js_array.len(&mut cx) {
|
||||||
|
let entry: Handle<JsString> = js_array.get(&mut cx, i).unwrap();
|
||||||
|
projection_vec.push(entry.value(&mut cx));
|
||||||
|
}
|
||||||
|
projection_vec
|
||||||
|
});
|
||||||
let filter = query_obj
|
let filter = query_obj
|
||||||
.get_opt::<JsString, _, _>(&mut cx, "_filter")?
|
.get_opt::<JsString, _, _>(&mut cx, "_filter")?
|
||||||
.map(|s| s.value(&mut cx));
|
.map(|s| s.value(&mut cx));
|
||||||
@@ -138,7 +193,8 @@ fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
.refine_factor(refine_factor)
|
.refine_factor(refine_factor)
|
||||||
.nprobes(nprobes)
|
.nprobes(nprobes)
|
||||||
.filter(filter)
|
.filter(filter)
|
||||||
.metric_type(metric_type);
|
.metric_type(metric_type)
|
||||||
|
.select(select);
|
||||||
let record_batch_stream = builder.execute();
|
let record_batch_stream = builder.execute();
|
||||||
let results = record_batch_stream
|
let results = record_batch_stream
|
||||||
.and_then(|stream| stream.try_collect::<Vec<_>>().map_err(Error::from))
|
.and_then(|stream| stream.try_collect::<Vec<_>>().map_err(Error::from))
|
||||||
@@ -186,7 +242,7 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(batches));
|
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(batches));
|
||||||
let table_rst = database.create_table(table_name, batch_reader).await;
|
let table_rst = database.create_table(&table_name, batch_reader).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(
|
let table = Arc::new(Mutex::new(
|
||||||
@@ -229,14 +285,56 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
Ok(promise)
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn table_count_rows(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
|
let rt = runtime(&mut cx)?;
|
||||||
|
let channel = cx.channel();
|
||||||
|
|
||||||
|
let (deferred, promise) = cx.promise();
|
||||||
|
let table = js_table.table.clone();
|
||||||
|
|
||||||
|
rt.block_on(async move {
|
||||||
|
let num_rows_result = table.lock().unwrap().count_rows().await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
let num_rows = num_rows_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
|
Ok(cx.number(num_rows as f64))
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn table_delete(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
|
let rt = runtime(&mut cx)?;
|
||||||
|
let channel = cx.channel();
|
||||||
|
|
||||||
|
let (deferred, promise) = cx.promise();
|
||||||
|
let table = js_table.table.clone();
|
||||||
|
|
||||||
|
let predicate = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
|
|
||||||
|
let delete_result = rt.block_on(async move { table.lock().unwrap().delete(&predicate).await });
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
delete_result.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
|
Ok(cx.undefined())
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(promise)
|
||||||
|
}
|
||||||
|
|
||||||
#[neon::main]
|
#[neon::main]
|
||||||
fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
||||||
cx.export_function("databaseNew", database_new)?;
|
cx.export_function("databaseNew", database_new)?;
|
||||||
cx.export_function("databaseTableNames", database_table_names)?;
|
cx.export_function("databaseTableNames", database_table_names)?;
|
||||||
cx.export_function("databaseOpenTable", database_open_table)?;
|
cx.export_function("databaseOpenTable", database_open_table)?;
|
||||||
|
cx.export_function("databaseDropTable", database_drop_table)?;
|
||||||
cx.export_function("tableSearch", table_search)?;
|
cx.export_function("tableSearch", table_search)?;
|
||||||
cx.export_function("tableCreate", table_create)?;
|
cx.export_function("tableCreate", table_create)?;
|
||||||
cx.export_function("tableAdd", table_add)?;
|
cx.export_function("tableAdd", table_add)?;
|
||||||
|
cx.export_function("tableCountRows", table_count_rows)?;
|
||||||
|
cx.export_function("tableDelete", table_delete)?;
|
||||||
cx.export_function(
|
cx.export_function(
|
||||||
"tableCreateVectorIndex",
|
"tableCreateVectorIndex",
|
||||||
index::vector::table_create_vector_index,
|
index::vector::table_create_vector_index,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "vectordb"
|
name = "vectordb"
|
||||||
version = "0.0.1"
|
version = "0.1.9"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "Serverless, low-latency vector database for AI applications"
|
description = "Serverless, low-latency vector database for AI applications"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@@ -9,10 +9,12 @@ repository = "https://github.com/lancedb/lancedb"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arrow-array = "37.0"
|
arrow-array = "40.0"
|
||||||
arrow-data = "37.0"
|
arrow-data = "40.0"
|
||||||
arrow-schema = "37.0"
|
arrow-schema = "40.0"
|
||||||
lance = "0.4.3"
|
object_store = "0.6.1"
|
||||||
|
snafu = "0.7.4"
|
||||||
|
lance = "0.5.0"
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// Copyright 2023 Lance Developers.
|
// Copyright 2023 LanceDB Developers.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
@@ -12,16 +12,20 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use arrow_array::RecordBatchReader;
|
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::error::Result;
|
use arrow_array::RecordBatchReader;
|
||||||
|
use lance::io::object_store::ObjectStore;
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
|
use crate::error::{CreateDirSnafu, Result};
|
||||||
use crate::table::Table;
|
use crate::table::Table;
|
||||||
|
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
pub(crate) path: Arc<PathBuf>,
|
object_store: ObjectStore,
|
||||||
|
|
||||||
|
pub(crate) uri: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
const LANCE_EXTENSION: &str = "lance";
|
const LANCE_EXTENSION: &str = "lance";
|
||||||
@@ -37,26 +41,38 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Database] object.
|
/// * A [Database] object.
|
||||||
pub fn connect<P: AsRef<Path>>(path: P) -> Result<Database> {
|
pub async fn connect(uri: &str) -> Result<Database> {
|
||||||
if !path.as_ref().try_exists()? {
|
let (object_store, _) = ObjectStore::from_uri(uri).await?;
|
||||||
create_dir_all(&path)?;
|
if object_store.is_local() {
|
||||||
|
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
||||||
}
|
}
|
||||||
Ok(Database {
|
Ok(Database {
|
||||||
path: Arc::new(path.as_ref().to_path_buf()),
|
uri: uri.to_string(),
|
||||||
|
object_store,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Try to create a local directory to store the lancedb dataset
|
||||||
|
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
||||||
|
let path = Path::new(path);
|
||||||
|
if !path.try_exists()? {
|
||||||
|
create_dir_all(&path)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the names of all tables in the database.
|
/// Get the names of all tables in the database.
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Vec<String>] with all table names.
|
/// * A [Vec<String>] with all table names.
|
||||||
pub fn table_names(&self) -> Result<Vec<String>> {
|
pub async fn table_names(&self) -> Result<Vec<String>> {
|
||||||
let f = self
|
let f = self
|
||||||
.path
|
.object_store
|
||||||
.read_dir()?
|
.read_dir(self.uri.as_str())
|
||||||
.flatten()
|
.await?
|
||||||
.map(|dir_entry| dir_entry.path())
|
.iter()
|
||||||
|
.map(|fname| Path::new(fname))
|
||||||
.filter(|path| {
|
.filter(|path| {
|
||||||
let is_lance = path
|
let is_lance = path
|
||||||
.extension()
|
.extension()
|
||||||
@@ -76,10 +92,10 @@ impl Database {
|
|||||||
|
|
||||||
pub async fn create_table(
|
pub async fn create_table(
|
||||||
&self,
|
&self,
|
||||||
name: String,
|
name: &str,
|
||||||
batches: Box<dyn RecordBatchReader>,
|
batches: Box<dyn RecordBatchReader>,
|
||||||
) -> Result<Table> {
|
) -> Result<Table> {
|
||||||
Table::create(self.path.clone(), name, batches).await
|
Table::create(&self.uri, name, batches).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open a table in the database.
|
/// Open a table in the database.
|
||||||
@@ -90,8 +106,18 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table(&self, name: String) -> Result<Table> {
|
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
||||||
Table::open(self.path.clone(), name).await
|
Table::open(&self.uri, name).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Drop a table in the database.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `name` - The name of the table.
|
||||||
|
pub async fn drop_table(&self, name: &str) -> Result<()> {
|
||||||
|
let dir_name = format!("{}/{}.{}", self.uri, name, LANCE_EXTENSION);
|
||||||
|
self.object_store.remove_dir_all(dir_name).await?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,10 +131,10 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_connect() {
|
async fn test_connect() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
let db = Database::connect(&path_buf);
|
let db = Database::connect(uri).await.unwrap();
|
||||||
|
|
||||||
assert_eq!(db.unwrap().path.as_path(), path_buf.as_path())
|
assert_eq!(db.uri, uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -118,10 +144,29 @@ mod tests {
|
|||||||
create_dir_all(tmp_dir.path().join("table2.lance")).unwrap();
|
create_dir_all(tmp_dir.path().join("table2.lance")).unwrap();
|
||||||
create_dir_all(tmp_dir.path().join("invalidlance")).unwrap();
|
create_dir_all(tmp_dir.path().join("invalidlance")).unwrap();
|
||||||
|
|
||||||
let db = Database::connect(&tmp_dir.into_path()).unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
let tables = db.table_names().unwrap();
|
let db = Database::connect(uri).await.unwrap();
|
||||||
|
let tables = db.table_names().await.unwrap();
|
||||||
assert_eq!(tables.len(), 2);
|
assert_eq!(tables.len(), 2);
|
||||||
assert!(tables.contains(&String::from("table1")));
|
assert!(tables.contains(&String::from("table1")));
|
||||||
assert!(tables.contains(&String::from("table2")));
|
assert!(tables.contains(&String::from("table2")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_connect_s3() {
|
||||||
|
// let db = Database::connect("s3://bucket/path/to/database").await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn drop_table() {
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
create_dir_all(tmp_dir.path().join("table1.lance")).unwrap();
|
||||||
|
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
let db = Database::connect(uri).await.unwrap();
|
||||||
|
db.drop_table("table1").await.unwrap();
|
||||||
|
|
||||||
|
let tables = db.table_names().await.unwrap();
|
||||||
|
assert_eq!(tables.len(), 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,32 +12,50 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
#[derive(Debug)]
|
use snafu::Snafu;
|
||||||
pub enum Error {
|
|
||||||
IO(String),
|
|
||||||
Lance(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Error {
|
#[derive(Debug, Snafu)]
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
#[snafu(visibility(pub(crate)))]
|
||||||
let (catalog, message) = match self {
|
pub enum Error {
|
||||||
Self::IO(s) => ("I/O", s.as_str()),
|
#[snafu(display("LanceDBError: Invalid table name: {name}"))]
|
||||||
Self::Lance(s) => ("Lance", s.as_str()),
|
InvalidTableName { name: String },
|
||||||
};
|
#[snafu(display("LanceDBError: Table '{name}' was not found"))]
|
||||||
write!(f, "LanceDBError({catalog}): {message}")
|
TableNotFound { name: String },
|
||||||
}
|
#[snafu(display("LanceDBError: Table '{name}' already exists"))]
|
||||||
|
TableAlreadyExists { name: String },
|
||||||
|
#[snafu(display("LanceDBError: Unable to created lance dataset at {path}: {source}"))]
|
||||||
|
CreateDir {
|
||||||
|
path: String,
|
||||||
|
source: std::io::Error,
|
||||||
|
},
|
||||||
|
#[snafu(display("LanceDBError: {message}"))]
|
||||||
|
Store { message: String },
|
||||||
|
#[snafu(display("LanceDBError: {message}"))]
|
||||||
|
Lance { message: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
impl From<lance::Error> for Error {
|
||||||
fn from(e: std::io::Error) -> Self {
|
fn from(e: lance::Error) -> Self {
|
||||||
Self::IO(e.to_string())
|
Self::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<lance::Error> for Error {
|
impl From<object_store::Error> for Error {
|
||||||
fn from(e: lance::Error) -> Self {
|
fn from(e: object_store::Error) -> Self {
|
||||||
Self::Lance(e.to_string())
|
Self::Store {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<object_store::path::Error> for Error {
|
||||||
|
fn from(e: object_store::path::Error) -> Self {
|
||||||
|
Self::Store {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ pub trait VectorIndexBuilder {
|
|||||||
fn get_column(&self) -> Option<String>;
|
fn get_column(&self) -> Option<String>;
|
||||||
fn get_index_name(&self) -> Option<String>;
|
fn get_index_name(&self) -> Option<String>;
|
||||||
fn build(&self) -> VectorIndexParams;
|
fn build(&self) -> VectorIndexParams;
|
||||||
|
|
||||||
|
fn get_replace(&self) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct IvfPQIndexBuilder {
|
pub struct IvfPQIndexBuilder {
|
||||||
@@ -28,6 +30,7 @@ pub struct IvfPQIndexBuilder {
|
|||||||
metric_type: Option<MetricType>,
|
metric_type: Option<MetricType>,
|
||||||
ivf_params: Option<IvfBuildParams>,
|
ivf_params: Option<IvfBuildParams>,
|
||||||
pq_params: Option<PQBuildParams>,
|
pq_params: Option<PQBuildParams>,
|
||||||
|
replace: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IvfPQIndexBuilder {
|
impl IvfPQIndexBuilder {
|
||||||
@@ -38,6 +41,7 @@ impl IvfPQIndexBuilder {
|
|||||||
metric_type: None,
|
metric_type: None,
|
||||||
ivf_params: None,
|
ivf_params: None,
|
||||||
pq_params: None,
|
pq_params: None,
|
||||||
|
replace: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -67,6 +71,11 @@ impl IvfPQIndexBuilder {
|
|||||||
self.pq_params = Some(pq_params);
|
self.pq_params = Some(pq_params);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace(&mut self, replace: bool) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.replace = replace;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VectorIndexBuilder for IvfPQIndexBuilder {
|
impl VectorIndexBuilder for IvfPQIndexBuilder {
|
||||||
@@ -84,6 +93,10 @@ impl VectorIndexBuilder for IvfPQIndexBuilder {
|
|||||||
|
|
||||||
VectorIndexParams::with_ivf_pq_params(pq_params.metric_type, ivf_params, pq_params)
|
VectorIndexParams::with_ivf_pq_params(pq_params.metric_type, ivf_params, pq_params)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_replace(&self) -> bool {
|
||||||
|
self.replace
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -17,3 +17,5 @@ pub mod error;
|
|||||||
pub mod index;
|
pub mod index;
|
||||||
pub mod query;
|
pub mod query;
|
||||||
pub mod table;
|
pub mod table;
|
||||||
|
|
||||||
|
pub use database::Database;
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ pub struct Query {
|
|||||||
pub query_vector: Float32Array,
|
pub query_vector: Float32Array,
|
||||||
pub limit: usize,
|
pub limit: usize,
|
||||||
pub filter: Option<String>,
|
pub filter: Option<String>,
|
||||||
|
pub select: Option<Vec<String>>,
|
||||||
pub nprobes: usize,
|
pub nprobes: usize,
|
||||||
pub refine_factor: Option<u32>,
|
pub refine_factor: Option<u32>,
|
||||||
pub metric_type: Option<MetricType>,
|
pub metric_type: Option<MetricType>,
|
||||||
@@ -54,6 +55,7 @@ impl Query {
|
|||||||
metric_type: None,
|
metric_type: None,
|
||||||
use_index: false,
|
use_index: false,
|
||||||
filter: None,
|
filter: None,
|
||||||
|
select: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,6 +74,7 @@ impl Query {
|
|||||||
)?;
|
)?;
|
||||||
scanner.nprobs(self.nprobes);
|
scanner.nprobs(self.nprobes);
|
||||||
scanner.use_index(self.use_index);
|
scanner.use_index(self.use_index);
|
||||||
|
self.select.as_ref().map(|p| scanner.project(p.as_slice()));
|
||||||
self.filter.as_ref().map(|f| scanner.filter(f));
|
self.filter.as_ref().map(|f| scanner.filter(f));
|
||||||
self.refine_factor.map(|rf| scanner.refine(rf));
|
self.refine_factor.map(|rf| scanner.refine(rf));
|
||||||
self.metric_type.map(|mt| scanner.distance_metric(mt));
|
self.metric_type.map(|mt| scanner.distance_metric(mt));
|
||||||
@@ -138,10 +141,23 @@ impl Query {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A filter statement to be applied to this query.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `filter` - value A filter in the same format used by a sql WHERE clause.
|
||||||
pub fn filter(mut self, filter: Option<String>) -> Query {
|
pub fn filter(mut self, filter: Option<String>) -> Query {
|
||||||
self.filter = filter;
|
self.filter = filter;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return only the specified columns.
|
||||||
|
///
|
||||||
|
/// Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
pub fn select(mut self, columns: Option<Vec<String>>) -> Query {
|
||||||
|
self.select = columns;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -159,7 +175,7 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_setters_getters() {
|
async fn test_setters_getters() {
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let ds = Dataset::write(&mut batches, ":memory:", None)
|
let ds = Dataset::write(&mut batches, "memory://foo", None)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -188,7 +204,7 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_execute() {
|
async fn test_execute() {
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let ds = Dataset::write(&mut batches, ":memory:", None)
|
let ds = Dataset::write(&mut batches, "memory://foo", None)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -12,28 +12,35 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::{Float32Array, RecordBatchReader};
|
use arrow_array::{Float32Array, RecordBatchReader};
|
||||||
use lance::dataset::{Dataset, WriteMode, WriteParams};
|
use lance::dataset::{Dataset, WriteMode, WriteParams};
|
||||||
use lance::index::IndexType;
|
use lance::index::IndexType;
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, InvalidTableNameSnafu, Result};
|
||||||
use crate::index::vector::VectorIndexBuilder;
|
use crate::index::vector::VectorIndexBuilder;
|
||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
|
|
||||||
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
||||||
|
|
||||||
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
||||||
|
|
||||||
/// A table in a LanceDB database.
|
/// A table in a LanceDB database.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Table {
|
pub struct Table {
|
||||||
name: String,
|
name: String,
|
||||||
path: String,
|
uri: String,
|
||||||
dataset: Arc<Dataset>,
|
dataset: Arc<Dataset>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Table {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "Table({})", self.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Table {
|
impl Table {
|
||||||
/// Opens an existing Table
|
/// Opens an existing Table
|
||||||
///
|
///
|
||||||
@@ -45,18 +52,28 @@ impl Table {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open(base_path: Arc<PathBuf>, name: String) -> Result<Self> {
|
pub async fn open(base_uri: &str, name: &str) -> Result<Self> {
|
||||||
let ds_path = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
let path = Path::new(base_uri);
|
||||||
let ds_uri = ds_path
|
|
||||||
|
let table_uri = path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::IO(format!("Unable to find table {}", name)))?;
|
.context(InvalidTableNameSnafu { name })?;
|
||||||
let dataset = Dataset::open(ds_uri).await?;
|
|
||||||
let table = Table {
|
let dataset = Dataset::open(&uri).await.map_err(|e| match e {
|
||||||
name,
|
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
|
||||||
path: ds_uri.to_string(),
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
e => Error::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
|
Ok(Table {
|
||||||
|
name: name.to_string(),
|
||||||
|
uri: uri.to_string(),
|
||||||
dataset: Arc::new(dataset),
|
dataset: Arc::new(dataset),
|
||||||
};
|
})
|
||||||
Ok(table)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new Table
|
/// Creates a new Table
|
||||||
@@ -71,25 +88,36 @@ impl Table {
|
|||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
base_path: Arc<PathBuf>,
|
base_uri: &str,
|
||||||
name: String,
|
name: &str,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
mut batches: Box<dyn RecordBatchReader>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let ds_path = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
let base_path = Path::new(base_uri);
|
||||||
let path = ds_path
|
let table_uri = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::IO(format!("Unable to find table {}", name)))?;
|
.context(InvalidTableNameSnafu { name })?
|
||||||
|
.to_string();
|
||||||
let dataset =
|
let dataset = Dataset::write(&mut batches, &uri, Some(WriteParams::default()))
|
||||||
Arc::new(Dataset::write(&mut batches, path, Some(WriteParams::default())).await?);
|
.await
|
||||||
|
.map_err(|e| match e {
|
||||||
|
lance::Error::DatasetAlreadyExists { .. } => Error::TableAlreadyExists {
|
||||||
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
e => Error::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
Ok(Table {
|
Ok(Table {
|
||||||
name,
|
name: name.to_string(),
|
||||||
path: path.to_string(),
|
uri,
|
||||||
dataset,
|
dataset: Arc::new(dataset),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_idx(&mut self, index_builder: &impl VectorIndexBuilder) -> Result<()> {
|
/// Create index on the table.
|
||||||
|
pub async fn create_index(&mut self, index_builder: &impl VectorIndexBuilder) -> Result<()> {
|
||||||
use lance::index::DatasetIndexExt;
|
use lance::index::DatasetIndexExt;
|
||||||
|
|
||||||
let dataset = self
|
let dataset = self
|
||||||
@@ -102,6 +130,7 @@ impl Table {
|
|||||||
IndexType::Vector,
|
IndexType::Vector,
|
||||||
index_builder.get_index_name(),
|
index_builder.get_index_name(),
|
||||||
&index_builder.build(),
|
&index_builder.build(),
|
||||||
|
index_builder.get_replace(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
self.dataset = Arc::new(dataset);
|
self.dataset = Arc::new(dataset);
|
||||||
@@ -125,8 +154,7 @@ impl Table {
|
|||||||
let mut params = WriteParams::default();
|
let mut params = WriteParams::default();
|
||||||
params.mode = write_mode.unwrap_or(WriteMode::Append);
|
params.mode = write_mode.unwrap_or(WriteMode::Append);
|
||||||
|
|
||||||
self.dataset =
|
self.dataset = Arc::new(Dataset::write(&mut batches, &self.uri, Some(params)).await?);
|
||||||
Arc::new(Dataset::write(&mut batches, self.path.as_str(), Some(params)).await?);
|
|
||||||
Ok(batches.count())
|
Ok(batches.count())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,10 +175,33 @@ impl Table {
|
|||||||
pub async fn count_rows(&self) -> Result<usize> {
|
pub async fn count_rows(&self) -> Result<usize> {
|
||||||
Ok(self.dataset.count_rows().await?)
|
Ok(self.dataset.count_rows().await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Merge new data into this table.
|
||||||
|
pub async fn merge(
|
||||||
|
&mut self,
|
||||||
|
mut batches: Box<dyn RecordBatchReader>,
|
||||||
|
left_on: &str,
|
||||||
|
right_on: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut dataset = self.dataset.as_ref().clone();
|
||||||
|
dataset.merge(&mut batches, left_on, right_on).await?;
|
||||||
|
self.dataset = Arc::new(dataset);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete rows from the table
|
||||||
|
pub async fn delete(&mut self, predicate: &str) -> Result<()> {
|
||||||
|
let mut dataset = self.dataset.as_ref().clone();
|
||||||
|
dataset.delete(predicate).await?;
|
||||||
|
self.dataset = Arc::new(dataset);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::{
|
use arrow_array::{
|
||||||
Array, FixedSizeListArray, Float32Array, Int32Array, RecordBatch, RecordBatchReader,
|
Array, FixedSizeListArray, Float32Array, Int32Array, RecordBatch, RecordBatchReader,
|
||||||
};
|
};
|
||||||
@@ -161,53 +212,68 @@ mod tests {
|
|||||||
use lance::index::vector::ivf::IvfBuildParams;
|
use lance::index::vector::ivf::IvfBuildParams;
|
||||||
use lance::index::vector::pq::PQBuildParams;
|
use lance::index::vector::pq::PQBuildParams;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use std::sync::Arc;
|
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
use crate::error::Result;
|
use super::*;
|
||||||
use crate::index::vector::IvfPQIndexBuilder;
|
use crate::index::vector::IvfPQIndexBuilder;
|
||||||
use crate::table::Table;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_new_table_not_exists() {
|
|
||||||
let tmp_dir = tempdir().unwrap();
|
|
||||||
let path_buf = tmp_dir.into_path();
|
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string()).await;
|
|
||||||
assert!(table.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_open() {
|
async fn test_open() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
Dataset::write(
|
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
||||||
&mut batches,
|
|
||||||
path_buf.join("test.lance").to_str().unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string())
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
let table = Table::open(uri, "test").await.unwrap();
|
||||||
|
|
||||||
assert_eq!(table.name, "test")
|
assert_eq!(table.name, "test")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_open_not_found() {
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
let table = Table::open(uri, "test").await;
|
||||||
|
assert!(matches!(table.unwrap_err(), Error::TableNotFound { .. }));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_object_store_path() {
|
||||||
|
use std::path::Path as StdPath;
|
||||||
|
let p = StdPath::new("s3://bucket/path/to/file");
|
||||||
|
let c = p.join("subfile");
|
||||||
|
assert_eq!(c.to_str().unwrap(), "s3://bucket/path/to/file/subfile");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_already_exists() {
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
|
let _ = batches.schema().clone();
|
||||||
|
Table::create(&uri, "test", batches).await.unwrap();
|
||||||
|
|
||||||
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
|
let result = Table::create(&uri, "test", batches).await;
|
||||||
|
assert!(matches!(
|
||||||
|
result.unwrap_err(),
|
||||||
|
Error::TableAlreadyExists { .. }
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_add() {
|
async fn test_add() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(Arc::new(path_buf), "test".to_string(), batches)
|
let mut table = Table::create(&uri, "test", batches).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> =
|
let new_batches: Box<dyn RecordBatchReader> =
|
||||||
@@ -225,13 +291,11 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_add_overwrite() {
|
async fn test_add_overwrite() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(Arc::new(path_buf), "test".to_string(), batches)
|
let mut table = Table::create(uri, "test", batches).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> =
|
let new_batches: Box<dyn RecordBatchReader> =
|
||||||
@@ -252,21 +316,16 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_search() {
|
async fn test_search() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
Dataset::write(
|
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
||||||
&mut batches,
|
|
||||||
path_buf.join("test.lance").to_str().unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string())
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
let table = Table::open(uri, "test").await.unwrap();
|
||||||
|
|
||||||
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
||||||
let query = table.search(vector.clone());
|
let query = table.search(vector.clone());
|
||||||
assert_eq!(vector, query.query_vector);
|
assert_eq!(vector, query.query_vector);
|
||||||
@@ -291,7 +350,7 @@ mod tests {
|
|||||||
use arrow_array::Float32Array;
|
use arrow_array::Float32Array;
|
||||||
|
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let dimension = 16;
|
let dimension = 16;
|
||||||
let schema = Arc::new(ArrowSchema::new(vec![Field::new(
|
let schema = Arc::new(ArrowSchema::new(vec![Field::new(
|
||||||
@@ -318,9 +377,7 @@ mod tests {
|
|||||||
.unwrap()]);
|
.unwrap()]);
|
||||||
|
|
||||||
let reader: Box<dyn RecordBatchReader + Send> = Box::new(batches);
|
let reader: Box<dyn RecordBatchReader + Send> = Box::new(batches);
|
||||||
let mut table = Table::create(Arc::new(path_buf), "test".to_string(), reader)
|
let mut table = Table::create(uri, "test", reader).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let mut i = IvfPQIndexBuilder::new();
|
let mut i = IvfPQIndexBuilder::new();
|
||||||
|
|
||||||
@@ -330,7 +387,7 @@ mod tests {
|
|||||||
.ivf_params(IvfBuildParams::new(256))
|
.ivf_params(IvfBuildParams::new(256))
|
||||||
.pq_params(PQBuildParams::default());
|
.pq_params(PQBuildParams::default());
|
||||||
|
|
||||||
table.create_idx(index_builder).await.unwrap();
|
table.create_index(index_builder).await.unwrap();
|
||||||
|
|
||||||
assert_eq!(table.dataset.load_indices().await.unwrap().len(), 1);
|
assert_eq!(table.dataset.load_indices().await.unwrap().len(), 1);
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 512);
|
assert_eq!(table.count_rows().await.unwrap(), 512);
|
||||||
|
|||||||
Reference in New Issue
Block a user