mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
195 Commits
python-v0.
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a4404e9e18 | ||
|
|
077e5bb586 | ||
|
|
2ad71bdeca | ||
|
|
7c13615096 | ||
|
|
f882f5b69a | ||
|
|
a68311a893 | ||
|
|
846a5cea33 | ||
|
|
e3dec647b5 | ||
|
|
c58104cecc | ||
|
|
b3b5362632 | ||
|
|
abe06fee3d | ||
|
|
93a82fd371 | ||
|
|
0d379e6ffa | ||
|
|
e1388bdfdd | ||
|
|
315a24c2bc | ||
|
|
6dd4cf6038 | ||
|
|
f97e751b3c | ||
|
|
e803a626a1 | ||
|
|
9403254442 | ||
|
|
b2a38ac366 | ||
|
|
bdb6c09c3b | ||
|
|
2bfdef2624 | ||
|
|
7982d5c082 | ||
|
|
7ff6ec7fe3 | ||
|
|
ba1ded933a | ||
|
|
b595d8a579 | ||
|
|
2a1d6d8abf | ||
|
|
440a466a13 | ||
|
|
b9afd9c860 | ||
|
|
a6b6f6a806 | ||
|
|
ae1548b507 | ||
|
|
4e03ee82bc | ||
|
|
46a6846d07 | ||
|
|
a207213358 | ||
|
|
6c321c694a | ||
|
|
5c00b2904c | ||
|
|
14677d7c18 | ||
|
|
dd22a379b2 | ||
|
|
7747c9bcbf | ||
|
|
c9d6fc43a6 | ||
|
|
581bcfbb88 | ||
|
|
3750639b5f | ||
|
|
e744d54460 | ||
|
|
9d1ce4b5a5 | ||
|
|
729ce5e542 | ||
|
|
de6739e7ec | ||
|
|
495216efdb | ||
|
|
a3b45a4d00 | ||
|
|
c316c2f532 | ||
|
|
3966b16b63 | ||
|
|
5661cc15ac | ||
|
|
4e7220400f | ||
|
|
ae4928fe77 | ||
|
|
e80a405dee | ||
|
|
a53e19e386 | ||
|
|
c0097c5f0a | ||
|
|
c199708e64 | ||
|
|
4a47150ae7 | ||
|
|
f86b20a564 | ||
|
|
cc81f3e1a5 | ||
|
|
bc49c4db82 | ||
|
|
d2eec46f17 | ||
|
|
51437bc228 | ||
|
|
fa53cfcfd2 | ||
|
|
374fe0ad95 | ||
|
|
35e5b84ba9 | ||
|
|
7c12d497b0 | ||
|
|
dfe4ba8dad | ||
|
|
fa1b9ad5bd | ||
|
|
8877eb020d | ||
|
|
01e4291d21 | ||
|
|
ab3ea76ad1 | ||
|
|
728ef8657d | ||
|
|
0b13901a16 | ||
|
|
84b110e0ef | ||
|
|
e1836e54e3 | ||
|
|
4ba5326880 | ||
|
|
b036a69300 | ||
|
|
5b12a47119 | ||
|
|
769d483e50 | ||
|
|
9ecb11fe5a | ||
|
|
22bd8329f3 | ||
|
|
a736fad149 | ||
|
|
072adc41aa | ||
|
|
c6f25ef1f0 | ||
|
|
2f0c5baea2 | ||
|
|
a63dd66d41 | ||
|
|
d6b3ccb37b | ||
|
|
c4f99e82e5 | ||
|
|
979a2d3d9d | ||
|
|
7ac5f74c80 | ||
|
|
ecdee4d2b1 | ||
|
|
f391ed828a | ||
|
|
a99a450f2b | ||
|
|
6fa1f37506 | ||
|
|
544382df5e | ||
|
|
784f00ef6d | ||
|
|
96d7446f70 | ||
|
|
99ea78fb55 | ||
|
|
8eef4cdc28 | ||
|
|
0f102f02c3 | ||
|
|
a33a0670f6 | ||
|
|
14c9ff46d1 | ||
|
|
1865f7decf | ||
|
|
a608621476 | ||
|
|
00514999ff | ||
|
|
b3b597fef6 | ||
|
|
bf17144591 | ||
|
|
09e110525f | ||
|
|
40f0dbb64d | ||
|
|
3b19e96ae7 | ||
|
|
78a17ad54c | ||
|
|
a8e6b491e2 | ||
|
|
cea541ca46 | ||
|
|
873ffc1042 | ||
|
|
83273ad997 | ||
|
|
d18d63c69d | ||
|
|
c3e865e8d0 | ||
|
|
a7755cb313 | ||
|
|
3490f3456f | ||
|
|
0a1d0693e1 | ||
|
|
fd330b4b4b | ||
|
|
d4e9fc08e0 | ||
|
|
3626f2f5e1 | ||
|
|
e64712cfa5 | ||
|
|
3e3118f85c | ||
|
|
592598a333 | ||
|
|
5ad21341c9 | ||
|
|
6e08caa091 | ||
|
|
7e259d8b0f | ||
|
|
e84f747464 | ||
|
|
998cd43fe6 | ||
|
|
4bc7eebe61 | ||
|
|
2e3b34e79b | ||
|
|
e7574698eb | ||
|
|
801a9e5f6f | ||
|
|
4e5fbe6c99 | ||
|
|
1a449fa49e | ||
|
|
6bf742c759 | ||
|
|
ef3093bc23 | ||
|
|
16851389ea | ||
|
|
c269524b2f | ||
|
|
f6eef14313 | ||
|
|
32716adaa3 | ||
|
|
5e98b7f4c0 | ||
|
|
3f2589c11f | ||
|
|
e3b99694d6 | ||
|
|
9d42dc349c | ||
|
|
482f1ee1d3 | ||
|
|
2f39274a66 | ||
|
|
2fc174f532 | ||
|
|
dba85f4d6f | ||
|
|
555fa26147 | ||
|
|
e05c0cd87e | ||
|
|
25c17ebf4e | ||
|
|
87b12b57dc | ||
|
|
3dc9b71914 | ||
|
|
2622f34d1a | ||
|
|
a677a4b651 | ||
|
|
e6b4f14c1f | ||
|
|
15f8f4d627 | ||
|
|
6526d6c3b1 | ||
|
|
da4d7e3ca7 | ||
|
|
8fbadca9aa | ||
|
|
29120219cf | ||
|
|
a9897d9d85 | ||
|
|
acda7a4589 | ||
|
|
dac0857745 | ||
|
|
0a9e1eab75 | ||
|
|
d999d72c8d | ||
|
|
de4720993e | ||
|
|
6c14a307e2 | ||
|
|
43747278c8 | ||
|
|
e5f42a850e | ||
|
|
7920ecf66e | ||
|
|
28e1b70e4b | ||
|
|
52b79d2b1e | ||
|
|
c05d45150d | ||
|
|
48ed3bb544 | ||
|
|
bcfc93cc88 | ||
|
|
214d0debf5 | ||
|
|
f059372137 | ||
|
|
3dc1803c07 | ||
|
|
d0501f65f1 | ||
|
|
4703cc6894 | ||
|
|
493f9ce467 | ||
|
|
5c759505b8 | ||
|
|
bb6a39727e | ||
|
|
d57bed90e5 | ||
|
|
648327e90c | ||
|
|
6c7e81ee57 | ||
|
|
905e9d4738 | ||
|
|
38642e349c | ||
|
|
6879861ea8 | ||
|
|
88325e488e |
@@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.15.0-beta.0"
|
current_version = "0.18.2-beta.0"
|
||||||
parse = """(?x)
|
parse = """(?x)
|
||||||
(?P<major>0|[1-9]\\d*)\\.
|
(?P<major>0|[1-9]\\d*)\\.
|
||||||
(?P<minor>0|[1-9]\\d*)\\.
|
(?P<minor>0|[1-9]\\d*)\\.
|
||||||
@@ -87,26 +87,11 @@ glob = "node/package.json"
|
|||||||
replace = "\"@lancedb/vectordb-linux-x64-gnu\": \"{new_version}\""
|
replace = "\"@lancedb/vectordb-linux-x64-gnu\": \"{new_version}\""
|
||||||
search = "\"@lancedb/vectordb-linux-x64-gnu\": \"{current_version}\""
|
search = "\"@lancedb/vectordb-linux-x64-gnu\": \"{current_version}\""
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
glob = "node/package.json"
|
|
||||||
replace = "\"@lancedb/vectordb-linux-arm64-musl\": \"{new_version}\""
|
|
||||||
search = "\"@lancedb/vectordb-linux-arm64-musl\": \"{current_version}\""
|
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
glob = "node/package.json"
|
|
||||||
replace = "\"@lancedb/vectordb-linux-x64-musl\": \"{new_version}\""
|
|
||||||
search = "\"@lancedb/vectordb-linux-x64-musl\": \"{current_version}\""
|
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
[[tool.bumpversion.files]]
|
||||||
glob = "node/package.json"
|
glob = "node/package.json"
|
||||||
replace = "\"@lancedb/vectordb-win32-x64-msvc\": \"{new_version}\""
|
replace = "\"@lancedb/vectordb-win32-x64-msvc\": \"{new_version}\""
|
||||||
search = "\"@lancedb/vectordb-win32-x64-msvc\": \"{current_version}\""
|
search = "\"@lancedb/vectordb-win32-x64-msvc\": \"{current_version}\""
|
||||||
|
|
||||||
[[tool.bumpversion.files]]
|
|
||||||
glob = "node/package.json"
|
|
||||||
replace = "\"@lancedb/vectordb-win32-arm64-msvc\": \"{new_version}\""
|
|
||||||
search = "\"@lancedb/vectordb-win32-arm64-msvc\": \"{current_version}\""
|
|
||||||
|
|
||||||
# Cargo files
|
# Cargo files
|
||||||
# ------------
|
# ------------
|
||||||
[[tool.bumpversion.files]]
|
[[tool.bumpversion.files]]
|
||||||
|
|||||||
@@ -34,6 +34,10 @@ rustflags = ["-C", "target-cpu=haswell", "-C", "target-feature=+avx2,+fma,+f16c"
|
|||||||
[target.x86_64-unknown-linux-musl]
|
[target.x86_64-unknown-linux-musl]
|
||||||
rustflags = ["-C", "target-cpu=haswell", "-C", "target-feature=-crt-static,+avx2,+fma,+f16c"]
|
rustflags = ["-C", "target-cpu=haswell", "-C", "target-feature=-crt-static,+avx2,+fma,+f16c"]
|
||||||
|
|
||||||
|
[target.aarch64-unknown-linux-musl]
|
||||||
|
linker = "aarch64-linux-musl-gcc"
|
||||||
|
rustflags = ["-C", "target-feature=-crt-static"]
|
||||||
|
|
||||||
[target.aarch64-apple-darwin]
|
[target.aarch64-apple-darwin]
|
||||||
rustflags = ["-C", "target-cpu=apple-m1", "-C", "target-feature=+neon,+fp16,+fhm,+dotprod"]
|
rustflags = ["-C", "target-cpu=apple-m1", "-C", "target-feature=+neon,+fp16,+fhm,+dotprod"]
|
||||||
|
|
||||||
@@ -44,4 +48,4 @@ rustflags = ["-Ctarget-feature=+crt-static"]
|
|||||||
|
|
||||||
# Experimental target for Arm64 Windows
|
# Experimental target for Arm64 Windows
|
||||||
[target.aarch64-pc-windows-msvc]
|
[target.aarch64-pc-windows-msvc]
|
||||||
rustflags = ["-Ctarget-feature=+crt-static"]
|
rustflags = ["-Ctarget-feature=+crt-static"]
|
||||||
|
|||||||
12
.github/workflows/build_linux_wheel/action.yml
vendored
12
.github/workflows/build_linux_wheel/action.yml
vendored
@@ -36,8 +36,7 @@ runs:
|
|||||||
args: ${{ inputs.args }}
|
args: ${{ inputs.args }}
|
||||||
before-script-linux: |
|
before-script-linux: |
|
||||||
set -e
|
set -e
|
||||||
yum install -y openssl-devel \
|
curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-$(uname -m).zip > /tmp/protoc.zip \
|
||||||
&& curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-$(uname -m).zip > /tmp/protoc.zip \
|
|
||||||
&& unzip /tmp/protoc.zip -d /usr/local \
|
&& unzip /tmp/protoc.zip -d /usr/local \
|
||||||
&& rm /tmp/protoc.zip
|
&& rm /tmp/protoc.zip
|
||||||
- name: Build Arm Manylinux Wheel
|
- name: Build Arm Manylinux Wheel
|
||||||
@@ -52,12 +51,7 @@ runs:
|
|||||||
args: ${{ inputs.args }}
|
args: ${{ inputs.args }}
|
||||||
before-script-linux: |
|
before-script-linux: |
|
||||||
set -e
|
set -e
|
||||||
apt install -y unzip
|
yum install -y clang \
|
||||||
if [ $(uname -m) = "x86_64" ]; then
|
&& curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-aarch_64.zip > /tmp/protoc.zip \
|
||||||
PROTOC_ARCH="x86_64"
|
|
||||||
else
|
|
||||||
PROTOC_ARCH="aarch_64"
|
|
||||||
fi
|
|
||||||
curl -L https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-$PROTOC_ARCH.zip > /tmp/protoc.zip \
|
|
||||||
&& unzip /tmp/protoc.zip -d /usr/local \
|
&& unzip /tmp/protoc.zip -d /usr/local \
|
||||||
&& rm /tmp/protoc.zip
|
&& rm /tmp/protoc.zip
|
||||||
|
|||||||
31
.github/workflows/license-header-check.yml
vendored
Normal file
31
.github/workflows/license-header-check.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Check license headers
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- rust/**
|
||||||
|
- python/**
|
||||||
|
- nodejs/**
|
||||||
|
- java/**
|
||||||
|
- .github/workflows/license-header-check.yml
|
||||||
|
jobs:
|
||||||
|
check-licenses:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Install license-header-checker
|
||||||
|
working-directory: /tmp
|
||||||
|
run: |
|
||||||
|
curl -s https://raw.githubusercontent.com/lluissm/license-header-checker/master/install.sh | bash
|
||||||
|
mv /tmp/bin/license-header-checker /usr/local/bin/
|
||||||
|
- name: Check license headers (rust)
|
||||||
|
run: license-header-checker -a -v ./rust/license_header.txt ./ rs && [[ -z `git status -s` ]]
|
||||||
|
- name: Check license headers (python)
|
||||||
|
run: license-header-checker -a -v ./python/license_header.txt python py && [[ -z `git status -s` ]]
|
||||||
|
- name: Check license headers (typescript)
|
||||||
|
run: license-header-checker -a -v ./nodejs/license_header.txt nodejs ts && [[ -z `git status -s` ]]
|
||||||
|
- name: Check license headers (java)
|
||||||
|
run: license-header-checker -a -v ./nodejs/license_header.txt java java && [[ -z `git status -s` ]]
|
||||||
12
.github/workflows/nodejs.yml
vendored
12
.github/workflows/nodejs.yml
vendored
@@ -106,6 +106,18 @@ jobs:
|
|||||||
python ci/mock_openai.py &
|
python ci/mock_openai.py &
|
||||||
cd nodejs/examples
|
cd nodejs/examples
|
||||||
npm test
|
npm test
|
||||||
|
- name: Check docs
|
||||||
|
run: |
|
||||||
|
# We run this as part of the job because the binary needs to be built
|
||||||
|
# first to export the types of the native code.
|
||||||
|
set -e
|
||||||
|
npm ci
|
||||||
|
npm run docs
|
||||||
|
if ! git diff --exit-code; then
|
||||||
|
echo "Docs need to be updated"
|
||||||
|
echo "Run 'npm run docs', fix any warnings, and commit the changes."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
macos:
|
macos:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
runs-on: "macos-14"
|
runs-on: "macos-14"
|
||||||
|
|||||||
1080
.github/workflows/npm-publish.yml
vendored
1080
.github/workflows/npm-publish.yml
vendored
File diff suppressed because it is too large
Load Diff
22
.github/workflows/pypi-publish.yml
vendored
22
.github/workflows/pypi-publish.yml
vendored
@@ -4,6 +4,10 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'python-v*'
|
- 'python-v*'
|
||||||
|
pull_request:
|
||||||
|
# This should trigger a dry run (we skip the final publish step)
|
||||||
|
paths:
|
||||||
|
- .github/workflows/pypi-publish.yml
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
linux:
|
||||||
@@ -15,15 +19,21 @@ jobs:
|
|||||||
- platform: x86_64
|
- platform: x86_64
|
||||||
manylinux: "2_17"
|
manylinux: "2_17"
|
||||||
extra_args: ""
|
extra_args: ""
|
||||||
|
runner: ubuntu-22.04
|
||||||
- platform: x86_64
|
- platform: x86_64
|
||||||
manylinux: "2_28"
|
manylinux: "2_28"
|
||||||
extra_args: "--features fp16kernels"
|
extra_args: "--features fp16kernels"
|
||||||
|
runner: ubuntu-22.04
|
||||||
- platform: aarch64
|
- platform: aarch64
|
||||||
manylinux: "2_24"
|
manylinux: "2_17"
|
||||||
extra_args: ""
|
extra_args: ""
|
||||||
# We don't build fp16 kernels for aarch64, because it uses
|
# For successful fat LTO builds, we need a large runner to avoid OOM errors.
|
||||||
# cross compilation image, which doesn't have a new enough compiler.
|
runner: ubuntu-2404-8x-arm64
|
||||||
runs-on: "ubuntu-22.04"
|
- platform: aarch64
|
||||||
|
manylinux: "2_28"
|
||||||
|
extra_args: "--features fp16kernels"
|
||||||
|
runner: ubuntu-2404-8x-arm64
|
||||||
|
runs-on: ${{ matrix.config.runner }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -40,6 +50,7 @@ jobs:
|
|||||||
arm-build: ${{ matrix.config.platform == 'aarch64' }}
|
arm-build: ${{ matrix.config.platform == 'aarch64' }}
|
||||||
manylinux: ${{ matrix.config.manylinux }}
|
manylinux: ${{ matrix.config.manylinux }}
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
|
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||||
@@ -69,6 +80,7 @@ jobs:
|
|||||||
python-minor-version: 8
|
python-minor-version: 8
|
||||||
args: "--release --strip --target ${{ matrix.config.target }} --features fp16kernels"
|
args: "--release --strip --target ${{ matrix.config.target }} --features fp16kernels"
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
|
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||||
@@ -90,10 +102,12 @@ jobs:
|
|||||||
args: "--release --strip"
|
args: "--release --strip"
|
||||||
vcpkg_token: ${{ secrets.VCPKG_GITHUB_PACKAGES }}
|
vcpkg_token: ${{ secrets.VCPKG_GITHUB_PACKAGES }}
|
||||||
- uses: ./.github/workflows/upload_wheel
|
- uses: ./.github/workflows/upload_wheel
|
||||||
|
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||||
with:
|
with:
|
||||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||||
gh-release:
|
gh-release:
|
||||||
|
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
|||||||
57
.github/workflows/python.yml
vendored
57
.github/workflows/python.yml
vendored
@@ -13,6 +13,11 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Color output for pytest is off by default.
|
||||||
|
PYTEST_ADDOPTS: "--color=yes"
|
||||||
|
FORCE_COLOR: "1"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
name: "Lint"
|
name: "Lint"
|
||||||
@@ -33,13 +38,14 @@ jobs:
|
|||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Install ruff
|
- name: Install ruff
|
||||||
run: |
|
run: |
|
||||||
pip install ruff==0.8.4
|
pip install ruff==0.9.9
|
||||||
- name: Format check
|
- name: Format check
|
||||||
run: ruff format --check .
|
run: ruff format --check .
|
||||||
- name: Lint
|
- name: Lint
|
||||||
run: ruff check .
|
run: ruff check .
|
||||||
doctest:
|
|
||||||
name: "Doctest"
|
type-check:
|
||||||
|
name: "Type Check"
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: "ubuntu-22.04"
|
||||||
defaults:
|
defaults:
|
||||||
@@ -54,7 +60,36 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.12"
|
||||||
|
- name: Install protobuf compiler
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler
|
||||||
|
pip install toml
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python ../ci/parse_requirements.py pyproject.toml --extras dev,tests,embeddings > requirements.txt
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Run pyright
|
||||||
|
run: pyright
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
name: "Doctest"
|
||||||
|
timeout-minutes: 30
|
||||||
|
runs-on: "ubuntu-24.04"
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: python
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
- name: Install protobuf
|
- name: Install protobuf
|
||||||
run: |
|
run: |
|
||||||
@@ -75,8 +110,8 @@ jobs:
|
|||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-minor-version: ["9", "11"]
|
python-minor-version: ["9", "12"]
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: "ubuntu-24.04"
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -101,6 +136,10 @@ jobs:
|
|||||||
- uses: ./.github/workflows/run_tests
|
- uses: ./.github/workflows/run_tests
|
||||||
with:
|
with:
|
||||||
integration: true
|
integration: true
|
||||||
|
- name: Test without pylance
|
||||||
|
run: |
|
||||||
|
pip uninstall -y pylance
|
||||||
|
pytest -vv python/tests/test_table.py
|
||||||
# Make sure wheels are not included in the Rust cache
|
# Make sure wheels are not included in the Rust cache
|
||||||
- name: Delete wheels
|
- name: Delete wheels
|
||||||
run: rm -rf target/wheels
|
run: rm -rf target/wheels
|
||||||
@@ -127,7 +166,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.12"
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
workspaces: python
|
workspaces: python
|
||||||
@@ -157,7 +196,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.12"
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
workspaces: python
|
workspaces: python
|
||||||
@@ -168,7 +207,7 @@ jobs:
|
|||||||
run: rm -rf target/wheels
|
run: rm -rf target/wheels
|
||||||
pydantic1x:
|
pydantic1x:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: "ubuntu-24.04"
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
171
.github/workflows/rust.yml
vendored
171
.github/workflows/rust.yml
vendored
@@ -22,6 +22,7 @@ env:
|
|||||||
# "1" means line tables only, which is useful for panic tracebacks.
|
# "1" means line tables only, which is useful for panic tracebacks.
|
||||||
RUSTFLAGS: "-C debuginfo=1"
|
RUSTFLAGS: "-C debuginfo=1"
|
||||||
RUST_BACKTRACE: "1"
|
RUST_BACKTRACE: "1"
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@@ -51,6 +52,33 @@ jobs:
|
|||||||
- name: Run clippy
|
- name: Run clippy
|
||||||
run: cargo clippy --workspace --tests --all-features -- -D warnings
|
run: cargo clippy --workspace --tests --all-features -- -D warnings
|
||||||
|
|
||||||
|
build-no-lock:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
env:
|
||||||
|
# Need up-to-date compilers for kernels
|
||||||
|
CC: clang
|
||||||
|
CXX: clang++
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
# Building without a lock file often requires the latest Rust version since downstream
|
||||||
|
# dependencies may have updated their minimum Rust version.
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: "stable"
|
||||||
|
# Remove cargo.lock to force a fresh build
|
||||||
|
- name: Remove Cargo.lock
|
||||||
|
run: rm -f Cargo.lock
|
||||||
|
- uses: rui314/setup-mold@v1
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- name: Build all
|
||||||
|
run: |
|
||||||
|
cargo build --benches --all-features --tests
|
||||||
|
|
||||||
linux:
|
linux:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
# To build all features, we need more disk space than is available
|
# To build all features, we need more disk space than is available
|
||||||
@@ -75,8 +103,11 @@ jobs:
|
|||||||
workspaces: rust
|
workspaces: rust
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
# This shaves 2 minutes off this step in CI. This doesn't seem to be
|
||||||
|
# necessary in standard runners, but it is in the 4x runners.
|
||||||
|
sudo rm /var/lib/man-db/auto-update
|
||||||
sudo apt install -y protobuf-compiler libssl-dev
|
sudo apt install -y protobuf-compiler libssl-dev
|
||||||
|
- uses: rui314/setup-mold@v1
|
||||||
- name: Make Swap
|
- name: Make Swap
|
||||||
run: |
|
run: |
|
||||||
sudo fallocate -l 16G /swapfile
|
sudo fallocate -l 16G /swapfile
|
||||||
@@ -87,11 +118,11 @@ jobs:
|
|||||||
working-directory: .
|
working-directory: .
|
||||||
run: docker compose up --detach --wait
|
run: docker compose up --detach --wait
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --all-features
|
run: cargo build --all-features --tests --locked --examples
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --all-features
|
run: cargo test --all-features --locked
|
||||||
- name: Run examples
|
- name: Run examples
|
||||||
run: cargo run --example simple
|
run: cargo run --example simple --locked
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@@ -115,129 +146,43 @@ jobs:
|
|||||||
workspaces: rust
|
workspaces: rust
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: brew install protobuf
|
run: brew install protobuf
|
||||||
- name: Build
|
|
||||||
run: cargo build --all-features
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
# Run with everything except the integration tests.
|
run: |
|
||||||
run: cargo test --features remote,fp16kernels
|
# Don't run the s3 integration tests since docker isn't available
|
||||||
|
# on this image.
|
||||||
|
ALL_FEATURES=`cargo metadata --format-version=1 --no-deps \
|
||||||
|
| jq -r '.packages[] | .features | keys | .[]' \
|
||||||
|
| grep -v s3-test | sort | uniq | paste -s -d "," -`
|
||||||
|
cargo test --features $ALL_FEATURES --locked
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
runs-on: windows-2022
|
runs-on: windows-2022
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: rust/lancedb
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
workspaces: rust
|
workspaces: rust
|
||||||
- name: Install Protoc v21.12
|
- name: Install Protoc v21.12
|
||||||
working-directory: C:\
|
run: choco install --no-progress protoc
|
||||||
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
New-Item -Path 'C:\protoc' -ItemType Directory
|
rustup target add ${{ matrix.target }}
|
||||||
Set-Location C:\protoc
|
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||||
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
cargo build --features remote --tests --locked --target ${{ matrix.target }}
|
||||||
7z x protoc.zip
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
|
||||||
shell: powershell
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
# Can only run tests when target matches host
|
||||||
|
if: ${{ matrix.target == 'x86_64-pc-windows-msvc' }}
|
||||||
run: |
|
run: |
|
||||||
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
||||||
cargo build
|
cargo test --features remote --locked
|
||||||
cargo test
|
|
||||||
|
|
||||||
windows-arm64:
|
|
||||||
runs-on: windows-4x-arm
|
|
||||||
steps:
|
|
||||||
- name: Install Git
|
|
||||||
run: |
|
|
||||||
Invoke-WebRequest -Uri "https://github.com/git-for-windows/git/releases/download/v2.44.0.windows.1/Git-2.44.0-64-bit.exe" -OutFile "git-installer.exe"
|
|
||||||
Start-Process -FilePath "git-installer.exe" -ArgumentList "/VERYSILENT", "/NORESTART" -Wait
|
|
||||||
shell: powershell
|
|
||||||
- name: Add Git to PATH
|
|
||||||
run: |
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\Program Files\Git\bin"
|
|
||||||
$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
|
|
||||||
shell: powershell
|
|
||||||
- name: Configure Git symlinks
|
|
||||||
run: git config --global core.symlinks true
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.13"
|
|
||||||
- name: Install Visual Studio Build Tools
|
|
||||||
run: |
|
|
||||||
Invoke-WebRequest -Uri "https://aka.ms/vs/17/release/vs_buildtools.exe" -OutFile "vs_buildtools.exe"
|
|
||||||
Start-Process -FilePath "vs_buildtools.exe" -ArgumentList "--quiet", "--wait", "--norestart", "--nocache", `
|
|
||||||
"--installPath", "C:\BuildTools", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.VC.Tools.ARM64", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.Windows11SDK.22621", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.VC.ATL", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.VC.ATLMFC", `
|
|
||||||
"--add", "Microsoft.VisualStudio.Component.VC.Llvm.Clang" -Wait
|
|
||||||
shell: powershell
|
|
||||||
- name: Add Visual Studio Build Tools to PATH
|
|
||||||
run: |
|
|
||||||
$vsPath = "C:\BuildTools\VC\Tools\MSVC"
|
|
||||||
$latestVersion = (Get-ChildItem $vsPath | Sort-Object {[version]$_.Name} -Descending)[0].Name
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\BuildTools\VC\Tools\MSVC\$latestVersion\bin\Hostx64\arm64"
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\BuildTools\VC\Tools\MSVC\$latestVersion\bin\Hostx64\x64"
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\Program Files (x86)\Windows Kits\10\bin\10.0.22621.0\arm64"
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\Program Files (x86)\Windows Kits\10\bin\10.0.22621.0\x64"
|
|
||||||
Add-Content $env:GITHUB_PATH "C:\BuildTools\VC\Tools\Llvm\x64\bin"
|
|
||||||
|
|
||||||
# Add MSVC runtime libraries to LIB
|
|
||||||
$env:LIB = "C:\BuildTools\VC\Tools\MSVC\$latestVersion\lib\arm64;" +
|
|
||||||
"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.22621.0\um\arm64;" +
|
|
||||||
"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.22621.0\ucrt\arm64"
|
|
||||||
Add-Content $env:GITHUB_ENV "LIB=$env:LIB"
|
|
||||||
|
|
||||||
# Add INCLUDE paths
|
|
||||||
$env:INCLUDE = "C:\BuildTools\VC\Tools\MSVC\$latestVersion\include;" +
|
|
||||||
"C:\Program Files (x86)\Windows Kits\10\Include\10.0.22621.0\ucrt;" +
|
|
||||||
"C:\Program Files (x86)\Windows Kits\10\Include\10.0.22621.0\um;" +
|
|
||||||
"C:\Program Files (x86)\Windows Kits\10\Include\10.0.22621.0\shared"
|
|
||||||
Add-Content $env:GITHUB_ENV "INCLUDE=$env:INCLUDE"
|
|
||||||
shell: powershell
|
|
||||||
- name: Install Rust
|
|
||||||
run: |
|
|
||||||
Invoke-WebRequest https://win.rustup.rs/x86_64 -OutFile rustup-init.exe
|
|
||||||
.\rustup-init.exe -y --default-host aarch64-pc-windows-msvc
|
|
||||||
shell: powershell
|
|
||||||
- name: Add Rust to PATH
|
|
||||||
run: |
|
|
||||||
Add-Content $env:GITHUB_PATH "$env:USERPROFILE\.cargo\bin"
|
|
||||||
shell: powershell
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
with:
|
|
||||||
workspaces: rust
|
|
||||||
- name: Install 7-Zip ARM
|
|
||||||
run: |
|
|
||||||
New-Item -Path 'C:\7zip' -ItemType Directory
|
|
||||||
Invoke-WebRequest https://7-zip.org/a/7z2408-arm64.exe -OutFile C:\7zip\7z-installer.exe
|
|
||||||
Start-Process -FilePath C:\7zip\7z-installer.exe -ArgumentList '/S' -Wait
|
|
||||||
shell: powershell
|
|
||||||
- name: Add 7-Zip to PATH
|
|
||||||
run: Add-Content $env:GITHUB_PATH "C:\Program Files\7-Zip"
|
|
||||||
shell: powershell
|
|
||||||
- name: Install Protoc v21.12
|
|
||||||
working-directory: C:\
|
|
||||||
run: |
|
|
||||||
if (Test-Path 'C:\protoc') {
|
|
||||||
Write-Host "Protoc directory exists, skipping installation"
|
|
||||||
return
|
|
||||||
}
|
|
||||||
New-Item -Path 'C:\protoc' -ItemType Directory
|
|
||||||
Set-Location C:\protoc
|
|
||||||
Invoke-WebRequest https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protoc-21.12-win64.zip -OutFile C:\protoc\protoc.zip
|
|
||||||
& 'C:\Program Files\7-Zip\7z.exe' x protoc.zip
|
|
||||||
shell: powershell
|
|
||||||
- name: Add Protoc to PATH
|
|
||||||
run: Add-Content $env:GITHUB_PATH "C:\protoc\bin"
|
|
||||||
shell: powershell
|
|
||||||
- name: Run tests
|
|
||||||
run: |
|
|
||||||
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
|
|
||||||
cargo build --target aarch64-pc-windows-msvc
|
|
||||||
cargo test --target aarch64-pc-windows-msvc
|
|
||||||
|
|
||||||
msrv:
|
msrv:
|
||||||
# Check the minimum supported Rust version
|
# Check the minimum supported Rust version
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,7 +9,6 @@ venv
|
|||||||
.vscode
|
.vscode
|
||||||
.zed
|
.zed
|
||||||
rust/target
|
rust/target
|
||||||
rust/Cargo.lock
|
|
||||||
|
|
||||||
site
|
site
|
||||||
|
|
||||||
@@ -42,5 +41,3 @@ dist
|
|||||||
target
|
target
|
||||||
|
|
||||||
**/sccache.log
|
**/sccache.log
|
||||||
|
|
||||||
Cargo.lock
|
|
||||||
|
|||||||
@@ -1,21 +1,27 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v3.2.0
|
rev: v3.2.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.2.2
|
rev: v0.9.9
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- repo: local
|
# - repo: https://github.com/RobertCraigie/pyright-python
|
||||||
hooks:
|
# rev: v1.1.395
|
||||||
- id: local-biome-check
|
# hooks:
|
||||||
name: biome check
|
# - id: pyright
|
||||||
entry: npx @biomejs/biome@1.8.3 check --config-path nodejs/biome.json nodejs/
|
# args: ["--project", "python"]
|
||||||
language: system
|
# additional_dependencies: [pyarrow-stubs]
|
||||||
types: [text]
|
- repo: local
|
||||||
files: "nodejs/.*"
|
hooks:
|
||||||
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*|nodejs/examples/.*
|
- id: local-biome-check
|
||||||
|
name: biome check
|
||||||
|
entry: npx @biomejs/biome@1.8.3 check --config-path nodejs/biome.json nodejs/
|
||||||
|
language: system
|
||||||
|
types: [text]
|
||||||
|
files: "nodejs/.*"
|
||||||
|
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*|nodejs/examples/.*
|
||||||
|
|||||||
8407
Cargo.lock
generated
Normal file
8407
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
64
Cargo.toml
64
Cargo.toml
@@ -21,39 +21,57 @@ categories = ["database-implementations"]
|
|||||||
rust-version = "1.78.0"
|
rust-version = "1.78.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
lance = { "version" = "=0.22.0", "features" = ["dynamodb"] }
|
lance = { "version" = "=0.25.0", "features" = [
|
||||||
lance-io = "=0.22.0"
|
"dynamodb",
|
||||||
lance-index = "=0.22.0"
|
] }
|
||||||
lance-linalg = "=0.22.0"
|
lance-io = { version = "=0.25.0" }
|
||||||
lance-table = "=0.22.0"
|
lance-index = { version = "=0.25.0" }
|
||||||
lance-testing = "=0.22.0"
|
lance-linalg = { version = "=0.25.0" }
|
||||||
lance-datafusion = "=0.22.0"
|
lance-table = { version = "=0.25.0" }
|
||||||
lance-encoding = "=0.22.0"
|
lance-testing = { version = "=0.25.0" }
|
||||||
|
lance-datafusion = { version = "=0.25.0" }
|
||||||
|
lance-encoding = { version = "=0.25.0" }
|
||||||
# Note that this one does not include pyarrow
|
# Note that this one does not include pyarrow
|
||||||
arrow = { version = "53.2", optional = false }
|
arrow = { version = "54.1", optional = false }
|
||||||
arrow-array = "53.2"
|
arrow-array = "54.1"
|
||||||
arrow-data = "53.2"
|
arrow-data = "54.1"
|
||||||
arrow-ipc = "53.2"
|
arrow-ipc = "54.1"
|
||||||
arrow-ord = "53.2"
|
arrow-ord = "54.1"
|
||||||
arrow-schema = "53.2"
|
arrow-schema = "54.1"
|
||||||
arrow-arith = "53.2"
|
arrow-arith = "54.1"
|
||||||
arrow-cast = "53.2"
|
arrow-cast = "54.1"
|
||||||
async-trait = "0"
|
async-trait = "0"
|
||||||
chrono = "0.4.35"
|
datafusion = { version = "45.0", default-features = false }
|
||||||
datafusion-common = "44.0"
|
datafusion-catalog = "45.0"
|
||||||
datafusion-physical-plan = "44.0"
|
datafusion-common = { version = "45.0", default-features = false }
|
||||||
env_logger = "0.10"
|
datafusion-execution = "45.0"
|
||||||
|
datafusion-expr = "45.0"
|
||||||
|
datafusion-physical-plan = "45.0"
|
||||||
|
env_logger = "0.11"
|
||||||
half = { "version" = "=2.4.1", default-features = false, features = [
|
half = { "version" = "=2.4.1", default-features = false, features = [
|
||||||
"num-traits",
|
"num-traits",
|
||||||
] }
|
] }
|
||||||
futures = "0"
|
futures = "0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
moka = { version = "0.11", features = ["future"] }
|
moka = { version = "0.12", features = ["future"] }
|
||||||
object_store = "0.10.2"
|
object_store = "0.11.0"
|
||||||
pin-project = "1.0.7"
|
pin-project = "1.0.7"
|
||||||
snafu = "0.7.4"
|
snafu = "0.8"
|
||||||
url = "2"
|
url = "2"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
regex = "1.10"
|
regex = "1.10"
|
||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
|
semver = "1.0.25"
|
||||||
|
|
||||||
|
# Temporary pins to work around downstream issues
|
||||||
|
# https://github.com/apache/arrow-rs/commit/2fddf85afcd20110ce783ed5b4cdeb82293da30b
|
||||||
|
chrono = "=0.4.39"
|
||||||
|
# https://github.com/RustCrypto/formats/issues/1684
|
||||||
|
base64ct = "=1.6.0"
|
||||||
|
|
||||||
|
# Workaround for: https://github.com/eira-fransham/crunchy/issues/13
|
||||||
|
crunchy = "=0.2.2"
|
||||||
|
|
||||||
|
# Workaround for: https://github.com/Lokathor/bytemuck/issues/306
|
||||||
|
bytemuck_derive = ">=1.8.1, <1.9.0"
|
||||||
|
|||||||
12
README.md
12
README.md
@@ -1,9 +1,17 @@
|
|||||||
|
<a href="https://cloud.lancedb.com" target="_blank">
|
||||||
|
<img src="https://github.com/user-attachments/assets/92dad0a2-2a37-4ce1-b783-0d1b4f30a00c" alt="LanceDB Cloud Public Beta" width="100%" style="max-width: 100%;">
|
||||||
|
</a>
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<p align="center">
|
<p align="center">
|
||||||
|
|
||||||
<img width="275" alt="LanceDB Logo" src="https://github.com/lancedb/lancedb/assets/5846846/37d7c7ad-c2fd-4f56-9f16-fffb0d17c73a">
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/ac270358-333e-4bea-a132-acefaa94040e">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/b864d814-0d29-4784-8fd9-807297c758c0">
|
||||||
|
<img alt="LanceDB Logo" src="https://github.com/user-attachments/assets/b864d814-0d29-4784-8fd9-807297c758c0" width=300>
|
||||||
|
</picture>
|
||||||
|
|
||||||
**Developer-friendly, database for multimodal AI**
|
**Search More, Manage Less**
|
||||||
|
|
||||||
<a href='https://github.com/lancedb/vectordb-recipes/tree/main' target="_blank"><img alt='LanceDB' src='https://img.shields.io/badge/VectorDB_Recipes-100000?style=for-the-badge&logo=LanceDB&logoColor=white&labelColor=645cfb&color=645cfb'/></a>
|
<a href='https://github.com/lancedb/vectordb-recipes/tree/main' target="_blank"><img alt='LanceDB' src='https://img.shields.io/badge/VectorDB_Recipes-100000?style=for-the-badge&logo=LanceDB&logoColor=white&labelColor=645cfb&color=645cfb'/></a>
|
||||||
<a href='https://lancedb.github.io/lancedb/' target="_blank"><img alt='lancdb' src='https://img.shields.io/badge/DOCS-100000?style=for-the-badge&logo=lancdb&logoColor=white&labelColor=645cfb&color=645cfb'/></a>
|
<a href='https://lancedb.github.io/lancedb/' target="_blank"><img alt='lancdb' src='https://img.shields.io/badge/DOCS-100000?style=for-the-badge&logo=lancdb&logoColor=white&labelColor=645cfb&color=645cfb'/></a>
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
ARCH=${1:-x86_64}
|
|
||||||
|
|
||||||
# We pass down the current user so that when we later mount the local files
|
|
||||||
# into the container, the files are accessible by the current user.
|
|
||||||
pushd ci/manylinux_node
|
|
||||||
docker build \
|
|
||||||
-t lancedb-node-manylinux-$ARCH \
|
|
||||||
--build-arg="ARCH=$ARCH" \
|
|
||||||
--build-arg="DOCKER_USER=$(id -u)" \
|
|
||||||
--progress=plain \
|
|
||||||
.
|
|
||||||
popd
|
|
||||||
|
|
||||||
# We turn on memory swap to avoid OOM killer
|
|
||||||
docker run \
|
|
||||||
-v $(pwd):/io -w /io \
|
|
||||||
--memory-swap=-1 \
|
|
||||||
lancedb-node-manylinux-$ARCH \
|
|
||||||
bash ci/manylinux_node/build_lancedb.sh $ARCH
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
# Builds the macOS artifacts (nodejs binaries).
|
|
||||||
# Usage: ./ci/build_macos_artifacts_nodejs.sh [target]
|
|
||||||
# Targets supported: x86_64-apple-darwin aarch64-apple-darwin
|
|
||||||
set -e
|
|
||||||
|
|
||||||
prebuild_rust() {
|
|
||||||
# Building here for the sake of easier debugging.
|
|
||||||
pushd rust/lancedb
|
|
||||||
echo "Building rust library for $1"
|
|
||||||
export RUST_BACKTRACE=1
|
|
||||||
cargo build --release --target $1
|
|
||||||
popd
|
|
||||||
}
|
|
||||||
|
|
||||||
build_node_binaries() {
|
|
||||||
pushd nodejs
|
|
||||||
echo "Building nodejs library for $1"
|
|
||||||
export RUST_TARGET=$1
|
|
||||||
npm run build-release
|
|
||||||
popd
|
|
||||||
}
|
|
||||||
|
|
||||||
if [ -n "$1" ]; then
|
|
||||||
targets=$1
|
|
||||||
else
|
|
||||||
targets="x86_64-apple-darwin aarch64-apple-darwin"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Building artifacts for targets: $targets"
|
|
||||||
for target in $targets
|
|
||||||
do
|
|
||||||
prebuild_rust $target
|
|
||||||
build_node_binaries $target
|
|
||||||
done
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# Many linux dockerfile with Rust, Node, and Lance dependencies installed.
|
# Many linux dockerfile with Rust, Node, and Lance dependencies installed.
|
||||||
# This container allows building the node modules native libraries in an
|
# This container allows building the node modules native libraries in an
|
||||||
# environment with a very old glibc, so that we are compatible with a wide
|
# environment with a very old glibc, so that we are compatible with a wide
|
||||||
# range of linux distributions.
|
# range of linux distributions.
|
||||||
ARG ARCH=x86_64
|
ARG ARCH=x86_64
|
||||||
@@ -9,10 +9,6 @@ FROM quay.io/pypa/manylinux_2_28_${ARCH}
|
|||||||
ARG ARCH=x86_64
|
ARG ARCH=x86_64
|
||||||
ARG DOCKER_USER=default_user
|
ARG DOCKER_USER=default_user
|
||||||
|
|
||||||
# Install static openssl
|
|
||||||
COPY install_openssl.sh install_openssl.sh
|
|
||||||
RUN ./install_openssl.sh ${ARCH} > /dev/null
|
|
||||||
|
|
||||||
# Protobuf is also installed as root.
|
# Protobuf is also installed as root.
|
||||||
COPY install_protobuf.sh install_protobuf.sh
|
COPY install_protobuf.sh install_protobuf.sh
|
||||||
RUN ./install_protobuf.sh ${ARCH}
|
RUN ./install_protobuf.sh ${ARCH}
|
||||||
@@ -21,7 +17,7 @@ ENV DOCKER_USER=${DOCKER_USER}
|
|||||||
# Create a group and user, but only if it doesn't exist
|
# Create a group and user, but only if it doesn't exist
|
||||||
RUN echo ${ARCH} && id -u ${DOCKER_USER} >/dev/null 2>&1 || adduser --user-group --create-home --uid ${DOCKER_USER} build_user
|
RUN echo ${ARCH} && id -u ${DOCKER_USER} >/dev/null 2>&1 || adduser --user-group --create-home --uid ${DOCKER_USER} build_user
|
||||||
|
|
||||||
# We switch to the user to install Rust and Node, since those like to be
|
# We switch to the user to install Rust and Node, since those like to be
|
||||||
# installed at the user level.
|
# installed at the user level.
|
||||||
USER ${DOCKER_USER}
|
USER ${DOCKER_USER}
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Builds the nodejs module for manylinux. Invoked by ci/build_linux_artifacts_nodejs.sh.
|
|
||||||
set -e
|
|
||||||
ARCH=${1:-x86_64}
|
|
||||||
|
|
||||||
if [ "$ARCH" = "x86_64" ]; then
|
|
||||||
export OPENSSL_LIB_DIR=/usr/local/lib64/
|
|
||||||
else
|
|
||||||
export OPENSSL_LIB_DIR=/usr/local/lib/
|
|
||||||
fi
|
|
||||||
export OPENSSL_STATIC=1
|
|
||||||
export OPENSSL_INCLUDE_DIR=/usr/local/include/openssl
|
|
||||||
|
|
||||||
#Alpine doesn't have .bashrc
|
|
||||||
FILE=$HOME/.bashrc && test -f $FILE && source $FILE
|
|
||||||
|
|
||||||
cd nodejs
|
|
||||||
npm ci
|
|
||||||
npm run build-release
|
|
||||||
@@ -4,14 +4,6 @@ set -e
|
|||||||
ARCH=${1:-x86_64}
|
ARCH=${1:-x86_64}
|
||||||
TARGET_TRIPLE=${2:-x86_64-unknown-linux-gnu}
|
TARGET_TRIPLE=${2:-x86_64-unknown-linux-gnu}
|
||||||
|
|
||||||
if [ "$ARCH" = "x86_64" ]; then
|
|
||||||
export OPENSSL_LIB_DIR=/usr/local/lib64/
|
|
||||||
else
|
|
||||||
export OPENSSL_LIB_DIR=/usr/local/lib/
|
|
||||||
fi
|
|
||||||
export OPENSSL_STATIC=1
|
|
||||||
export OPENSSL_INCLUDE_DIR=/usr/local/include/openssl
|
|
||||||
|
|
||||||
#Alpine doesn't have .bashrc
|
#Alpine doesn't have .bashrc
|
||||||
FILE=$HOME/.bashrc && test -f $FILE && source $FILE
|
FILE=$HOME/.bashrc && test -f $FILE && source $FILE
|
||||||
|
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Builds openssl from source so we can statically link to it
|
|
||||||
|
|
||||||
# this is to avoid the error we get with the system installation:
|
|
||||||
# /usr/bin/ld: <library>: version node not found for symbol SSLeay@@OPENSSL_1.0.1
|
|
||||||
# /usr/bin/ld: failed to set dynamic section sizes: Bad value
|
|
||||||
set -e
|
|
||||||
|
|
||||||
git clone -b OpenSSL_1_1_1v \
|
|
||||||
--single-branch \
|
|
||||||
https://github.com/openssl/openssl.git
|
|
||||||
|
|
||||||
pushd openssl
|
|
||||||
|
|
||||||
if [[ $1 == x86_64* ]]; then
|
|
||||||
ARCH=linux-x86_64
|
|
||||||
else
|
|
||||||
# gnu target
|
|
||||||
ARCH=linux-aarch64
|
|
||||||
fi
|
|
||||||
|
|
||||||
./Configure no-shared $ARCH
|
|
||||||
|
|
||||||
make
|
|
||||||
|
|
||||||
make install
|
|
||||||
41
ci/parse_requirements.py
Normal file
41
ci/parse_requirements.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import argparse
|
||||||
|
import toml
|
||||||
|
|
||||||
|
|
||||||
|
def parse_dependencies(pyproject_path, extras=None):
|
||||||
|
with open(pyproject_path, "r") as file:
|
||||||
|
pyproject = toml.load(file)
|
||||||
|
|
||||||
|
dependencies = pyproject.get("project", {}).get("dependencies", [])
|
||||||
|
for dependency in dependencies:
|
||||||
|
print(dependency)
|
||||||
|
|
||||||
|
optional_dependencies = pyproject.get("project", {}).get(
|
||||||
|
"optional-dependencies", {}
|
||||||
|
)
|
||||||
|
|
||||||
|
if extras:
|
||||||
|
for extra in extras.split(","):
|
||||||
|
for dep in optional_dependencies.get(extra, []):
|
||||||
|
print(dep)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate requirements.txt from pyproject.toml"
|
||||||
|
)
|
||||||
|
parser.add_argument("path", type=str, help="Path to pyproject.toml")
|
||||||
|
parser.add_argument(
|
||||||
|
"--extras",
|
||||||
|
type=str,
|
||||||
|
help="Comma-separated list of extras to include",
|
||||||
|
default="",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
parse_dependencies(args.path, args.extras)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -4,6 +4,9 @@ repo_url: https://github.com/lancedb/lancedb
|
|||||||
edit_uri: https://github.com/lancedb/lancedb/tree/main/docs/src
|
edit_uri: https://github.com/lancedb/lancedb/tree/main/docs/src
|
||||||
repo_name: lancedb/lancedb
|
repo_name: lancedb/lancedb
|
||||||
docs_dir: src
|
docs_dir: src
|
||||||
|
watch:
|
||||||
|
- src
|
||||||
|
- ../python/python
|
||||||
|
|
||||||
theme:
|
theme:
|
||||||
name: "material"
|
name: "material"
|
||||||
@@ -63,6 +66,7 @@ plugins:
|
|||||||
- https://arrow.apache.org/docs/objects.inv
|
- https://arrow.apache.org/docs/objects.inv
|
||||||
- https://pandas.pydata.org/docs/objects.inv
|
- https://pandas.pydata.org/docs/objects.inv
|
||||||
- https://lancedb.github.io/lance/objects.inv
|
- https://lancedb.github.io/lance/objects.inv
|
||||||
|
- https://docs.pydantic.dev/latest/objects.inv
|
||||||
- mkdocs-jupyter
|
- mkdocs-jupyter
|
||||||
- render_swagger:
|
- render_swagger:
|
||||||
allow_arbitrary_locations: true
|
allow_arbitrary_locations: true
|
||||||
@@ -105,8 +109,8 @@ nav:
|
|||||||
- 📚 Concepts:
|
- 📚 Concepts:
|
||||||
- Vector search: concepts/vector_search.md
|
- Vector search: concepts/vector_search.md
|
||||||
- Indexing:
|
- Indexing:
|
||||||
- IVFPQ: concepts/index_ivfpq.md
|
- IVFPQ: concepts/index_ivfpq.md
|
||||||
- HNSW: concepts/index_hnsw.md
|
- HNSW: concepts/index_hnsw.md
|
||||||
- Storage: concepts/storage.md
|
- Storage: concepts/storage.md
|
||||||
- Data management: concepts/data_management.md
|
- Data management: concepts/data_management.md
|
||||||
- 🔨 Guides:
|
- 🔨 Guides:
|
||||||
@@ -120,6 +124,9 @@ nav:
|
|||||||
- Overview: hybrid_search/hybrid_search.md
|
- Overview: hybrid_search/hybrid_search.md
|
||||||
- Comparing Rerankers: hybrid_search/eval.md
|
- Comparing Rerankers: hybrid_search/eval.md
|
||||||
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
||||||
|
- Late interaction with MultiVector search:
|
||||||
|
- Overview: guides/multi-vector.md
|
||||||
|
- Example: notebooks/Multivector_on_LanceDB.ipynb
|
||||||
- RAG:
|
- RAG:
|
||||||
- Vanilla RAG: rag/vanilla_rag.md
|
- Vanilla RAG: rag/vanilla_rag.md
|
||||||
- Multi-head RAG: rag/multi_head_rag.md
|
- Multi-head RAG: rag/multi_head_rag.md
|
||||||
@@ -130,8 +137,8 @@ nav:
|
|||||||
- Adaptive RAG: rag/adaptive_rag.md
|
- Adaptive RAG: rag/adaptive_rag.md
|
||||||
- SFR RAG: rag/sfr_rag.md
|
- SFR RAG: rag/sfr_rag.md
|
||||||
- Advanced Techniques:
|
- Advanced Techniques:
|
||||||
- HyDE: rag/advanced_techniques/hyde.md
|
- HyDE: rag/advanced_techniques/hyde.md
|
||||||
- FLARE: rag/advanced_techniques/flare.md
|
- FLARE: rag/advanced_techniques/flare.md
|
||||||
- Reranking:
|
- Reranking:
|
||||||
- Quickstart: reranking/index.md
|
- Quickstart: reranking/index.md
|
||||||
- Cohere Reranker: reranking/cohere.md
|
- Cohere Reranker: reranking/cohere.md
|
||||||
@@ -146,7 +153,7 @@ nav:
|
|||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
- Building Custom Rerankers: reranking/custom_reranker.md
|
||||||
- Example: notebooks/lancedb_reranking.ipynb
|
- Example: notebooks/lancedb_reranking.ipynb
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility:
|
- Versioning & Reproducibility:
|
||||||
- sync API: notebooks/reproducibility.ipynb
|
- sync API: notebooks/reproducibility.ipynb
|
||||||
- async API: notebooks/reproducibility_async.ipynb
|
- async API: notebooks/reproducibility_async.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
@@ -178,6 +185,7 @@ nav:
|
|||||||
- Imagebind embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/imagebind_embedding.md
|
- Imagebind embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/imagebind_embedding.md
|
||||||
- Jina Embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/jina_multimodal_embedding.md
|
- Jina Embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/jina_multimodal_embedding.md
|
||||||
- User-defined embedding functions: embeddings/custom_embedding_function.md
|
- User-defined embedding functions: embeddings/custom_embedding_function.md
|
||||||
|
- Variables and secrets: embeddings/variables_and_secrets.md
|
||||||
- "Example: Multi-lingual semantic search": notebooks/multi_lingual_example.ipynb
|
- "Example: Multi-lingual semantic search": notebooks/multi_lingual_example.ipynb
|
||||||
- "Example: MultiModal CLIP Embeddings": notebooks/DisappearingEmbeddingFunction.ipynb
|
- "Example: MultiModal CLIP Embeddings": notebooks/DisappearingEmbeddingFunction.ipynb
|
||||||
- 🔌 Integrations:
|
- 🔌 Integrations:
|
||||||
@@ -228,20 +236,13 @@ nav:
|
|||||||
- 👾 JavaScript (vectordb): javascript/modules.md
|
- 👾 JavaScript (vectordb): javascript/modules.md
|
||||||
- 👾 JavaScript (lancedb): js/globals.md
|
- 👾 JavaScript (lancedb): js/globals.md
|
||||||
- 🦀 Rust: https://docs.rs/lancedb/latest/lancedb/
|
- 🦀 Rust: https://docs.rs/lancedb/latest/lancedb/
|
||||||
- ☁️ LanceDB Cloud:
|
|
||||||
- Overview: cloud/index.md
|
|
||||||
- API reference:
|
|
||||||
- 🐍 Python: python/saas-python.md
|
|
||||||
- 👾 JavaScript: javascript/modules.md
|
|
||||||
- REST API: cloud/rest.md
|
|
||||||
- FAQs: cloud/cloud_faq.md
|
|
||||||
|
|
||||||
- Quick start: basic.md
|
- Quick start: basic.md
|
||||||
- Concepts:
|
- Concepts:
|
||||||
- Vector search: concepts/vector_search.md
|
- Vector search: concepts/vector_search.md
|
||||||
- Indexing:
|
- Indexing:
|
||||||
- IVFPQ: concepts/index_ivfpq.md
|
- IVFPQ: concepts/index_ivfpq.md
|
||||||
- HNSW: concepts/index_hnsw.md
|
- HNSW: concepts/index_hnsw.md
|
||||||
- Storage: concepts/storage.md
|
- Storage: concepts/storage.md
|
||||||
- Data management: concepts/data_management.md
|
- Data management: concepts/data_management.md
|
||||||
- Guides:
|
- Guides:
|
||||||
@@ -255,6 +256,9 @@ nav:
|
|||||||
- Overview: hybrid_search/hybrid_search.md
|
- Overview: hybrid_search/hybrid_search.md
|
||||||
- Comparing Rerankers: hybrid_search/eval.md
|
- Comparing Rerankers: hybrid_search/eval.md
|
||||||
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
- Airbnb financial data example: notebooks/hybrid_search.ipynb
|
||||||
|
- Late interaction with MultiVector search:
|
||||||
|
- Overview: guides/multi-vector.md
|
||||||
|
- Document search Example: notebooks/Multivector_on_LanceDB.ipynb
|
||||||
- RAG:
|
- RAG:
|
||||||
- Vanilla RAG: rag/vanilla_rag.md
|
- Vanilla RAG: rag/vanilla_rag.md
|
||||||
- Multi-head RAG: rag/multi_head_rag.md
|
- Multi-head RAG: rag/multi_head_rag.md
|
||||||
@@ -265,8 +269,8 @@ nav:
|
|||||||
- Adaptive RAG: rag/adaptive_rag.md
|
- Adaptive RAG: rag/adaptive_rag.md
|
||||||
- SFR RAG: rag/sfr_rag.md
|
- SFR RAG: rag/sfr_rag.md
|
||||||
- Advanced Techniques:
|
- Advanced Techniques:
|
||||||
- HyDE: rag/advanced_techniques/hyde.md
|
- HyDE: rag/advanced_techniques/hyde.md
|
||||||
- FLARE: rag/advanced_techniques/flare.md
|
- FLARE: rag/advanced_techniques/flare.md
|
||||||
- Reranking:
|
- Reranking:
|
||||||
- Quickstart: reranking/index.md
|
- Quickstart: reranking/index.md
|
||||||
- Cohere Reranker: reranking/cohere.md
|
- Cohere Reranker: reranking/cohere.md
|
||||||
@@ -280,7 +284,7 @@ nav:
|
|||||||
- Building Custom Rerankers: reranking/custom_reranker.md
|
- Building Custom Rerankers: reranking/custom_reranker.md
|
||||||
- Example: notebooks/lancedb_reranking.ipynb
|
- Example: notebooks/lancedb_reranking.ipynb
|
||||||
- Filtering: sql.md
|
- Filtering: sql.md
|
||||||
- Versioning & Reproducibility:
|
- Versioning & Reproducibility:
|
||||||
- sync API: notebooks/reproducibility.ipynb
|
- sync API: notebooks/reproducibility.ipynb
|
||||||
- async API: notebooks/reproducibility_async.ipynb
|
- async API: notebooks/reproducibility_async.ipynb
|
||||||
- Configuring Storage: guides/storage.md
|
- Configuring Storage: guides/storage.md
|
||||||
@@ -311,6 +315,7 @@ nav:
|
|||||||
- Imagebind embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/imagebind_embedding.md
|
- Imagebind embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/imagebind_embedding.md
|
||||||
- Jina Embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/jina_multimodal_embedding.md
|
- Jina Embeddings: embeddings/available_embedding_models/multimodal_embedding_functions/jina_multimodal_embedding.md
|
||||||
- User-defined embedding functions: embeddings/custom_embedding_function.md
|
- User-defined embedding functions: embeddings/custom_embedding_function.md
|
||||||
|
- Variables and secrets: embeddings/variables_and_secrets.md
|
||||||
- "Example: Multi-lingual semantic search": notebooks/multi_lingual_example.ipynb
|
- "Example: Multi-lingual semantic search": notebooks/multi_lingual_example.ipynb
|
||||||
- "Example: MultiModal CLIP Embeddings": notebooks/DisappearingEmbeddingFunction.ipynb
|
- "Example: MultiModal CLIP Embeddings": notebooks/DisappearingEmbeddingFunction.ipynb
|
||||||
- Integrations:
|
- Integrations:
|
||||||
@@ -349,21 +354,14 @@ nav:
|
|||||||
- 🦀 Rust:
|
- 🦀 Rust:
|
||||||
- Overview: examples/examples_rust.md
|
- Overview: examples/examples_rust.md
|
||||||
- Studies:
|
- Studies:
|
||||||
- studies/overview.md
|
- studies/overview.md
|
||||||
- ↗Improve retrievers with hybrid search and reranking: https://blog.lancedb.com/hybrid-search-and-reranking-report/
|
- ↗Improve retrievers with hybrid search and reranking: https://blog.lancedb.com/hybrid-search-and-reranking-report/
|
||||||
- API reference:
|
- API reference:
|
||||||
- Overview: api_reference.md
|
- Overview: api_reference.md
|
||||||
- Python: python/python.md
|
- Python: python/python.md
|
||||||
- Javascript (vectordb): javascript/modules.md
|
- Javascript (vectordb): javascript/modules.md
|
||||||
- Javascript (lancedb): js/globals.md
|
- Javascript (lancedb): js/globals.md
|
||||||
- Rust: https://docs.rs/lancedb/latest/lancedb/index.html
|
- Rust: https://docs.rs/lancedb/latest/lancedb/index.html
|
||||||
- LanceDB Cloud:
|
|
||||||
- Overview: cloud/index.md
|
|
||||||
- API reference:
|
|
||||||
- 🐍 Python: python/saas-python.md
|
|
||||||
- 👾 JavaScript: javascript/modules.md
|
|
||||||
- REST API: cloud/rest.md
|
|
||||||
- FAQs: cloud/cloud_faq.md
|
|
||||||
|
|
||||||
extra_css:
|
extra_css:
|
||||||
- styles/global.css
|
- styles/global.css
|
||||||
@@ -371,6 +369,7 @@ extra_css:
|
|||||||
|
|
||||||
extra_javascript:
|
extra_javascript:
|
||||||
- "extra_js/init_ask_ai_widget.js"
|
- "extra_js/init_ask_ai_widget.js"
|
||||||
|
- "extra_js/reo.js"
|
||||||
|
|
||||||
extra:
|
extra:
|
||||||
analytics:
|
analytics:
|
||||||
|
|||||||
@@ -38,6 +38,13 @@ components:
|
|||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
index_name:
|
||||||
|
name: index_name
|
||||||
|
in: path
|
||||||
|
description: name of the index
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
responses:
|
responses:
|
||||||
invalid_request:
|
invalid_request:
|
||||||
description: Invalid request
|
description: Invalid request
|
||||||
@@ -164,7 +171,7 @@ paths:
|
|||||||
distance_type:
|
distance_type:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
The distance metric to use for search. L2, Cosine, Dot and Hamming are supported. Default is L2.
|
The distance metric to use for search. l2, Cosine, Dot and Hamming are supported. Default is l2.
|
||||||
bypass_vector_index:
|
bypass_vector_index:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: |
|
description: |
|
||||||
@@ -443,7 +450,7 @@ paths:
|
|||||||
type: string
|
type: string
|
||||||
nullable: false
|
nullable: false
|
||||||
description: |
|
description: |
|
||||||
The metric type to use for the index. L2, Cosine, Dot are supported.
|
The metric type to use for the index. l2, Cosine, Dot are supported.
|
||||||
index_type:
|
index_type:
|
||||||
type: string
|
type: string
|
||||||
responses:
|
responses:
|
||||||
@@ -485,3 +492,22 @@ paths:
|
|||||||
$ref: "#/components/responses/unauthorized"
|
$ref: "#/components/responses/unauthorized"
|
||||||
"404":
|
"404":
|
||||||
$ref: "#/components/responses/not_found"
|
$ref: "#/components/responses/not_found"
|
||||||
|
/v1/table/{name}/index/{index_name}/drop/:
|
||||||
|
post:
|
||||||
|
description: Drop an index from the table
|
||||||
|
tags:
|
||||||
|
- Tables
|
||||||
|
summary: Drop an index from the table
|
||||||
|
operationId: dropIndex
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/table_name"
|
||||||
|
- $ref: "#/components/parameters/index_name"
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Index successfully dropped
|
||||||
|
"400":
|
||||||
|
$ref: "#/components/responses/invalid_request"
|
||||||
|
"401":
|
||||||
|
$ref: "#/components/responses/unauthorized"
|
||||||
|
"404":
|
||||||
|
$ref: "#/components/responses/not_found"
|
||||||
@@ -69,7 +69,7 @@ Lance supports `IVF_PQ` index type by default.
|
|||||||
|
|
||||||
The following IVF_PQ paramters can be specified:
|
The following IVF_PQ paramters can be specified:
|
||||||
|
|
||||||
- **distance_type**: The distance metric to use. By default it uses euclidean distance "`L2`".
|
- **distance_type**: The distance metric to use. By default it uses euclidean distance "`l2`".
|
||||||
We also support "cosine" and "dot" distance as well.
|
We also support "cosine" and "dot" distance as well.
|
||||||
- **num_partitions**: The number of partitions in the index. The default is the square root
|
- **num_partitions**: The number of partitions in the index. The default is the square root
|
||||||
of the number of rows.
|
of the number of rows.
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as vectordb from "vectordb";
|
|||||||
// --8<-- [end:import]
|
// --8<-- [end:import]
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
|
console.log("ann_indexes.ts: start");
|
||||||
// --8<-- [start:ingest]
|
// --8<-- [start:ingest]
|
||||||
const db = await vectordb.connect("data/sample-lancedb");
|
const db = await vectordb.connect("data/sample-lancedb");
|
||||||
|
|
||||||
@@ -49,5 +50,5 @@ import * as vectordb from "vectordb";
|
|||||||
.execute();
|
.execute();
|
||||||
// --8<-- [end:search3]
|
// --8<-- [end:search3]
|
||||||
|
|
||||||
console.log("Ann indexes: done");
|
console.log("ann_indexes.ts: done");
|
||||||
})();
|
})();
|
||||||
|
|||||||
@@ -133,13 +133,22 @@ recommend switching to stable releases.
|
|||||||
## Connect to a database
|
## Connect to a database
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
```python
|
```python
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:imports"
|
--8<-- "python/python/tests/docs/test_basic.py:imports"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:connect"
|
|
||||||
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:connect_async"
|
--8<-- "python/python/tests/docs/test_basic.py:set_uri"
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:connect"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:imports"
|
||||||
|
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:set_uri"
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:connect_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
@@ -183,21 +192,33 @@ table.
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_table"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async"
|
|
||||||
```
|
|
||||||
|
|
||||||
If the table already exists, LanceDB will raise an error by default.
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||||
to the `create_table` method.
|
to the `create_table` method.
|
||||||
|
|
||||||
You can also pass in a pandas DataFrame directly:
|
=== "Sync API"
|
||||||
|
|
||||||
```python
|
```python
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_pandas"
|
--8<-- "python/python/tests/docs/test_basic.py:create_table"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_table_async_pandas"
|
```
|
||||||
```
|
|
||||||
|
You can also pass in a pandas DataFrame directly:
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:create_table_pandas"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:create_table_async"
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also pass in a pandas DataFrame directly:
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:create_table_async_pandas"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
@@ -247,10 +268,16 @@ similar to a `CREATE TABLE` statement in SQL.
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table_async"
|
||||||
|
```
|
||||||
|
|
||||||
!!! note "You can define schema in Pydantic"
|
!!! note "You can define schema in Pydantic"
|
||||||
LanceDB comes with Pydantic support, which allows you to define the schema of your data using Pydantic models. This makes it easy to work with LanceDB tables and data. Learn more about all supported types in [tables guide](./guides/tables.md).
|
LanceDB comes with Pydantic support, which allows you to define the schema of your data using Pydantic models. This makes it easy to work with LanceDB tables and data. Learn more about all supported types in [tables guide](./guides/tables.md).
|
||||||
@@ -281,10 +308,16 @@ Once created, you can open a table as follows:
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:open_table"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:open_table_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:open_table"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:open_table_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
=== "@lancedb/lancedb"
|
=== "@lancedb/lancedb"
|
||||||
@@ -310,10 +343,16 @@ If you forget the name of your table, you can always get a listing of all table
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:table_names"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:table_names_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:table_names"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:table_names_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
=== "@lancedb/lancedb"
|
=== "@lancedb/lancedb"
|
||||||
@@ -340,10 +379,16 @@ After a table has been created, you can always add more data to it as follows:
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:add_data"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:add_data_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:add_data"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:add_data_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
=== "@lancedb/lancedb"
|
=== "@lancedb/lancedb"
|
||||||
@@ -370,10 +415,16 @@ Once you've embedded the query, you can find its nearest neighbors as follows:
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:vector_search"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:vector_search_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:vector_search"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:vector_search_async"
|
||||||
|
```
|
||||||
|
|
||||||
This returns a pandas DataFrame with the results.
|
This returns a pandas DataFrame with the results.
|
||||||
|
|
||||||
@@ -412,10 +463,16 @@ LanceDB allows you to create an ANN index on a table as follows:
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```py
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_index"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:create_index_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:create_index"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:create_index_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
=== "@lancedb/lancedb"
|
=== "@lancedb/lancedb"
|
||||||
@@ -451,10 +508,16 @@ This can delete any number of rows that match the filter.
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:delete_rows"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:delete_rows_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:delete_rows"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:delete_rows_async"
|
||||||
|
```
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
@@ -483,7 +546,10 @@ simple or complex as needed. To see what expressions are supported, see the
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
Read more: [lancedb.table.Table.delete][]
|
=== "Sync API"
|
||||||
|
Read more: [lancedb.table.Table.delete][]
|
||||||
|
=== "Async API"
|
||||||
|
Read more: [lancedb.table.AsyncTable.delete][]
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
@@ -505,10 +571,16 @@ Use the `drop_table()` method on the database to remove a table.
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table"
|
|
||||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table_async"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_basic.py:drop_table"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_basic.py:drop_table_async"
|
||||||
|
```
|
||||||
|
|
||||||
This permanently removes the table and is not recoverable, unlike deleting rows.
|
This permanently removes the table and is not recoverable, unlike deleting rows.
|
||||||
By default, if the table does not exist an exception is raised. To suppress this,
|
By default, if the table does not exist an exception is raised. To suppress this,
|
||||||
@@ -543,10 +615,17 @@ You can use the embedding API when working with embedding models. It automatical
|
|||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|
||||||
```python
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:imports"
|
|
||||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
```python
|
||||||
```
|
--8<-- "python/python/tests/docs/test_embeddings_optional.py:imports"
|
||||||
|
|
||||||
|
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
||||||
|
```
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
Coming soon to the async API.
|
||||||
|
https://github.com/lancedb/lancedb/issues/1938
|
||||||
|
|
||||||
=== "Typescript[^1]"
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
|||||||
@@ -107,7 +107,6 @@ const example = async () => {
|
|||||||
// --8<-- [start:search]
|
// --8<-- [start:search]
|
||||||
const query = await tbl.search([100, 100]).limit(2).execute();
|
const query = await tbl.search([100, 100]).limit(2).execute();
|
||||||
// --8<-- [end:search]
|
// --8<-- [end:search]
|
||||||
console.log(query);
|
|
||||||
|
|
||||||
// --8<-- [start:delete]
|
// --8<-- [start:delete]
|
||||||
await tbl.delete('item = "fizz"');
|
await tbl.delete('item = "fizz"');
|
||||||
@@ -119,8 +118,9 @@ const example = async () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
console.log("basic_legacy.ts: start");
|
||||||
await example();
|
await example();
|
||||||
console.log("Basic example: done");
|
console.log("basic_legacy.ts: done");
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
main();
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
LanceDB Cloud is a SaaS (software-as-a-service) solution that runs serverless in the cloud, clearly separating storage from compute. It's designed to be highly scalable without breaking the bank. LanceDB Cloud is currently in private beta with general availability coming soon, but you can apply for early access with the private beta release by signing up below.
|
LanceDB Cloud is a SaaS (software-as-a-service) solution that runs serverless in the cloud, clearly separating storage from compute. It's designed to be highly scalable without breaking the bank. LanceDB Cloud is currently in private beta with general availability coming soon, but you can apply for early access with the private beta release by signing up below.
|
||||||
|
|
||||||
[Try out LanceDB Cloud](https://noteforms.com/forms/lancedb-mailing-list-cloud-kty1o5?notionforms=1&utm_source=notionforms){ .md-button .md-button--primary }
|
[Try out LanceDB Cloud (Public Beta)](https://cloud.lancedb.com){ .md-button .md-button--primary }
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ Then the greedy search routine operates as follows:
|
|||||||
|
|
||||||
There are three key parameters to set when constructing an HNSW index:
|
There are three key parameters to set when constructing an HNSW index:
|
||||||
|
|
||||||
* `metric`: Use an `L2` euclidean distance metric. We also support `dot` and `cosine` distance.
|
* `metric`: Use an `l2` euclidean distance metric. We also support `dot` and `cosine` distance.
|
||||||
* `m`: The number of neighbors to select for each vector in the HNSW graph.
|
* `m`: The number of neighbors to select for each vector in the HNSW graph.
|
||||||
* `ef_construction`: The number of candidates to evaluate during the construction of the HNSW graph.
|
* `ef_construction`: The number of candidates to evaluate during the construction of the HNSW graph.
|
||||||
|
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ We can combine the above concepts to understand how to build and query an IVF-PQ
|
|||||||
|
|
||||||
There are three key parameters to set when constructing an IVF-PQ index:
|
There are three key parameters to set when constructing an IVF-PQ index:
|
||||||
|
|
||||||
* `metric`: Use an `L2` euclidean distance metric. We also support `dot` and `cosine` distance.
|
* `metric`: Use an `l2` euclidean distance metric. We also support `dot` and `cosine` distance.
|
||||||
* `num_partitions`: The number of partitions in the IVF portion of the index.
|
* `num_partitions`: The number of partitions in the IVF portion of the index.
|
||||||
* `num_sub_vectors`: The number of sub-vectors that will be created during Product Quantization (PQ).
|
* `num_sub_vectors`: The number of sub-vectors that will be created during Product Quantization (PQ).
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ In Python, the index can be created as follows:
|
|||||||
```python
|
```python
|
||||||
# Create and train the index for a 1536-dimensional vector
|
# Create and train the index for a 1536-dimensional vector
|
||||||
# Make sure you have enough data in the table for an effective training step
|
# Make sure you have enough data in the table for an effective training step
|
||||||
tbl.create_index(metric="L2", num_partitions=256, num_sub_vectors=96)
|
tbl.create_index(metric="l2", num_partitions=256, num_sub_vectors=96)
|
||||||
```
|
```
|
||||||
!!! note
|
!!! note
|
||||||
`num_partitions`=256 and `num_sub_vectors`=96 does not work for every dataset. Those values needs to be adjusted for your particular dataset.
|
`num_partitions`=256 and `num_sub_vectors`=96 does not work for every dataset. Those values needs to be adjusted for your particular dataset.
|
||||||
|
|||||||
@@ -55,6 +55,14 @@ Let's implement `SentenceTransformerEmbeddings` class. All you need to do is imp
|
|||||||
|
|
||||||
This is a stripped down version of our implementation of `SentenceTransformerEmbeddings` that removes certain optimizations and default settings.
|
This is a stripped down version of our implementation of `SentenceTransformerEmbeddings` that removes certain optimizations and default settings.
|
||||||
|
|
||||||
|
!!! danger "Use sensitive keys to prevent leaking secrets"
|
||||||
|
To prevent leaking secrets, such as API keys, you should add any sensitive
|
||||||
|
parameters of an embedding function to the output of the
|
||||||
|
[sensitive_keys()][lancedb.embeddings.base.EmbeddingFunction.sensitive_keys] /
|
||||||
|
[getSensitiveKeys()](../../js/namespaces/embedding/classes/EmbeddingFunction/#getsensitivekeys)
|
||||||
|
method. This prevents users from accidentally instantiating the embedding
|
||||||
|
function with hard-coded secrets.
|
||||||
|
|
||||||
Now you can use this embedding function to create your table schema and that's it! you can then ingest data and run queries without manually vectorizing the inputs.
|
Now you can use this embedding function to create your table schema and that's it! you can then ingest data and run queries without manually vectorizing the inputs.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ As mentioned, after creating embedding, each data point is represented as a vect
|
|||||||
|
|
||||||
Points that are close to each other in vector space are considered similar (or appear in similar contexts), and points that are far away are considered dissimilar. To quantify this closeness, we use distance as a metric which can be measured in the following way -
|
Points that are close to each other in vector space are considered similar (or appear in similar contexts), and points that are far away are considered dissimilar. To quantify this closeness, we use distance as a metric which can be measured in the following way -
|
||||||
|
|
||||||
1. **Euclidean Distance (L2)**: It calculates the straight-line distance between two points (vectors) in a multidimensional space.
|
1. **Euclidean Distance (l2)**: It calculates the straight-line distance between two points (vectors) in a multidimensional space.
|
||||||
2. **Cosine Similarity**: It measures the cosine of the angle between two vectors, providing a normalized measure of similarity based on their direction.
|
2. **Cosine Similarity**: It measures the cosine of the angle between two vectors, providing a normalized measure of similarity based on their direction.
|
||||||
3. **Dot product**: It is calculated as the sum of the products of their corresponding components. To measure relatedness it considers both the magnitude and direction of the vectors.
|
3. **Dot product**: It is calculated as the sum of the products of their corresponding components. To measure relatedness it considers both the magnitude and direction of the vectors.
|
||||||
|
|
||||||
|
|||||||
53
docs/src/embeddings/variables_and_secrets.md
Normal file
53
docs/src/embeddings/variables_and_secrets.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Variable and Secrets
|
||||||
|
|
||||||
|
Most embedding configuration options are saved in the table's metadata. However,
|
||||||
|
this isn't always appropriate. For example, API keys should never be stored in the
|
||||||
|
metadata. Additionally, other configuration options might be best set at runtime,
|
||||||
|
such as the `device` configuration that controls whether to use GPU or CPU for
|
||||||
|
inference. If you hardcoded this to GPU, you wouldn't be able to run the code on
|
||||||
|
a server without one.
|
||||||
|
|
||||||
|
To handle these cases, you can set variables on the embedding registry and
|
||||||
|
reference them in the embedding configuration. These variables will be available
|
||||||
|
during the runtime of your program, but not saved in the table's metadata. When
|
||||||
|
the table is loaded from a different process, the variables must be set again.
|
||||||
|
|
||||||
|
To set a variable, use the `set_var()` / `setVar()` method on the embedding registry.
|
||||||
|
To reference a variable, use the syntax `$env:VARIABLE_NAME`. If there is a default
|
||||||
|
value, you can use the syntax `$env:VARIABLE_NAME:DEFAULT_VALUE`.
|
||||||
|
|
||||||
|
## Using variables to set secrets
|
||||||
|
|
||||||
|
Sensitive configuration, such as API keys, must either be set as environment
|
||||||
|
variables or using variables on the embedding registry. If you pass in a hardcoded
|
||||||
|
value, LanceDB will raise an error. Instead, if you want to set an API key via
|
||||||
|
configuration, use a variable:
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_embeddings_optional.py:register_secret"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Typescript"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/embedding.test.ts:register_secret"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using variables to set the device parameter
|
||||||
|
|
||||||
|
Many embedding functions that run locally have a `device` parameter that controls
|
||||||
|
whether to use GPU or CPU for inference. Because not all computers have a GPU,
|
||||||
|
it's helpful to be able to set the `device` parameter at runtime, rather than
|
||||||
|
have it hard coded in the embedding configuration. To make it work even if the
|
||||||
|
variable isn't set, you could provide a default value of `cpu` in the embedding
|
||||||
|
configuration.
|
||||||
|
|
||||||
|
Some embedding libraries even have a method to detect which devices are available,
|
||||||
|
which could be used to dynamically set the device at runtime. For example, in Python
|
||||||
|
you can check if a CUDA GPU is available using `torch.cuda.is_available()`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_embeddings_optional.py:register_device"
|
||||||
|
```
|
||||||
@@ -8,15 +8,5 @@ LanceDB provides language APIs, allowing you to embed a database in your languag
|
|||||||
* 👾 [JavaScript](examples_js.md) examples
|
* 👾 [JavaScript](examples_js.md) examples
|
||||||
* 🦀 Rust examples (coming soon)
|
* 🦀 Rust examples (coming soon)
|
||||||
|
|
||||||
## Python Applications powered by LanceDB
|
!!! tip "Hosted LanceDB"
|
||||||
|
If you want S3 cost-efficiency and local performance via a simple serverless API, checkout **LanceDB Cloud**. For private deployments, high performance at extreme scale, or if you have strict security requirements, talk to us about **LanceDB Enterprise**. [Learn more](https://docs.lancedb.com/)
|
||||||
| Project Name | Description |
|
|
||||||
| --- | --- |
|
|
||||||
| **Ultralytics Explorer 🚀**<br>[](https://docs.ultralytics.com/datasets/explorer/)<br>[](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/docs/en/datasets/explorer/explorer.ipynb) | - 🔍 **Explore CV Datasets**: Semantic search, SQL queries, vector similarity, natural language.<br>- 🖥️ **GUI & Python API**: Seamless dataset interaction.<br>- ⚡ **Efficient & Scalable**: Leverages LanceDB for large datasets.<br>- 📊 **Detailed Analysis**: Easily analyze data patterns.<br>- 🌐 **Browser GUI Demo**: Create embeddings, search images, run queries. |
|
|
||||||
| **Website Chatbot🤖**<br>[](https://github.com/lancedb/lancedb-vercel-chatbot)<br>[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flancedb%2Flancedb-vercel-chatbot&env=OPENAI_API_KEY&envDescription=OpenAI%20API%20Key%20for%20chat%20completion.&project-name=lancedb-vercel-chatbot&repository-name=lancedb-vercel-chatbot&demo-title=LanceDB%20Chatbot%20Demo&demo-description=Demo%20website%20chatbot%20with%20LanceDB.&demo-url=https%3A%2F%2Flancedb.vercel.app&demo-image=https%3A%2F%2Fi.imgur.com%2FazVJtvr.png) | - 🌐 **Chatbot from Sitemap/Docs**: Create a chatbot using site or document context.<br>- 🚀 **Embed LanceDB in Next.js**: Lightweight, on-prem storage.<br>- 🧠 **AI-Powered Context Retrieval**: Efficiently access relevant data.<br>- 🔧 **Serverless & Native JS**: Seamless integration with Next.js.<br>- ⚡ **One-Click Deploy on Vercel**: Quick and easy setup.. |
|
|
||||||
|
|
||||||
## Nodejs Applications powered by LanceDB
|
|
||||||
|
|
||||||
| Project Name | Description |
|
|
||||||
| --- | --- |
|
|
||||||
| **Langchain Writing Assistant✍️ **<br>[](https://github.com/lancedb/vectordb-recipes/tree/main/applications/node/lanchain_writing_assistant) | - **📂 Data Source Integration**: Use your own data by specifying data source file, and the app instantly processes it to provide insights. <br>- **🧠 Intelligent Suggestions**: Powered by LangChain.js and LanceDB, it improves writing productivity and accuracy. <br>- **💡 Enhanced Writing Experience**: It delivers real-time contextual insights and factual suggestions while the user writes. |
|
|
||||||
1
docs/src/extra_js/reo.js
Normal file
1
docs/src/extra_js/reo.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
!function(){var e,t,n;e="9627b71b382d201",t=function(){Reo.init({clientID:"9627b71b382d201"})},(n=document.createElement("script")).src="https://static.reo.dev/"+e+"/reo.js",n.defer=!0,n.onload=t,document.head.appendChild(n)}();
|
||||||
85
docs/src/guides/multi-vector.md
Normal file
85
docs/src/guides/multi-vector.md
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Late interaction & MultiVector embedding type
|
||||||
|
Late interaction is a technique used in retrieval that calculates the relevance of a query to a document by comparing their multi-vector representations. The key difference between late interaction and other popular methods:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
[ Illustration from https://jina.ai/news/what-is-colbert-and-late-interaction-and-why-they-matter-in-search/]
|
||||||
|
|
||||||
|
<b>No interaction:</b> Refers to independently embedding the query and document, that are compared to calcualte similarity without any interaction between them. This is typically used in vector search operations.
|
||||||
|
|
||||||
|
<b>Partial interaction</b> Refers to a specific approach where the similarity computation happens primarily between query vectors and document vectors, without extensive interaction between individual components of each. An example of this is dual-encoder models like BERT.
|
||||||
|
|
||||||
|
<b>Early full interaction</b> Refers to techniques like cross-encoders that process query and docs in pairs with full interaction across various stages of encoding. This is a powerful, but relatively slower technique. Because it requires processing query and docs in pairs, doc embeddings can't be pre-computed for fast retrieval. This is why cross encoders are typically used as reranking models combined with vector search. Learn more about [LanceDB Reranking support](https://lancedb.github.io/lancedb/reranking/).
|
||||||
|
|
||||||
|
<b>Late interaction</b> Late interaction is a technique that calculates the doc and query similarity independently and then the interaction or evaluation happens during the retrieval process. This is typically used in retrieval models like ColBERT. Unlike early interaction, It allows speeding up the retrieval process without compromising the depth of semantic analysis.
|
||||||
|
|
||||||
|
## Internals of ColBERT
|
||||||
|
Let's take a look at the steps involved in performing late interaction based retrieval using ColBERT:
|
||||||
|
|
||||||
|
• ColBERT employs BERT-based encoders for both queries `(fQ)` and documents `(fD)`
|
||||||
|
• A single BERT model is shared between query and document encoders and special tokens distinguish input types: `[Q]` for queries and `[D]` for documents
|
||||||
|
|
||||||
|
**Query Encoder (fQ):**
|
||||||
|
• Query q is tokenized into WordPiece tokens: `q1, q2, ..., ql`. `[Q]` token is prepended right after BERT's `[CLS]` token
|
||||||
|
• If query length < Nq, it's padded with [MASK] tokens up to Nq.
|
||||||
|
• The padded sequence goes through BERT's transformer architecture
|
||||||
|
• Final embeddings are L2-normalized.
|
||||||
|
|
||||||
|
**Document Encoder (fD):**
|
||||||
|
• Document d is tokenized into tokens `d1, d2, ..., dm`. `[D]` token is prepended after `[CLS]` token
|
||||||
|
• Unlike queries, documents are NOT padded with `[MASK]` tokens
|
||||||
|
• Document tokens are processed through BERT and the same linear layer
|
||||||
|
|
||||||
|
**Late Interaction:**
|
||||||
|
• Late interaction estimates relevance score `S(q,d)` using embedding `Eq` and `Ed`. Late interaction happens after independent encoding
|
||||||
|
• For each query embedding, maximum similarity is computed against all document embeddings
|
||||||
|
• The similarity measure can be cosine similarity or squared L2 distance
|
||||||
|
|
||||||
|
**MaxSim Calculation:**
|
||||||
|
```
|
||||||
|
S(q,d) := Σ max(Eqi⋅EdjT)
|
||||||
|
i∈|Eq| j∈|Ed|
|
||||||
|
```
|
||||||
|
• This finds the best matching document embedding for each query embedding
|
||||||
|
• Captures relevance based on strongest local matches between contextual embeddings
|
||||||
|
|
||||||
|
## LanceDB MultiVector type
|
||||||
|
LanceDB supports multivector type, this is useful when you have multiple vectors for a single item (e.g. with ColBert and ColPali).
|
||||||
|
|
||||||
|
You can index on a column with multivector type and search on it, the query can be single vector or multiple vectors. For now, only cosine metric is supported for multivector search. The vector value type can be float16, float32 or float64. LanceDB integrateds [ConteXtualized Token Retriever(XTR)](https://arxiv.org/abs/2304.01982), which introduces a simple, yet novel, objective function that encourages the model to retrieve the most important document tokens first.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import lancedb
|
||||||
|
import numpy as np
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
db = lancedb.connect("data/multivector_demo")
|
||||||
|
schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("id", pa.int64()),
|
||||||
|
# float16, float32, and float64 are supported
|
||||||
|
pa.field("vector", pa.list_(pa.list_(pa.float32(), 256))),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"id": i,
|
||||||
|
"vector": np.random.random(size=(2, 256)).tolist(),
|
||||||
|
}
|
||||||
|
for i in range(1024)
|
||||||
|
]
|
||||||
|
tbl = db.create_table("my_table", data=data, schema=schema)
|
||||||
|
|
||||||
|
# only cosine similarity is supported for multi-vectors
|
||||||
|
tbl.create_index(metric="cosine")
|
||||||
|
|
||||||
|
# query with single vector
|
||||||
|
query = np.random.random(256).astype(np.float16)
|
||||||
|
tbl.search(query).to_arrow()
|
||||||
|
|
||||||
|
# query with multiple vectors
|
||||||
|
query = np.random.random(size=(2, 256))
|
||||||
|
tbl.search(query).to_arrow()
|
||||||
|
```
|
||||||
|
Find more about vector search in LanceDB [here](https://lancedb.github.io/lancedb/search/#multivector-type).
|
||||||
@@ -518,7 +518,7 @@ After a table has been created, you can always add more data to it using the `ad
|
|||||||
--8<-- "python/python/tests/docs/test_guide_tables.py:add_table_from_polars"
|
--8<-- "python/python/tests/docs/test_guide_tables.py:add_table_from_polars"
|
||||||
```
|
```
|
||||||
=== "Async API"
|
=== "Async API"
|
||||||
|
|
||||||
```python
|
```python
|
||||||
--8<-- "python/python/tests/docs/test_guide_tables.py:add_table_async_from_polars"
|
--8<-- "python/python/tests/docs/test_guide_tables.py:add_table_async_from_polars"
|
||||||
```
|
```
|
||||||
@@ -601,6 +601,38 @@ After a table has been created, you can always add more data to it using the `ad
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Upserting into a table
|
||||||
|
|
||||||
|
Upserting lets you insert new rows or update existing rows in a table. To upsert
|
||||||
|
in LanceDB, use the merge insert API.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic"
|
||||||
|
```
|
||||||
|
**API Reference**: [lancedb.table.Table.merge_insert][]
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic_async"
|
||||||
|
```
|
||||||
|
**API Reference**: [lancedb.table.AsyncTable.merge_insert][]
|
||||||
|
|
||||||
|
=== "Typescript[^1]"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/merge_insert.test.ts:upsert_basic"
|
||||||
|
```
|
||||||
|
**API Reference**: [lancedb.Table.mergeInsert](../js/classes/Table.md/#mergeInsert)
|
||||||
|
|
||||||
|
Read more in the guide on [merge insert](tables/merge_insert.md).
|
||||||
|
|
||||||
## Deleting from a table
|
## Deleting from a table
|
||||||
|
|
||||||
Use the `delete()` method on tables to delete rows from a table. To choose which rows to delete, provide a filter that matches on the metadata columns. This can delete any number of rows that match the filter.
|
Use the `delete()` method on tables to delete rows from a table. To choose which rows to delete, provide a filter that matches on the metadata columns. This can delete any number of rows that match the filter.
|
||||||
@@ -630,7 +662,7 @@ Use the `delete()` method on tables to delete rows from a table. To choose which
|
|||||||
```python
|
```python
|
||||||
--8<-- "python/python/tests/docs/test_guide_tables.py:delete_specific_row_async"
|
--8<-- "python/python/tests/docs/test_guide_tables.py:delete_specific_row_async"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Delete from a list of values
|
### Delete from a list of values
|
||||||
=== "Sync API"
|
=== "Sync API"
|
||||||
|
|
||||||
@@ -838,7 +870,7 @@ a table:
|
|||||||
|
|
||||||
You can add new columns to the table with the `add_columns` method. New columns
|
You can add new columns to the table with the `add_columns` method. New columns
|
||||||
are filled with values based on a SQL expression. For example, you can add a new
|
are filled with values based on a SQL expression. For example, you can add a new
|
||||||
column `y` to the table, fill it with the value of `x * 2` and set the expected
|
column `y` to the table, fill it with the value of `x * 2` and set the expected
|
||||||
data type for it.
|
data type for it.
|
||||||
|
|
||||||
=== "Python"
|
=== "Python"
|
||||||
|
|||||||
135
docs/src/guides/tables/merge_insert.md
Normal file
135
docs/src/guides/tables/merge_insert.md
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
The merge insert command is a flexible API that can be used to perform:
|
||||||
|
|
||||||
|
1. Upsert
|
||||||
|
2. Insert-if-not-exists
|
||||||
|
3. Replace range
|
||||||
|
|
||||||
|
It works by joining the input data with the target table on a key you provide.
|
||||||
|
Often this key is a unique row id key. You can then specify what to do when
|
||||||
|
there is a match and when there is not a match. For example, for upsert you want
|
||||||
|
to update if the row has a match and insert if the row doesn't have a match.
|
||||||
|
Whereas for insert-if-not-exists you only want to insert if the row doesn't have
|
||||||
|
a match.
|
||||||
|
|
||||||
|
You can also read more in the API reference:
|
||||||
|
|
||||||
|
* Python
|
||||||
|
* Sync: [lancedb.table.Table.merge_insert][]
|
||||||
|
* Async: [lancedb.table.AsyncTable.merge_insert][]
|
||||||
|
* Typescript: [lancedb.Table.mergeInsert](../../js/classes/Table.md/#mergeinsert)
|
||||||
|
|
||||||
|
!!! tip "Use scalar indices to speed up merge insert"
|
||||||
|
|
||||||
|
The merge insert command needs to perform a join between the input data and the
|
||||||
|
target table on the `on` key you provide. This requires scanning that entire
|
||||||
|
column, which can be expensive for large tables. To speed up this operation,
|
||||||
|
you can create a scalar index on the `on` column, which will allow LanceDB to
|
||||||
|
find matches without having to scan the whole tables.
|
||||||
|
|
||||||
|
Read more about scalar indices in [Building a Scalar Index](../scalar_index.md)
|
||||||
|
guide.
|
||||||
|
|
||||||
|
!!! info "Embedding Functions"
|
||||||
|
|
||||||
|
Like the create table and add APIs, the merge insert API will automatically
|
||||||
|
compute embeddings if the table has a embedding definition in its schema.
|
||||||
|
If the input data doesn't contain the source column, or the vector column
|
||||||
|
is already filled, then the embeddings won't be computed. See the
|
||||||
|
[Embedding Functions](../../embeddings/embedding_functions.md) guide for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
## Upsert
|
||||||
|
|
||||||
|
Upsert updates rows if they exist and inserts them if they don't. To do this
|
||||||
|
with merge insert, enable both `when_matched_update_all()` and
|
||||||
|
`when_not_matched_insert_all()`.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:upsert_basic_async"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Typescript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/merge_insert.test.ts:upsert_basic"
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note "Providing subsets of columns"
|
||||||
|
|
||||||
|
If a column is nullable, it can be omitted from input data and it will be
|
||||||
|
considered `null`. Columns can also be provided in any order.
|
||||||
|
|
||||||
|
## Insert-if-not-exists
|
||||||
|
|
||||||
|
To avoid inserting duplicate rows, you can use the insert-if-not-exists command.
|
||||||
|
This will only insert rows that do not have a match in the target table. To do
|
||||||
|
this with merge insert, enable just `when_not_matched_insert_all()`.
|
||||||
|
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:insert_if_not_exists"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:insert_if_not_exists_async"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Typescript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/merge_insert.test.ts:insert_if_not_exists"
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Replace range
|
||||||
|
|
||||||
|
You can also replace a range of rows in the target table with the input data.
|
||||||
|
For example, if you have a table of document chunks, where each chunk has
|
||||||
|
both a `doc_id` and a `chunk_id`, you can replace all chunks for a given
|
||||||
|
`doc_id` with updated chunks. This can be tricky otherwise because if you
|
||||||
|
try to use upsert when the new data has fewer chunks you will end up with
|
||||||
|
extra chunks. To avoid this, add another clause to delete any chunks for
|
||||||
|
the document that are not in the new data, with
|
||||||
|
`when_not_matched_by_source_delete`.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:replace_range"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```python
|
||||||
|
--8<-- "python/python/tests/docs/test_merge_insert.py:replace_range_async"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Typescript"
|
||||||
|
|
||||||
|
=== "@lancedb/lancedb"
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
--8<-- "nodejs/examples/merge_insert.test.ts:replace_range"
|
||||||
|
```
|
||||||
@@ -4,6 +4,9 @@ LanceDB is an open-source vector database for AI that's designed to store, manag
|
|||||||
|
|
||||||
Both the database and the underlying data format are designed from the ground up to be **easy-to-use**, **scalable** and **cost-effective**.
|
Both the database and the underlying data format are designed from the ground up to be **easy-to-use**, **scalable** and **cost-effective**.
|
||||||
|
|
||||||
|
!!! tip "Hosted LanceDB"
|
||||||
|
If you want S3 cost-efficiency and local performance via a simple serverless API, checkout **LanceDB Cloud**. For private deployments, high performance at extreme scale, or if you have strict security requirements, talk to us about **LanceDB Enterprise**. [Learn more](https://docs.lancedb.com/)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Truly multi-modal
|
## Truly multi-modal
|
||||||
@@ -20,7 +23,7 @@ LanceDB **OSS** is an **open-source**, batteries-included embedded vector databa
|
|||||||
|
|
||||||
LanceDB **Cloud** is a SaaS (software-as-a-service) solution that runs serverless in the cloud, making the storage clearly separated from compute. It's designed to be cost-effective and highly scalable without breaking the bank. LanceDB Cloud is currently in private beta with general availability coming soon, but you can apply for early access with the private beta release by signing up below.
|
LanceDB **Cloud** is a SaaS (software-as-a-service) solution that runs serverless in the cloud, making the storage clearly separated from compute. It's designed to be cost-effective and highly scalable without breaking the bank. LanceDB Cloud is currently in private beta with general availability coming soon, but you can apply for early access with the private beta release by signing up below.
|
||||||
|
|
||||||
[Try out LanceDB Cloud](https://noteforms.com/forms/lancedb-mailing-list-cloud-kty1o5?notionforms=1&utm_source=notionforms){ .md-button .md-button--primary }
|
[Try out LanceDB Cloud (Public Beta) Now](https://cloud.lancedb.com){ .md-button .md-button--primary }
|
||||||
|
|
||||||
## Why use LanceDB?
|
## Why use LanceDB?
|
||||||
|
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ This method creates a scalar(for non-vector cols) or a vector index on a table.
|
|||||||
|:---|:---|:---|:---|
|
|:---|:---|:---|:---|
|
||||||
|`vector_col`|`Optional[str]`| Provide if you want to create index on a vector column. |`None`|
|
|`vector_col`|`Optional[str]`| Provide if you want to create index on a vector column. |`None`|
|
||||||
|`col_name`|`Optional[str]`| Provide if you want to create index on a non-vector column. |`None`|
|
|`col_name`|`Optional[str]`| Provide if you want to create index on a non-vector column. |`None`|
|
||||||
|`metric`|`Optional[str]` |Provide the metric to use for vector index. choice of metrics: 'L2', 'dot', 'cosine'. |`L2`|
|
|`metric`|`Optional[str]` |Provide the metric to use for vector index. choice of metrics: 'l2', 'dot', 'cosine'. |`l2`|
|
||||||
|`num_partitions`|`Optional[int]`|Number of partitions to use for the index.|`256`|
|
|`num_partitions`|`Optional[int]`|Number of partitions to use for the index.|`256`|
|
||||||
|`num_sub_vectors`|`Optional[int]` |Number of sub-vectors to use for the index.|`96`|
|
|`num_sub_vectors`|`Optional[int]` |Number of sub-vectors to use for the index.|`96`|
|
||||||
|`index_cache_size`|`Optional[int]` |Size of the index cache.|`None`|
|
|`index_cache_size`|`Optional[int]` |Size of the index cache.|`None`|
|
||||||
|
|||||||
@@ -125,7 +125,7 @@ The exhaustive list of parameters for `LanceDBVectorStore` vector store are :
|
|||||||
```
|
```
|
||||||
- **_table_exists(self, tbl_name: `Optional[str]` = `None`) -> `bool`** : Returns `True` if `tbl_name` exists in database.
|
- **_table_exists(self, tbl_name: `Optional[str]` = `None`) -> `bool`** : Returns `True` if `tbl_name` exists in database.
|
||||||
- __create_index(
|
- __create_index(
|
||||||
self, scalar: `Optional[bool]` = False, col_name: `Optional[str]` = None, num_partitions: `Optional[int]` = 256, num_sub_vectors: `Optional[int]` = 96, index_cache_size: `Optional[int]` = None, metric: `Optional[str]` = "L2",
|
self, scalar: `Optional[bool]` = False, col_name: `Optional[str]` = None, num_partitions: `Optional[int]` = 256, num_sub_vectors: `Optional[int]` = 96, index_cache_size: `Optional[int]` = None, metric: `Optional[str]` = "l2",
|
||||||
) -> `None`__ : Creates a scalar(for non-vector cols) or a vector index on a table.
|
) -> `None`__ : Creates a scalar(for non-vector cols) or a vector index on a table.
|
||||||
Make sure your vector column has enough data before creating an index on it.
|
Make sure your vector column has enough data before creating an index on it.
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ Distance metrics type.
|
|||||||
|
|
||||||
- [Cosine](MetricType.md#cosine)
|
- [Cosine](MetricType.md#cosine)
|
||||||
- [Dot](MetricType.md#dot)
|
- [Dot](MetricType.md#dot)
|
||||||
- [L2](MetricType.md#l2)
|
- [l2](MetricType.md#l2)
|
||||||
|
|
||||||
## Enumeration Members
|
## Enumeration Members
|
||||||
|
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ ___
|
|||||||
|
|
||||||
• `Optional` **metric\_type**: [`MetricType`](../enums/MetricType.md)
|
• `Optional` **metric\_type**: [`MetricType`](../enums/MetricType.md)
|
||||||
|
|
||||||
Metric type, L2 or Cosine
|
Metric type, l2 or Cosine
|
||||||
|
|
||||||
#### Defined in
|
#### Defined in
|
||||||
|
|
||||||
|
|||||||
@@ -15,11 +15,9 @@ npm install @lancedb/lancedb
|
|||||||
This will download the appropriate native library for your platform. We currently
|
This will download the appropriate native library for your platform. We currently
|
||||||
support:
|
support:
|
||||||
|
|
||||||
- Linux (x86_64 and aarch64)
|
- Linux (x86_64 and aarch64 on glibc and musl)
|
||||||
- MacOS (Intel and ARM/M1/M2)
|
- MacOS (Intel and ARM/M1/M2)
|
||||||
- Windows (x86_64 only)
|
- Windows (x86_64 and aarch64)
|
||||||
|
|
||||||
We do not yet support musl-based Linux (such as Alpine Linux) or aarch64 Windows.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@@ -36,41 +34,8 @@ const results = await table.vectorSearch([0.1, 0.3]).limit(20).toArray();
|
|||||||
console.log(results);
|
console.log(results);
|
||||||
```
|
```
|
||||||
|
|
||||||
The [quickstart](../basic.md) contains a more complete example.
|
The [quickstart](https://lancedb.github.io/lancedb/basic/) contains a more complete example.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
```sh
|
See [CONTRIBUTING.md](_media/CONTRIBUTING.md) for information on how to contribute to LanceDB.
|
||||||
npm run build
|
|
||||||
npm run test
|
|
||||||
```
|
|
||||||
|
|
||||||
### Running lint / format
|
|
||||||
|
|
||||||
LanceDb uses [biome](https://biomejs.dev/) for linting and formatting. if you are using VSCode you will need to install the official [Biome](https://marketplace.visualstudio.com/items?itemName=biomejs.biome) extension.
|
|
||||||
To manually lint your code you can run:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm run lint
|
|
||||||
```
|
|
||||||
|
|
||||||
to automatically fix all fixable issues:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm run lint-fix
|
|
||||||
```
|
|
||||||
|
|
||||||
If you do not have your workspace root set to the `nodejs` directory, unfortunately the extension will not work. You can still run the linting and formatting commands manually.
|
|
||||||
|
|
||||||
### Generating docs
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm run docs
|
|
||||||
|
|
||||||
cd ../docs
|
|
||||||
# Asssume the virtual environment was created
|
|
||||||
# python3 -m venv venv
|
|
||||||
# pip install -r requirements.txt
|
|
||||||
. ./venv/bin/activate
|
|
||||||
mkdocs build
|
|
||||||
```
|
|
||||||
|
|||||||
76
docs/src/js/_media/CONTRIBUTING.md
Normal file
76
docs/src/js/_media/CONTRIBUTING.md
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Contributing to LanceDB Typescript
|
||||||
|
|
||||||
|
This document outlines the process for contributing to LanceDB Typescript.
|
||||||
|
For general contribution guidelines, see [CONTRIBUTING.md](../CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Project layout
|
||||||
|
|
||||||
|
The Typescript package is a wrapper around the Rust library, `lancedb`. We use
|
||||||
|
the [napi-rs](https://napi.rs/) library to create the bindings between Rust and
|
||||||
|
Typescript.
|
||||||
|
|
||||||
|
* `src/`: Rust bindings source code
|
||||||
|
* `lancedb/`: Typescript package source code
|
||||||
|
* `__test__/`: Unit tests
|
||||||
|
* `examples/`: An npm package with the examples shown in the documentation
|
||||||
|
|
||||||
|
## Development environment
|
||||||
|
|
||||||
|
To set up your development environment, you will need to install the following:
|
||||||
|
|
||||||
|
1. Node.js 14 or later
|
||||||
|
2. Rust's package manager, Cargo. Use [rustup](https://rustup.rs/) to install.
|
||||||
|
3. [protoc](https://grpc.io/docs/protoc-installation/) (Protocol Buffers compiler)
|
||||||
|
|
||||||
|
Initial setup:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
### Commit Hooks
|
||||||
|
|
||||||
|
It is **highly recommended** to install the [pre-commit](https://pre-commit.com/) hooks to ensure that your
|
||||||
|
code is formatted correctly and passes basic checks before committing:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
Most common development commands can be run using the npm scripts.
|
||||||
|
|
||||||
|
Build the package
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Lint:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm run lint
|
||||||
|
```
|
||||||
|
|
||||||
|
Format and fix lints:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm run lint-fix
|
||||||
|
```
|
||||||
|
|
||||||
|
Run tests:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
To run a single test:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# Single file: table.test.ts
|
||||||
|
npm test -- table.test.ts
|
||||||
|
# Single test: 'merge insert' in table.test.ts
|
||||||
|
npm test -- table.test.ts --testNamePattern=merge\ insert
|
||||||
|
```
|
||||||
@@ -23,18 +23,6 @@ be closed when they are garbage collected.
|
|||||||
Any created tables are independent and will continue to work even if
|
Any created tables are independent and will continue to work even if
|
||||||
the underlying connection has been closed.
|
the underlying connection has been closed.
|
||||||
|
|
||||||
## Constructors
|
|
||||||
|
|
||||||
### new Connection()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
new Connection(): Connection
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
[`Connection`](Connection.md)
|
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
### close()
|
### close()
|
||||||
@@ -71,7 +59,7 @@ Creates a new empty Table
|
|||||||
* **name**: `string`
|
* **name**: `string`
|
||||||
The name of the table.
|
The name of the table.
|
||||||
|
|
||||||
* **schema**: `SchemaLike`
|
* **schema**: [`SchemaLike`](../type-aliases/SchemaLike.md)
|
||||||
The schema of the table
|
The schema of the table
|
||||||
|
|
||||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
||||||
@@ -117,7 +105,7 @@ Creates a new Table and initialize it with new data.
|
|||||||
* **name**: `string`
|
* **name**: `string`
|
||||||
The name of the table.
|
The name of the table.
|
||||||
|
|
||||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
* **data**: [`TableLike`](../type-aliases/TableLike.md) \| `Record`<`string`, `unknown`>[]
|
||||||
Non-empty Array of Records
|
Non-empty Array of Records
|
||||||
to be inserted into the table
|
to be inserted into the table
|
||||||
|
|
||||||
@@ -143,6 +131,20 @@ Return a brief description of the connection
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### dropAllTables()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
abstract dropAllTables(): Promise<void>
|
||||||
|
```
|
||||||
|
|
||||||
|
Drop all tables in the database.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`void`>
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### dropTable()
|
### dropTable()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -189,7 +191,7 @@ Open a table in the database.
|
|||||||
* **name**: `string`
|
* **name**: `string`
|
||||||
The name of the table
|
The name of the table
|
||||||
|
|
||||||
* **options?**: `Partial`<`OpenTableOptions`>
|
* **options?**: `Partial`<[`OpenTableOptions`](../interfaces/OpenTableOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -72,11 +72,9 @@ The results of a full text search are ordered by relevance measured by BM25.
|
|||||||
|
|
||||||
You can combine filters with full text search.
|
You can combine filters with full text search.
|
||||||
|
|
||||||
For now, the full text search index only supports English, and doesn't support phrase search.
|
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`FtsOptions`>
|
* **options?**: `Partial`<[`FtsOptions`](../interfaces/FtsOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -98,7 +96,7 @@ the vectors.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`HnswPqOptions`>
|
* **options?**: `Partial`<[`HnswPqOptions`](../interfaces/HnswPqOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -120,7 +118,38 @@ the vectors.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`HnswSqOptions`>
|
* **options?**: `Partial`<[`HnswSqOptions`](../interfaces/HnswSqOptions.md)>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Index`](Index.md)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### ivfFlat()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
static ivfFlat(options?): Index
|
||||||
|
```
|
||||||
|
|
||||||
|
Create an IvfFlat index
|
||||||
|
|
||||||
|
This index groups vectors into partitions of similar vectors. Each partition keeps track of
|
||||||
|
a centroid which is the average value of all vectors in the group.
|
||||||
|
|
||||||
|
During a query the centroids are compared with the query vector to find the closest
|
||||||
|
partitions. The vectors in these partitions are then searched to find
|
||||||
|
the closest vectors.
|
||||||
|
|
||||||
|
The partitioning process is called IVF and the `num_partitions` parameter controls how
|
||||||
|
many groups to create.
|
||||||
|
|
||||||
|
Note that training an IVF FLAT index on a large dataset is a slow operation and
|
||||||
|
currently is also a memory intensive operation.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **options?**: `Partial`<[`IvfFlatOptions`](../interfaces/IvfFlatOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
|
|||||||
126
docs/src/js/classes/MergeInsertBuilder.md
Normal file
126
docs/src/js/classes/MergeInsertBuilder.md
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / MergeInsertBuilder
|
||||||
|
|
||||||
|
# Class: MergeInsertBuilder
|
||||||
|
|
||||||
|
A builder used to create and run a merge insert operation
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### new MergeInsertBuilder()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
new MergeInsertBuilder(native, schema): MergeInsertBuilder
|
||||||
|
```
|
||||||
|
|
||||||
|
Construct a MergeInsertBuilder. __Internal use only.__
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **native**: `NativeMergeInsertBuilder`
|
||||||
|
|
||||||
|
* **schema**: `Schema`<`any`> \| `Promise`<`Schema`<`any`>>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### execute()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
execute(data): Promise<void>
|
||||||
|
```
|
||||||
|
|
||||||
|
Executes the merge insert operation
|
||||||
|
|
||||||
|
Nothing is returned but the `Table` is updated
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **data**: [`Data`](../type-aliases/Data.md)
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`void`>
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### whenMatchedUpdateAll()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
whenMatchedUpdateAll(options?): MergeInsertBuilder
|
||||||
|
```
|
||||||
|
|
||||||
|
Rows that exist in both the source table (new data) and
|
||||||
|
the target table (old data) will be updated, replacing
|
||||||
|
the old row with the corresponding matching row.
|
||||||
|
|
||||||
|
If there are multiple matches then the behavior is undefined.
|
||||||
|
Currently this causes multiple copies of the row to be created
|
||||||
|
but that behavior is subject to change.
|
||||||
|
|
||||||
|
An optional condition may be specified. If it is, then only
|
||||||
|
matched rows that satisfy the condtion will be updated. Any
|
||||||
|
rows that do not satisfy the condition will be left as they
|
||||||
|
are. Failing to satisfy the condition does not cause a
|
||||||
|
"matched row" to become a "not matched" row.
|
||||||
|
|
||||||
|
The condition should be an SQL string. Use the prefix
|
||||||
|
target. to refer to rows in the target table (old data)
|
||||||
|
and the prefix source. to refer to rows in the source
|
||||||
|
table (new data).
|
||||||
|
|
||||||
|
For example, "target.last_update < source.last_update"
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **options?**
|
||||||
|
|
||||||
|
* **options.where?**: `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### whenNotMatchedBySourceDelete()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
whenNotMatchedBySourceDelete(options?): MergeInsertBuilder
|
||||||
|
```
|
||||||
|
|
||||||
|
Rows that exist only in the target table (old data) will be
|
||||||
|
deleted. An optional condition can be provided to limit what
|
||||||
|
data is deleted.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **options?**
|
||||||
|
|
||||||
|
* **options.where?**: `string`
|
||||||
|
An optional condition to limit what data is deleted
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### whenNotMatchedInsertAll()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
whenNotMatchedInsertAll(): MergeInsertBuilder
|
||||||
|
```
|
||||||
|
|
||||||
|
Rows that exist only in the source table (new data) should
|
||||||
|
be inserted into the target table.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||||
@@ -8,30 +8,14 @@
|
|||||||
|
|
||||||
A builder for LanceDB queries.
|
A builder for LanceDB queries.
|
||||||
|
|
||||||
|
## See
|
||||||
|
|
||||||
|
[Table#query](Table.md#query), [Table#search](Table.md#search)
|
||||||
|
|
||||||
## Extends
|
## Extends
|
||||||
|
|
||||||
- [`QueryBase`](QueryBase.md)<`NativeQuery`>
|
- [`QueryBase`](QueryBase.md)<`NativeQuery`>
|
||||||
|
|
||||||
## Constructors
|
|
||||||
|
|
||||||
### new Query()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
new Query(tbl): Query
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **tbl**: `Table`
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
[`Query`](Query.md)
|
|
||||||
|
|
||||||
#### Overrides
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`constructor`](QueryBase.md#constructors)
|
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
### inner
|
### inner
|
||||||
@@ -46,42 +30,6 @@ protected inner: Query | Promise<Query>;
|
|||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
### \[asyncIterator\]()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`[asyncIterator]`](QueryBase.md#%5Basynciterator%5D)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### doCall()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected doCall(fn): void
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **fn**
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`void`
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`doCall`](QueryBase.md#docall)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### execute()
|
### execute()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -92,7 +40,7 @@ Execute the query and return the results as an
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -161,7 +109,7 @@ fastSearch(): this
|
|||||||
Skip searching un-indexed data. This can make search faster, but will miss
|
Skip searching un-indexed data. This can make search faster, but will miss
|
||||||
any data that is not yet indexed.
|
any data that is not yet indexed.
|
||||||
|
|
||||||
Use lancedb.Table#optimize to index all un-indexed data.
|
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -189,7 +137,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
`this`
|
`this`
|
||||||
|
|
||||||
#### Alias
|
#### See
|
||||||
|
|
||||||
where
|
where
|
||||||
|
|
||||||
@@ -213,7 +161,7 @@ fullTextSearch(query, options?): this
|
|||||||
|
|
||||||
* **query**: `string`
|
* **query**: `string`
|
||||||
|
|
||||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -250,26 +198,6 @@ called then every valid row from the table will be returned.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### nativeExecute()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`Promise`<`RecordBatchIterator`>
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`nativeExecute`](QueryBase.md#nativeexecute)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### nearestTo()
|
### nearestTo()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -294,7 +222,7 @@ If there is more than one vector column you must use
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **vector**: `IntoVector`
|
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -427,7 +355,7 @@ Collect the results as an array of objects.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -449,7 +377,7 @@ Collect the results as an Arrow
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,11 @@
|
|||||||
|
|
||||||
Common methods supported by all query types
|
Common methods supported by all query types
|
||||||
|
|
||||||
|
## See
|
||||||
|
|
||||||
|
- [Query](Query.md)
|
||||||
|
- [VectorQuery](VectorQuery.md)
|
||||||
|
|
||||||
## Extended by
|
## Extended by
|
||||||
|
|
||||||
- [`Query`](Query.md)
|
- [`Query`](Query.md)
|
||||||
@@ -21,22 +26,6 @@ Common methods supported by all query types
|
|||||||
|
|
||||||
- `AsyncIterable`<`RecordBatch`>
|
- `AsyncIterable`<`RecordBatch`>
|
||||||
|
|
||||||
## Constructors
|
|
||||||
|
|
||||||
### new QueryBase()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected new QueryBase<NativeQueryType>(inner): QueryBase<NativeQueryType>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **inner**: `NativeQueryType` \| `Promise`<`NativeQueryType`>
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md)<`NativeQueryType`>
|
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
### inner
|
### inner
|
||||||
@@ -47,38 +36,6 @@ protected inner: NativeQueryType | Promise<NativeQueryType>;
|
|||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
### \[asyncIterator\]()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
|
||||||
|
|
||||||
#### Implementation of
|
|
||||||
|
|
||||||
`AsyncIterable.[asyncIterator]`
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### doCall()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected doCall(fn): void
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **fn**
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`void`
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### execute()
|
### execute()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -89,7 +46,7 @@ Execute the query and return the results as an
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -150,7 +107,7 @@ fastSearch(): this
|
|||||||
Skip searching un-indexed data. This can make search faster, but will miss
|
Skip searching un-indexed data. This can make search faster, but will miss
|
||||||
any data that is not yet indexed.
|
any data that is not yet indexed.
|
||||||
|
|
||||||
Use lancedb.Table#optimize to index all un-indexed data.
|
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -174,7 +131,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
`this`
|
`this`
|
||||||
|
|
||||||
#### Alias
|
#### See
|
||||||
|
|
||||||
where
|
where
|
||||||
|
|
||||||
@@ -194,7 +151,7 @@ fullTextSearch(query, options?): this
|
|||||||
|
|
||||||
* **query**: `string`
|
* **query**: `string`
|
||||||
|
|
||||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -223,22 +180,6 @@ called then every valid row from the table will be returned.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### nativeExecute()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`Promise`<`RecordBatchIterator`>
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### offset()
|
### offset()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -314,7 +255,7 @@ Collect the results as an array of objects.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -332,7 +273,7 @@ Collect the results as an Arrow
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -14,21 +14,13 @@ will be freed when the Table is garbage collected. To eagerly free the cache yo
|
|||||||
can call the `close` method. Once the Table is closed, it cannot be used for any
|
can call the `close` method. Once the Table is closed, it cannot be used for any
|
||||||
further operations.
|
further operations.
|
||||||
|
|
||||||
|
Tables are created using the methods [Connection#createTable](Connection.md#createtable)
|
||||||
|
and [Connection#createEmptyTable](Connection.md#createemptytable). Existing tables are opened
|
||||||
|
using [Connection#openTable](Connection.md#opentable).
|
||||||
|
|
||||||
Closing a table is optional. It not closed, it will be closed when it is garbage
|
Closing a table is optional. It not closed, it will be closed when it is garbage
|
||||||
collected.
|
collected.
|
||||||
|
|
||||||
## Constructors
|
|
||||||
|
|
||||||
### new Table()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
new Table(): Table
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
[`Table`](Table.md)
|
|
||||||
|
|
||||||
## Accessors
|
## Accessors
|
||||||
|
|
||||||
### name
|
### name
|
||||||
@@ -216,6 +208,9 @@ Indices on vector columns will speed up vector searches.
|
|||||||
Indices on scalar columns will speed up filtering (in both
|
Indices on scalar columns will speed up filtering (in both
|
||||||
vector and non-vector searches)
|
vector and non-vector searches)
|
||||||
|
|
||||||
|
We currently don't support custom named indexes.
|
||||||
|
The index name will always be `${column}_idx`.
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **column**: `string`
|
* **column**: `string`
|
||||||
@@ -226,11 +221,6 @@ vector and non-vector searches)
|
|||||||
|
|
||||||
`Promise`<`void`>
|
`Promise`<`void`>
|
||||||
|
|
||||||
#### Note
|
|
||||||
|
|
||||||
We currently don't support custom named indexes,
|
|
||||||
The index name will always be `${column}_idx`
|
|
||||||
|
|
||||||
#### Examples
|
#### Examples
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -317,6 +307,28 @@ then call ``cleanup_files`` to remove the old files.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### dropIndex()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
abstract dropIndex(name): Promise<void>
|
||||||
|
```
|
||||||
|
|
||||||
|
Drop an index from the table.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **name**: `string`
|
||||||
|
The name of the index.
|
||||||
|
This does not delete the index from disk, it just removes it from the table.
|
||||||
|
To delete the index, run [Table#optimize](Table.md#optimize) after dropping the index.
|
||||||
|
Use [Table.listIndices](Table.md#listindices) to find the names of the indices.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`void`>
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### indexStats()
|
### indexStats()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -336,6 +348,8 @@ List all the stats of a specified index
|
|||||||
|
|
||||||
The stats of the index. If the index does not exist, it will return undefined
|
The stats of the index. If the index does not exist, it will return undefined
|
||||||
|
|
||||||
|
Use [Table.listIndices](Table.md#listindices) to find the names of the indices.
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### isOpen()
|
### isOpen()
|
||||||
@@ -376,7 +390,7 @@ List all the versions of the table
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Promise`<`Version`[]>
|
`Promise`<[`Version`](../interfaces/Version.md)[]>
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -392,7 +406,7 @@ abstract mergeInsert(on): MergeInsertBuilder
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`MergeInsertBuilder`
|
[`MergeInsertBuilder`](MergeInsertBuilder.md)
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -436,7 +450,7 @@ Modeled after ``VACUUM`` in PostgreSQL.
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Promise`<`OptimizeStats`>
|
`Promise`<[`OptimizeStats`](../interfaces/OptimizeStats.md)>
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -553,7 +567,7 @@ Get the schema of the table.
|
|||||||
abstract search(
|
abstract search(
|
||||||
query,
|
query,
|
||||||
queryType?,
|
queryType?,
|
||||||
ftsColumns?): VectorQuery | Query
|
ftsColumns?): Query | VectorQuery
|
||||||
```
|
```
|
||||||
|
|
||||||
Create a search query to find the nearest neighbors
|
Create a search query to find the nearest neighbors
|
||||||
@@ -561,7 +575,7 @@ of the given query
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **query**: `string` \| `IntoVector`
|
* **query**: `string` \| [`IntoVector`](../type-aliases/IntoVector.md)
|
||||||
the query, a vector or string
|
the query, a vector or string
|
||||||
|
|
||||||
* **queryType?**: `string`
|
* **queryType?**: `string`
|
||||||
@@ -575,7 +589,7 @@ of the given query
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
[`VectorQuery`](VectorQuery.md) \| [`Query`](Query.md)
|
[`Query`](Query.md) \| [`VectorQuery`](VectorQuery.md)
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -694,7 +708,7 @@ by `query`.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **vector**: `IntoVector`
|
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -717,38 +731,3 @@ Retrieve the version of the table
|
|||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Promise`<`number`>
|
`Promise`<`number`>
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### parseTableData()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
static parseTableData(
|
|
||||||
data,
|
|
||||||
options?,
|
|
||||||
streaming?): Promise<object>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **data**: `TableLike` \| `Record`<`string`, `unknown`>[]
|
|
||||||
|
|
||||||
* **options?**: `Partial`<[`CreateTableOptions`](../interfaces/CreateTableOptions.md)>
|
|
||||||
|
|
||||||
* **streaming?**: `boolean` = `false`
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`Promise`<`object`>
|
|
||||||
|
|
||||||
##### buf
|
|
||||||
|
|
||||||
```ts
|
|
||||||
buf: Buffer;
|
|
||||||
```
|
|
||||||
|
|
||||||
##### mode
|
|
||||||
|
|
||||||
```ts
|
|
||||||
mode: string;
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -10,30 +10,14 @@ A builder used to construct a vector search
|
|||||||
|
|
||||||
This builder can be reused to execute the query many times.
|
This builder can be reused to execute the query many times.
|
||||||
|
|
||||||
|
## See
|
||||||
|
|
||||||
|
[Query#nearestTo](Query.md#nearestto)
|
||||||
|
|
||||||
## Extends
|
## Extends
|
||||||
|
|
||||||
- [`QueryBase`](QueryBase.md)<`NativeVectorQuery`>
|
- [`QueryBase`](QueryBase.md)<`NativeVectorQuery`>
|
||||||
|
|
||||||
## Constructors
|
|
||||||
|
|
||||||
### new VectorQuery()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
new VectorQuery(inner): VectorQuery
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **inner**: `VectorQuery` \| `Promise`<`VectorQuery`>
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
[`VectorQuery`](VectorQuery.md)
|
|
||||||
|
|
||||||
#### Overrides
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`constructor`](QueryBase.md#constructors)
|
|
||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
### inner
|
### inner
|
||||||
@@ -48,22 +32,6 @@ protected inner: VectorQuery | Promise<VectorQuery>;
|
|||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
### \[asyncIterator\]()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
asyncIterator: AsyncIterator<RecordBatch<any>, any, undefined>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`AsyncIterator`<`RecordBatch`<`any`>, `any`, `undefined`>
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`[asyncIterator]`](QueryBase.md#%5Basynciterator%5D)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### addQueryVector()
|
### addQueryVector()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -72,7 +40,7 @@ addQueryVector(vector): VectorQuery
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **vector**: `IntoVector`
|
* **vector**: [`IntoVector`](../type-aliases/IntoVector.md)
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -128,6 +96,24 @@ whose data type is a fixed-size-list of floats.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### distanceRange()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
distanceRange(lowerBound?, upperBound?): VectorQuery
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **lowerBound?**: `number`
|
||||||
|
|
||||||
|
* **upperBound?**: `number`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`VectorQuery`](VectorQuery.md)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### distanceType()
|
### distanceType()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -161,26 +147,6 @@ By default "l2" is used.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### doCall()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected doCall(fn): void
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **fn**
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`void`
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`doCall`](QueryBase.md#docall)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### ef()
|
### ef()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -215,7 +181,7 @@ Execute the query and return the results as an
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -284,7 +250,7 @@ fastSearch(): this
|
|||||||
Skip searching un-indexed data. This can make search faster, but will miss
|
Skip searching un-indexed data. This can make search faster, but will miss
|
||||||
any data that is not yet indexed.
|
any data that is not yet indexed.
|
||||||
|
|
||||||
Use lancedb.Table#optimize to index all un-indexed data.
|
Use [Table#optimize](Table.md#optimize) to index all un-indexed data.
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -312,7 +278,7 @@ A filter statement to be applied to this query.
|
|||||||
|
|
||||||
`this`
|
`this`
|
||||||
|
|
||||||
#### Alias
|
#### See
|
||||||
|
|
||||||
where
|
where
|
||||||
|
|
||||||
@@ -336,7 +302,7 @@ fullTextSearch(query, options?): this
|
|||||||
|
|
||||||
* **query**: `string`
|
* **query**: `string`
|
||||||
|
|
||||||
* **options?**: `Partial`<`FullTextSearchOptions`>
|
* **options?**: `Partial`<[`FullTextSearchOptions`](../interfaces/FullTextSearchOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -373,26 +339,6 @@ called then every valid row from the table will be returned.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### nativeExecute()
|
|
||||||
|
|
||||||
```ts
|
|
||||||
protected nativeExecute(options?): Promise<RecordBatchIterator>
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Parameters
|
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
|
||||||
|
|
||||||
#### Returns
|
|
||||||
|
|
||||||
`Promise`<`RecordBatchIterator`>
|
|
||||||
|
|
||||||
#### Inherited from
|
|
||||||
|
|
||||||
[`QueryBase`](QueryBase.md).[`nativeExecute`](QueryBase.md#nativeexecute)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### nprobes()
|
### nprobes()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -528,6 +474,22 @@ distance between the query vector and the actual uncompressed vector.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### rerank()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
rerank(reranker): VectorQuery
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **reranker**: [`Reranker`](../namespaces/rerankers/interfaces/Reranker.md)
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`VectorQuery`](VectorQuery.md)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### select()
|
### select()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -591,7 +553,7 @@ Collect the results as an array of objects.
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
@@ -613,7 +575,7 @@ Collect the results as an Arrow
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **options?**: `Partial`<`QueryExecutionOptions`>
|
* **options?**: `Partial`<[`QueryExecutionOptions`](../interfaces/QueryExecutionOptions.md)>
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
[@lancedb/lancedb](../globals.md) / WriteMode
|
|
||||||
|
|
||||||
# Enumeration: WriteMode
|
|
||||||
|
|
||||||
Write mode for writing a table.
|
|
||||||
|
|
||||||
## Enumeration Members
|
|
||||||
|
|
||||||
### Append
|
|
||||||
|
|
||||||
```ts
|
|
||||||
Append: "Append";
|
|
||||||
```
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### Create
|
|
||||||
|
|
||||||
```ts
|
|
||||||
Create: "Create";
|
|
||||||
```
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### Overwrite
|
|
||||||
|
|
||||||
```ts
|
|
||||||
Overwrite: "Overwrite";
|
|
||||||
```
|
|
||||||
@@ -6,10 +6,10 @@
|
|||||||
|
|
||||||
# Function: connect()
|
# Function: connect()
|
||||||
|
|
||||||
## connect(uri, opts)
|
## connect(uri, options)
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
function connect(uri, opts?): Promise<Connection>
|
function connect(uri, options?): Promise<Connection>
|
||||||
```
|
```
|
||||||
|
|
||||||
Connect to a LanceDB instance at the given URI.
|
Connect to a LanceDB instance at the given URI.
|
||||||
@@ -26,7 +26,8 @@ Accepted formats:
|
|||||||
The uri of the database. If the database uri starts
|
The uri of the database. If the database uri starts
|
||||||
with `db://` then it connects to a remote database.
|
with `db://` then it connects to a remote database.
|
||||||
|
|
||||||
* **opts?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)>
|
* **options?**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)>
|
||||||
|
The options to use when connecting to the database
|
||||||
|
|
||||||
### Returns
|
### Returns
|
||||||
|
|
||||||
@@ -49,10 +50,10 @@ const conn = await connect(
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
## connect(opts)
|
## connect(options)
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
function connect(opts): Promise<Connection>
|
function connect(options): Promise<Connection>
|
||||||
```
|
```
|
||||||
|
|
||||||
Connect to a LanceDB instance at the given URI.
|
Connect to a LanceDB instance at the given URI.
|
||||||
@@ -65,7 +66,8 @@ Accepted formats:
|
|||||||
|
|
||||||
### Parameters
|
### Parameters
|
||||||
|
|
||||||
* **opts**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)> & `object`
|
* **options**: `Partial`<[`ConnectionOptions`](../interfaces/ConnectionOptions.md)> & `object`
|
||||||
|
The options to use when connecting to the database
|
||||||
|
|
||||||
### Returns
|
### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -22,8 +22,6 @@ when creating a table or adding data to it)
|
|||||||
This function converts an array of Record<String, any> (row-major JS objects)
|
This function converts an array of Record<String, any> (row-major JS objects)
|
||||||
to an Arrow Table (a columnar structure)
|
to an Arrow Table (a columnar structure)
|
||||||
|
|
||||||
Note that it currently does not support nulls.
|
|
||||||
|
|
||||||
If a schema is provided then it will be used to determine the resulting array
|
If a schema is provided then it will be used to determine the resulting array
|
||||||
types. Fields will also be reordered to fit the order defined by the schema.
|
types. Fields will also be reordered to fit the order defined by the schema.
|
||||||
|
|
||||||
@@ -31,6 +29,9 @@ If a schema is not provided then the types will be inferred and the field order
|
|||||||
will be controlled by the order of properties in the first record. If a type
|
will be controlled by the order of properties in the first record. If a type
|
||||||
is inferred it will always be nullable.
|
is inferred it will always be nullable.
|
||||||
|
|
||||||
|
If not all fields are found in the data, then a subset of the schema will be
|
||||||
|
returned.
|
||||||
|
|
||||||
If the input is empty then a schema must be provided to create an empty table.
|
If the input is empty then a schema must be provided to create an empty table.
|
||||||
|
|
||||||
When a schema is not specified then data types will be inferred. The inference
|
When a schema is not specified then data types will be inferred. The inference
|
||||||
@@ -38,6 +39,7 @@ rules are as follows:
|
|||||||
|
|
||||||
- boolean => Bool
|
- boolean => Bool
|
||||||
- number => Float64
|
- number => Float64
|
||||||
|
- bigint => Int64
|
||||||
- String => Utf8
|
- String => Utf8
|
||||||
- Buffer => Binary
|
- Buffer => Binary
|
||||||
- Record<String, any> => Struct
|
- Record<String, any> => Struct
|
||||||
@@ -57,6 +59,7 @@ rules are as follows:
|
|||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
|
```ts
|
||||||
import { fromTableToBuffer, makeArrowTable } from "../arrow";
|
import { fromTableToBuffer, makeArrowTable } from "../arrow";
|
||||||
import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow";
|
import { Field, FixedSizeList, Float16, Float32, Int32, Schema } from "apache-arrow";
|
||||||
|
|
||||||
@@ -78,42 +81,40 @@ The `vectorColumns` option can be used to support other vector column
|
|||||||
names and data types.
|
names and data types.
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
|
|
||||||
const schema = new Schema([
|
const schema = new Schema([
|
||||||
new Field("a", new Float64()),
|
new Field("a", new Float64()),
|
||||||
new Field("b", new Float64()),
|
new Field("b", new Float64()),
|
||||||
new Field(
|
new Field(
|
||||||
"vector",
|
"vector",
|
||||||
new FixedSizeList(3, new Field("item", new Float32()))
|
new FixedSizeList(3, new Field("item", new Float32()))
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
const table = makeArrowTable([
|
const table = makeArrowTable([
|
||||||
{ a: 1, b: 2, vector: [1, 2, 3] },
|
{ a: 1, b: 2, vector: [1, 2, 3] },
|
||||||
{ a: 4, b: 5, vector: [4, 5, 6] },
|
{ a: 4, b: 5, vector: [4, 5, 6] },
|
||||||
{ a: 7, b: 8, vector: [7, 8, 9] },
|
{ a: 7, b: 8, vector: [7, 8, 9] },
|
||||||
]);
|
]);
|
||||||
assert.deepEqual(table.schema, schema);
|
assert.deepEqual(table.schema, schema);
|
||||||
```
|
```
|
||||||
|
|
||||||
You can specify the vector column types and names using the options as well
|
You can specify the vector column types and names using the options as well
|
||||||
|
|
||||||
```typescript
|
```ts
|
||||||
|
|
||||||
const schema = new Schema([
|
const schema = new Schema([
|
||||||
new Field('a', new Float64()),
|
new Field('a', new Float64()),
|
||||||
new Field('b', new Float64()),
|
new Field('b', new Float64()),
|
||||||
new Field('vec1', new FixedSizeList(3, new Field('item', new Float16()))),
|
new Field('vec1', new FixedSizeList(3, new Field('item', new Float16()))),
|
||||||
new Field('vec2', new FixedSizeList(3, new Field('item', new Float16())))
|
new Field('vec2', new FixedSizeList(3, new Field('item', new Float16())))
|
||||||
]);
|
]);
|
||||||
const table = makeArrowTable([
|
const table = makeArrowTable([
|
||||||
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
|
{ a: 1, b: 2, vec1: [1, 2, 3], vec2: [2, 4, 6] },
|
||||||
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
|
{ a: 4, b: 5, vec1: [4, 5, 6], vec2: [8, 10, 12] },
|
||||||
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] }
|
{ a: 7, b: 8, vec1: [7, 8, 9], vec2: [14, 16, 18] }
|
||||||
], {
|
], {
|
||||||
vectorColumns: {
|
vectorColumns: {
|
||||||
vec1: { type: new Float16() },
|
vec1: { type: new Float16() },
|
||||||
vec2: { type: new Float16() }
|
vec2: { type: new Float16() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert.deepEqual(table.schema, schema)
|
assert.deepEqual(table.schema, schema)
|
||||||
```
|
```
|
||||||
|
|||||||
19
docs/src/js/functions/packBits.md
Normal file
19
docs/src/js/functions/packBits.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / packBits
|
||||||
|
|
||||||
|
# Function: packBits()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
function packBits(data): number[]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Parameters
|
||||||
|
|
||||||
|
* **data**: `number`[]
|
||||||
|
|
||||||
|
## Returns
|
||||||
|
|
||||||
|
`number`[]
|
||||||
@@ -7,16 +7,14 @@
|
|||||||
## Namespaces
|
## Namespaces
|
||||||
|
|
||||||
- [embedding](namespaces/embedding/README.md)
|
- [embedding](namespaces/embedding/README.md)
|
||||||
|
- [rerankers](namespaces/rerankers/README.md)
|
||||||
## Enumerations
|
|
||||||
|
|
||||||
- [WriteMode](enumerations/WriteMode.md)
|
|
||||||
|
|
||||||
## Classes
|
## Classes
|
||||||
|
|
||||||
- [Connection](classes/Connection.md)
|
- [Connection](classes/Connection.md)
|
||||||
- [Index](classes/Index.md)
|
- [Index](classes/Index.md)
|
||||||
- [MakeArrowTableOptions](classes/MakeArrowTableOptions.md)
|
- [MakeArrowTableOptions](classes/MakeArrowTableOptions.md)
|
||||||
|
- [MergeInsertBuilder](classes/MergeInsertBuilder.md)
|
||||||
- [Query](classes/Query.md)
|
- [Query](classes/Query.md)
|
||||||
- [QueryBase](classes/QueryBase.md)
|
- [QueryBase](classes/QueryBase.md)
|
||||||
- [RecordBatchIterator](classes/RecordBatchIterator.md)
|
- [RecordBatchIterator](classes/RecordBatchIterator.md)
|
||||||
@@ -30,25 +28,43 @@
|
|||||||
- [AddDataOptions](interfaces/AddDataOptions.md)
|
- [AddDataOptions](interfaces/AddDataOptions.md)
|
||||||
- [ClientConfig](interfaces/ClientConfig.md)
|
- [ClientConfig](interfaces/ClientConfig.md)
|
||||||
- [ColumnAlteration](interfaces/ColumnAlteration.md)
|
- [ColumnAlteration](interfaces/ColumnAlteration.md)
|
||||||
|
- [CompactionStats](interfaces/CompactionStats.md)
|
||||||
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
- [ConnectionOptions](interfaces/ConnectionOptions.md)
|
||||||
- [CreateTableOptions](interfaces/CreateTableOptions.md)
|
- [CreateTableOptions](interfaces/CreateTableOptions.md)
|
||||||
- [ExecutableQuery](interfaces/ExecutableQuery.md)
|
- [ExecutableQuery](interfaces/ExecutableQuery.md)
|
||||||
|
- [FtsOptions](interfaces/FtsOptions.md)
|
||||||
|
- [FullTextSearchOptions](interfaces/FullTextSearchOptions.md)
|
||||||
|
- [HnswPqOptions](interfaces/HnswPqOptions.md)
|
||||||
|
- [HnswSqOptions](interfaces/HnswSqOptions.md)
|
||||||
- [IndexConfig](interfaces/IndexConfig.md)
|
- [IndexConfig](interfaces/IndexConfig.md)
|
||||||
- [IndexOptions](interfaces/IndexOptions.md)
|
- [IndexOptions](interfaces/IndexOptions.md)
|
||||||
- [IndexStatistics](interfaces/IndexStatistics.md)
|
- [IndexStatistics](interfaces/IndexStatistics.md)
|
||||||
|
- [IvfFlatOptions](interfaces/IvfFlatOptions.md)
|
||||||
- [IvfPqOptions](interfaces/IvfPqOptions.md)
|
- [IvfPqOptions](interfaces/IvfPqOptions.md)
|
||||||
|
- [OpenTableOptions](interfaces/OpenTableOptions.md)
|
||||||
- [OptimizeOptions](interfaces/OptimizeOptions.md)
|
- [OptimizeOptions](interfaces/OptimizeOptions.md)
|
||||||
|
- [OptimizeStats](interfaces/OptimizeStats.md)
|
||||||
|
- [QueryExecutionOptions](interfaces/QueryExecutionOptions.md)
|
||||||
|
- [RemovalStats](interfaces/RemovalStats.md)
|
||||||
- [RetryConfig](interfaces/RetryConfig.md)
|
- [RetryConfig](interfaces/RetryConfig.md)
|
||||||
- [TableNamesOptions](interfaces/TableNamesOptions.md)
|
- [TableNamesOptions](interfaces/TableNamesOptions.md)
|
||||||
- [TimeoutConfig](interfaces/TimeoutConfig.md)
|
- [TimeoutConfig](interfaces/TimeoutConfig.md)
|
||||||
- [UpdateOptions](interfaces/UpdateOptions.md)
|
- [UpdateOptions](interfaces/UpdateOptions.md)
|
||||||
- [WriteOptions](interfaces/WriteOptions.md)
|
- [Version](interfaces/Version.md)
|
||||||
|
|
||||||
## Type Aliases
|
## Type Aliases
|
||||||
|
|
||||||
- [Data](type-aliases/Data.md)
|
- [Data](type-aliases/Data.md)
|
||||||
|
- [DataLike](type-aliases/DataLike.md)
|
||||||
|
- [FieldLike](type-aliases/FieldLike.md)
|
||||||
|
- [IntoSql](type-aliases/IntoSql.md)
|
||||||
|
- [IntoVector](type-aliases/IntoVector.md)
|
||||||
|
- [RecordBatchLike](type-aliases/RecordBatchLike.md)
|
||||||
|
- [SchemaLike](type-aliases/SchemaLike.md)
|
||||||
|
- [TableLike](type-aliases/TableLike.md)
|
||||||
|
|
||||||
## Functions
|
## Functions
|
||||||
|
|
||||||
- [connect](functions/connect.md)
|
- [connect](functions/connect.md)
|
||||||
- [makeArrowTable](functions/makeArrowTable.md)
|
- [makeArrowTable](functions/makeArrowTable.md)
|
||||||
|
- [packBits](functions/packBits.md)
|
||||||
|
|||||||
@@ -8,6 +8,14 @@
|
|||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
|
### extraHeaders?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional extraHeaders: Record<string, string>;
|
||||||
|
```
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### retryConfig?
|
### retryConfig?
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ must be provided.
|
|||||||
### dataType?
|
### dataType?
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
optional dataType: string;
|
optional dataType: string | DataType<Type, any>;
|
||||||
```
|
```
|
||||||
|
|
||||||
A new data type for the column. If not provided then the data type will not be changed.
|
A new data type for the column. If not provided then the data type will not be changed.
|
||||||
|
|||||||
49
docs/src/js/interfaces/CompactionStats.md
Normal file
49
docs/src/js/interfaces/CompactionStats.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / CompactionStats
|
||||||
|
|
||||||
|
# Interface: CompactionStats
|
||||||
|
|
||||||
|
Statistics about a compaction operation.
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### filesAdded
|
||||||
|
|
||||||
|
```ts
|
||||||
|
filesAdded: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of new, compacted data files added
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### filesRemoved
|
||||||
|
|
||||||
|
```ts
|
||||||
|
filesRemoved: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of data files removed
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### fragmentsAdded
|
||||||
|
|
||||||
|
```ts
|
||||||
|
fragmentsAdded: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of new, compacted fragments added
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### fragmentsRemoved
|
||||||
|
|
||||||
|
```ts
|
||||||
|
fragmentsRemoved: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of fragments removed
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
## Properties
|
## Properties
|
||||||
|
|
||||||
### dataStorageVersion?
|
### ~~dataStorageVersion?~~
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
optional dataStorageVersion: string;
|
optional dataStorageVersion: string;
|
||||||
@@ -19,6 +19,10 @@ The version of the data storage format to use.
|
|||||||
The default is `stable`.
|
The default is `stable`.
|
||||||
Set to "legacy" to use the old format.
|
Set to "legacy" to use the old format.
|
||||||
|
|
||||||
|
#### Deprecated
|
||||||
|
|
||||||
|
Pass `new_table_data_storage_version` to storageOptions instead.
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### embeddingFunction?
|
### embeddingFunction?
|
||||||
@@ -29,7 +33,7 @@ optional embeddingFunction: EmbeddingFunctionConfig;
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### enableV2ManifestPaths?
|
### ~~enableV2ManifestPaths?~~
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
optional enableV2ManifestPaths: boolean;
|
optional enableV2ManifestPaths: boolean;
|
||||||
@@ -41,6 +45,10 @@ turning this on will make the dataset unreadable for older versions
|
|||||||
of LanceDB (prior to 0.10.0). To migrate an existing dataset, instead
|
of LanceDB (prior to 0.10.0). To migrate an existing dataset, instead
|
||||||
use the LocalTable#migrateManifestPathsV2 method.
|
use the LocalTable#migrateManifestPathsV2 method.
|
||||||
|
|
||||||
|
#### Deprecated
|
||||||
|
|
||||||
|
Pass `new_table_enable_v2_manifest_paths` to storageOptions instead.
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### existOk
|
### existOk
|
||||||
@@ -90,17 +98,3 @@ Options already set on the connection will be inherited by the table,
|
|||||||
but can be overridden here.
|
but can be overridden here.
|
||||||
|
|
||||||
The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### useLegacyFormat?
|
|
||||||
|
|
||||||
```ts
|
|
||||||
optional useLegacyFormat: boolean;
|
|
||||||
```
|
|
||||||
|
|
||||||
If true then data files will be written with the legacy format
|
|
||||||
|
|
||||||
The default is false.
|
|
||||||
|
|
||||||
Deprecated. Use data storage version instead.
|
|
||||||
|
|||||||
103
docs/src/js/interfaces/FtsOptions.md
Normal file
103
docs/src/js/interfaces/FtsOptions.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / FtsOptions
|
||||||
|
|
||||||
|
# Interface: FtsOptions
|
||||||
|
|
||||||
|
Options to create a full text search index
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### asciiFolding?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional asciiFolding: boolean;
|
||||||
|
```
|
||||||
|
|
||||||
|
whether to remove punctuation
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### baseTokenizer?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional baseTokenizer: "raw" | "simple" | "whitespace";
|
||||||
|
```
|
||||||
|
|
||||||
|
The tokenizer to use when building the index.
|
||||||
|
The default is "simple".
|
||||||
|
|
||||||
|
The following tokenizers are available:
|
||||||
|
|
||||||
|
"simple" - Simple tokenizer. This tokenizer splits the text into tokens using whitespace and punctuation as a delimiter.
|
||||||
|
|
||||||
|
"whitespace" - Whitespace tokenizer. This tokenizer splits the text into tokens using whitespace as a delimiter.
|
||||||
|
|
||||||
|
"raw" - Raw tokenizer. This tokenizer does not split the text into tokens and indexes the entire text as a single token.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### language?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional language: string;
|
||||||
|
```
|
||||||
|
|
||||||
|
language for stemming and stop words
|
||||||
|
this is only used when `stem` or `remove_stop_words` is true
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### lowercase?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional lowercase: boolean;
|
||||||
|
```
|
||||||
|
|
||||||
|
whether to lowercase tokens
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### maxTokenLength?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional maxTokenLength: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
maximum token length
|
||||||
|
tokens longer than this length will be ignored
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### removeStopWords?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional removeStopWords: boolean;
|
||||||
|
```
|
||||||
|
|
||||||
|
whether to remove stop words
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### stem?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional stem: boolean;
|
||||||
|
```
|
||||||
|
|
||||||
|
whether to stem tokens
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### withPosition?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional withPosition: boolean;
|
||||||
|
```
|
||||||
|
|
||||||
|
Whether to build the index with positions.
|
||||||
|
True by default.
|
||||||
|
If set to false, the index will not store the positions of the tokens in the text,
|
||||||
|
which will make the index smaller and faster to build, but will not support phrase queries.
|
||||||
22
docs/src/js/interfaces/FullTextSearchOptions.md
Normal file
22
docs/src/js/interfaces/FullTextSearchOptions.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / FullTextSearchOptions
|
||||||
|
|
||||||
|
# Interface: FullTextSearchOptions
|
||||||
|
|
||||||
|
Options that control the behavior of a full text search
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### columns?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional columns: string | string[];
|
||||||
|
```
|
||||||
|
|
||||||
|
The columns to search
|
||||||
|
|
||||||
|
If not specified, all indexed columns will be searched.
|
||||||
|
For now, only one column can be searched.
|
||||||
149
docs/src/js/interfaces/HnswPqOptions.md
Normal file
149
docs/src/js/interfaces/HnswPqOptions.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / HnswPqOptions
|
||||||
|
|
||||||
|
# Interface: HnswPqOptions
|
||||||
|
|
||||||
|
Options to create an `HNSW_PQ` index
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### distanceType?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional distanceType: "l2" | "cosine" | "dot";
|
||||||
|
```
|
||||||
|
|
||||||
|
The distance metric used to train the index.
|
||||||
|
|
||||||
|
Default value is "l2".
|
||||||
|
|
||||||
|
The following distance types are available:
|
||||||
|
|
||||||
|
"l2" - Euclidean distance. This is a very common distance metric that
|
||||||
|
accounts for both magnitude and direction when determining the distance
|
||||||
|
between vectors. l2 distance has a range of [0, ∞).
|
||||||
|
|
||||||
|
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||||
|
calculated from the cosine similarity between two vectors. Cosine
|
||||||
|
similarity is a measure of similarity between two non-zero vectors of an
|
||||||
|
inner product space. It is defined to equal the cosine of the angle
|
||||||
|
between them. Unlike l2, the cosine distance is not affected by the
|
||||||
|
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||||
|
|
||||||
|
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||||
|
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||||
|
l2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### efConstruction?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional efConstruction: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of candidates to evaluate during the construction of the HNSW graph.
|
||||||
|
|
||||||
|
The default value is 300.
|
||||||
|
|
||||||
|
This value controls the tradeoff between build speed and accuracy.
|
||||||
|
The higher the value the more accurate the build but the slower it will be.
|
||||||
|
150 to 300 is the typical range. 100 is a minimum for good quality search
|
||||||
|
results. In most cases, there is no benefit to setting this higher than 500.
|
||||||
|
This value should be set to a value that is not less than `ef` in the search phase.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### m?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional m: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of neighbors to select for each vector in the HNSW graph.
|
||||||
|
|
||||||
|
The default value is 20.
|
||||||
|
|
||||||
|
This value controls the tradeoff between search speed and accuracy.
|
||||||
|
The higher the value the more accurate the search but the slower it will be.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### maxIterations?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional maxIterations: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Max iterations to train kmeans.
|
||||||
|
|
||||||
|
The default value is 50.
|
||||||
|
|
||||||
|
When training an IVF index we use kmeans to calculate the partitions. This parameter
|
||||||
|
controls how many iterations of kmeans to run.
|
||||||
|
|
||||||
|
Increasing this might improve the quality of the index but in most cases the parameter
|
||||||
|
is unused because kmeans will converge with fewer iterations. The parameter is only
|
||||||
|
used in cases where kmeans does not appear to converge. In those cases it is unlikely
|
||||||
|
that setting this larger will lead to the index converging anyways.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### numPartitions?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional numPartitions: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of IVF partitions to create.
|
||||||
|
|
||||||
|
For HNSW, we recommend a small number of partitions. Setting this to 1 works
|
||||||
|
well for most tables. For very large tables, training just one HNSW graph
|
||||||
|
will require too much memory. Each partition becomes its own HNSW graph, so
|
||||||
|
setting this value higher reduces the peak memory use of training.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### numSubVectors?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional numSubVectors: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Number of sub-vectors of PQ.
|
||||||
|
|
||||||
|
This value controls how much the vector is compressed during the quantization step.
|
||||||
|
The more sub vectors there are the less the vector is compressed. The default is
|
||||||
|
the dimension of the vector divided by 16. If the dimension is not evenly divisible
|
||||||
|
by 16 we use the dimension divded by 8.
|
||||||
|
|
||||||
|
The above two cases are highly preferred. Having 8 or 16 values per subvector allows
|
||||||
|
us to use efficient SIMD instructions.
|
||||||
|
|
||||||
|
If the dimension is not visible by 8 then we use 1 subvector. This is not ideal and
|
||||||
|
will likely result in poor performance.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### sampleRate?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional sampleRate: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The rate used to calculate the number of training vectors for kmeans.
|
||||||
|
|
||||||
|
Default value is 256.
|
||||||
|
|
||||||
|
When an IVF index is trained, we need to calculate partitions. These are groups
|
||||||
|
of vectors that are similar to each other. To do this we use an algorithm called kmeans.
|
||||||
|
|
||||||
|
Running kmeans on a large dataset can be slow. To speed this up we run kmeans on a
|
||||||
|
random sample of the data. This parameter controls the size of the sample. The total
|
||||||
|
number of vectors used to train the index is `sample_rate * num_partitions`.
|
||||||
|
|
||||||
|
Increasing this value might improve the quality of the index but in most cases the
|
||||||
|
default should be sufficient.
|
||||||
128
docs/src/js/interfaces/HnswSqOptions.md
Normal file
128
docs/src/js/interfaces/HnswSqOptions.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / HnswSqOptions
|
||||||
|
|
||||||
|
# Interface: HnswSqOptions
|
||||||
|
|
||||||
|
Options to create an `HNSW_SQ` index
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### distanceType?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional distanceType: "l2" | "cosine" | "dot";
|
||||||
|
```
|
||||||
|
|
||||||
|
The distance metric used to train the index.
|
||||||
|
|
||||||
|
Default value is "l2".
|
||||||
|
|
||||||
|
The following distance types are available:
|
||||||
|
|
||||||
|
"l2" - Euclidean distance. This is a very common distance metric that
|
||||||
|
accounts for both magnitude and direction when determining the distance
|
||||||
|
between vectors. l2 distance has a range of [0, ∞).
|
||||||
|
|
||||||
|
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||||
|
calculated from the cosine similarity between two vectors. Cosine
|
||||||
|
similarity is a measure of similarity between two non-zero vectors of an
|
||||||
|
inner product space. It is defined to equal the cosine of the angle
|
||||||
|
between them. Unlike l2, the cosine distance is not affected by the
|
||||||
|
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||||
|
|
||||||
|
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||||
|
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||||
|
l2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### efConstruction?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional efConstruction: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of candidates to evaluate during the construction of the HNSW graph.
|
||||||
|
|
||||||
|
The default value is 300.
|
||||||
|
|
||||||
|
This value controls the tradeoff between build speed and accuracy.
|
||||||
|
The higher the value the more accurate the build but the slower it will be.
|
||||||
|
150 to 300 is the typical range. 100 is a minimum for good quality search
|
||||||
|
results. In most cases, there is no benefit to setting this higher than 500.
|
||||||
|
This value should be set to a value that is not less than `ef` in the search phase.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### m?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional m: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of neighbors to select for each vector in the HNSW graph.
|
||||||
|
|
||||||
|
The default value is 20.
|
||||||
|
|
||||||
|
This value controls the tradeoff between search speed and accuracy.
|
||||||
|
The higher the value the more accurate the search but the slower it will be.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### maxIterations?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional maxIterations: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Max iterations to train kmeans.
|
||||||
|
|
||||||
|
The default value is 50.
|
||||||
|
|
||||||
|
When training an IVF index we use kmeans to calculate the partitions. This parameter
|
||||||
|
controls how many iterations of kmeans to run.
|
||||||
|
|
||||||
|
Increasing this might improve the quality of the index but in most cases the parameter
|
||||||
|
is unused because kmeans will converge with fewer iterations. The parameter is only
|
||||||
|
used in cases where kmeans does not appear to converge. In those cases it is unlikely
|
||||||
|
that setting this larger will lead to the index converging anyways.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### numPartitions?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional numPartitions: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of IVF partitions to create.
|
||||||
|
|
||||||
|
For HNSW, we recommend a small number of partitions. Setting this to 1 works
|
||||||
|
well for most tables. For very large tables, training just one HNSW graph
|
||||||
|
will require too much memory. Each partition becomes its own HNSW graph, so
|
||||||
|
setting this value higher reduces the peak memory use of training.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### sampleRate?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional sampleRate: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The rate used to calculate the number of training vectors for kmeans.
|
||||||
|
|
||||||
|
Default value is 256.
|
||||||
|
|
||||||
|
When an IVF index is trained, we need to calculate partitions. These are groups
|
||||||
|
of vectors that are similar to each other. To do this we use an algorithm called kmeans.
|
||||||
|
|
||||||
|
Running kmeans on a large dataset can be slow. To speed this up we run kmeans on a
|
||||||
|
random sample of the data. This parameter controls the size of the sample. The total
|
||||||
|
number of vectors used to train the index is `sample_rate * num_partitions`.
|
||||||
|
|
||||||
|
Increasing this value might improve the quality of the index but in most cases the
|
||||||
|
default should be sufficient.
|
||||||
@@ -30,6 +30,17 @@ The type of the index
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### loss?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional loss: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The KMeans loss value of the index,
|
||||||
|
it is only present for vector indices.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### numIndexedRows
|
### numIndexedRows
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
|
|||||||
112
docs/src/js/interfaces/IvfFlatOptions.md
Normal file
112
docs/src/js/interfaces/IvfFlatOptions.md
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / IvfFlatOptions
|
||||||
|
|
||||||
|
# Interface: IvfFlatOptions
|
||||||
|
|
||||||
|
Options to create an `IVF_FLAT` index
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### distanceType?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional distanceType: "l2" | "cosine" | "dot" | "hamming";
|
||||||
|
```
|
||||||
|
|
||||||
|
Distance type to use to build the index.
|
||||||
|
|
||||||
|
Default value is "l2".
|
||||||
|
|
||||||
|
This is used when training the index to calculate the IVF partitions
|
||||||
|
(vectors are grouped in partitions with similar vectors according to this
|
||||||
|
distance type).
|
||||||
|
|
||||||
|
The distance type used to train an index MUST match the distance type used
|
||||||
|
to search the index. Failure to do so will yield inaccurate results.
|
||||||
|
|
||||||
|
The following distance types are available:
|
||||||
|
|
||||||
|
"l2" - Euclidean distance. This is a very common distance metric that
|
||||||
|
accounts for both magnitude and direction when determining the distance
|
||||||
|
between vectors. l2 distance has a range of [0, ∞).
|
||||||
|
|
||||||
|
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||||
|
calculated from the cosine similarity between two vectors. Cosine
|
||||||
|
similarity is a measure of similarity between two non-zero vectors of an
|
||||||
|
inner product space. It is defined to equal the cosine of the angle
|
||||||
|
between them. Unlike l2, the cosine distance is not affected by the
|
||||||
|
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||||
|
|
||||||
|
Note: the cosine distance is undefined when one (or both) of the vectors
|
||||||
|
are all zeros (there is no direction). These vectors are invalid and may
|
||||||
|
never be returned from a vector search.
|
||||||
|
|
||||||
|
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||||
|
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||||
|
l2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||||
|
|
||||||
|
"hamming" - Hamming distance. Hamming distance is a distance metric
|
||||||
|
calculated from the number of bits that are different between two vectors.
|
||||||
|
Hamming distance has a range of [0, dimension]. Note that the hamming distance
|
||||||
|
is only valid for binary vectors.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### maxIterations?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional maxIterations: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Max iteration to train IVF kmeans.
|
||||||
|
|
||||||
|
When training an IVF FLAT index we use kmeans to calculate the partitions. This parameter
|
||||||
|
controls how many iterations of kmeans to run.
|
||||||
|
|
||||||
|
Increasing this might improve the quality of the index but in most cases these extra
|
||||||
|
iterations have diminishing returns.
|
||||||
|
|
||||||
|
The default value is 50.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### numPartitions?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional numPartitions: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of IVF partitions to create.
|
||||||
|
|
||||||
|
This value should generally scale with the number of rows in the dataset.
|
||||||
|
By default the number of partitions is the square root of the number of
|
||||||
|
rows.
|
||||||
|
|
||||||
|
If this value is too large then the first part of the search (picking the
|
||||||
|
right partition) will be slow. If this value is too small then the second
|
||||||
|
part of the search (searching within a partition) will be slow.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### sampleRate?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional sampleRate: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of vectors, per partition, to sample when training IVF kmeans.
|
||||||
|
|
||||||
|
When an IVF FLAT index is trained, we need to calculate partitions. These are groups
|
||||||
|
of vectors that are similar to each other. To do this we use an algorithm called kmeans.
|
||||||
|
|
||||||
|
Running kmeans on a large dataset can be slow. To speed this up we run kmeans on a
|
||||||
|
random sample of the data. This parameter controls the size of the sample. The total
|
||||||
|
number of vectors used to train the index is `sample_rate * num_partitions`.
|
||||||
|
|
||||||
|
Increasing this value might improve the quality of the index but in most cases the
|
||||||
|
default should be sufficient.
|
||||||
|
|
||||||
|
The default value is 256.
|
||||||
@@ -31,13 +31,13 @@ The following distance types are available:
|
|||||||
|
|
||||||
"l2" - Euclidean distance. This is a very common distance metric that
|
"l2" - Euclidean distance. This is a very common distance metric that
|
||||||
accounts for both magnitude and direction when determining the distance
|
accounts for both magnitude and direction when determining the distance
|
||||||
between vectors. L2 distance has a range of [0, ∞).
|
between vectors. l2 distance has a range of [0, ∞).
|
||||||
|
|
||||||
"cosine" - Cosine distance. Cosine distance is a distance metric
|
"cosine" - Cosine distance. Cosine distance is a distance metric
|
||||||
calculated from the cosine similarity between two vectors. Cosine
|
calculated from the cosine similarity between two vectors. Cosine
|
||||||
similarity is a measure of similarity between two non-zero vectors of an
|
similarity is a measure of similarity between two non-zero vectors of an
|
||||||
inner product space. It is defined to equal the cosine of the angle
|
inner product space. It is defined to equal the cosine of the angle
|
||||||
between them. Unlike L2, the cosine distance is not affected by the
|
between them. Unlike l2, the cosine distance is not affected by the
|
||||||
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
magnitude of the vectors. Cosine distance has a range of [0, 2].
|
||||||
|
|
||||||
Note: the cosine distance is undefined when one (or both) of the vectors
|
Note: the cosine distance is undefined when one (or both) of the vectors
|
||||||
@@ -46,7 +46,7 @@ never be returned from a vector search.
|
|||||||
|
|
||||||
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
"dot" - Dot product. Dot distance is the dot product of two vectors. Dot
|
||||||
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
distance has a range of (-∞, ∞). If the vectors are normalized (i.e. their
|
||||||
L2 norm is 1), then dot distance is equivalent to the cosine distance.
|
l2 norm is 1), then dot distance is equivalent to the cosine distance.
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -68,6 +68,21 @@ The default value is 50.
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### numBits?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional numBits: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Number of bits per sub-vector.
|
||||||
|
|
||||||
|
This value controls how much each subvector is compressed. The more bits the more
|
||||||
|
accurate the index will be but the slower search. The default is 8 bits.
|
||||||
|
|
||||||
|
The number of bits must be 4 or 8.
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### numPartitions?
|
### numPartitions?
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
|
|||||||
40
docs/src/js/interfaces/OpenTableOptions.md
Normal file
40
docs/src/js/interfaces/OpenTableOptions.md
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / OpenTableOptions
|
||||||
|
|
||||||
|
# Interface: OpenTableOptions
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### indexCacheSize?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional indexCacheSize: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
Set the size of the index cache, specified as a number of entries
|
||||||
|
|
||||||
|
The exact meaning of an "entry" will depend on the type of index:
|
||||||
|
- IVF: there is one entry for each IVF partition
|
||||||
|
- BTREE: there is one entry for the entire index
|
||||||
|
|
||||||
|
This cache applies to the entire opened table, across all indices.
|
||||||
|
Setting this value higher will increase performance on larger datasets
|
||||||
|
at the expense of more RAM
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### storageOptions?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional storageOptions: Record<string, string>;
|
||||||
|
```
|
||||||
|
|
||||||
|
Configuration for object storage.
|
||||||
|
|
||||||
|
Options already set on the connection will be inherited by the table,
|
||||||
|
but can be overridden here.
|
||||||
|
|
||||||
|
The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||||
29
docs/src/js/interfaces/OptimizeStats.md
Normal file
29
docs/src/js/interfaces/OptimizeStats.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / OptimizeStats
|
||||||
|
|
||||||
|
# Interface: OptimizeStats
|
||||||
|
|
||||||
|
Statistics about an optimize operation
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### compaction
|
||||||
|
|
||||||
|
```ts
|
||||||
|
compaction: CompactionStats;
|
||||||
|
```
|
||||||
|
|
||||||
|
Statistics about the compaction operation
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### prune
|
||||||
|
|
||||||
|
```ts
|
||||||
|
prune: RemovalStats;
|
||||||
|
```
|
||||||
|
|
||||||
|
Statistics about the removal operation
|
||||||
22
docs/src/js/interfaces/QueryExecutionOptions.md
Normal file
22
docs/src/js/interfaces/QueryExecutionOptions.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / QueryExecutionOptions
|
||||||
|
|
||||||
|
# Interface: QueryExecutionOptions
|
||||||
|
|
||||||
|
Options that control the behavior of a particular query execution
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### maxBatchLength?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional maxBatchLength: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The maximum number of rows to return in a single batch
|
||||||
|
|
||||||
|
Batches may have fewer rows if the underlying data is stored
|
||||||
|
in smaller chunks.
|
||||||
29
docs/src/js/interfaces/RemovalStats.md
Normal file
29
docs/src/js/interfaces/RemovalStats.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / RemovalStats
|
||||||
|
|
||||||
|
# Interface: RemovalStats
|
||||||
|
|
||||||
|
Statistics about a cleanup operation
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### bytesRemoved
|
||||||
|
|
||||||
|
```ts
|
||||||
|
bytesRemoved: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of bytes removed
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### oldVersionsRemoved
|
||||||
|
|
||||||
|
```ts
|
||||||
|
oldVersionsRemoved: number;
|
||||||
|
```
|
||||||
|
|
||||||
|
The number of old versions removed
|
||||||
31
docs/src/js/interfaces/Version.md
Normal file
31
docs/src/js/interfaces/Version.md
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / Version
|
||||||
|
|
||||||
|
# Interface: Version
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### metadata
|
||||||
|
|
||||||
|
```ts
|
||||||
|
metadata: Record<string, string>;
|
||||||
|
```
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### timestamp
|
||||||
|
|
||||||
|
```ts
|
||||||
|
timestamp: Date;
|
||||||
|
```
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### version
|
||||||
|
|
||||||
|
```ts
|
||||||
|
version: number;
|
||||||
|
```
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
[**@lancedb/lancedb**](../README.md) • **Docs**
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
[@lancedb/lancedb](../globals.md) / WriteOptions
|
|
||||||
|
|
||||||
# Interface: WriteOptions
|
|
||||||
|
|
||||||
Write options when creating a Table.
|
|
||||||
|
|
||||||
## Properties
|
|
||||||
|
|
||||||
### mode?
|
|
||||||
|
|
||||||
```ts
|
|
||||||
optional mode: WriteMode;
|
|
||||||
```
|
|
||||||
|
|
||||||
Write mode for writing to a table.
|
|
||||||
@@ -17,6 +17,14 @@
|
|||||||
### Interfaces
|
### Interfaces
|
||||||
|
|
||||||
- [EmbeddingFunctionConfig](interfaces/EmbeddingFunctionConfig.md)
|
- [EmbeddingFunctionConfig](interfaces/EmbeddingFunctionConfig.md)
|
||||||
|
- [EmbeddingFunctionConstructor](interfaces/EmbeddingFunctionConstructor.md)
|
||||||
|
- [EmbeddingFunctionCreate](interfaces/EmbeddingFunctionCreate.md)
|
||||||
|
- [FieldOptions](interfaces/FieldOptions.md)
|
||||||
|
- [FunctionOptions](interfaces/FunctionOptions.md)
|
||||||
|
|
||||||
|
### Type Aliases
|
||||||
|
|
||||||
|
- [CreateReturnType](type-aliases/CreateReturnType.md)
|
||||||
|
|
||||||
### Functions
|
### Functions
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,23 @@
|
|||||||
|
|
||||||
An embedding function that automatically creates vector representation for a given column.
|
An embedding function that automatically creates vector representation for a given column.
|
||||||
|
|
||||||
|
It's important subclasses pass the **original** options to the super constructor
|
||||||
|
and then pass those options to `resolveVariables` to resolve any variables before
|
||||||
|
using them.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```ts
|
||||||
|
class MyEmbeddingFunction extends EmbeddingFunction {
|
||||||
|
constructor(options: {model: string, timeout: number}) {
|
||||||
|
super(optionsRaw);
|
||||||
|
const options = this.resolveVariables(optionsRaw);
|
||||||
|
this.model = options.model;
|
||||||
|
this.timeout = options.timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Extended by
|
## Extended by
|
||||||
|
|
||||||
- [`TextEmbeddingFunction`](TextEmbeddingFunction.md)
|
- [`TextEmbeddingFunction`](TextEmbeddingFunction.md)
|
||||||
@@ -16,7 +33,7 @@ An embedding function that automatically creates vector representation for a giv
|
|||||||
|
|
||||||
• **T** = `any`
|
• **T** = `any`
|
||||||
|
|
||||||
• **M** *extends* `FunctionOptions` = `FunctionOptions`
|
• **M** *extends* [`FunctionOptions`](../interfaces/FunctionOptions.md) = [`FunctionOptions`](../interfaces/FunctionOptions.md)
|
||||||
|
|
||||||
## Constructors
|
## Constructors
|
||||||
|
|
||||||
@@ -82,12 +99,33 @@ The datatype of the embeddings
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### getSensitiveKeys()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
protected getSensitiveKeys(): string[]
|
||||||
|
```
|
||||||
|
|
||||||
|
Provide a list of keys in the function options that should be treated as
|
||||||
|
sensitive. If users pass raw values for these keys, they will be rejected.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`[]
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### init()?
|
### init()?
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
optional init(): Promise<void>
|
optional init(): Promise<void>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Optionally load any resources needed for the embedding function.
|
||||||
|
|
||||||
|
This method is called after the embedding function has been initialized
|
||||||
|
but before any embeddings are computed. It is useful for loading local models
|
||||||
|
or other resources that are needed for the embedding function to work.
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Promise`<`void`>
|
`Promise`<`void`>
|
||||||
@@ -108,6 +146,24 @@ The number of dimensions of the embeddings
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### resolveVariables()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
protected resolveVariables(config): Partial<M>
|
||||||
|
```
|
||||||
|
|
||||||
|
Apply variables to the config.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **config**: `Partial`<`M`>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Partial`<`M`>
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### sourceField()
|
### sourceField()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -118,53 +174,31 @@ sourceField is used in combination with `LanceSchema` to provide a declarative d
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
* **optionsOrDatatype**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||||
The options for the field or the datatype
|
The options for the field or the datatype
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||||
|
|
||||||
#### See
|
#### See
|
||||||
|
|
||||||
lancedb.LanceSchema
|
[LanceSchema](../functions/LanceSchema.md)
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### toJSON()
|
### toJSON()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
abstract toJSON(): Partial<M>
|
toJSON(): Record<string, any>
|
||||||
```
|
```
|
||||||
|
|
||||||
Convert the embedding function to a JSON object
|
Get the original arguments to the constructor, to serialize them so they
|
||||||
It is used to serialize the embedding function to the schema
|
can be used to recreate the embedding function later.
|
||||||
It's important that any object returned by this method contains all the necessary
|
|
||||||
information to recreate the embedding function
|
|
||||||
|
|
||||||
It should return the same object that was passed to the constructor
|
|
||||||
If it does not, the embedding function will not be able to be recreated, or could be recreated incorrectly
|
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Partial`<`M`>
|
`Record`<`string`, `any`>
|
||||||
|
|
||||||
#### Example
|
|
||||||
|
|
||||||
```ts
|
|
||||||
class MyEmbeddingFunction extends EmbeddingFunction {
|
|
||||||
constructor(options: {model: string, timeout: number}) {
|
|
||||||
super();
|
|
||||||
this.model = options.model;
|
|
||||||
this.timeout = options.timeout;
|
|
||||||
}
|
|
||||||
toJSON() {
|
|
||||||
return {
|
|
||||||
model: this.model,
|
|
||||||
timeout: this.timeout,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -178,12 +212,13 @@ vectorField is used in combination with `LanceSchema` to provide a declarative d
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||||
|
The options for the field
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||||
|
|
||||||
#### See
|
#### See
|
||||||
|
|
||||||
lancedb.LanceSchema
|
[LanceSchema](../functions/LanceSchema.md)
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ Fetch an embedding function by name
|
|||||||
|
|
||||||
#### Type Parameters
|
#### Type Parameters
|
||||||
|
|
||||||
• **T** *extends* [`EmbeddingFunction`](EmbeddingFunction.md)<`unknown`, `FunctionOptions`>
|
• **T** *extends* [`EmbeddingFunction`](EmbeddingFunction.md)<`unknown`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ Fetch an embedding function by name
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`undefined` \| `EmbeddingFunctionCreate`<`T`>
|
`undefined` \| [`EmbeddingFunctionCreate`](../interfaces/EmbeddingFunctionCreate.md)<`T`>
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
@@ -80,6 +80,28 @@ getTableMetadata(functions): Map<string, string>
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### getVar()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
getVar(name): undefined | string
|
||||||
|
```
|
||||||
|
|
||||||
|
Get a variable.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **name**: `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`undefined` \| `string`
|
||||||
|
|
||||||
|
#### See
|
||||||
|
|
||||||
|
[setVar](EmbeddingFunctionRegistry.md#setvar)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### length()
|
### length()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -104,7 +126,7 @@ Register an embedding function
|
|||||||
|
|
||||||
#### Type Parameters
|
#### Type Parameters
|
||||||
|
|
||||||
• **T** *extends* `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>> = `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
• **T** *extends* [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>> = [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>
|
||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
@@ -145,3 +167,31 @@ reset the registry to the initial state
|
|||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`void`
|
`void`
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### setVar()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
setVar(name, value): void
|
||||||
|
```
|
||||||
|
|
||||||
|
Set a variable. These can be accessed in the embedding function
|
||||||
|
configuration using the syntax `$var:variable_name`. If they are not
|
||||||
|
set, an error will be thrown letting you know which key is unset. If you
|
||||||
|
want to supply a default value, you can add an additional part in the
|
||||||
|
configuration like so: `$var:variable_name:default_value`. Default values
|
||||||
|
can be used for runtime configurations that are not sensitive, such as
|
||||||
|
whether to use a GPU for inference.
|
||||||
|
|
||||||
|
The name must not contain colons. The default value can contain colons.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **name**: `string`
|
||||||
|
|
||||||
|
* **value**: `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`void`
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ an abstract class for implementing embedding functions that take text as input
|
|||||||
|
|
||||||
## Type Parameters
|
## Type Parameters
|
||||||
|
|
||||||
• **M** *extends* `FunctionOptions` = `FunctionOptions`
|
• **M** *extends* [`FunctionOptions`](../interfaces/FunctionOptions.md) = [`FunctionOptions`](../interfaces/FunctionOptions.md)
|
||||||
|
|
||||||
## Constructors
|
## Constructors
|
||||||
|
|
||||||
@@ -114,12 +114,37 @@ abstract generateEmbeddings(texts, ...args): Promise<number[][] | Float32Array[]
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### getSensitiveKeys()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
protected getSensitiveKeys(): string[]
|
||||||
|
```
|
||||||
|
|
||||||
|
Provide a list of keys in the function options that should be treated as
|
||||||
|
sensitive. If users pass raw values for these keys, they will be rejected.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`[]
|
||||||
|
|
||||||
|
#### Inherited from
|
||||||
|
|
||||||
|
[`EmbeddingFunction`](EmbeddingFunction.md).[`getSensitiveKeys`](EmbeddingFunction.md#getsensitivekeys)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### init()?
|
### init()?
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
optional init(): Promise<void>
|
optional init(): Promise<void>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Optionally load any resources needed for the embedding function.
|
||||||
|
|
||||||
|
This method is called after the embedding function has been initialized
|
||||||
|
but before any embeddings are computed. It is useful for loading local models
|
||||||
|
or other resources that are needed for the embedding function to work.
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Promise`<`void`>
|
`Promise`<`void`>
|
||||||
@@ -148,6 +173,28 @@ The number of dimensions of the embeddings
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
|
### resolveVariables()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
protected resolveVariables(config): Partial<M>
|
||||||
|
```
|
||||||
|
|
||||||
|
Apply variables to the config.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **config**: `Partial`<`M`>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Partial`<`M`>
|
||||||
|
|
||||||
|
#### Inherited from
|
||||||
|
|
||||||
|
[`EmbeddingFunction`](EmbeddingFunction.md).[`resolveVariables`](EmbeddingFunction.md#resolvevariables)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
### sourceField()
|
### sourceField()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
@@ -158,11 +205,11 @@ sourceField is used in combination with `LanceSchema` to provide a declarative d
|
|||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||||
|
|
||||||
#### See
|
#### See
|
||||||
|
|
||||||
lancedb.LanceSchema
|
[LanceSchema](../functions/LanceSchema.md)
|
||||||
|
|
||||||
#### Overrides
|
#### Overrides
|
||||||
|
|
||||||
@@ -173,37 +220,15 @@ lancedb.LanceSchema
|
|||||||
### toJSON()
|
### toJSON()
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
abstract toJSON(): Partial<M>
|
toJSON(): Record<string, any>
|
||||||
```
|
```
|
||||||
|
|
||||||
Convert the embedding function to a JSON object
|
Get the original arguments to the constructor, to serialize them so they
|
||||||
It is used to serialize the embedding function to the schema
|
can be used to recreate the embedding function later.
|
||||||
It's important that any object returned by this method contains all the necessary
|
|
||||||
information to recreate the embedding function
|
|
||||||
|
|
||||||
It should return the same object that was passed to the constructor
|
|
||||||
If it does not, the embedding function will not be able to be recreated, or could be recreated incorrectly
|
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
`Partial`<`M`>
|
`Record`<`string`, `any`>
|
||||||
|
|
||||||
#### Example
|
|
||||||
|
|
||||||
```ts
|
|
||||||
class MyEmbeddingFunction extends EmbeddingFunction {
|
|
||||||
constructor(options: {model: string, timeout: number}) {
|
|
||||||
super();
|
|
||||||
this.model = options.model;
|
|
||||||
this.timeout = options.timeout;
|
|
||||||
}
|
|
||||||
toJSON() {
|
|
||||||
return {
|
|
||||||
model: this.model,
|
|
||||||
timeout: this.timeout,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Inherited from
|
#### Inherited from
|
||||||
|
|
||||||
@@ -221,15 +246,16 @@ vectorField is used in combination with `LanceSchema` to provide a declarative d
|
|||||||
|
|
||||||
#### Parameters
|
#### Parameters
|
||||||
|
|
||||||
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<`FieldOptions`<`DataType`<`Type`, `any`>>>
|
* **optionsOrDatatype?**: `DataType`<`Type`, `any`> \| `Partial`<[`FieldOptions`](../interfaces/FieldOptions.md)<`DataType`<`Type`, `any`>>>
|
||||||
|
The options for the field
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
|
||||||
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, `FunctionOptions`>>]
|
[`DataType`<`Type`, `any`>, `Map`<`string`, [`EmbeddingFunction`](EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]
|
||||||
|
|
||||||
#### See
|
#### See
|
||||||
|
|
||||||
lancedb.LanceSchema
|
[LanceSchema](../functions/LanceSchema.md)
|
||||||
|
|
||||||
#### Inherited from
|
#### Inherited from
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ Create a schema with embedding functions.
|
|||||||
|
|
||||||
## Parameters
|
## Parameters
|
||||||
|
|
||||||
* **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>]>
|
* **fields**: `Record`<`string`, `object` \| [`object`, `Map`<`string`, [`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>]>
|
||||||
|
|
||||||
## Returns
|
## Returns
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ function register(name?): (ctor) => any
|
|||||||
|
|
||||||
### Parameters
|
### Parameters
|
||||||
|
|
||||||
* **ctor**: `EmbeddingFunctionConstructor`<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, `FunctionOptions`>>
|
* **ctor**: [`EmbeddingFunctionConstructor`](../interfaces/EmbeddingFunctionConstructor.md)<[`EmbeddingFunction`](../classes/EmbeddingFunction.md)<`any`, [`FunctionOptions`](../interfaces/FunctionOptions.md)>>
|
||||||
|
|
||||||
### Returns
|
### Returns
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,27 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / EmbeddingFunctionConstructor
|
||||||
|
|
||||||
|
# Interface: EmbeddingFunctionConstructor<T>
|
||||||
|
|
||||||
|
## Type Parameters
|
||||||
|
|
||||||
|
• **T** *extends* [`EmbeddingFunction`](../classes/EmbeddingFunction.md) = [`EmbeddingFunction`](../classes/EmbeddingFunction.md)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### new EmbeddingFunctionConstructor()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
new EmbeddingFunctionConstructor(modelOptions?): T
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **modelOptions?**: `T`\[`"TOptions"`\]
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`T`
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / EmbeddingFunctionCreate
|
||||||
|
|
||||||
|
# Interface: EmbeddingFunctionCreate<T>
|
||||||
|
|
||||||
|
## Type Parameters
|
||||||
|
|
||||||
|
• **T** *extends* [`EmbeddingFunction`](../classes/EmbeddingFunction.md)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### create()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
create(options?): CreateReturnType<T>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **options?**: `T`\[`"TOptions"`\]
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`CreateReturnType`](../type-aliases/CreateReturnType.md)<`T`>
|
||||||
27
docs/src/js/namespaces/embedding/interfaces/FieldOptions.md
Normal file
27
docs/src/js/namespaces/embedding/interfaces/FieldOptions.md
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / FieldOptions
|
||||||
|
|
||||||
|
# Interface: FieldOptions<T>
|
||||||
|
|
||||||
|
## Type Parameters
|
||||||
|
|
||||||
|
• **T** *extends* `DataType` = `DataType`
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### datatype
|
||||||
|
|
||||||
|
```ts
|
||||||
|
datatype: T;
|
||||||
|
```
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### dims?
|
||||||
|
|
||||||
|
```ts
|
||||||
|
optional dims: number;
|
||||||
|
```
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / FunctionOptions
|
||||||
|
|
||||||
|
# Interface: FunctionOptions
|
||||||
|
|
||||||
|
Options for a given embedding function
|
||||||
|
|
||||||
|
## Indexable
|
||||||
|
|
||||||
|
\[`key`: `string`\]: `any`
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [embedding](../README.md) / CreateReturnType
|
||||||
|
|
||||||
|
# Type Alias: CreateReturnType<T>
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type CreateReturnType<T>: T extends object ? Promise<T> : T;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Type Parameters
|
||||||
|
|
||||||
|
• **T**
|
||||||
17
docs/src/js/namespaces/rerankers/README.md
Normal file
17
docs/src/js/namespaces/rerankers/README.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[**@lancedb/lancedb**](../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../globals.md) / rerankers
|
||||||
|
|
||||||
|
# rerankers
|
||||||
|
|
||||||
|
## Index
|
||||||
|
|
||||||
|
### Classes
|
||||||
|
|
||||||
|
- [RRFReranker](classes/RRFReranker.md)
|
||||||
|
|
||||||
|
### Interfaces
|
||||||
|
|
||||||
|
- [Reranker](interfaces/Reranker.md)
|
||||||
48
docs/src/js/namespaces/rerankers/classes/RRFReranker.md
Normal file
48
docs/src/js/namespaces/rerankers/classes/RRFReranker.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [rerankers](../README.md) / RRFReranker
|
||||||
|
|
||||||
|
# Class: RRFReranker
|
||||||
|
|
||||||
|
Reranks the results using the Reciprocal Rank Fusion (RRF) algorithm.
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### rerankHybrid()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
rerankHybrid(
|
||||||
|
query,
|
||||||
|
vecResults,
|
||||||
|
ftsResults): Promise<RecordBatch<any>>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **query**: `string`
|
||||||
|
|
||||||
|
* **vecResults**: `RecordBatch`<`any`>
|
||||||
|
|
||||||
|
* **ftsResults**: `RecordBatch`<`any`>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`RecordBatch`<`any`>>
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
### create()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
static create(k): Promise<RRFReranker>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **k**: `number` = `60`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`RRFReranker`](RRFReranker.md)>
|
||||||
30
docs/src/js/namespaces/rerankers/interfaces/Reranker.md
Normal file
30
docs/src/js/namespaces/rerankers/interfaces/Reranker.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[**@lancedb/lancedb**](../../../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../../../globals.md) / [rerankers](../README.md) / Reranker
|
||||||
|
|
||||||
|
# Interface: Reranker
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### rerankHybrid()
|
||||||
|
|
||||||
|
```ts
|
||||||
|
rerankHybrid(
|
||||||
|
query,
|
||||||
|
vecResults,
|
||||||
|
ftsResults): Promise<RecordBatch<any>>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
* **query**: `string`
|
||||||
|
|
||||||
|
* **vecResults**: `RecordBatch`<`any`>
|
||||||
|
|
||||||
|
* **ftsResults**: `RecordBatch`<`any`>
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`RecordBatch`<`any`>>
|
||||||
11
docs/src/js/type-aliases/DataLike.md
Normal file
11
docs/src/js/type-aliases/DataLike.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / DataLike
|
||||||
|
|
||||||
|
# Type Alias: DataLike
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type DataLike: Data | object;
|
||||||
|
```
|
||||||
11
docs/src/js/type-aliases/FieldLike.md
Normal file
11
docs/src/js/type-aliases/FieldLike.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / FieldLike
|
||||||
|
|
||||||
|
# Type Alias: FieldLike
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type FieldLike: Field | object;
|
||||||
|
```
|
||||||
19
docs/src/js/type-aliases/IntoSql.md
Normal file
19
docs/src/js/type-aliases/IntoSql.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / IntoSql
|
||||||
|
|
||||||
|
# Type Alias: IntoSql
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type IntoSql:
|
||||||
|
| string
|
||||||
|
| number
|
||||||
|
| boolean
|
||||||
|
| null
|
||||||
|
| Date
|
||||||
|
| ArrayBufferLike
|
||||||
|
| Buffer
|
||||||
|
| IntoSql[];
|
||||||
|
```
|
||||||
11
docs/src/js/type-aliases/IntoVector.md
Normal file
11
docs/src/js/type-aliases/IntoVector.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / IntoVector
|
||||||
|
|
||||||
|
# Type Alias: IntoVector
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type IntoVector: Float32Array | Float64Array | number[] | Promise<Float32Array | Float64Array | number[]>;
|
||||||
|
```
|
||||||
11
docs/src/js/type-aliases/RecordBatchLike.md
Normal file
11
docs/src/js/type-aliases/RecordBatchLike.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / RecordBatchLike
|
||||||
|
|
||||||
|
# Type Alias: RecordBatchLike
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type RecordBatchLike: RecordBatch | object;
|
||||||
|
```
|
||||||
11
docs/src/js/type-aliases/SchemaLike.md
Normal file
11
docs/src/js/type-aliases/SchemaLike.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / SchemaLike
|
||||||
|
|
||||||
|
# Type Alias: SchemaLike
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type SchemaLike: Schema | object;
|
||||||
|
```
|
||||||
11
docs/src/js/type-aliases/TableLike.md
Normal file
11
docs/src/js/type-aliases/TableLike.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[**@lancedb/lancedb**](../README.md) • **Docs**
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
[@lancedb/lancedb](../globals.md) / TableLike
|
||||||
|
|
||||||
|
# Type Alias: TableLike
|
||||||
|
|
||||||
|
```ts
|
||||||
|
type TableLike: ArrowTable | object;
|
||||||
|
```
|
||||||
667
docs/src/notebooks/Multivector_on_LanceDB.ipynb
Normal file
667
docs/src/notebooks/Multivector_on_LanceDB.ipynb
Normal file
File diff suppressed because one or more lines are too long
@@ -1,17 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
#
|
#
|
||||||
# Copyright 2023 LanceDB Developers
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
"""Dataset hf://poloclub/diffusiondb
|
"""Dataset hf://poloclub/diffusiondb
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -114,14 +114,17 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"data = [\n",
|
"import pandas as pd\n",
|
||||||
" {\"vector\": [1.1, 1.2], \"lat\": 45.5, \"long\": -122.7},\n",
|
|
||||||
" {\"vector\": [0.2, 1.8], \"lat\": 40.1, \"long\": -74.1},\n",
|
|
||||||
"]\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"db.create_table(\"table2\", data)\n",
|
"data = pd.DataFrame(\n",
|
||||||
"\n",
|
" {\n",
|
||||||
"db[\"table2\"].head() "
|
" \"vector\": [[1.1, 1.2, 1.3, 1.4], [0.2, 1.8, 0.4, 3.6]],\n",
|
||||||
|
" \"lat\": [45.5, 40.1],\n",
|
||||||
|
" \"long\": [-122.7, -74.1],\n",
|
||||||
|
" }\n",
|
||||||
|
")\n",
|
||||||
|
"db.create_table(\"my_table_pandas\", data)\n",
|
||||||
|
"db[\"my_table_pandas\"].head()"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -164,7 +167,7 @@
|
|||||||
"import pyarrow as pa\n",
|
"import pyarrow as pa\n",
|
||||||
"\n",
|
"\n",
|
||||||
"custom_schema = pa.schema([\n",
|
"custom_schema = pa.schema([\n",
|
||||||
"pa.field(\"vector\", pa.list_(pa.float32(), 2)),\n",
|
"pa.field(\"vector\", pa.list_(pa.float32(), 4)),\n",
|
||||||
"pa.field(\"lat\", pa.float32()),\n",
|
"pa.field(\"lat\", pa.float32()),\n",
|
||||||
"pa.field(\"long\", pa.float32())\n",
|
"pa.field(\"long\", pa.float32())\n",
|
||||||
"])\n",
|
"])\n",
|
||||||
|
|||||||
@@ -9,23 +9,50 @@ LanceDB supports [Polars](https://github.com/pola-rs/polars), a blazingly fast D
|
|||||||
|
|
||||||
First, we connect to a LanceDB database.
|
First, we connect to a LanceDB database.
|
||||||
|
|
||||||
|
=== "Sync API"
|
||||||
|
|
||||||
|
```py
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:import-lancedb"
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:connect_to_lancedb"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```py
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:import-lancedb"
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:connect_to_lancedb_async"
|
||||||
|
```
|
||||||
|
|
||||||
```py
|
|
||||||
--8<-- "python/python/tests/docs/test_python.py:import-lancedb"
|
|
||||||
--8<-- "python/python/tests/docs/test_python.py:connect_to_lancedb"
|
|
||||||
```
|
|
||||||
|
|
||||||
We can load a Polars `DataFrame` to LanceDB directly.
|
We can load a Polars `DataFrame` to LanceDB directly.
|
||||||
|
|
||||||
```py
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_python.py:import-polars"
|
|
||||||
--8<-- "python/python/tests/docs/test_python.py:create_table_polars"
|
```py
|
||||||
```
|
--8<-- "python/python/tests/docs/test_python.py:import-polars"
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:create_table_polars"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```py
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:import-polars"
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:create_table_polars_async"
|
||||||
|
```
|
||||||
|
|
||||||
We can now perform similarity search via the LanceDB Python API.
|
We can now perform similarity search via the LanceDB Python API.
|
||||||
|
|
||||||
```py
|
=== "Sync API"
|
||||||
--8<-- "python/python/tests/docs/test_python.py:vector_search_polars"
|
|
||||||
```
|
```py
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:vector_search_polars"
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Async API"
|
||||||
|
|
||||||
|
```py
|
||||||
|
--8<-- "python/python/tests/docs/test_python.py:vector_search_polars_async"
|
||||||
|
```
|
||||||
|
|
||||||
In addition to the selected columns, LanceDB also returns a vector
|
In addition to the selected columns, LanceDB also returns a vector
|
||||||
and also the `_distance` column which is the distance between the query
|
and also the `_distance` column which is the distance between the query
|
||||||
@@ -112,4 +139,3 @@ The reason it's beneficial to not convert the LanceDB Table
|
|||||||
to a DataFrame is because the table can potentially be way larger
|
to a DataFrame is because the table can potentially be way larger
|
||||||
than memory, and Polars LazyFrames allow us to work with such
|
than memory, and Polars LazyFrames allow us to work with such
|
||||||
larger-than-memory datasets by not loading it into memory all at once.
|
larger-than-memory datasets by not loading it into memory all at once.
|
||||||
|
|
||||||
|
|||||||
@@ -2,14 +2,19 @@
|
|||||||
|
|
||||||
[Pydantic](https://docs.pydantic.dev/latest/) is a data validation library in Python.
|
[Pydantic](https://docs.pydantic.dev/latest/) is a data validation library in Python.
|
||||||
LanceDB integrates with Pydantic for schema inference, data ingestion, and query result casting.
|
LanceDB integrates with Pydantic for schema inference, data ingestion, and query result casting.
|
||||||
|
Using [LanceModel][lancedb.pydantic.LanceModel], users can seamlessly
|
||||||
|
integrate Pydantic with the rest of the LanceDB APIs.
|
||||||
|
|
||||||
## Schema
|
```python
|
||||||
|
|
||||||
LanceDB supports to create Apache Arrow Schema from a
|
--8<-- "python/python/tests/docs/test_pydantic_integration.py:imports"
|
||||||
[Pydantic BaseModel](https://docs.pydantic.dev/latest/api/main/#pydantic.main.BaseModel)
|
|
||||||
via [pydantic_to_schema()](python.md#lancedb.pydantic.pydantic_to_schema) method.
|
--8<-- "python/python/tests/docs/test_pydantic_integration.py:base_model"
|
||||||
|
|
||||||
|
--8<-- "python/python/tests/docs/test_pydantic_integration.py:set_url"
|
||||||
|
--8<-- "python/python/tests/docs/test_pydantic_integration.py:base_example"
|
||||||
|
```
|
||||||
|
|
||||||
::: lancedb.pydantic.pydantic_to_schema
|
|
||||||
|
|
||||||
## Vector Field
|
## Vector Field
|
||||||
|
|
||||||
@@ -34,3 +39,9 @@ Current supported type conversions:
|
|||||||
| `list` | `pyarrow.List` |
|
| `list` | `pyarrow.List` |
|
||||||
| `BaseModel` | `pyarrow.Struct` |
|
| `BaseModel` | `pyarrow.Struct` |
|
||||||
| `Vector(n)` | `pyarrow.FixedSizeList(float32, n)` |
|
| `Vector(n)` | `pyarrow.FixedSizeList(float32, n)` |
|
||||||
|
|
||||||
|
LanceDB supports to create Apache Arrow Schema from a
|
||||||
|
[Pydantic BaseModel][pydantic.BaseModel]
|
||||||
|
via [pydantic_to_schema()](python.md#lancedb.pydantic.pydantic_to_schema) method.
|
||||||
|
|
||||||
|
::: lancedb.pydantic.pydantic_to_schema
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user