mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 05:19:58 +00:00
Compare commits
118 Commits
python-v0.
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc0814767b | ||
|
|
8960a8e535 | ||
|
|
a8568ddc72 | ||
|
|
55f88346d0 | ||
|
|
dfb9a28795 | ||
|
|
a797f5fe59 | ||
|
|
3cd84c9375 | ||
|
|
5ca83fdc99 | ||
|
|
33cc9b682f | ||
|
|
b3e5ac6d2a | ||
|
|
0fe844034d | ||
|
|
f41eb899dc | ||
|
|
e7022b990e | ||
|
|
ea86dad4b7 | ||
|
|
a45656b8b6 | ||
|
|
bc19a75f65 | ||
|
|
8e348ab4bd | ||
|
|
96914a619b | ||
|
|
3c62806b6a | ||
|
|
72f339a0b3 | ||
|
|
b9e3cfbdca | ||
|
|
5e30648f45 | ||
|
|
76fc16c7a1 | ||
|
|
007f9c1af8 | ||
|
|
27e4ad3f11 | ||
|
|
df42943ccf | ||
|
|
3eec9ea740 | ||
|
|
11fcdb1194 | ||
|
|
95a5a0d713 | ||
|
|
c3043a54c6 | ||
|
|
d5586c9c32 | ||
|
|
d39e7d23f4 | ||
|
|
ddceda4ff7 | ||
|
|
70f92f19a6 | ||
|
|
d9fb6457e1 | ||
|
|
56b4fd2bd9 | ||
|
|
7c133ec416 | ||
|
|
1dbb4cd1e2 | ||
|
|
af65417d19 | ||
|
|
01dd6c5e75 | ||
|
|
1e85b57c82 | ||
|
|
16eff254ea | ||
|
|
1b2463c5dd | ||
|
|
92f74f955f | ||
|
|
53b5ea3f92 | ||
|
|
291ed41c3e | ||
|
|
fdda7b1a76 | ||
|
|
eb2cbedf19 | ||
|
|
bc139000bd | ||
|
|
dbea3a7544 | ||
|
|
3bb7c546d7 | ||
|
|
2f4b70ecfe | ||
|
|
1ad1c0820d | ||
|
|
db712b0f99 | ||
|
|
fd1a5ce788 | ||
|
|
def087fc85 | ||
|
|
43f920182a | ||
|
|
718963d1fb | ||
|
|
e4dac751e7 | ||
|
|
aae02953eb | ||
|
|
1d9f76bdda | ||
|
|
affdfc4d48 | ||
|
|
41b77f5e25 | ||
|
|
eb8b3b8c54 | ||
|
|
f69c3e0595 | ||
|
|
8511edaaab | ||
|
|
657aba3c05 | ||
|
|
2e197ef387 | ||
|
|
4f512af024 | ||
|
|
5349e8b1db | ||
|
|
5e01810438 | ||
|
|
6eaaee59f8 | ||
|
|
055efdcdb6 | ||
|
|
bc582bb702 | ||
|
|
df9c41f342 | ||
|
|
0bd6ac945e | ||
|
|
c9d5475333 | ||
|
|
3850d5fb35 | ||
|
|
b37c58342e | ||
|
|
a06e64f22d | ||
|
|
e983198f0e | ||
|
|
76e7b4abf8 | ||
|
|
5f6eb4651e | ||
|
|
805c78bb20 | ||
|
|
4746281b21 | ||
|
|
7b3b6bdccd | ||
|
|
37e1124c0f | ||
|
|
93f037ee41 | ||
|
|
e4fc06825a | ||
|
|
fe89a373a2 | ||
|
|
3d3915edef | ||
|
|
e2e8b6aee4 | ||
|
|
12dbca5248 | ||
|
|
a6babfa651 | ||
|
|
75ede86fab | ||
|
|
becd649130 | ||
|
|
9d2fb7d602 | ||
|
|
fdb5d6fdf1 | ||
|
|
2f13fa225f | ||
|
|
e933de003d | ||
|
|
05fd387425 | ||
|
|
82a1da554c | ||
|
|
a7c0d80b9e | ||
|
|
71323a064a | ||
|
|
df48454b70 | ||
|
|
6603414885 | ||
|
|
c256f6c502 | ||
|
|
cc03f90379 | ||
|
|
975da09b02 | ||
|
|
c32e17b497 | ||
|
|
0528abdf97 | ||
|
|
1090c311e8 | ||
|
|
e767cbb374 | ||
|
|
3d7c48feca | ||
|
|
08d62550bb | ||
|
|
b272408b05 | ||
|
|
46ffa87cd4 | ||
|
|
cd9fc37b95 |
@@ -1,22 +0,0 @@
|
||||
[bumpversion]
|
||||
current_version = 0.4.17
|
||||
commit = True
|
||||
message = Bump version: {current_version} → {new_version}
|
||||
tag = True
|
||||
tag_name = v{new_version}
|
||||
|
||||
[bumpversion:file:node/package.json]
|
||||
|
||||
[bumpversion:file:nodejs/package.json]
|
||||
|
||||
[bumpversion:file:nodejs/npm/darwin-x64/package.json]
|
||||
|
||||
[bumpversion:file:nodejs/npm/darwin-arm64/package.json]
|
||||
|
||||
[bumpversion:file:nodejs/npm/linux-x64-gnu/package.json]
|
||||
|
||||
[bumpversion:file:nodejs/npm/linux-arm64-gnu/package.json]
|
||||
|
||||
[bumpversion:file:rust/ffi/node/Cargo.toml]
|
||||
|
||||
[bumpversion:file:rust/lancedb/Cargo.toml]
|
||||
57
.bumpversion.toml
Normal file
57
.bumpversion.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
[tool.bumpversion]
|
||||
current_version = "0.5.2"
|
||||
parse = """(?x)
|
||||
(?P<major>0|[1-9]\\d*)\\.
|
||||
(?P<minor>0|[1-9]\\d*)\\.
|
||||
(?P<patch>0|[1-9]\\d*)
|
||||
(?:-(?P<pre_l>[a-zA-Z-]+)\\.(?P<pre_n>0|[1-9]\\d*))?
|
||||
"""
|
||||
serialize = [
|
||||
"{major}.{minor}.{patch}-{pre_l}.{pre_n}",
|
||||
"{major}.{minor}.{patch}",
|
||||
]
|
||||
search = "{current_version}"
|
||||
replace = "{new_version}"
|
||||
regex = false
|
||||
ignore_missing_version = false
|
||||
ignore_missing_files = false
|
||||
tag = true
|
||||
sign_tags = false
|
||||
tag_name = "v{new_version}"
|
||||
tag_message = "Bump version: {current_version} → {new_version}"
|
||||
allow_dirty = true
|
||||
commit = true
|
||||
message = "Bump version: {current_version} → {new_version}"
|
||||
commit_args = ""
|
||||
|
||||
[tool.bumpversion.parts.pre_l]
|
||||
values = ["beta", "final"]
|
||||
optional_value = "final"
|
||||
|
||||
[[tool.bumpversion.files]]
|
||||
filename = "node/package.json"
|
||||
search = "\"version\": \"{current_version}\","
|
||||
replace = "\"version\": \"{new_version}\","
|
||||
|
||||
[[tool.bumpversion.files]]
|
||||
filename = "nodejs/package.json"
|
||||
search = "\"version\": \"{current_version}\","
|
||||
replace = "\"version\": \"{new_version}\","
|
||||
|
||||
# nodejs binary packages
|
||||
[[tool.bumpversion.files]]
|
||||
glob = "nodejs/npm/*/package.json"
|
||||
search = "\"version\": \"{current_version}\","
|
||||
replace = "\"version\": \"{new_version}\","
|
||||
|
||||
# Cargo files
|
||||
# ------------
|
||||
[[tool.bumpversion.files]]
|
||||
filename = "rust/ffi/node/Cargo.toml"
|
||||
search = "\nversion = \"{current_version}\""
|
||||
replace = "\nversion = \"{new_version}\""
|
||||
|
||||
[[tool.bumpversion.files]]
|
||||
filename = "rust/lancedb/Cargo.toml"
|
||||
search = "\nversion = \"{current_version}\""
|
||||
replace = "\nversion = \"{new_version}\""
|
||||
33
.github/labeler.yml
vendored
Normal file
33
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
version: 1
|
||||
appendOnly: true
|
||||
# Labels are applied based on conventional commits standard
|
||||
# https://www.conventionalcommits.org/en/v1.0.0/
|
||||
# These labels are later used in release notes. See .github/release.yml
|
||||
labels:
|
||||
# If the PR title has an ! before the : it will be considered a breaking change
|
||||
# For example, `feat!: add new feature` will be considered a breaking change
|
||||
- label: breaking-change
|
||||
title: "^[^:]+!:.*"
|
||||
- label: breaking-change
|
||||
body: "BREAKING CHANGE"
|
||||
- label: enhancement
|
||||
title: "^feat(\\(.+\\))?!?:.*"
|
||||
- label: bug
|
||||
title: "^fix(\\(.+\\))?!?:.*"
|
||||
- label: documentation
|
||||
title: "^docs(\\(.+\\))?!?:.*"
|
||||
- label: performance
|
||||
title: "^perf(\\(.+\\))?!?:.*"
|
||||
- label: ci
|
||||
title: "^ci(\\(.+\\))?!?:.*"
|
||||
- label: chore
|
||||
title: "^(chore|test|build|style)(\\(.+\\))?!?:.*"
|
||||
- label: Python
|
||||
files:
|
||||
- "^python\\/.*"
|
||||
- label: Rust
|
||||
files:
|
||||
- "^rust\\/.*"
|
||||
- label: typescript
|
||||
files:
|
||||
- "^node\\/.*"
|
||||
41
.github/release_notes.json
vendored
Normal file
41
.github/release_notes.json
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"ignore_labels": ["chore"],
|
||||
"pr_template": "- ${{TITLE}} by @${{AUTHOR}} in ${{URL}}",
|
||||
"categories": [
|
||||
{
|
||||
"title": "## 🏆 Highlights",
|
||||
"labels": ["highlight"]
|
||||
},
|
||||
{
|
||||
"title": "## 🛠 Breaking Changes",
|
||||
"labels": ["breaking-change"]
|
||||
},
|
||||
{
|
||||
"title": "## ⚠️ Deprecations ",
|
||||
"labels": ["deprecation"]
|
||||
},
|
||||
{
|
||||
"title": "## 🎉 New Features",
|
||||
"labels": ["enhancement"]
|
||||
},
|
||||
{
|
||||
"title": "## 🐛 Bug Fixes",
|
||||
"labels": ["bug"]
|
||||
},
|
||||
{
|
||||
"title": "## 📚 Documentation",
|
||||
"labels": ["documentation"]
|
||||
},
|
||||
{
|
||||
"title": "## 🚀 Performance Improvements",
|
||||
"labels": ["performance"]
|
||||
},
|
||||
{
|
||||
"title": "## Other Changes"
|
||||
},
|
||||
{
|
||||
"title": "## 🔧 Build and CI",
|
||||
"labels": ["ci"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -46,6 +46,7 @@ runs:
|
||||
with:
|
||||
command: build
|
||||
working-directory: python
|
||||
docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
|
||||
target: aarch64-unknown-linux-gnu
|
||||
manylinux: ${{ inputs.manylinux }}
|
||||
args: ${{ inputs.args }}
|
||||
|
||||
1
.github/workflows/build_mac_wheel/action.yml
vendored
1
.github/workflows/build_mac_wheel/action.yml
vendored
@@ -21,5 +21,6 @@ runs:
|
||||
with:
|
||||
command: build
|
||||
args: ${{ inputs.args }}
|
||||
docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
|
||||
working-directory: python
|
||||
interpreter: 3.${{ inputs.python-minor-version }}
|
||||
|
||||
@@ -26,6 +26,7 @@ runs:
|
||||
with:
|
||||
command: build
|
||||
args: ${{ inputs.args }}
|
||||
docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
|
||||
working-directory: python
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
|
||||
8
.github/workflows/cargo-publish.yml
vendored
8
.github/workflows/cargo-publish.yml
vendored
@@ -1,8 +1,12 @@
|
||||
name: Cargo Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
push:
|
||||
tags-ignore:
|
||||
# We don't publish pre-releases for Rust. Crates.io is just a source
|
||||
# distribution, so we don't need to publish pre-releases.
|
||||
- 'v*-beta*'
|
||||
- '*-v*' # for example, python-vX.Y.Z
|
||||
|
||||
env:
|
||||
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
||||
|
||||
81
.github/workflows/dev.yml
vendored
Normal file
81
.github/workflows/dev.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: PR Checks
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, synchronize, reopened]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
name: Label PR
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: srvaroa/labeler@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
commitlint:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
name: Verify PR title / description conforms to semantic-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18"
|
||||
# These rules are disabled because Github will always ensure there
|
||||
# is a blank line between the title and the body and Github will
|
||||
# word wrap the description field to ensure a reasonable max line
|
||||
# length.
|
||||
- run: npm install @commitlint/config-conventional
|
||||
- run: >
|
||||
echo 'module.exports = {
|
||||
"rules": {
|
||||
"body-max-line-length": [0, "always", Infinity],
|
||||
"footer-max-line-length": [0, "always", Infinity],
|
||||
"body-leading-blank": [0, "always"]
|
||||
}
|
||||
}' > .commitlintrc.js
|
||||
- run: npx commitlint --extends @commitlint/config-conventional --verbose <<< $COMMIT_MSG
|
||||
env:
|
||||
COMMIT_MSG: >
|
||||
${{ github.event.pull_request.title }}
|
||||
|
||||
${{ github.event.pull_request.body }}
|
||||
- if: failure()
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const message = `**ACTION NEEDED**
|
||||
|
||||
Lance follows the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) for release automation.
|
||||
|
||||
The PR title and description are used as the merge commit message.\
|
||||
Please update your PR title and description to match the specification.
|
||||
|
||||
For details on the error please inspect the "PR Title Check" action.
|
||||
`
|
||||
// Get list of current comments
|
||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
// Check if this job already commented
|
||||
for (const comment of comments) {
|
||||
if (comment.body === message) {
|
||||
return // Already commented
|
||||
}
|
||||
}
|
||||
// Post the comment about Conventional Commits
|
||||
github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: message
|
||||
})
|
||||
core.setFailed(message)
|
||||
85
.github/workflows/java.yml
vendored
Normal file
85
.github/workflows/java.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: Build and Run Java JNI Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths:
|
||||
- java/**
|
||||
- rust/**
|
||||
- .github/workflows/java.yml
|
||||
env:
|
||||
# This env var is used by Swatinem/rust-cache@v2 for the cache
|
||||
# key, so we set it to make sure it is always consistent.
|
||||
CARGO_TERM_COLOR: always
|
||||
# Disable full debug symbol generation to speed up CI build and keep memory down
|
||||
# "1" means line tables only, which is useful for panic tracebacks.
|
||||
RUSTFLAGS: "-C debuginfo=1"
|
||||
RUST_BACKTRACE: "1"
|
||||
# according to: https://matklad.github.io/2021/09/04/fast-rust-builds.html
|
||||
# CI builds are faster with incremental disabled.
|
||||
CARGO_INCREMENTAL: "0"
|
||||
CARGO_BUILD_JOBS: "1"
|
||||
jobs:
|
||||
linux-build:
|
||||
runs-on: ubuntu-22.04
|
||||
name: ubuntu-22.04 + Java 11 & 17
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./java
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: java/core/lancedb-jni
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --check
|
||||
working-directory: ./java/core/lancedb-jni
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y protobuf-compiler libssl-dev
|
||||
- name: Install Java 17
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: temurin
|
||||
java-version: 17
|
||||
cache: "maven"
|
||||
- run: echo "JAVA_17=$JAVA_HOME" >> $GITHUB_ENV
|
||||
- name: Install Java 11
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: temurin
|
||||
java-version: 11
|
||||
cache: "maven"
|
||||
- name: Java Style Check
|
||||
run: mvn checkstyle:check
|
||||
# Disable because of issues in lancedb rust core code
|
||||
# - name: Rust Clippy
|
||||
# working-directory: java/core/lancedb-jni
|
||||
# run: cargo clippy --all-targets -- -D warnings
|
||||
- name: Running tests with Java 11
|
||||
run: mvn clean test
|
||||
- name: Running tests with Java 17
|
||||
run: |
|
||||
export JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS \
|
||||
-XX:+IgnoreUnrecognizedVMOptions \
|
||||
--add-opens=java.base/java.lang=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.io=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.net=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.nio=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.util=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED \
|
||||
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
|
||||
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
|
||||
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED \
|
||||
--add-opens=java.base/sun.security.action=ALL-UNNAMED \
|
||||
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED \
|
||||
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
|
||||
-Djdk.reflect.useDirectMethodHandle=false \
|
||||
-Dio.netty.tryReflectionSetAccessible=true"
|
||||
JAVA_HOME=$JAVA_17 mvn clean test
|
||||
88
.github/workflows/make-release-commit.yml
vendored
88
.github/workflows/make-release-commit.yml
vendored
@@ -1,37 +1,62 @@
|
||||
name: Create release commit
|
||||
|
||||
# This workflow increments versions, tags the version, and pushes it.
|
||||
# When a tag is pushed, another workflow is triggered that creates a GH release
|
||||
# and uploads the binaries. This workflow is only for creating the tag.
|
||||
|
||||
# This script will enforce that a minor version is incremented if there are any
|
||||
# breaking changes since the last minor increment. However, it isn't able to
|
||||
# differentiate between breaking changes in Node versus Python. If you wish to
|
||||
# bypass this check, you can manually increment the version and push the tag.
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||
required: true
|
||||
default: "false"
|
||||
type: choice
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
part:
|
||||
default: false
|
||||
type: boolean
|
||||
type:
|
||||
description: 'What kind of release is this?'
|
||||
required: true
|
||||
default: 'patch'
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
- preview
|
||||
- stable
|
||||
python:
|
||||
description: 'Make a Python release'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
other:
|
||||
description: 'Make a Node/Rust release'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
bump-minor:
|
||||
description: 'Bump minor version'
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
make-release:
|
||||
# Creates tag and GH release. The GH release will trigger the build and release jobs.
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Check out main
|
||||
uses: actions/checkout@v4
|
||||
- name: Output Inputs
|
||||
run: echo "${{ toJSON(github.event.inputs) }}"
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
# It's important we use our token here, as the default token will NOT
|
||||
# trigger any workflows watching for new tags. See:
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
- name: Set git configs for bumpversion
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -41,19 +66,34 @@ jobs:
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Bump version, create tag and commit
|
||||
- name: Bump Python version
|
||||
if: ${{ inputs.python }}
|
||||
working-directory: python
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
pip install bump2version
|
||||
bumpversion --verbose ${{ inputs.part }}
|
||||
- name: Push new version and tag
|
||||
if: ${{ inputs.dry_run }} == "false"
|
||||
# Need to get the commit before bumping the version, so we can
|
||||
# determine if there are breaking changes in the next step as well.
|
||||
echo "COMMIT_BEFORE_BUMP=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
|
||||
pip install bump-my-version PyGithub packaging
|
||||
bash ../ci/bump_version.sh ${{ inputs.type }} ${{ inputs.bump-minor }} python-v
|
||||
- name: Bump Node/Rust version
|
||||
if: ${{ inputs.other }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
pip install bump-my-version PyGithub packaging
|
||||
bash ci/bump_version.sh ${{ inputs.type }} ${{ inputs.bump-minor }} v $COMMIT_BEFORE_BUMP
|
||||
- name: Push new version tag
|
||||
if: ${{ !inputs.dry_run }}
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
# Need to use PAT here too to trigger next workflow. See comment above.
|
||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
branch: main
|
||||
branch: ${{ github.ref }}
|
||||
tags: true
|
||||
- uses: ./.github/workflows/update_package_lock
|
||||
if: ${{ inputs.dry_run }} == "false"
|
||||
if: ${{ !inputs.dry_run && inputs.other }}
|
||||
with:
|
||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
3
.github/workflows/nodejs.yml
vendored
3
.github/workflows/nodejs.yml
vendored
@@ -52,8 +52,7 @@ jobs:
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all --all-features -- -D warnings
|
||||
npm ci
|
||||
npm run lint
|
||||
npm run chkformat
|
||||
npm run lint-ci
|
||||
linux:
|
||||
name: Linux (NodeJS ${{ matrix.node-version }})
|
||||
timeout-minutes: 30
|
||||
|
||||
104
.github/workflows/npm-publish.yml
vendored
104
.github/workflows/npm-publish.yml
vendored
@@ -1,8 +1,9 @@
|
||||
name: NPM Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
node:
|
||||
@@ -110,12 +111,11 @@ jobs:
|
||||
runner: ubuntu-latest
|
||||
- arch: aarch64
|
||||
# For successful fat LTO builds, we need a large runner to avoid OOM errors.
|
||||
runner: buildjet-16vcpu-ubuntu-2204-arm
|
||||
runner: warp-ubuntu-latest-arm64-4x
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
# Buildjet aarch64 runners have only 1.5 GB RAM per core, vs 3.5 GB per core for
|
||||
# x86_64 runners. To avoid OOM errors on ARM, we create a swap file.
|
||||
# To avoid OOM errors on ARM, we create a swap file.
|
||||
- name: Configure aarch64 build
|
||||
if: ${{ matrix.config.arch == 'aarch64' }}
|
||||
run: |
|
||||
@@ -274,9 +274,15 @@ jobs:
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||
run: |
|
||||
# Tag beta as "preview" instead of default "latest". See lancedb
|
||||
# npm publish step for more info.
|
||||
if [[ $GITHUB_REF =~ refs/tags/v(.*)-beta.* ]]; then
|
||||
PUBLISH_ARGS="--tag preview"
|
||||
fi
|
||||
|
||||
mv */*.tgz .
|
||||
for filename in *.tgz; do
|
||||
npm publish $filename
|
||||
npm publish $PUBLISH_ARGS $filename
|
||||
done
|
||||
|
||||
release-nodejs:
|
||||
@@ -316,11 +322,23 @@ jobs:
|
||||
- name: Publish to NPM
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.LANCEDB_NPM_REGISTRY_TOKEN }}
|
||||
run: npm publish --access public
|
||||
# By default, things are published to the latest tag. This is what is
|
||||
# installed by default if the user does not specify a version. This is
|
||||
# good for stable releases, but for pre-releases, we want to publish to
|
||||
# the "preview" tag so they can install with `npm install lancedb@preview`.
|
||||
# See: https://medium.com/@mbostock/prereleases-and-npm-e778fc5e2420
|
||||
run: |
|
||||
if [[ $GITHUB_REF =~ refs/tags/v(.*)-beta.* ]]; then
|
||||
npm publish --access public --tag preview
|
||||
else
|
||||
npm publish --access public
|
||||
fi
|
||||
|
||||
update-package-lock:
|
||||
needs: [release]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -331,11 +349,13 @@ jobs:
|
||||
lfs: true
|
||||
- uses: ./.github/workflows/update_package_lock
|
||||
with:
|
||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
update-package-lock-nodejs:
|
||||
needs: [release-nodejs]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -346,4 +366,70 @@ jobs:
|
||||
lfs: true
|
||||
- uses: ./.github/workflows/update_package_lock_nodejs
|
||||
with:
|
||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
gh-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
- name: Extract version
|
||||
id: extract_version
|
||||
env:
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
run: |
|
||||
set -e
|
||||
echo "Extracting tag and version from $GITHUB_REF"
|
||||
if [[ $GITHUB_REF =~ refs/tags/v(.*) ]]; then
|
||||
VERSION=${BASH_REMATCH[1]}
|
||||
TAG=v$VERSION
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Failed to extract version from $GITHUB_REF"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracted version $VERSION from $GITHUB_REF"
|
||||
if [[ $VERSION =~ beta ]]; then
|
||||
echo "This is a beta release"
|
||||
|
||||
# Get last release (that is not this one)
|
||||
FROM_TAG=$(git tag --sort='version:refname' \
|
||||
| grep ^v \
|
||||
| grep -vF "$TAG" \
|
||||
| python ci/semver_sort.py v \
|
||||
| tail -n 1)
|
||||
else
|
||||
echo "This is a stable release"
|
||||
# Get last stable tag (ignore betas)
|
||||
FROM_TAG=$(git tag --sort='version:refname' \
|
||||
| grep ^v \
|
||||
| grep -vF "$TAG" \
|
||||
| grep -v beta \
|
||||
| python ci/semver_sort.py v \
|
||||
| tail -n 1)
|
||||
fi
|
||||
echo "Found from tag $FROM_TAG"
|
||||
echo "from_tag=$FROM_TAG" >> $GITHUB_OUTPUT
|
||||
- name: Create Release Notes
|
||||
id: release_notes
|
||||
uses: mikepenz/release-changelog-builder-action@v4
|
||||
with:
|
||||
configuration: .github/release_notes.json
|
||||
toTag: ${{ steps.extract_version.outputs.tag }}
|
||||
fromTag: ${{ steps.extract_version.outputs.from_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create GH release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
prerelease: ${{ contains('beta', github.ref) }}
|
||||
tag_name: ${{ steps.extract_version.outputs.tag }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
generate_release_notes: false
|
||||
name: Node/Rust LanceDB v${{ steps.extract_version.outputs.version }}
|
||||
body: ${{ steps.release_notes.outputs.changelog }}
|
||||
|
||||
107
.github/workflows/pypi-publish.yml
vendored
107
.github/workflows/pypi-publish.yml
vendored
@@ -1,18 +1,16 @@
|
||||
name: PyPI Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
tags:
|
||||
- 'python-v*'
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
# Only runs on tags that matches the python-make-release action
|
||||
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||
name: Python ${{ matrix.config.platform }} manylinux${{ matrix.config.manylinux }}
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
matrix:
|
||||
python-minor-version: ["8"]
|
||||
config:
|
||||
- platform: x86_64
|
||||
manylinux: "2_17"
|
||||
@@ -34,25 +32,22 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.${{ matrix.python-minor-version }}
|
||||
python-version: 3.8
|
||||
- uses: ./.github/workflows/build_linux_wheel
|
||||
with:
|
||||
python-minor-version: ${{ matrix.python-minor-version }}
|
||||
python-minor-version: 8
|
||||
args: "--release --strip ${{ matrix.config.extra_args }}"
|
||||
arm-build: ${{ matrix.config.platform == 'aarch64' }}
|
||||
manylinux: ${{ matrix.config.manylinux }}
|
||||
- uses: ./.github/workflows/upload_wheel
|
||||
with:
|
||||
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
repo: "pypi"
|
||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||
mac:
|
||||
# Only runs on tags that matches the python-make-release action
|
||||
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||
timeout-minutes: 60
|
||||
runs-on: ${{ matrix.config.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-minor-version: ["8"]
|
||||
config:
|
||||
- target: x86_64-apple-darwin
|
||||
runner: macos-13
|
||||
@@ -63,7 +58,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
- name: Set up Python
|
||||
@@ -72,38 +66,95 @@ jobs:
|
||||
python-version: 3.12
|
||||
- uses: ./.github/workflows/build_mac_wheel
|
||||
with:
|
||||
python-minor-version: ${{ matrix.python-minor-version }}
|
||||
python-minor-version: 8
|
||||
args: "--release --strip --target ${{ matrix.config.target }} --features fp16kernels"
|
||||
- uses: ./.github/workflows/upload_wheel
|
||||
with:
|
||||
python-minor-version: ${{ matrix.python-minor-version }}
|
||||
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
repo: "pypi"
|
||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||
windows:
|
||||
# Only runs on tags that matches the python-make-release action
|
||||
if: startsWith(github.ref, 'refs/tags/python-v')
|
||||
timeout-minutes: 60
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-minor-version: ["8"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.${{ matrix.python-minor-version }}
|
||||
python-version: 3.8
|
||||
- uses: ./.github/workflows/build_windows_wheel
|
||||
with:
|
||||
python-minor-version: ${{ matrix.python-minor-version }}
|
||||
python-minor-version: 8
|
||||
args: "--release --strip"
|
||||
vcpkg_token: ${{ secrets.VCPKG_GITHUB_PACKAGES }}
|
||||
- uses: ./.github/workflows/upload_wheel
|
||||
with:
|
||||
python-minor-version: ${{ matrix.python-minor-version }}
|
||||
token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
repo: "pypi"
|
||||
pypi_token: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||
fury_token: ${{ secrets.FURY_TOKEN }}
|
||||
gh-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
- name: Extract version
|
||||
id: extract_version
|
||||
env:
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
run: |
|
||||
set -e
|
||||
echo "Extracting tag and version from $GITHUB_REF"
|
||||
if [[ $GITHUB_REF =~ refs/tags/python-v(.*) ]]; then
|
||||
VERSION=${BASH_REMATCH[1]}
|
||||
TAG=python-v$VERSION
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Failed to extract version from $GITHUB_REF"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracted version $VERSION from $GITHUB_REF"
|
||||
if [[ $VERSION =~ beta ]]; then
|
||||
echo "This is a beta release"
|
||||
|
||||
# Get last release (that is not this one)
|
||||
FROM_TAG=$(git tag --sort='version:refname' \
|
||||
| grep ^python-v \
|
||||
| grep -vF "$TAG" \
|
||||
| python ci/semver_sort.py python-v \
|
||||
| tail -n 1)
|
||||
else
|
||||
echo "This is a stable release"
|
||||
# Get last stable tag (ignore betas)
|
||||
FROM_TAG=$(git tag --sort='version:refname' \
|
||||
| grep ^python-v \
|
||||
| grep -vF "$TAG" \
|
||||
| grep -v beta \
|
||||
| python ci/semver_sort.py python-v \
|
||||
| tail -n 1)
|
||||
fi
|
||||
echo "Found from tag $FROM_TAG"
|
||||
echo "from_tag=$FROM_TAG" >> $GITHUB_OUTPUT
|
||||
- name: Create Python Release Notes
|
||||
id: python_release_notes
|
||||
uses: mikepenz/release-changelog-builder-action@v4
|
||||
with:
|
||||
configuration: .github/release_notes.json
|
||||
toTag: ${{ steps.extract_version.outputs.tag }}
|
||||
fromTag: ${{ steps.extract_version.outputs.from_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create Python GH release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
prerelease: ${{ contains('beta', github.ref) }}
|
||||
tag_name: ${{ steps.extract_version.outputs.tag }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
generate_release_notes: false
|
||||
name: Python LanceDB v${{ steps.extract_version.outputs.version }}
|
||||
body: ${{ steps.python_release_notes.outputs.changelog }}
|
||||
|
||||
56
.github/workflows/python-make-release-commit.yml
vendored
56
.github/workflows/python-make-release-commit.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: Python - Create release commit
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||
required: true
|
||||
default: "false"
|
||||
type: choice
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
part:
|
||||
description: 'What kind of release is this?'
|
||||
required: true
|
||||
default: 'patch'
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out main
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
- name: Set git configs for bumpversion
|
||||
shell: bash
|
||||
run: |
|
||||
git config user.name 'Lance Release'
|
||||
git config user.email 'lance-dev@lancedb.com'
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Bump version, create tag and commit
|
||||
working-directory: python
|
||||
run: |
|
||||
pip install bump2version
|
||||
bumpversion --verbose ${{ inputs.part }}
|
||||
- name: Push new version and tag
|
||||
if: ${{ inputs.dry_run }} == "false"
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
|
||||
6
.github/workflows/python.yml
vendored
6
.github/workflows/python.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
workspaces: python
|
||||
- name: Install
|
||||
run: |
|
||||
pip install -e .[tests,dev,embeddings]
|
||||
pip install --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests,dev,embeddings]
|
||||
pip install tantivy
|
||||
pip install mlx
|
||||
- name: Doctest
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
python-minor-version: ["8", "11"]
|
||||
python-minor-version: ["9", "11"]
|
||||
runs-on: "ubuntu-22.04"
|
||||
defaults:
|
||||
run:
|
||||
@@ -189,7 +189,7 @@ jobs:
|
||||
- name: Install lancedb
|
||||
run: |
|
||||
pip install "pydantic<2"
|
||||
pip install -e .[tests]
|
||||
pip install --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests]
|
||||
pip install tantivy
|
||||
- name: Run tests
|
||||
run: pytest -m "not slow and not s3_test" -x -v --durations=30 python/tests
|
||||
|
||||
2
.github/workflows/run_tests/action.yml
vendored
2
.github/workflows/run_tests/action.yml
vendored
@@ -15,7 +15,7 @@ runs:
|
||||
- name: Install lancedb
|
||||
shell: bash
|
||||
run: |
|
||||
pip3 install $(ls target/wheels/lancedb-*.whl)[tests,dev]
|
||||
pip3 install --extra-index-url https://pypi.fury.io/lancedb/ $(ls target/wheels/lancedb-*.whl)[tests,dev]
|
||||
- name: Setup localstack for integration tests
|
||||
if: ${{ inputs.integration == 'true' }}
|
||||
shell: bash
|
||||
|
||||
4
.github/workflows/rust.yml
vendored
4
.github/workflows/rust.yml
vendored
@@ -74,11 +74,11 @@ jobs:
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y protobuf-compiler libssl-dev
|
||||
- name: Build
|
||||
run: cargo build --all-features
|
||||
- name: Start S3 integration test environment
|
||||
working-directory: .
|
||||
run: docker compose up --detach --wait
|
||||
- name: Build
|
||||
run: cargo build --all-features
|
||||
- name: Run tests
|
||||
run: cargo test --all-features
|
||||
- name: Run examples
|
||||
|
||||
53
.github/workflows/upload_wheel/action.yml
vendored
53
.github/workflows/upload_wheel/action.yml
vendored
@@ -2,28 +2,43 @@ name: upload-wheel
|
||||
|
||||
description: "Upload wheels to Pypi"
|
||||
inputs:
|
||||
os:
|
||||
required: true
|
||||
description: "ubuntu-22.04 or macos-13"
|
||||
repo:
|
||||
required: false
|
||||
description: "pypi or testpypi"
|
||||
default: "pypi"
|
||||
token:
|
||||
pypi_token:
|
||||
required: true
|
||||
description: "release token for the repo"
|
||||
fury_token:
|
||||
required: true
|
||||
description: "release token for the fury repo"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install twine
|
||||
- name: Publish wheel
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ inputs.token }}
|
||||
shell: bash
|
||||
run: twine upload --repository ${{ inputs.repo }} target/wheels/lancedb-*.whl
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install twine
|
||||
- name: Choose repo
|
||||
shell: bash
|
||||
id: choose_repo
|
||||
run: |
|
||||
if [ ${{ github.ref }} == "*beta*" ]; then
|
||||
echo "repo=fury" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "repo=pypi" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Publish to PyPI
|
||||
shell: bash
|
||||
env:
|
||||
FURY_TOKEN: ${{ inputs.fury_token }}
|
||||
PYPI_TOKEN: ${{ inputs.pypi_token }}
|
||||
run: |
|
||||
if [ ${{ steps.choose_repo.outputs.repo }} == "fury" ]; then
|
||||
WHEEL=$(ls target/wheels/lancedb-*.whl 2> /dev/null | head -n 1)
|
||||
echo "Uploading $WHEEL to Fury"
|
||||
curl -f -F package=@$WHEEL https://$FURY_TOKEN@push.fury.io/lancedb/
|
||||
else
|
||||
twine upload --repository ${{ steps.choose_repo.outputs.repo }} \
|
||||
--username __token__ \
|
||||
--password $PYPI_TOKEN \
|
||||
target/wheels/lancedb-*.whl
|
||||
fi
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,7 @@
|
||||
venv
|
||||
|
||||
.vscode
|
||||
|
||||
.zed
|
||||
rust/target
|
||||
rust/Cargo.lock
|
||||
|
||||
|
||||
@@ -10,9 +10,12 @@ repos:
|
||||
rev: v0.2.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
- id: local-biome-check
|
||||
name: biome check
|
||||
entry: npx @biomejs/biome@1.7.3 check --config-path nodejs/biome.json nodejs/
|
||||
language: system
|
||||
types: [text]
|
||||
files: "nodejs/.*"
|
||||
exclude: nodejs/lancedb/native.d.ts|nodejs/dist/.*
|
||||
|
||||
36
Cargo.toml
36
Cargo.toml
@@ -1,5 +1,11 @@
|
||||
[workspace]
|
||||
members = ["rust/ffi/node", "rust/lancedb", "nodejs", "python"]
|
||||
members = [
|
||||
"rust/ffi/node",
|
||||
"rust/lancedb",
|
||||
"nodejs",
|
||||
"python",
|
||||
"java/core/lancedb-jni",
|
||||
]
|
||||
# Python package needs to be built by maturin.
|
||||
exclude = ["python"]
|
||||
resolver = "2"
|
||||
@@ -14,22 +20,24 @@ keywords = ["lancedb", "lance", "database", "vector", "search"]
|
||||
categories = ["database-implementations"]
|
||||
|
||||
[workspace.dependencies]
|
||||
lance = { "version" = "=0.10.15", "features" = ["dynamodb"] }
|
||||
lance-index = { "version" = "=0.10.15" }
|
||||
lance-linalg = { "version" = "=0.10.15" }
|
||||
lance-testing = { "version" = "=0.10.15" }
|
||||
lance = { "version" = "=0.13.0", "features" = ["dynamodb"] }
|
||||
lance-index = { "version" = "=0.13.0" }
|
||||
lance-linalg = { "version" = "=0.13.0" }
|
||||
lance-testing = { "version" = "=0.13.0" }
|
||||
lance-datafusion = { "version" = "=0.13.0" }
|
||||
# Note that this one does not include pyarrow
|
||||
arrow = { version = "50.0", optional = false }
|
||||
arrow-array = "50.0"
|
||||
arrow-data = "50.0"
|
||||
arrow-ipc = "50.0"
|
||||
arrow-ord = "50.0"
|
||||
arrow-schema = "50.0"
|
||||
arrow-arith = "50.0"
|
||||
arrow-cast = "50.0"
|
||||
arrow = { version = "51.0", optional = false }
|
||||
arrow-array = "51.0"
|
||||
arrow-data = "51.0"
|
||||
arrow-ipc = "51.0"
|
||||
arrow-ord = "51.0"
|
||||
arrow-schema = "51.0"
|
||||
arrow-arith = "51.0"
|
||||
arrow-cast = "51.0"
|
||||
async-trait = "0"
|
||||
chrono = "0.4.35"
|
||||
half = { "version" = "=2.3.1", default-features = false, features = [
|
||||
datafusion-physical-plan = "37.1"
|
||||
half = { "version" = "=2.4.1", default-features = false, features = [
|
||||
"num-traits",
|
||||
] }
|
||||
futures = "0"
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
<hr />
|
||||
|
||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings.
|
||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrieval, filtering and management of embeddings.
|
||||
|
||||
The key features of LanceDB include:
|
||||
|
||||
@@ -36,7 +36,7 @@ The key features of LanceDB include:
|
||||
|
||||
* GPU support in building vector index(*).
|
||||
|
||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/docs/integrations/vectorstores/lancedb/), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||
|
||||
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||
|
||||
@@ -83,5 +83,5 @@ result = table.search([100, 100]).limit(2).to_pandas()
|
||||
```
|
||||
|
||||
## Blogs, Tutorials & Videos
|
||||
* 📈 <a href="https://blog.eto.ai/benchmarking-random-access-in-lance-ed690757a826">2000x better performance with Lance over Parquet</a>
|
||||
* 📈 <a href="https://blog.lancedb.com/benchmarking-random-access-in-lance/">2000x better performance with Lance over Parquet</a>
|
||||
* 🤖 <a href="https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb">Build a question and answer bot with LanceDB</a>
|
||||
|
||||
51
ci/bump_version.sh
Normal file
51
ci/bump_version.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
set -e
|
||||
|
||||
RELEASE_TYPE=${1:-"stable"}
|
||||
BUMP_MINOR=${2:-false}
|
||||
TAG_PREFIX=${3:-"v"} # Such as "python-v"
|
||||
HEAD_SHA=${4:-$(git rev-parse HEAD)}
|
||||
|
||||
readonly SELF_DIR=$(cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
|
||||
PREV_TAG=$(git tag --sort='version:refname' | grep ^$TAG_PREFIX | python $SELF_DIR/semver_sort.py $TAG_PREFIX | tail -n 1)
|
||||
echo "Found previous tag $PREV_TAG"
|
||||
|
||||
# Initially, we don't want to tag if we are doing stable, because we will bump
|
||||
# again later. See comment at end for why.
|
||||
if [[ "$RELEASE_TYPE" == 'stable' ]]; then
|
||||
BUMP_ARGS="--no-tag"
|
||||
fi
|
||||
|
||||
# If last is stable and not bumping minor
|
||||
if [[ $PREV_TAG != *beta* ]]; then
|
||||
if [[ "$BUMP_MINOR" != "false" ]]; then
|
||||
# X.Y.Z -> X.(Y+1).0-beta.0
|
||||
bump-my-version bump -vv $BUMP_ARGS minor
|
||||
else
|
||||
# X.Y.Z -> X.Y.(Z+1)-beta.0
|
||||
bump-my-version bump -vv $BUMP_ARGS patch
|
||||
fi
|
||||
else
|
||||
if [[ "$BUMP_MINOR" != "false" ]]; then
|
||||
# X.Y.Z-beta.N -> X.(Y+1).0-beta.0
|
||||
bump-my-version bump -vv $BUMP_ARGS minor
|
||||
else
|
||||
# X.Y.Z-beta.N -> X.Y.Z-beta.(N+1)
|
||||
bump-my-version bump -vv $BUMP_ARGS pre_n
|
||||
fi
|
||||
fi
|
||||
|
||||
# The above bump will always bump to a pre-release version. If we are releasing
|
||||
# a stable version, bump the pre-release level ("pre_l") to make it stable.
|
||||
if [[ $RELEASE_TYPE == 'stable' ]]; then
|
||||
# X.Y.Z-beta.N -> X.Y.Z
|
||||
bump-my-version bump -vv pre_l
|
||||
fi
|
||||
|
||||
# Validate that we have incremented version appropriately for breaking changes
|
||||
NEW_TAG=$(git describe --tags --exact-match HEAD)
|
||||
NEW_VERSION=$(echo $NEW_TAG | sed "s/^$TAG_PREFIX//")
|
||||
LAST_STABLE_RELEASE=$(git tag --sort='version:refname' | grep ^$TAG_PREFIX | grep -v beta | grep -vF "$NEW_TAG" | python $SELF_DIR/semver_sort.py $TAG_PREFIX | tail -n 1)
|
||||
LAST_STABLE_VERSION=$(echo $LAST_STABLE_RELEASE | sed "s/^$TAG_PREFIX//")
|
||||
|
||||
python $SELF_DIR/check_breaking_changes.py $LAST_STABLE_RELEASE $HEAD_SHA $LAST_STABLE_VERSION $NEW_VERSION
|
||||
35
ci/check_breaking_changes.py
Normal file
35
ci/check_breaking_changes.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Check whether there are any breaking changes in the PRs between the base and head commits.
|
||||
If there are, assert that we have incremented the minor version.
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
from packaging.version import parse
|
||||
|
||||
from github import Github
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("base")
|
||||
parser.add_argument("head")
|
||||
parser.add_argument("last_stable_version")
|
||||
parser.add_argument("current_version")
|
||||
args = parser.parse_args()
|
||||
|
||||
repo = Github(os.environ["GITHUB_TOKEN"]).get_repo(os.environ["GITHUB_REPOSITORY"])
|
||||
commits = repo.compare(args.base, args.head).commits
|
||||
prs = (pr for commit in commits for pr in commit.get_pulls())
|
||||
|
||||
for pr in prs:
|
||||
if any(label.name == "breaking-change" for label in pr.labels):
|
||||
print(f"Breaking change in PR: {pr.html_url}")
|
||||
break
|
||||
else:
|
||||
print("No breaking changes found.")
|
||||
exit(0)
|
||||
|
||||
last_stable_version = parse(args.last_stable_version)
|
||||
current_version = parse(args.current_version)
|
||||
if current_version.minor <= last_stable_version.minor:
|
||||
print("Minor version is not greater than the last stable version.")
|
||||
exit(1)
|
||||
35
ci/semver_sort.py
Normal file
35
ci/semver_sort.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Takes a list of semver strings and sorts them in ascending order.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from packaging.version import parse, InvalidVersion
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("prefix", default="v")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read the input from stdin
|
||||
lines = sys.stdin.readlines()
|
||||
|
||||
# Parse the versions
|
||||
versions = []
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
try:
|
||||
version_str = line.removeprefix(args.prefix)
|
||||
version = parse(version_str)
|
||||
except InvalidVersion:
|
||||
# There are old tags that don't follow the semver format
|
||||
print(f"Invalid version: {line}", file=sys.stderr)
|
||||
continue
|
||||
versions.append((line, version))
|
||||
|
||||
# Sort the versions
|
||||
versions.sort(key=lambda x: x[1])
|
||||
|
||||
# Print the sorted versions as original strings
|
||||
for line, _ in versions:
|
||||
print(line)
|
||||
@@ -106,6 +106,9 @@ nav:
|
||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||
- Configuring Storage: guides/storage.md
|
||||
- Sync -> Async Migration Guide: migration.md
|
||||
- Tuning retrieval performance:
|
||||
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
||||
- Reranking: guides/tuning_retrievers/2_reranking.md
|
||||
- 🧬 Managing embeddings:
|
||||
- Overview: embeddings/index.md
|
||||
- Embedding functions: embeddings/embedding_functions.md
|
||||
@@ -119,9 +122,11 @@ nav:
|
||||
- Polars: python/polars_arrow.md
|
||||
- DuckDB: python/duckdb.md
|
||||
- LangChain:
|
||||
- LangChain 🔗: https://python.langchain.com/docs/integrations/vectorstores/lancedb/
|
||||
- LangChain 🔗: integrations/langchain.md
|
||||
- LangChain JS/TS 🔗: https://js.langchain.com/docs/integrations/vectorstores/lancedb
|
||||
- LlamaIndex 🦙: https://docs.llamaindex.ai/en/stable/examples/vector_stores/LanceDBIndexDemo/
|
||||
- LlamaIndex 🦙:
|
||||
- LlamaIndex docs: integrations/llamaIndex.md
|
||||
- LlamaIndex demo: https://docs.llamaindex.ai/en/stable/examples/vector_stores/LanceDBIndexDemo/
|
||||
- Pydantic: python/pydantic.md
|
||||
- Voxel51: integrations/voxel51.md
|
||||
- PromptTools: integrations/prompttools.md
|
||||
@@ -152,7 +157,7 @@ nav:
|
||||
- Overview: cloud/index.md
|
||||
- API reference:
|
||||
- 🐍 Python: python/saas-python.md
|
||||
- 👾 JavaScript: javascript/saas-modules.md
|
||||
- 👾 JavaScript: javascript/modules.md
|
||||
|
||||
- Quick start: basic.md
|
||||
- Concepts:
|
||||
@@ -181,6 +186,9 @@ nav:
|
||||
- Versioning & Reproducibility: notebooks/reproducibility.ipynb
|
||||
- Configuring Storage: guides/storage.md
|
||||
- Sync -> Async Migration Guide: migration.md
|
||||
- Tuning retrieval performance:
|
||||
- Choosing right query type: guides/tuning_retrievers/1_query_types.md
|
||||
- Reranking: guides/tuning_retrievers/2_reranking.md
|
||||
- Managing Embeddings:
|
||||
- Overview: embeddings/index.md
|
||||
- Embedding functions: embeddings/embedding_functions.md
|
||||
@@ -219,7 +227,7 @@ nav:
|
||||
- Overview: cloud/index.md
|
||||
- API reference:
|
||||
- 🐍 Python: python/saas-python.md
|
||||
- 👾 JavaScript: javascript/saas-modules.md
|
||||
- 👾 JavaScript: javascript/modules.md
|
||||
|
||||
extra_css:
|
||||
- styles/global.css
|
||||
|
||||
@@ -44,6 +44,36 @@
|
||||
|
||||
!!! info "Please also make sure you're using the same version of Arrow as in the [lancedb crate](https://github.com/lancedb/lancedb/blob/main/Cargo.toml)"
|
||||
|
||||
### Preview releases
|
||||
|
||||
Stable releases are created about every 2 weeks. For the latest features and bug
|
||||
fixes, you can install the preview release. These releases receive the same
|
||||
level of testing as stable releases, but are not guaranteed to be available for
|
||||
more than 6 months after they are released. Once your application is stable, we
|
||||
recommend switching to stable releases.
|
||||
|
||||
=== "Python"
|
||||
|
||||
```shell
|
||||
pip install --pre --extra-index-url https://pypi.fury.io/lancedb/ lancedb
|
||||
```
|
||||
|
||||
=== "Typescript"
|
||||
|
||||
```shell
|
||||
npm install vectordb@preview
|
||||
```
|
||||
|
||||
=== "Rust"
|
||||
|
||||
We don't push preview releases to crates.io, but you can referent the tag
|
||||
in GitHub within your Cargo dependencies:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
lancedb = { git = "https://github.com/lancedb/lancedb.git", tag = "vX.Y.Z-beta.N" }
|
||||
```
|
||||
|
||||
## Connect to a database
|
||||
|
||||
=== "Python"
|
||||
@@ -150,6 +180,9 @@ table.
|
||||
|
||||
!!! info "Under the hood, LanceDB reads in the Apache Arrow data and persists it to disk using the [Lance format](https://www.github.com/lancedb/lance)."
|
||||
|
||||
!!! info "Automatic embedding generation with Embedding API"
|
||||
When working with embedding models, it is recommended to use the LanceDB embedding API to automatically create vector representation of the data and queries in the background. See the [quickstart example](#using-the-embedding-api) or the embedding API [guide](./embeddings/)
|
||||
|
||||
### Create an empty table
|
||||
|
||||
Sometimes you may not have the data to insert into the table at creation time.
|
||||
@@ -164,6 +197,9 @@ similar to a `CREATE TABLE` statement in SQL.
|
||||
--8<-- "python/python/tests/docs/test_basic.py:create_empty_table_async"
|
||||
```
|
||||
|
||||
!!! note "You can define schema in Pydantic"
|
||||
LanceDB comes with Pydantic support, which allows you to define the schema of your data using Pydantic models. This makes it easy to work with LanceDB tables and data. Learn more about all supported types in [tables guide](./guides/tables.md).
|
||||
|
||||
=== "Typescript"
|
||||
|
||||
```typescript
|
||||
@@ -394,6 +430,19 @@ Use the `drop_table()` method on the database to remove a table.
|
||||
})
|
||||
```
|
||||
|
||||
## Using the Embedding API
|
||||
You can use the embedding API when working with embedding models. It automatically vectorizes the data at ingestion and query time and comes with built-in integrations with popular embedding models like Openai, Hugging Face, Sentence Transformers, CLIP and more.
|
||||
|
||||
=== "Python"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:imports"
|
||||
--8<-- "python/python/tests/docs/test_embeddings_optional.py:openai_embeddings"
|
||||
```
|
||||
|
||||
Learn about using the existing integrations and creating custom embedding functions in the [embedding API guide](./embeddings/).
|
||||
|
||||
|
||||
## What's next
|
||||
|
||||
This section covered the very basics of using LanceDB. If you're learning about vector databases for the first time, you may want to read the page on [indexing](concepts/index_ivfpq.md) to get familiar with the concepts.
|
||||
|
||||
@@ -206,6 +206,44 @@ print(actual.text)
|
||||
```
|
||||
|
||||
|
||||
### Ollama embeddings
|
||||
Generate embeddings via the [ollama](https://github.com/ollama/ollama-python) python library. More details:
|
||||
|
||||
- [Ollama docs on embeddings](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings)
|
||||
- [Ollama blog on embeddings](https://ollama.com/blog/embedding-models)
|
||||
|
||||
| Parameter | Type | Default Value | Description |
|
||||
|------------------------|----------------------------|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `name` | `str` | `nomic-embed-text` | The name of the model. |
|
||||
| `host` | `str` | `http://localhost:11434` | The Ollama host to connect to. |
|
||||
| `options` | `ollama.Options` or `dict` | `None` | Additional model parameters listed in the documentation for the Modelfile such as `temperature`. |
|
||||
| `keep_alive` | `float` or `str` | `"5m"` | Controls how long the model will stay loaded into memory following the request. |
|
||||
| `ollama_client_kwargs` | `dict` | `{}` | kwargs that can be past to the `ollama.Client`. |
|
||||
|
||||
```python
|
||||
import lancedb
|
||||
from lancedb.pydantic import LanceModel, Vector
|
||||
from lancedb.embeddings import get_registry
|
||||
|
||||
db = lancedb.connect("/tmp/db")
|
||||
func = get_registry().get("ollama").create(name="nomic-embed-text")
|
||||
|
||||
class Words(LanceModel):
|
||||
text: str = func.SourceField()
|
||||
vector: Vector(func.ndims()) = func.VectorField()
|
||||
|
||||
table = db.create_table("words", schema=Words, mode="overwrite")
|
||||
table.add([
|
||||
{"text": "hello world"},
|
||||
{"text": "goodbye world"}
|
||||
])
|
||||
|
||||
query = "greetings"
|
||||
actual = table.search(query).limit(1).to_pydantic(Words)[0]
|
||||
print(actual.text)
|
||||
```
|
||||
|
||||
|
||||
### OpenAI embeddings
|
||||
LanceDB registers the OpenAI embeddings function in the registry by default, as `openai`. Below are the parameters that you can customize when creating the instances:
|
||||
|
||||
@@ -327,6 +365,68 @@ tbl.add(df)
|
||||
rs = tbl.search("hello").limit(1).to_pandas()
|
||||
```
|
||||
|
||||
### Cohere Embeddings
|
||||
Using cohere API requires cohere package, which can be installed using `pip install cohere`. Cohere embeddings are used to generate embeddings for text data. The embeddings can be used for various tasks like semantic search, clustering, and classification.
|
||||
You also need to set the `COHERE_API_KEY` environment variable to use the Cohere API.
|
||||
|
||||
Supported models are:
|
||||
```
|
||||
* embed-english-v3.0
|
||||
* embed-multilingual-v3.0
|
||||
* embed-english-light-v3.0
|
||||
* embed-multilingual-light-v3.0
|
||||
* embed-english-v2.0
|
||||
* embed-english-light-v2.0
|
||||
* embed-multilingual-v2.0
|
||||
```
|
||||
|
||||
Supported parameters (to be passed in `create` method) are:
|
||||
|
||||
| Parameter | Type | Default Value | Description |
|
||||
|---|---|---|---|
|
||||
| `name` | `str` | `"embed-english-v2.0"` | The model ID of the cohere model to use. Supported base models for Text Embeddings: embed-english-v3.0, embed-multilingual-v3.0, embed-english-light-v3.0, embed-multilingual-light-v3.0, embed-english-v2.0, embed-english-light-v2.0, embed-multilingual-v2.0 |
|
||||
| `source_input_type` | `str` | `"search_document"` | The type of input data to be used for the source column. |
|
||||
| `query_input_type` | `str` | `"search_query"` | The type of input data to be used for the query. |
|
||||
|
||||
Cohere supports following input types:
|
||||
| Input Type | Description |
|
||||
|-------------------------|---------------------------------------|
|
||||
| "`search_document`" | Used for embeddings stored in a vector|
|
||||
| | database for search use-cases. |
|
||||
| "`search_query`" | Used for embeddings of search queries |
|
||||
| | run against a vector DB |
|
||||
| "`semantic_similarity`" | Specifies the given text will be used |
|
||||
| | for Semantic Textual Similarity (STS) |
|
||||
| "`classification`" | Used for embeddings passed through a |
|
||||
| | text classifier. |
|
||||
| "`clustering`" | Used for the embeddings run through a |
|
||||
| | clustering algorithm |
|
||||
|
||||
Usage Example:
|
||||
|
||||
```python
|
||||
import lancedb
|
||||
from lancedb.pydantic import LanceModel, Vector
|
||||
from lancedb.embeddings import EmbeddingFunctionRegistry
|
||||
|
||||
cohere = EmbeddingFunctionRegistry
|
||||
.get_instance()
|
||||
.get("cohere")
|
||||
.create(name="embed-multilingual-v2.0")
|
||||
|
||||
class TextModel(LanceModel):
|
||||
text: str = cohere.SourceField()
|
||||
vector: Vector(cohere.ndims()) = cohere.VectorField()
|
||||
|
||||
data = [ { "text": "hello world" },
|
||||
{ "text": "goodbye world" }]
|
||||
|
||||
db = lancedb.connect("~/.lancedb")
|
||||
tbl = db.create_table("test", schema=TextModel, mode="overwrite")
|
||||
|
||||
tbl.add(data)
|
||||
```
|
||||
|
||||
### AWS Bedrock Text Embedding Functions
|
||||
AWS Bedrock supports multiple base models for generating text embeddings. You need to setup the AWS credentials to use this embedding function.
|
||||
You can do so by using `awscli` and also add your session_token:
|
||||
|
||||
@@ -2,6 +2,9 @@ Representing multi-modal data as vector embeddings is becoming a standard practi
|
||||
|
||||
For this purpose, LanceDB introduces an **embedding functions API**, that allow you simply set up once, during the configuration stage of your project. After this, the table remembers it, effectively making the embedding functions *disappear in the background* so you don't have to worry about manually passing callables, and instead, simply focus on the rest of your data engineering pipeline.
|
||||
|
||||
!!! Note "LanceDB cloud doesn't support embedding functions yet"
|
||||
LanceDB Cloud does not support embedding functions yet. You need to generate embeddings before ingesting into the table or querying.
|
||||
|
||||
!!! warning
|
||||
Using the embedding function registry means that you don't have to explicitly generate the embeddings yourself.
|
||||
However, if your embedding function changes, you'll have to re-configure your table with the new embedding function
|
||||
@@ -46,7 +49,7 @@ For this purpose, LanceDB introduces an **embedding functions API**, that allow
|
||||
|
||||
```python
|
||||
class Pets(LanceModel):
|
||||
vector: Vector(clip.ndims) = clip.VectorField()
|
||||
vector: Vector(clip.ndims()) = clip.VectorField()
|
||||
image_uri: str = clip.SourceField()
|
||||
```
|
||||
|
||||
@@ -149,7 +152,7 @@ You can also use the integration for adding utility operations in the schema. Fo
|
||||
|
||||
```python
|
||||
class Pets(LanceModel):
|
||||
vector: Vector(clip.ndims) = clip.VectorField()
|
||||
vector: Vector(clip.ndims()) = clip.VectorField()
|
||||
image_uri: str = clip.SourceField()
|
||||
|
||||
@property
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
LanceDB provides support for full-text search via [Tantivy](https://github.com/quickwit-oss/tantivy) (currently Python only), allowing you to incorporate keyword-based search (based on BM25) in your retrieval solutions. Our goal is to push the FTS integration down to the Rust level in the future, so that it's available for Rust and JavaScript users as well. Follow along at [this Github issue](https://github.com/lancedb/lance/issues/1195)
|
||||
|
||||
A hybrid search solution combining vector and full-text search is also on the way.
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -55,6 +54,16 @@ This returns the result as a list of dictionaries as follows.
|
||||
!!! note
|
||||
LanceDB automatically searches on the existing FTS index if the input to the search is of type `str`. If you provide a vector as input, LanceDB will search the ANN index instead.
|
||||
|
||||
## Tokenization
|
||||
By default the text is tokenized by splitting on punctuation and whitespaces and then removing tokens that are longer than 40 chars. For more language specific tokenization then provide the argument tokenizer_name with the 2 letter language code followed by "_stem". So for english it would be "en_stem".
|
||||
|
||||
```python
|
||||
table.create_fts_index("text", tokenizer_name="en_stem")
|
||||
```
|
||||
|
||||
The following [languages](https://docs.rs/tantivy/latest/tantivy/tokenizer/enum.Language.html) are currently supported.
|
||||
|
||||
|
||||
## Index multiple columns
|
||||
|
||||
If you have multiple string columns to index, there's no need to combine them manually -- simply pass them all as a list to `create_fts_index`:
|
||||
@@ -140,6 +149,7 @@ is treated as a phrase query.
|
||||
In general, a query that's declared as a phrase query will be wrapped in double quotes during parsing, with nested
|
||||
double quotes replaced by single quotes.
|
||||
|
||||
|
||||
## Configurations
|
||||
|
||||
By default, LanceDB configures a 1GB heap size limit for creating the index. You can
|
||||
|
||||
@@ -299,6 +299,14 @@ LanceDB can also connect to S3-compatible stores, such as MinIO. To do so, you m
|
||||
|
||||
This can also be done with the ``AWS_ENDPOINT`` and ``AWS_DEFAULT_REGION`` environment variables.
|
||||
|
||||
!!! tip "Local servers"
|
||||
|
||||
For local development, the server often has a `http` endpoint rather than a
|
||||
secure `https` endpoint. In this case, you must also set the `ALLOW_HTTP`
|
||||
environment variable to `true` to allow non-TLS connections, or pass the
|
||||
storage option `allow_http` as `true`. If you do not do this, you will get
|
||||
an error like `URL scheme is not allowed`.
|
||||
|
||||
#### S3 Express
|
||||
|
||||
LanceDB supports [S3 Express One Zone](https://aws.amazon.com/s3/storage-classes/express-one-zone/) endpoints, but requires additional configuration. Also, S3 Express endpoints only support connecting from an EC2 instance within the same region.
|
||||
|
||||
@@ -452,6 +452,27 @@ After a table has been created, you can always add more data to it using the var
|
||||
tbl.add(pydantic_model_items)
|
||||
```
|
||||
|
||||
??? "Ingesting Pydantic models with LanceDB embedding API"
|
||||
When using LanceDB's embedding API, you can add Pydantic models directly to the table. LanceDB will automatically convert the `vector` field to a vector before adding it to the table. You need to specify the default value of `vector` feild as None to allow LanceDB to automatically vectorize the data.
|
||||
|
||||
```python
|
||||
import lancedb
|
||||
from lancedb.pydantic import LanceModel, Vector
|
||||
from lancedb.embeddings import get_registry
|
||||
|
||||
db = lancedb.connect("~/tmp")
|
||||
embed_fcn = get_registry().get("huggingface").create(name="BAAI/bge-small-en-v1.5")
|
||||
|
||||
class Schema(LanceModel):
|
||||
text: str = embed_fcn.SourceField()
|
||||
vector: Vector(embed_fcn.ndims()) = embed_fcn.VectorField(default=None)
|
||||
|
||||
tbl = db.create_table("my_table", schema=Schema, mode="overwrite")
|
||||
models = [Schema(text="hello"), Schema(text="world")]
|
||||
tbl.add(models)
|
||||
```
|
||||
|
||||
|
||||
|
||||
=== "JavaScript"
|
||||
|
||||
@@ -636,6 +657,31 @@ The `values` parameter is used to provide the new values for the columns as lite
|
||||
|
||||
When rows are updated, they are moved out of the index. The row will still show up in ANN queries, but the query will not be as fast as it would be if the row was in the index. If you update a large proportion of rows, consider rebuilding the index afterwards.
|
||||
|
||||
## Drop a table
|
||||
|
||||
Use the `drop_table()` method on the database to remove a table.
|
||||
|
||||
=== "Python"
|
||||
|
||||
```python
|
||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table"
|
||||
--8<-- "python/python/tests/docs/test_basic.py:drop_table_async"
|
||||
```
|
||||
|
||||
This permanently removes the table and is not recoverable, unlike deleting rows.
|
||||
By default, if the table does not exist an exception is raised. To suppress this,
|
||||
you can pass in `ignore_missing=True`.
|
||||
|
||||
=== "Javascript/Typescript"
|
||||
|
||||
```typescript
|
||||
--8<-- "docs/src/basic_legacy.ts:drop_table"
|
||||
```
|
||||
|
||||
This permanently removes the table and is not recoverable, unlike deleting rows.
|
||||
If the table does not exist an exception is raised.
|
||||
|
||||
|
||||
## Consistency
|
||||
|
||||
In LanceDB OSS, users can set the `read_consistency_interval` parameter on connections to achieve different levels of read consistency. This parameter determines how frequently the database synchronizes with the underlying storage system to check for updates made by other processes. If another process updates a table, the database will not see the changes until the next synchronization.
|
||||
|
||||
128
docs/src/guides/tuning_retrievers/1_query_types.md
Normal file
128
docs/src/guides/tuning_retrievers/1_query_types.md
Normal file
@@ -0,0 +1,128 @@
|
||||
## Improving retriever performance
|
||||
VectorDBs are used as retreivers in recommender or chatbot-based systems for retrieving relevant data based on user queries. For example, retriever is a critical component of Retrieval Augmented Generation (RAG) acrhitectures. In this section, we will discuss how to improve the performance of retrievers.
|
||||
|
||||
There are serveral ways to improve the performance of retrievers. Some of the common techniques are:
|
||||
|
||||
* Using different query types
|
||||
* Using hybrid search
|
||||
* Fine-tuning the embedding models
|
||||
* Using different embedding models
|
||||
|
||||
Using different embedding models is something that's very specific to the use case and the data. So we will not discuss it here. In this section, we will discuss the first three techniques.
|
||||
|
||||
|
||||
!!! note "Note"
|
||||
We'll be using a simple metric called "hit-rate" for evaluating the performance of the retriever across this guide. Hit-rate is the percentage of queries for which the retriever returned the correct answer in the top-k results. For example, if the retriever returned the correct answer in the top-3 results for 70% of the queries, then the hit-rate@3 is 0.7.
|
||||
|
||||
|
||||
## The dataset
|
||||
We'll be using a QA dataset generated using a LLama2 review paper. The dataset contains 221 query, context and answer triplets. The queries and answers are generated using GPT-4 based on a given query. Full script used to generate the dataset can be found on this [repo](https://github.com/lancedb/ragged). It can be downloaded from [here](https://github.com/AyushExel/assets/blob/main/data_qa.csv)
|
||||
|
||||
### Using different query types
|
||||
Let's setup the embeddings and the dataset first. We'll use the LanceDB's `huggingface` embeddings integration for this guide.
|
||||
|
||||
```python
|
||||
import lancedb
|
||||
import pandas as pd
|
||||
from lancedb.embeddings import get_registry
|
||||
from lancedb.pydantic import Vector, LanceModel
|
||||
|
||||
db = lancedb.connect("~/lancedb/query_types")
|
||||
df = pd.read_csv("data_qa.csv")
|
||||
|
||||
embed_fcn = get_registry().get("huggingface").create(name="BAAI/bge-small-en-v1.")
|
||||
|
||||
class Schema(LanceModel):
|
||||
context: str = embed_fcn.SourceField()
|
||||
vector: Vector(embed_fcn.ndims()) = embed_fcn.VectorField()
|
||||
|
||||
table = db.create_table("qa", schema=Schema)
|
||||
table.add(df[["context"]].to_dict(orient="records"))
|
||||
|
||||
queries = df["query"].tolist()
|
||||
```
|
||||
|
||||
Now that we have the dataset and embeddings table set up, here's how you can run different query types on the dataset.
|
||||
|
||||
* <b> Vector Search: </b>
|
||||
|
||||
```python
|
||||
table.search(quries[0], query_type="vector").limit(5).to_pandas()
|
||||
```
|
||||
By default, LanceDB uses vector search query type for searching and it automatically converts the input query to a vector before searching when using embedding API. So, the following statement is equivalent to the above statement.
|
||||
|
||||
```python
|
||||
table.search(quries[0]).limit(5).to_pandas()
|
||||
```
|
||||
|
||||
Vector or semantic search is useful when you want to find documents that are similar to the query in terms of meaning.
|
||||
|
||||
---
|
||||
|
||||
* <b> Full-text Search: </b>
|
||||
|
||||
FTS requires creating an index on the column you want to search on. `replace=True` will replace the existing index if it exists.
|
||||
Once the index is created, you can search using the `fts` query type.
|
||||
```python
|
||||
table.create_fts_index("context", replace=True)
|
||||
table.search(quries[0], query_type="fts").limit(5).to_pandas()
|
||||
```
|
||||
|
||||
Full-text search is useful when you want to find documents that contain the query terms.
|
||||
|
||||
---
|
||||
|
||||
* <b> Hybrid Search: </b>
|
||||
|
||||
Hybrid search is a combination of vector and full-text search. Here's how you can run a hybrid search query on the dataset.
|
||||
```python
|
||||
table.search(quries[0], query_type="hybrid").limit(5).to_pandas()
|
||||
```
|
||||
Hybrid search requires a reranker to combine and rank the results from vector and full-text search. We'll cover reranking as a concept in the next section.
|
||||
|
||||
Hybrid search is useful when you want to combine the benefits of both vector and full-text search.
|
||||
|
||||
!!! note "Note"
|
||||
By default, it uses `LinearCombinationReranker` that combines the scores from vector and full-text search using a weighted linear combination. It is the simplest reranker implementation available in LanceDB. You can also use other rerankers like `CrossEncoderReranker` or `CohereReranker` for reranking the results.
|
||||
Learn more about rerankers [here](https://lancedb.github.io/lancedb/reranking/)
|
||||
|
||||
|
||||
|
||||
### Hit rate evaluation results
|
||||
|
||||
Now that we have seen how to run different query types on the dataset, let's evaluate the hit-rate of each query type on the dataset.
|
||||
For brevity, the entire evaluation script is not shown here. You can find the complete evaluation and benchmarking utility scripts [here](https://github.com/lancedb/ragged).
|
||||
|
||||
Here are the hit-rate results for the dataset:
|
||||
|
||||
| Query Type | Hit-rate@5 |
|
||||
| --- | --- |
|
||||
| Vector Search | 0.640 |
|
||||
| Full-text Search | 0.595 |
|
||||
| Hybrid Search (w/ LinearCombinationReranker) | 0.645 |
|
||||
|
||||
**Choosing query type** is very specific to the use case and the data. This synthetic dataset has been generated to be semantically challenging, i.e, the queries don't have a lot of keywords in common with the context. So, vector search performs better than full-text search. However, in real-world scenarios, full-text search might perform better than vector search. Hybrid search is a good choice when you want to combine the benefits of both vector and full-text search.
|
||||
|
||||
### Evaluation results on other datasets
|
||||
|
||||
The hit-rate results can vary based on the dataset and the query type. Here are the hit-rate results for the other datasets using the same embedding function.
|
||||
|
||||
* <b> SQuAD Dataset: </b>
|
||||
|
||||
| Query Type | Hit-rate@5 |
|
||||
| --- | --- |
|
||||
| Vector Search | 0.822 |
|
||||
| Full-text Search | 0.835 |
|
||||
| Hybrid Search (w/ LinearCombinationReranker) | 0.8874 |
|
||||
|
||||
* <b> Uber10K sec filing Dataset: </b>
|
||||
|
||||
| Query Type | Hit-rate@5 |
|
||||
| --- | --- |
|
||||
| Vector Search | 0.608 |
|
||||
| Full-text Search | 0.82 |
|
||||
| Hybrid Search (w/ LinearCombinationReranker) | 0.80 |
|
||||
|
||||
In these standard datasets, FTS seems to perform much better than vector search because the queries have a lot of keywords in common with the context. So, in general choosing the query type is very specific to the use case and the data.
|
||||
|
||||
|
||||
78
docs/src/guides/tuning_retrievers/2_reranking.md
Normal file
78
docs/src/guides/tuning_retrievers/2_reranking.md
Normal file
@@ -0,0 +1,78 @@
|
||||
Continuing from the previous example, we can now rerank the results using more complex rerankers.
|
||||
|
||||
## Reranking search results
|
||||
You can rerank any search results using a reranker. The syntax for reranking is as follows:
|
||||
|
||||
```python
|
||||
from lancedb.rerankers import LinearCombinationReranker
|
||||
|
||||
reranker = LinearCombinationReranker()
|
||||
table.search(quries[0], query_type="hybrid").rerank(reranker=reranker).limit(5).to_pandas()
|
||||
```
|
||||
Based on the `query_type`, the `rerank()` function can accept other arguments as well. For example, hybrid search accepts a `normalize` param to determine the score normalization method.
|
||||
|
||||
!!! note "Note"
|
||||
LanceDB provides a `Reranker` base class that can be extended to implement custom rerankers. Each reranker must implement the `rerank_hybrid` method. `rerank_vector` and `rerank_fts` methods are optional. For example, the `LinearCombinationReranker` only implements the `rerank_hybrid` method and so it can only be used for reranking hybrid search results.
|
||||
|
||||
## Choosing a Reranker
|
||||
There are many rerankers available in LanceDB like `CrossEncoderReranker`, `CohereReranker`, and `ColBERT`. The choice of reranker depends on the dataset and the application. You can even implement you own custom reranker by extending the `Reranker` class. For more details about each available reranker and performance comparison, refer to the [rerankers](https://lancedb.github.io/lancedb/reranking/) documentation.
|
||||
|
||||
In this example, we'll use the `CohereReranker` to rerank the search results. It requires `cohere` to be installed and `COHERE_API_KEY` to be set in the environment. To get your API key, sign up on [Cohere](https://cohere.ai/).
|
||||
|
||||
```python
|
||||
from lancedb.rerankers import CohereReranker
|
||||
|
||||
# use Cohere reranker v3
|
||||
reranker = CohereReranker(model_name="rerank-english-v3.0") # default model is "rerank-english-v2.0"
|
||||
```
|
||||
|
||||
### Reranking search results
|
||||
Now we can rerank all query type results using the `CohereReranker`:
|
||||
|
||||
```python
|
||||
|
||||
# rerank hybrid search results
|
||||
table.search(quries[0], query_type="hybrid").rerank(reranker=reranker).limit(5).to_pandas()
|
||||
|
||||
# rerank vector search results
|
||||
table.search(quries[0], query_type="vector").rerank(reranker=reranker).limit(5).to_pandas()
|
||||
|
||||
# rerank fts search results
|
||||
table.search(quries[0], query_type="fts").rerank(reranker=reranker).limit(5).to_pandas()
|
||||
```
|
||||
|
||||
Each reranker can accept additional arguments. For example, `CohereReranker` accepts `top_k` and `batch_size` params to control the number of documents to rerank and the batch size for reranking respectively. Similarly, a custom reranker can accept any number of arguments based on the implementation. For example, a reranker can accept a `filter` that implements some custom logic to filter out documents before reranking.
|
||||
|
||||
## Results
|
||||
|
||||
Let us take a look at the same datasets from the previous sections, using the same embedding table but with Cohere reranker applied to all query types.
|
||||
|
||||
!!! note "Note"
|
||||
When reranking fts or vector search results, the search results are over-fetched by a factor of 2 and then reranked. From the reranked set, `top_k` (5 in this case) results are taken. This is done because reranking will have no effect on the hit-rate if we only fetch the `top_k` results.
|
||||
|
||||
### Synthetic LLama2 paper dataset
|
||||
|
||||
| Query Type | Hit-rate@5 |
|
||||
| --- | --- |
|
||||
| Vector | 0.640 |
|
||||
| FTS | 0.595 |
|
||||
| Reranked vector | 0.677 |
|
||||
| Reranked fts | 0.672 |
|
||||
| Hybrid | 0.759 |
|
||||
|
||||
### SQuAD Dataset
|
||||
|
||||
|
||||
### Uber10K sec filing Dataset
|
||||
|
||||
| Query Type | Hit-rate@5 |
|
||||
| --- | --- |
|
||||
| Vector | 0.608 |
|
||||
| FTS | 0.824 |
|
||||
| Reranked vector | 0.671 |
|
||||
| Reranked fts | 0.843 |
|
||||
| Hybrid | 0.849 |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,9 @@ Hybrid Search is a broad (often misused) term. It can mean anything from combini
|
||||
## The challenge of (re)ranking search results
|
||||
Once you have a group of the most relevant search results from multiple search sources, you'd likely standardize the score and rank them accordingly. This process can also be seen as another independent step - reranking.
|
||||
There are two approaches for reranking search results from multiple sources.
|
||||
|
||||
* <b>Score-based</b>: Calculate final relevance scores based on a weighted linear combination of individual search algorithm scores. Example - Weighted linear combination of semantic search & keyword-based search results.
|
||||
|
||||
* <b>Relevance-based</b>: Discards the existing scores and calculates the relevance of each search result - query pair. Example - Cross Encoder models
|
||||
|
||||
Even though there are many strategies for reranking search results, none works for all cases. Moreover, evaluating them itself is a challenge. Also, reranking can be dataset, application specific so it's hard to generalize.
|
||||
|
||||
@@ -13,7 +13,7 @@ Get started using these examples and quick links.
|
||||
| Integrations | |
|
||||
|---|---:|
|
||||
| <h3> LlamaIndex </h3>LlamaIndex is a simple, flexible data framework for connecting custom data sources to large language models. Llama index integrates with LanceDB as the serverless VectorDB. <h3>[Lean More](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html) </h3> |<img src="../assets/llama-index.jpg" alt="image" width="150" height="auto">|
|
||||
| <h3>Langchain</h3>Langchain allows building applications with LLMs through composability <h3>[Lean More](https://python.langchain.com/docs/integrations/vectorstores/lancedb) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
||||
| <h3>Langchain</h3>Langchain allows building applications with LLMs through composability <h3>[Lean More](https://lancedb.github.io/lancedb/integrations/langchain/) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
||||
| <h3>Langchain TS</h3> Javascript bindings for Langchain. It integrates with LanceDB's serverless vectordb allowing you to build powerful AI applications through composibility using only serverless functions. <h3>[Learn More]( https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/lancedb) | <img src="../assets/langchain.png" alt="image" width="150" height="auto">|
|
||||
| <h3>Voxel51</h3> It is an open source toolkit that enables you to build better computer vision workflows by improving the quality of your datasets and delivering insights about your models.<h3>[Learn More](./voxel51.md) | <img src="../assets/voxel.gif" alt="image" width="150" height="auto">|
|
||||
| <h3>PromptTools</h3> Offers a set of free, open-source tools for testing and experimenting with models, prompts, and configurations. The core idea is to enable developers to evaluate prompts using familiar interfaces like code and notebooks. You can use it to experiment with different configurations of LanceDB, and test how LanceDB integrates with the LLM of your choice.<h3>[Learn More](./prompttools.md) | <img src="../assets/prompttools.jpeg" alt="image" width="150" height="auto">|
|
||||
|
||||
92
docs/src/integrations/langchain.md
Normal file
92
docs/src/integrations/langchain.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# Langchain
|
||||

|
||||
|
||||
## Quick Start
|
||||
You can load your document data using langchain's loaders, for this example we are using `TextLoader` and `OpenAIEmbeddings` as the embedding model.
|
||||
```python
|
||||
import os
|
||||
from langchain.document_loaders import TextLoader
|
||||
from langchain.vectorstores import LanceDB
|
||||
from langchain_openai import OpenAIEmbeddings
|
||||
from langchain_text_splitters import CharacterTextSplitter
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "sk-..."
|
||||
|
||||
loader = TextLoader("../../modules/state_of_the_union.txt") # Replace with your data path
|
||||
documents = loader.load()
|
||||
|
||||
documents = CharacterTextSplitter().split_documents(documents)
|
||||
embeddings = OpenAIEmbeddings()
|
||||
|
||||
docsearch = LanceDB.from_documents(documents, embeddings)
|
||||
query = "What did the president say about Ketanji Brown Jackson"
|
||||
docs = docsearch.similarity_search(query)
|
||||
print(docs[0].page_content)
|
||||
```
|
||||
|
||||
## Documentation
|
||||
In the above example `LanceDB` vector store class object is created using `from_documents()` method which is a `classmethod` and returns the initialized class object.
|
||||
You can also use `LanceDB.from_texts(texts: List[str],embedding: Embeddings)` class method.
|
||||
|
||||
The exhaustive list of parameters for `LanceDB` vector store are :
|
||||
- `connection`: (Optional) `lancedb.db.LanceDBConnection` connection object to use. If not provided, a new connection will be created.
|
||||
- `embedding`: Langchain embedding model.
|
||||
- `vector_key`: (Optional) Column name to use for vector's in the table. Defaults to `'vector'`.
|
||||
- `id_key`: (Optional) Column name to use for id's in the table. Defaults to `'id'`.
|
||||
- `text_key`: (Optional) Column name to use for text in the table. Defaults to `'text'`.
|
||||
- `table_name`: (Optional) Name of your table in the database. Defaults to `'vectorstore'`.
|
||||
- `api_key`: (Optional) API key to use for LanceDB cloud database. Defaults to `None`.
|
||||
- `region`: (Optional) Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`.
|
||||
- `mode`: (Optional) Mode to use for adding data to the table. Defaults to `'overwrite'`.
|
||||
|
||||
```python
|
||||
db_url = "db://lang_test" # url of db you created
|
||||
api_key = "xxxxx" # your API key
|
||||
region="us-east-1-dev" # your selected region
|
||||
|
||||
vector_store = LanceDB(
|
||||
uri=db_url,
|
||||
api_key=api_key, #(dont include for local API)
|
||||
region=region, #(dont include for local API)
|
||||
embedding=embeddings,
|
||||
table_name='langchain_test' #Optional
|
||||
)
|
||||
```
|
||||
|
||||
### Methods
|
||||
To add texts and store respective embeddings automatically:
|
||||
##### add_texts()
|
||||
- `texts`: `Iterable` of strings to add to the vectorstore.
|
||||
- `metadatas`: Optional `list[dict()]` of metadatas associated with the texts.
|
||||
- `ids`: Optional `list` of ids to associate with the texts.
|
||||
|
||||
|
||||
```python
|
||||
vector_store.add_texts(texts = ['test_123'], metadatas =[{'source' :'wiki'}])
|
||||
|
||||
#Additionaly, to explore the table you can load it into a df or save it in a csv file:
|
||||
|
||||
tbl = vector_store.get_table()
|
||||
print("tbl:", tbl)
|
||||
pd_df = tbl.to_pandas()
|
||||
pd_df.to_csv("docsearch.csv", index=False)
|
||||
|
||||
# you can also create a new vector store object using an older connection object:
|
||||
vector_store = LanceDB(connection=tbl, embedding=embeddings)
|
||||
```
|
||||
For index creation make sure your table has enough data in it. An ANN index is ususally not needed for datasets ~100K vectors. For large-scale (>1M) or higher dimension vectors, it is beneficial to create an ANN index.
|
||||
##### create_index()
|
||||
- `col_name`: `Optional[str] = None`
|
||||
- `vector_col`: `Optional[str] = None`
|
||||
- `num_partitions`: `Optional[int] = 256`
|
||||
- `num_sub_vectors`: `Optional[int] = 96`
|
||||
- `index_cache_size`: `Optional[int] = None`
|
||||
|
||||
```python
|
||||
# for creating vector index
|
||||
vector_store.create_index(vector_col='vector', metric = 'cosine')
|
||||
|
||||
# for creating scalar index(for non-vector columns)
|
||||
vector_store.create_index(col_name='text')
|
||||
|
||||
```
|
||||
139
docs/src/integrations/llamaIndex.md
Normal file
139
docs/src/integrations/llamaIndex.md
Normal file
@@ -0,0 +1,139 @@
|
||||
# Llama-Index
|
||||

|
||||
|
||||
## Quick start
|
||||
You would need to install the integration via `pip install llama-index-vector-stores-lancedb` in order to use it. You can run the below script to try it out :
|
||||
```python
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# Uncomment to see debug logs
|
||||
# logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
||||
# logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
|
||||
|
||||
from llama_index.core import SimpleDirectoryReader, Document, StorageContext
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.vector_stores.lancedb import LanceDBVectorStore
|
||||
import textwrap
|
||||
import openai
|
||||
|
||||
openai.api_key = "sk-..."
|
||||
|
||||
documents = SimpleDirectoryReader("./data/your-data-dir/").load_data()
|
||||
print("Document ID:", documents[0].doc_id, "Document Hash:", documents[0].hash)
|
||||
|
||||
## For LanceDB cloud :
|
||||
# vector_store = LanceDBVectorStore(
|
||||
# uri="db://db_name", # your remote DB URI
|
||||
# api_key="sk_..", # lancedb cloud api key
|
||||
# region="your-region" # the region you configured
|
||||
# ...
|
||||
# )
|
||||
|
||||
vector_store = LanceDBVectorStore(
|
||||
uri="./lancedb", mode="overwrite", query_type="vector"
|
||||
)
|
||||
storage_context = StorageContext.from_defaults(vector_store=vector_store)
|
||||
|
||||
index = VectorStoreIndex.from_documents(
|
||||
documents, storage_context=storage_context
|
||||
)
|
||||
lance_filter = "metadata.file_name = 'paul_graham_essay.txt' "
|
||||
retriever = index.as_retriever(vector_store_kwargs={"where": lance_filter})
|
||||
response = retriever.retrieve("What did the author do growing up?")
|
||||
```
|
||||
|
||||
### Filtering
|
||||
For metadata filtering, you can use a Lance SQL-like string filter as demonstrated in the example above. Additionally, you can also filter using the `MetadataFilters` class from LlamaIndex:
|
||||
```python
|
||||
from llama_index.core.vector_stores import (
|
||||
MetadataFilters,
|
||||
FilterOperator,
|
||||
FilterCondition,
|
||||
MetadataFilter,
|
||||
)
|
||||
|
||||
query_filters = MetadataFilters(
|
||||
filters=[
|
||||
MetadataFilter(
|
||||
key="creation_date", operator=FilterOperator.EQ, value="2024-05-23"
|
||||
),
|
||||
MetadataFilter(
|
||||
key="file_size", value=75040, operator=FilterOperator.GT
|
||||
),
|
||||
],
|
||||
condition=FilterCondition.AND,
|
||||
)
|
||||
```
|
||||
|
||||
### Hybrid Search
|
||||
For complete documentation, refer [here](https://lancedb.github.io/lancedb/hybrid_search/hybrid_search/). This example uses the `colbert` reranker. Make sure to install necessary dependencies for the reranker you choose.
|
||||
```python
|
||||
from lancedb.rerankers import ColbertReranker
|
||||
|
||||
reranker = ColbertReranker()
|
||||
vector_store._add_reranker(reranker)
|
||||
|
||||
query_engine = index.as_query_engine(
|
||||
filters=query_filters,
|
||||
vector_store_kwargs={
|
||||
"query_type": "hybrid",
|
||||
}
|
||||
)
|
||||
|
||||
response = query_engine.query("How much did Viaweb charge per month?")
|
||||
```
|
||||
|
||||
In the above snippet, you can change/specify query_type again when creating the engine/retriever.
|
||||
|
||||
## API reference
|
||||
The exhaustive list of parameters for `LanceDBVectorStore` vector store are :
|
||||
- `connection`: Optional, `lancedb.db.LanceDBConnection` connection object to use. If not provided, a new connection will be created.
|
||||
- `uri`: Optional[str], the uri of your database. Defaults to `"/tmp/lancedb"`.
|
||||
- `table_name` : Optional[str], Name of your table in the database. Defaults to `"vectors"`.
|
||||
- `table`: Optional[Any], `lancedb.db.LanceTable` object to be passed. Defaults to `None`.
|
||||
- `vector_column_name`: Optional[Any], Column name to use for vector's in the table. Defaults to `'vector'`.
|
||||
- `doc_id_key`: Optional[str], Column name to use for document id's in the table. Defaults to `'doc_id'`.
|
||||
- `text_key`: Optional[str], Column name to use for text in the table. Defaults to `'text'`.
|
||||
- `api_key`: Optional[str], API key to use for LanceDB cloud database. Defaults to `None`.
|
||||
- `region`: Optional[str], Region to use for LanceDB cloud database. Only for LanceDB Cloud, defaults to `None`.
|
||||
- `nprobes` : Optional[int], Set the number of probes to use. Only applicable if ANN index is created on the table else its ignored. Defaults to `20`.
|
||||
- `refine_factor` : Optional[int], Refine the results by reading extra elements and re-ranking them in memory. Defaults to `None`.
|
||||
- `reranker`: Optional[Any], The reranker to use for LanceDB.
|
||||
Defaults to `None`.
|
||||
- `overfetch_factor`: Optional[int], The factor by which to fetch more results.
|
||||
Defaults to `1`.
|
||||
- `mode`: Optional[str], The mode to use for LanceDB.
|
||||
Defaults to `"overwrite"`.
|
||||
- `query_type`:Optional[str], The type of query to use for LanceDB.
|
||||
Defaults to `"vector"`.
|
||||
|
||||
|
||||
### Methods
|
||||
|
||||
- __from_table(cls, table: lancedb.db.LanceTable) -> `LanceDBVectorStore`__ : (class method) Creates instance from lancedb table.
|
||||
|
||||
- **_add_reranker(self, reranker: lancedb.rerankers.Reranker) -> `None`** : Add a reranker to an existing vector store.
|
||||
- Usage :
|
||||
```python
|
||||
from lancedb.rerankers import ColbertReranker
|
||||
reranker = ColbertReranker()
|
||||
vector_store._add_reranker(reranker)
|
||||
```
|
||||
- **_table_exists(self, tbl_name: `Optional[str]` = `None`) -> `bool`** : Returns `True` if `tbl_name` exists in database.
|
||||
- __create_index(
|
||||
self, scalar: `Optional[bool]` = False, col_name: `Optional[str]` = None, num_partitions: `Optional[int]` = 256, num_sub_vectors: `Optional[int]` = 96, index_cache_size: `Optional[int]` = None, metric: `Optional[str]` = "L2",
|
||||
) -> `None`__ : Creates a scalar(for non-vector cols) or a vector index on a table.
|
||||
Make sure your vector column has enough data before creating an index on it.
|
||||
|
||||
- __add(self, nodes: `List[BaseNode]`, **add_kwargs: `Any`, ) -> `List[str]`__ :
|
||||
adds Nodes to the table
|
||||
|
||||
- **delete(self, ref_doc_id: `str`) -> `None`**: Delete nodes using with node_ids.
|
||||
- **delete_nodes(self, node_ids: `List[str]`) -> `None`** : Delete nodes using with node_ids.
|
||||
- __query(
|
||||
self,
|
||||
query: `VectorStoreQuery`,
|
||||
**kwargs: `Any`,
|
||||
) -> `VectorStoreQueryResult`__:
|
||||
Query index(`VectorStoreIndex`) for top k most similar nodes. Accepts llamaIndex `VectorStoreQuery` object.
|
||||
@@ -36,7 +36,7 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!pip install --quiet openai datasets \n",
|
||||
"!pip install --quiet openai datasets\n",
|
||||
"!pip install --quiet -U lancedb"
|
||||
]
|
||||
},
|
||||
@@ -213,7 +213,7 @@
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" # OR set the key here as a variable\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"client = OpenAI()\n",
|
||||
"assert len(client.models.list().data) > 0"
|
||||
]
|
||||
@@ -234,9 +234,12 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def embed_func(c): \n",
|
||||
"def embed_func(c):\n",
|
||||
" rs = client.embeddings.create(input=c, model=\"text-embedding-ada-002\")\n",
|
||||
" return [rs.data[0].embedding]"
|
||||
" return [\n",
|
||||
" data.embedding\n",
|
||||
" for data in rs.data\n",
|
||||
" ]"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -514,7 +517,7 @@
|
||||
" prompt_start +\n",
|
||||
" \"\\n\\n---\\n\\n\".join(context.text) +\n",
|
||||
" prompt_end\n",
|
||||
" ) \n",
|
||||
" )\n",
|
||||
" return prompt"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ excluded_globs = [
|
||||
"../src/fts.md",
|
||||
"../src/embedding.md",
|
||||
"../src/examples/*.md",
|
||||
"../src/integrations/voxel51.md",
|
||||
"../src/integrations/*.md",
|
||||
"../src/guides/tables.md",
|
||||
"../src/python/duckdb.md",
|
||||
"../src/embeddings/*.md",
|
||||
@@ -16,6 +16,7 @@ excluded_globs = [
|
||||
"../src/basic.md",
|
||||
"../src/hybrid_search/hybrid_search.md",
|
||||
"../src/reranking/*.md",
|
||||
"../src/guides/tuning_retrievers/*.md",
|
||||
]
|
||||
|
||||
python_prefix = "py"
|
||||
|
||||
27
java/core/lancedb-jni/Cargo.toml
Normal file
27
java/core/lancedb-jni/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "lancedb-jni"
|
||||
description = "JNI bindings for LanceDB"
|
||||
# TODO modify lancedb/Cargo.toml for version and dependencies
|
||||
version = "0.4.18"
|
||||
edition.workspace = true
|
||||
repository.workspace = true
|
||||
readme.workspace = true
|
||||
license.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
lancedb = { path = "../../../rust/lancedb" }
|
||||
lance = { workspace = true }
|
||||
arrow = { workspace = true, features = ["ffi"] }
|
||||
arrow-schema.workspace = true
|
||||
tokio = "1.23"
|
||||
jni = "0.21.1"
|
||||
snafu.workspace = true
|
||||
lazy_static.workspace = true
|
||||
serde = { version = "^1" }
|
||||
serde_json = { version = "1" }
|
||||
130
java/core/lancedb-jni/src/connection.rs
Normal file
130
java/core/lancedb-jni/src/connection.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use crate::ffi::JNIEnvExt;
|
||||
use crate::traits::IntoJava;
|
||||
use crate::{Error, RT};
|
||||
use jni::objects::{JObject, JString, JValue};
|
||||
use jni::JNIEnv;
|
||||
pub const NATIVE_CONNECTION: &str = "nativeConnectionHandle";
|
||||
use crate::Result;
|
||||
use lancedb::connection::{connect, Connection};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BlockingConnection {
|
||||
pub(crate) inner: Connection,
|
||||
}
|
||||
|
||||
impl BlockingConnection {
|
||||
pub fn create(dataset_uri: &str) -> Result<Self> {
|
||||
let inner = RT.block_on(connect(dataset_uri).execute())?;
|
||||
Ok(Self { inner })
|
||||
}
|
||||
|
||||
pub fn table_names(
|
||||
&self,
|
||||
start_after: Option<String>,
|
||||
limit: Option<i32>,
|
||||
) -> Result<Vec<String>> {
|
||||
let mut op = self.inner.table_names();
|
||||
if let Some(start_after) = start_after {
|
||||
op = op.start_after(start_after);
|
||||
}
|
||||
if let Some(limit) = limit {
|
||||
op = op.limit(limit as u32);
|
||||
}
|
||||
Ok(RT.block_on(op.execute())?)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoJava for BlockingConnection {
|
||||
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a> {
|
||||
attach_native_connection(env, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn attach_native_connection<'local>(
|
||||
env: &mut JNIEnv<'local>,
|
||||
connection: BlockingConnection,
|
||||
) -> JObject<'local> {
|
||||
let j_connection = create_java_connection_object(env);
|
||||
// This block sets a native Rust object (Connection) as a field in the Java object (j_Connection).
|
||||
// Caution: This creates a potential for memory leaks. The Rust object (Connection) is not
|
||||
// automatically garbage-collected by Java, and its memory will not be freed unless
|
||||
// explicitly handled.
|
||||
//
|
||||
// To prevent memory leaks, ensure the following:
|
||||
// 1. The Java object (`j_Connection`) should implement the `java.io.Closeable` interface.
|
||||
// 2. Users of this Java object should be instructed to always use it within a try-with-resources
|
||||
// statement (or manually call the `close()` method) to ensure that `self.close()` is invoked.
|
||||
match unsafe { env.set_rust_field(&j_connection, NATIVE_CONNECTION, connection) } {
|
||||
Ok(_) => j_connection,
|
||||
Err(err) => {
|
||||
env.throw_new(
|
||||
"java/lang/RuntimeException",
|
||||
format!("Failed to set native handle for Connection: {}", err),
|
||||
)
|
||||
.expect("Error throwing exception");
|
||||
JObject::null()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_java_connection_object<'a>(env: &mut JNIEnv<'a>) -> JObject<'a> {
|
||||
env.new_object("com/lancedb/lancedb/Connection", "()V", &[])
|
||||
.expect("Failed to create Java Lance Connection instance")
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_releaseNativeConnection(
|
||||
mut env: JNIEnv,
|
||||
j_connection: JObject,
|
||||
) {
|
||||
let _: BlockingConnection = unsafe {
|
||||
env.take_rust_field(j_connection, NATIVE_CONNECTION)
|
||||
.expect("Failed to take native Connection handle")
|
||||
};
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_connect<'local>(
|
||||
mut env: JNIEnv<'local>,
|
||||
_obj: JObject,
|
||||
dataset_uri_object: JString,
|
||||
) -> JObject<'local> {
|
||||
let dataset_uri: String = ok_or_throw!(env, env.get_string(&dataset_uri_object)).into();
|
||||
let blocking_connection = ok_or_throw!(env, BlockingConnection::create(&dataset_uri));
|
||||
blocking_connection.into_java(&mut env)
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "system" fn Java_com_lancedb_lancedb_Connection_tableNames<'local>(
|
||||
mut env: JNIEnv<'local>,
|
||||
j_connection: JObject,
|
||||
start_after_obj: JObject, // Optional<String>
|
||||
limit_obj: JObject, // Optional<Integer>
|
||||
) -> JObject<'local> {
|
||||
ok_or_throw!(
|
||||
env,
|
||||
inner_table_names(&mut env, j_connection, start_after_obj, limit_obj)
|
||||
)
|
||||
}
|
||||
|
||||
fn inner_table_names<'local>(
|
||||
env: &mut JNIEnv<'local>,
|
||||
j_connection: JObject,
|
||||
start_after_obj: JObject, // Optional<String>
|
||||
limit_obj: JObject, // Optional<Integer>
|
||||
) -> Result<JObject<'local>> {
|
||||
let start_after = env.get_string_opt(&start_after_obj)?;
|
||||
let limit = env.get_int_opt(&limit_obj)?;
|
||||
let conn =
|
||||
unsafe { env.get_rust_field::<_, _, BlockingConnection>(j_connection, NATIVE_CONNECTION) }?;
|
||||
let table_names = conn.table_names(start_after, limit)?;
|
||||
drop(conn);
|
||||
let j_names = env.new_object("java/util/ArrayList", "()V", &[])?;
|
||||
for item in table_names {
|
||||
let jstr_item = env.new_string(item)?;
|
||||
let item_jobj = JObject::from(jstr_item);
|
||||
let item_gen = JValue::Object(&item_jobj);
|
||||
env.call_method(&j_names, "add", "(Ljava/lang/Object;)Z", &[item_gen])?;
|
||||
}
|
||||
Ok(j_names)
|
||||
}
|
||||
225
java/core/lancedb-jni/src/error.rs
Normal file
225
java/core/lancedb-jni/src/error.rs
Normal file
@@ -0,0 +1,225 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::str::Utf8Error;
|
||||
|
||||
use arrow_schema::ArrowError;
|
||||
use jni::errors::Error as JniError;
|
||||
use serde_json::Error as JsonError;
|
||||
use snafu::{Location, Snafu};
|
||||
|
||||
type BoxedError = Box<dyn std::error::Error + Send + Sync + 'static>;
|
||||
|
||||
/// Java Exception types
|
||||
pub enum JavaException {
|
||||
IllegalArgumentException,
|
||||
IOException,
|
||||
RuntimeException,
|
||||
}
|
||||
|
||||
impl JavaException {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
Self::IllegalArgumentException => "java/lang/IllegalArgumentException",
|
||||
Self::IOException => "java/io/IOException",
|
||||
Self::RuntimeException => "java/lang/RuntimeException",
|
||||
}
|
||||
}
|
||||
}
|
||||
/// TODO(lu) change to lancedb-jni
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(visibility(pub))]
|
||||
pub enum Error {
|
||||
#[snafu(display("JNI error: {message}, {location}"))]
|
||||
Jni { message: String, location: Location },
|
||||
#[snafu(display("Invalid argument: {message}, {location}"))]
|
||||
InvalidArgument { message: String, location: Location },
|
||||
#[snafu(display("IO error: {source}, {location}"))]
|
||||
IO {
|
||||
source: BoxedError,
|
||||
location: Location,
|
||||
},
|
||||
#[snafu(display("Arrow error: {message}, {location}"))]
|
||||
Arrow { message: String, location: Location },
|
||||
#[snafu(display("Index error: {message}, {location}"))]
|
||||
Index { message: String, location: Location },
|
||||
#[snafu(display("JSON error: {message}, {location}"))]
|
||||
JSON { message: String, location: Location },
|
||||
#[snafu(display("Dataset at path {path} was not found, {location}"))]
|
||||
DatasetNotFound { path: String, location: Location },
|
||||
#[snafu(display("Dataset already exists: {uri}, {location}"))]
|
||||
DatasetAlreadyExists { uri: String, location: Location },
|
||||
#[snafu(display("Table '{name}' already exists"))]
|
||||
TableAlreadyExists { name: String },
|
||||
#[snafu(display("Table '{name}' was not found"))]
|
||||
TableNotFound { name: String },
|
||||
#[snafu(display("Invalid table name '{name}': {reason}"))]
|
||||
InvalidTableName { name: String, reason: String },
|
||||
#[snafu(display("Embedding function '{name}' was not found: {reason}, {location}"))]
|
||||
EmbeddingFunctionNotFound {
|
||||
name: String,
|
||||
reason: String,
|
||||
location: Location,
|
||||
},
|
||||
#[snafu(display("Other Lance error: {message}, {location}"))]
|
||||
OtherLance { message: String, location: Location },
|
||||
#[snafu(display("Other LanceDB error: {message}, {location}"))]
|
||||
OtherLanceDB { message: String, location: Location },
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Throw as Java Exception
|
||||
pub fn throw(&self, env: &mut jni::JNIEnv) {
|
||||
match self {
|
||||
Self::InvalidArgument { .. }
|
||||
| Self::DatasetNotFound { .. }
|
||||
| Self::DatasetAlreadyExists { .. }
|
||||
| Self::TableAlreadyExists { .. }
|
||||
| Self::TableNotFound { .. }
|
||||
| Self::InvalidTableName { .. }
|
||||
| Self::EmbeddingFunctionNotFound { .. } => {
|
||||
self.throw_as(env, JavaException::IllegalArgumentException)
|
||||
}
|
||||
Self::IO { .. } | Self::Index { .. } => self.throw_as(env, JavaException::IOException),
|
||||
Self::Arrow { .. }
|
||||
| Self::JSON { .. }
|
||||
| Self::OtherLance { .. }
|
||||
| Self::OtherLanceDB { .. }
|
||||
| Self::Jni { .. } => self.throw_as(env, JavaException::RuntimeException),
|
||||
}
|
||||
}
|
||||
|
||||
/// Throw as an concrete Java Exception
|
||||
pub fn throw_as(&self, env: &mut jni::JNIEnv, exception: JavaException) {
|
||||
let message = &format!(
|
||||
"Error when throwing Java exception: {}:{}",
|
||||
exception.as_str(),
|
||||
self
|
||||
);
|
||||
env.throw_new(exception.as_str(), self.to_string())
|
||||
.expect(message);
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
trait ToSnafuLocation {
|
||||
fn to_snafu_location(&'static self) -> snafu::Location;
|
||||
}
|
||||
|
||||
impl ToSnafuLocation for std::panic::Location<'static> {
|
||||
fn to_snafu_location(&'static self) -> snafu::Location {
|
||||
snafu::Location::new(self.file(), self.line(), self.column())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JniError> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: JniError) -> Self {
|
||||
Self::Jni {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Utf8Error> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: Utf8Error) -> Self {
|
||||
Self::InvalidArgument {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ArrowError> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: ArrowError) -> Self {
|
||||
Self::Arrow {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonError> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: JsonError) -> Self {
|
||||
Self::JSON {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lance::Error> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: lance::Error) -> Self {
|
||||
match source {
|
||||
lance::Error::DatasetNotFound {
|
||||
path,
|
||||
source: _,
|
||||
location,
|
||||
} => Self::DatasetNotFound { path, location },
|
||||
lance::Error::DatasetAlreadyExists { uri, location } => {
|
||||
Self::DatasetAlreadyExists { uri, location }
|
||||
}
|
||||
lance::Error::IO { source, location } => Self::IO { source, location },
|
||||
lance::Error::Arrow { message, location } => Self::Arrow { message, location },
|
||||
lance::Error::Index { message, location } => Self::Index { message, location },
|
||||
lance::Error::InvalidInput { source, location } => Self::InvalidArgument {
|
||||
message: source.to_string(),
|
||||
location,
|
||||
},
|
||||
_ => Self::OtherLance {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lancedb::Error> for Error {
|
||||
#[track_caller]
|
||||
fn from(source: lancedb::Error) -> Self {
|
||||
match source {
|
||||
lancedb::Error::InvalidTableName { name, reason } => {
|
||||
Self::InvalidTableName { name, reason }
|
||||
}
|
||||
lancedb::Error::InvalidInput { message } => Self::InvalidArgument {
|
||||
message,
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
},
|
||||
lancedb::Error::TableNotFound { name } => Self::TableNotFound { name },
|
||||
lancedb::Error::TableAlreadyExists { name } => Self::TableAlreadyExists { name },
|
||||
lancedb::Error::EmbeddingFunctionNotFound { name, reason } => {
|
||||
Self::EmbeddingFunctionNotFound {
|
||||
name,
|
||||
reason,
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
}
|
||||
}
|
||||
lancedb::Error::Arrow { source } => Self::Arrow {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
},
|
||||
lancedb::Error::Lance { source } => Self::from(source),
|
||||
_ => Self::OtherLanceDB {
|
||||
message: source.to_string(),
|
||||
location: std::panic::Location::caller().to_snafu_location(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
204
java/core/lancedb-jni/src/ffi.rs
Normal file
204
java/core/lancedb-jni/src/ffi.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use core::slice;
|
||||
|
||||
use jni::objects::{JByteBuffer, JObjectArray, JString};
|
||||
use jni::sys::jobjectArray;
|
||||
use jni::{objects::JObject, JNIEnv};
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
/// TODO(lu) import from lance-jni without duplicate
|
||||
/// Extend JNIEnv with helper functions.
|
||||
pub trait JNIEnvExt {
|
||||
/// Get integers from Java List<Integer> object.
|
||||
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>>;
|
||||
|
||||
/// Get strings from Java List<String> object.
|
||||
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>>;
|
||||
|
||||
/// Get strings from Java String[] object.
|
||||
/// Note that get Option<Vec<String>> from Java Optional<String[]> just doesn't work.
|
||||
#[allow(unused)]
|
||||
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>>;
|
||||
|
||||
/// Get Option<String> from Java Optional<String>.
|
||||
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>>;
|
||||
|
||||
/// Get Option<Vec<String>> from Java Optional<List<String>>.
|
||||
#[allow(unused)]
|
||||
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>>;
|
||||
|
||||
/// Get Option<i32> from Java Optional<Integer>.
|
||||
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>>;
|
||||
|
||||
/// Get Option<Vec<i32>> from Java Optional<List<Integer>>.
|
||||
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>>;
|
||||
|
||||
/// Get Option<i64> from Java Optional<Long>.
|
||||
#[allow(unused)]
|
||||
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>>;
|
||||
|
||||
/// Get Option<u64> from Java Optional<Long>.
|
||||
#[allow(unused)]
|
||||
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>>;
|
||||
|
||||
/// Get Option<&[u8]> from Java Optional<ByteBuffer>.
|
||||
#[allow(unused)]
|
||||
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>>;
|
||||
|
||||
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
|
||||
where
|
||||
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>;
|
||||
}
|
||||
|
||||
impl JNIEnvExt for JNIEnv<'_> {
|
||||
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>> {
|
||||
let list = self.get_list(obj)?;
|
||||
let mut iter = list.iter(self)?;
|
||||
let mut results = Vec::with_capacity(list.size(self)? as usize);
|
||||
while let Some(elem) = iter.next(self)? {
|
||||
let int_obj = self.call_method(elem, "intValue", "()I", &[])?;
|
||||
let int_value = int_obj.i()?;
|
||||
results.push(int_value);
|
||||
}
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>> {
|
||||
let list = self.get_list(obj)?;
|
||||
let mut iter = list.iter(self)?;
|
||||
let mut results = Vec::with_capacity(list.size(self)? as usize);
|
||||
while let Some(elem) = iter.next(self)? {
|
||||
let jstr = JString::from(elem);
|
||||
let val = self.get_string(&jstr)?;
|
||||
results.push(val.to_str()?.to_string())
|
||||
}
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>> {
|
||||
let jobject_array = unsafe { JObjectArray::from_raw(obj) };
|
||||
let array_len = self.get_array_length(&jobject_array)?;
|
||||
let mut res: Vec<String> = Vec::new();
|
||||
for i in 0..array_len {
|
||||
let item: JString = self.get_object_array_element(&jobject_array, i)?.into();
|
||||
res.push(self.get_string(&item)?.into());
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_string_obj = java_obj_gen.l()?;
|
||||
let jstr = JString::from(java_string_obj);
|
||||
let val = env.get_string(&jstr)?;
|
||||
Ok(val.to_str()?.to_string())
|
||||
})
|
||||
}
|
||||
|
||||
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_list_obj = java_obj_gen.l()?;
|
||||
env.get_strings(&java_list_obj)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_int_obj = java_obj_gen.l()?;
|
||||
let int_obj = env.call_method(java_int_obj, "intValue", "()I", &[])?;
|
||||
let int_value = int_obj.i()?;
|
||||
Ok(int_value)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_list_obj = java_obj_gen.l()?;
|
||||
env.get_integers(&java_list_obj)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_long_obj = java_obj_gen.l()?;
|
||||
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
|
||||
let long_value = long_obj.j()?;
|
||||
Ok(long_value)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_long_obj = java_obj_gen.l()?;
|
||||
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
|
||||
let long_value = long_obj.j()?;
|
||||
Ok(long_value as u64)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>> {
|
||||
self.get_optional(obj, |env, inner_obj| {
|
||||
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
|
||||
let java_byte_buffer_obj = java_obj_gen.l()?;
|
||||
let j_byte_buffer = JByteBuffer::from(java_byte_buffer_obj);
|
||||
let raw_data = env.get_direct_buffer_address(&j_byte_buffer)?;
|
||||
let capacity = env.get_direct_buffer_capacity(&j_byte_buffer)?;
|
||||
let data = unsafe { slice::from_raw_parts(raw_data, capacity) };
|
||||
Ok(data)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
|
||||
where
|
||||
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>,
|
||||
{
|
||||
if obj.is_null() {
|
||||
return Ok(None);
|
||||
}
|
||||
let is_present = self.call_method(obj, "isPresent", "()Z", &[])?;
|
||||
if !is_present.z()? {
|
||||
// TODO(lu): put get java object into here cuz can only get java Object
|
||||
Ok(None)
|
||||
} else {
|
||||
f(self, obj).map(Some)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseInts(
|
||||
mut env: JNIEnv,
|
||||
_obj: JObject,
|
||||
list_obj: JObject, // List<Integer>
|
||||
) {
|
||||
ok_or_throw_without_return!(env, env.get_integers(&list_obj));
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseIntsOpt(
|
||||
mut env: JNIEnv,
|
||||
_obj: JObject,
|
||||
list_obj: JObject, // Optional<List<Integer>>
|
||||
) {
|
||||
ok_or_throw_without_return!(env, env.get_ints_opt(&list_obj));
|
||||
}
|
||||
68
java/core/lancedb-jni/src/lib.rs
Normal file
68
java/core/lancedb-jni/src/lib.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
// TODO import from lance-jni without duplicate
|
||||
#[macro_export]
|
||||
macro_rules! ok_or_throw {
|
||||
($env:expr, $result:expr) => {
|
||||
match $result {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
Error::from(err).throw(&mut $env);
|
||||
return JObject::null();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ok_or_throw_without_return {
|
||||
($env:expr, $result:expr) => {
|
||||
match $result {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
Error::from(err).throw(&mut $env);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! ok_or_throw_with_return {
|
||||
($env:expr, $result:expr, $ret:expr) => {
|
||||
match $result {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
Error::from(err).throw(&mut $env);
|
||||
return $ret;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
mod connection;
|
||||
pub mod error;
|
||||
mod ffi;
|
||||
mod traits;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
|
||||
lazy_static! {
|
||||
static ref RT: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("Failed to create tokio runtime");
|
||||
}
|
||||
122
java/core/lancedb-jni/src/traits.rs
Normal file
122
java/core/lancedb-jni/src/traits.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use jni::objects::{JMap, JObject, JString, JValue};
|
||||
use jni::JNIEnv;
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait FromJObject<T> {
|
||||
fn extract(&self) -> Result<T>;
|
||||
}
|
||||
|
||||
/// Convert a Rust type into a Java Object.
|
||||
pub trait IntoJava {
|
||||
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a>;
|
||||
}
|
||||
|
||||
impl FromJObject<i32> for JObject<'_> {
|
||||
fn extract(&self) -> Result<i32> {
|
||||
Ok(JValue::from(self).i()?)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromJObject<i64> for JObject<'_> {
|
||||
fn extract(&self) -> Result<i64> {
|
||||
Ok(JValue::from(self).j()?)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromJObject<f32> for JObject<'_> {
|
||||
fn extract(&self) -> Result<f32> {
|
||||
Ok(JValue::from(self).f()?)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromJObject<f64> for JObject<'_> {
|
||||
fn extract(&self) -> Result<f64> {
|
||||
Ok(JValue::from(self).d()?)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FromJString {
|
||||
fn extract(&self, env: &mut JNIEnv) -> Result<String>;
|
||||
}
|
||||
|
||||
impl FromJString for JString<'_> {
|
||||
fn extract(&self, env: &mut JNIEnv) -> Result<String> {
|
||||
Ok(env.get_string(self)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait JMapExt {
|
||||
#[allow(dead_code)]
|
||||
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>>;
|
||||
}
|
||||
|
||||
fn get_map_value<T>(env: &mut JNIEnv, map: &JMap, key: &str) -> Result<Option<T>>
|
||||
where
|
||||
for<'a> JObject<'a>: FromJObject<T>,
|
||||
{
|
||||
let key_obj: JObject = env.new_string(key)?.into();
|
||||
if let Some(value) = map.get(env, &key_obj)? {
|
||||
if value.is_null() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(value.extract()?))
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl JMapExt for JMap<'_, '_, '_> {
|
||||
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>> {
|
||||
let key_obj: JObject = env.new_string(key)?.into();
|
||||
if let Some(value) = self.get(env, &key_obj)? {
|
||||
let value_str: JString = value.into();
|
||||
Ok(Some(value_str.extract(env)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>> {
|
||||
get_map_value(env, self, key)
|
||||
}
|
||||
|
||||
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>> {
|
||||
get_map_value(env, self, key)
|
||||
}
|
||||
|
||||
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>> {
|
||||
get_map_value(env, self, key)
|
||||
}
|
||||
|
||||
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>> {
|
||||
get_map_value(env, self, key)
|
||||
}
|
||||
}
|
||||
94
java/core/pom.xml
Normal file
94
java/core/pom.xml
Normal file
@@ -0,0 +1,94 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.1-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>lancedb-core</artifactId>
|
||||
<name>LanceDB Core</name>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-vector</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-memory-netty</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-c-data</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-dataset</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.questdb</groupId>
|
||||
<artifactId>jar-jni</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>build-jni</id>
|
||||
<activation>
|
||||
<activeByDefault>true</activeByDefault>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.questdb</groupId>
|
||||
<artifactId>rust-maven-plugin</artifactId>
|
||||
<version>1.1.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>lancedb-jni</id>
|
||||
<goals>
|
||||
<goal>build</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<path>lancedb-jni</path>
|
||||
<!--<release>true</release>-->
|
||||
<!-- Copy native libraries to target/classes for runtime access -->
|
||||
<copyTo>${project.build.directory}/classes/nativelib</copyTo>
|
||||
<copyWithPlatformDir>true</copyWithPlatformDir>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>lancedb-jni-test</id>
|
||||
<goals>
|
||||
<goal>test</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<path>lancedb-jni</path>
|
||||
<release>false</release>
|
||||
<verbosity>-v</verbosity>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
120
java/core/src/main/java/com/lancedb/lancedb/Connection.java
Normal file
120
java/core/src/main/java/com/lancedb/lancedb/Connection.java
Normal file
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.lancedb.lancedb;
|
||||
|
||||
import io.questdb.jar.jni.JarJniLoader;
|
||||
import java.io.Closeable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Represents LanceDB database.
|
||||
*/
|
||||
public class Connection implements Closeable {
|
||||
static {
|
||||
JarJniLoader.loadLib(Connection.class, "/nativelib", "lancedb_jni");
|
||||
}
|
||||
|
||||
private long nativeConnectionHandle;
|
||||
|
||||
/**
|
||||
* Connect to a LanceDB instance.
|
||||
*/
|
||||
public static native Connection connect(String uri);
|
||||
|
||||
/**
|
||||
* Get the names of all tables in the database. The names are sorted in
|
||||
* ascending order.
|
||||
*
|
||||
* @return the table names
|
||||
*/
|
||||
public List<String> tableNames() {
|
||||
return tableNames(Optional.empty(), Optional.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the names of filtered tables in the database. The names are sorted in
|
||||
* ascending order.
|
||||
*
|
||||
* @param limit The number of results to return.
|
||||
* @return the table names
|
||||
*/
|
||||
public List<String> tableNames(int limit) {
|
||||
return tableNames(Optional.empty(), Optional.of(limit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the names of filtered tables in the database. The names are sorted in
|
||||
* ascending order.
|
||||
*
|
||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
||||
* value. This can be combined with limit to implement pagination
|
||||
* by setting this to the last table name from the previous page.
|
||||
* @return the table names
|
||||
*/
|
||||
public List<String> tableNames(String startAfter) {
|
||||
return tableNames(Optional.of(startAfter), Optional.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the names of filtered tables in the database. The names are sorted in
|
||||
* ascending order.
|
||||
*
|
||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
||||
* value. This can be combined with limit to implement pagination
|
||||
* by setting this to the last table name from the previous page.
|
||||
* @param limit The number of results to return.
|
||||
* @return the table names
|
||||
*/
|
||||
public List<String> tableNames(String startAfter, int limit) {
|
||||
return tableNames(Optional.of(startAfter), Optional.of(limit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the names of filtered tables in the database. The names are sorted in
|
||||
* ascending order.
|
||||
*
|
||||
* @param startAfter If present, only return names that come lexicographically after the supplied
|
||||
* value. This can be combined with limit to implement pagination
|
||||
* by setting this to the last table name from the previous page.
|
||||
* @param limit The number of results to return.
|
||||
* @return the table names
|
||||
*/
|
||||
public native List<String> tableNames(
|
||||
Optional<String> startAfter, Optional<Integer> limit);
|
||||
|
||||
/**
|
||||
* Closes this connection and releases any system resources associated with it. If
|
||||
* the connection is
|
||||
* already closed, then invoking this method has no effect.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
if (nativeConnectionHandle != 0) {
|
||||
releaseNativeConnection(nativeConnectionHandle);
|
||||
nativeConnectionHandle = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Native method to release the Lance connection resources associated with the
|
||||
* given handle.
|
||||
*
|
||||
* @param handle The native handle to the connection resource.
|
||||
*/
|
||||
private native void releaseNativeConnection(long handle);
|
||||
|
||||
private Connection() {}
|
||||
}
|
||||
135
java/core/src/test/java/com/lancedb/lancedb/ConnectionTest.java
Normal file
135
java/core/src/test/java/com/lancedb/lancedb/ConnectionTest.java
Normal file
@@ -0,0 +1,135 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.lancedb.lancedb;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.net.URL;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
public class ConnectionTest {
|
||||
private static final String[] TABLE_NAMES = {
|
||||
"dataset_version",
|
||||
"new_empty_dataset",
|
||||
"test",
|
||||
"write_stream"
|
||||
};
|
||||
|
||||
@TempDir
|
||||
static Path tempDir; // Temporary directory for the tests
|
||||
private static URL lanceDbURL;
|
||||
|
||||
@BeforeAll
|
||||
static void setUp() {
|
||||
ClassLoader classLoader = ConnectionTest.class.getClassLoader();
|
||||
lanceDbURL = classLoader.getResource("example_db");
|
||||
}
|
||||
|
||||
@Test
|
||||
void emptyDB() {
|
||||
String databaseUri = tempDir.resolve("emptyDB").toString();
|
||||
try (Connection conn = Connection.connect(databaseUri)) {
|
||||
List<String> tableNames = conn.tableNames();
|
||||
assertTrue(tableNames.isEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void tableNames() {
|
||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
||||
List<String> tableNames = conn.tableNames();
|
||||
assertEquals(4, tableNames.size());
|
||||
for (int i = 0; i < TABLE_NAMES.length; i++) {
|
||||
assertEquals(TABLE_NAMES[i], tableNames.get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void tableNamesStartAfter() {
|
||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
||||
assertTableNamesStartAfter(conn, TABLE_NAMES[0], 3, TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, TABLE_NAMES[1], 2, TABLE_NAMES[2], TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, TABLE_NAMES[2], 1, TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, TABLE_NAMES[3], 0);
|
||||
assertTableNamesStartAfter(conn, "a_dataset", 4, TABLE_NAMES[0], TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, "o_dataset", 2, TABLE_NAMES[2], TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, "v_dataset", 1, TABLE_NAMES[3]);
|
||||
assertTableNamesStartAfter(conn, "z_dataset", 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertTableNamesStartAfter(Connection conn, String startAfter, int expectedSize, String... expectedNames) {
|
||||
List<String> tableNames = conn.tableNames(startAfter);
|
||||
assertEquals(expectedSize, tableNames.size());
|
||||
for (int i = 0; i < expectedNames.length; i++) {
|
||||
assertEquals(expectedNames[i], tableNames.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void tableNamesLimit() {
|
||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
||||
for (int i = 0; i <= TABLE_NAMES.length; i++) {
|
||||
List<String> tableNames = conn.tableNames(i);
|
||||
assertEquals(i, tableNames.size());
|
||||
for (int j = 0; j < i; j++) {
|
||||
assertEquals(TABLE_NAMES[j], tableNames.get(j));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void tableNamesStartAfterLimit() {
|
||||
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
|
||||
List<String> tableNames = conn.tableNames(TABLE_NAMES[0], 2);
|
||||
assertEquals(2, tableNames.size());
|
||||
assertEquals(TABLE_NAMES[1], tableNames.get(0));
|
||||
assertEquals(TABLE_NAMES[2], tableNames.get(1));
|
||||
tableNames = conn.tableNames(TABLE_NAMES[1], 1);
|
||||
assertEquals(1, tableNames.size());
|
||||
assertEquals(TABLE_NAMES[2], tableNames.get(0));
|
||||
tableNames = conn.tableNames(TABLE_NAMES[2], 2);
|
||||
assertEquals(1, tableNames.size());
|
||||
assertEquals(TABLE_NAMES[3], tableNames.get(0));
|
||||
tableNames = conn.tableNames(TABLE_NAMES[3], 2);
|
||||
assertEquals(0, tableNames.size());
|
||||
tableNames = conn.tableNames(TABLE_NAMES[0], 0);
|
||||
assertEquals(0, tableNames.size());
|
||||
|
||||
// Limit larger than the number of remaining tables
|
||||
tableNames = conn.tableNames(TABLE_NAMES[0], 10);
|
||||
assertEquals(3, tableNames.size());
|
||||
assertEquals(TABLE_NAMES[1], tableNames.get(0));
|
||||
assertEquals(TABLE_NAMES[2], tableNames.get(1));
|
||||
assertEquals(TABLE_NAMES[3], tableNames.get(2));
|
||||
|
||||
// Start after a value not in the list
|
||||
tableNames = conn.tableNames("non_existent_table", 2);
|
||||
assertEquals(2, tableNames.size());
|
||||
assertEquals(TABLE_NAMES[2], tableNames.get(0));
|
||||
assertEquals(TABLE_NAMES[3], tableNames.get(1));
|
||||
|
||||
// Start after the last table with a limit
|
||||
tableNames = conn.tableNames(TABLE_NAMES[3], 1);
|
||||
assertEquals(0, tableNames.size());
|
||||
}
|
||||
}
|
||||
}
|
||||
Binary file not shown.
@@ -0,0 +1 @@
|
||||
$d51afd07-e3cd-4c76-9b9b-787e13fd55b0<62>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
$15648e72-076f-4ef1-8b90-10d305b95b3b<33>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
$a3689caf-4f6b-4afc-a3c7-97af75661843<34>oitem <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string8price <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*double80vector <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*fixed_size_list:float:28
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
129
java/pom.xml
Normal file
129
java/pom.xml
Normal file
@@ -0,0 +1,129 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.lancedb</groupId>
|
||||
<artifactId>lancedb-parent</artifactId>
|
||||
<version>0.1-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Lance Parent</name>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.compiler.source>11</maven.compiler.source>
|
||||
<maven.compiler.target>11</maven.compiler.target>
|
||||
<arrow.version>15.0.0</arrow.version>
|
||||
</properties>
|
||||
|
||||
<modules>
|
||||
<module>core</module>
|
||||
</modules>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-vector</artifactId>
|
||||
<version>${arrow.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-memory-netty</artifactId>
|
||||
<version>${arrow.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-c-data</artifactId>
|
||||
<version>${arrow.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-dataset</artifactId>
|
||||
<version>${arrow.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.questdb</groupId>
|
||||
<artifactId>jar-jni</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<version>5.10.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20210307</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<configuration>
|
||||
<configLocation>google_checks.xml</configLocation>
|
||||
<consoleOutput>true</consoleOutput>
|
||||
<failsOnError>true</failsOnError>
|
||||
<violationSeverity>warning</violationSeverity>
|
||||
<linkXRef>false</linkXRef>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>validate</id>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-clean-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>3.0.2</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<compilerArgs>
|
||||
<arg>-h</arg>
|
||||
<arg>target/headers</arg>
|
||||
</compilerArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.2.5</version>
|
||||
<configuration>
|
||||
<argLine>--add-opens=java.base/java.nio=ALL-UNNAMED</argLine>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.0.2</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-install-plugin</artifactId>
|
||||
<version>2.5.2</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
</project>
|
||||
74
node/package-lock.json
generated
74
node/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.4.17",
|
||||
"version": "0.5.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "vectordb",
|
||||
"version": "0.4.17",
|
||||
"version": "0.5.2",
|
||||
"cpu": [
|
||||
"x64",
|
||||
"arm64"
|
||||
@@ -52,11 +52,11 @@
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-arm64": "0.4.17",
|
||||
"@lancedb/vectordb-darwin-x64": "0.4.17",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.17",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.17",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.17"
|
||||
"@lancedb/vectordb-darwin-arm64": "0.4.20",
|
||||
"@lancedb/vectordb-darwin-x64": "0.4.20",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.20",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.20",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@apache-arrow/ts": "^14.0.2",
|
||||
@@ -333,6 +333,66 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-arm64": {
|
||||
"version": "0.4.20",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.20.tgz",
|
||||
"integrity": "sha512-ffP2K4sA5mQTgePyARw1y8dPN996FmpvyAYoWO+TSItaXlhcXvc+KVa5udNMCZMDYeEnEv2Xpj6k4PwW3oBz+A==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-darwin-x64": {
|
||||
"version": "0.4.20",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-darwin-x64/-/vectordb-darwin-x64-0.4.20.tgz",
|
||||
"integrity": "sha512-GSYsXE20RIehDu30FjREhJdEzhnwOTV7ZsrSXagStzLY1gr7pyd7sfqxmmUtdD09di7LnQoiM71AOpPTa01YwQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-arm64-gnu": {
|
||||
"version": "0.4.20",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-arm64-gnu/-/vectordb-linux-arm64-gnu-0.4.20.tgz",
|
||||
"integrity": "sha512-FpNOjOsz3nJVm6EBGyNgbOW2aFhsWZ/igeY45Z8hbZaaK2YBwrg/DASoNlUzgv6IR8cUaGJ2irNVJfsKR2cG6g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-linux-x64-gnu": {
|
||||
"version": "0.4.20",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-linux-x64-gnu/-/vectordb-linux-x64-gnu-0.4.20.tgz",
|
||||
"integrity": "sha512-pOqWjrRZQSrLTlQPkjidRii7NZDw8Xu9pN6ouVu2JAK8n81FXaPtFCyAI+Y3v9GpnYDN0rvD4eQ36aHAVPsa2g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@lancedb/vectordb-win32-x64-msvc": {
|
||||
"version": "0.4.20",
|
||||
"resolved": "https://registry.npmjs.org/@lancedb/vectordb-win32-x64-msvc/-/vectordb-win32-x64-msvc-0.4.20.tgz",
|
||||
"integrity": "sha512-5J5SsYSJ7jRCmU/sgwVHdrGz43B/7R2T9OEoFTKyVAtqTZdu75rkytXyn9SyEayXVhlUOaw76N0ASm0hAoDS/A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@neon-rs/cli": {
|
||||
"version": "0.0.160",
|
||||
"resolved": "https://registry.npmjs.org/@neon-rs/cli/-/cli-0.0.160.tgz",
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "vectordb",
|
||||
"version": "0.4.17",
|
||||
"version": "0.5.2",
|
||||
"description": " Serverless, low-latency vector database for AI applications",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"tsc": "tsc -b",
|
||||
"build": "npm run tsc && cargo-cp-artifact --artifact cdylib lancedb-node index.node -- cargo build --message-format=json",
|
||||
"build": "npm run tsc && cargo-cp-artifact --artifact cdylib lancedb_node index.node -- cargo build --message-format=json",
|
||||
"build-release": "npm run build -- --release",
|
||||
"test": "npm run tsc && mocha -recursive dist/test",
|
||||
"integration-test": "npm run tsc && mocha -recursive dist/integration_test",
|
||||
@@ -88,10 +88,10 @@
|
||||
}
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@lancedb/vectordb-darwin-arm64": "0.4.17",
|
||||
"@lancedb/vectordb-darwin-x64": "0.4.17",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.17",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.17",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.17"
|
||||
"@lancedb/vectordb-darwin-arm64": "0.4.20",
|
||||
"@lancedb/vectordb-darwin-x64": "0.4.20",
|
||||
"@lancedb/vectordb-linux-arm64-gnu": "0.4.20",
|
||||
"@lancedb/vectordb-linux-x64-gnu": "0.4.20",
|
||||
"@lancedb/vectordb-win32-x64-msvc": "0.4.20"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,23 +27,23 @@ import {
|
||||
RecordBatch,
|
||||
makeData,
|
||||
Struct,
|
||||
Float,
|
||||
type Float,
|
||||
DataType,
|
||||
Binary,
|
||||
Float32
|
||||
} from 'apache-arrow'
|
||||
import { type EmbeddingFunction } from './index'
|
||||
import { sanitizeSchema } from './sanitize'
|
||||
} from "apache-arrow";
|
||||
import { type EmbeddingFunction } from "./index";
|
||||
import { sanitizeSchema } from "./sanitize";
|
||||
|
||||
/*
|
||||
* Options to control how a column should be converted to a vector array
|
||||
*/
|
||||
export class VectorColumnOptions {
|
||||
/** Vector column type. */
|
||||
type: Float = new Float32()
|
||||
type: Float = new Float32();
|
||||
|
||||
constructor (values?: Partial<VectorColumnOptions>) {
|
||||
Object.assign(this, values)
|
||||
constructor(values?: Partial<VectorColumnOptions>) {
|
||||
Object.assign(this, values);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ export class MakeArrowTableOptions {
|
||||
* The schema must be specified if there are no records (e.g. to make
|
||||
* an empty table)
|
||||
*/
|
||||
schema?: Schema
|
||||
schema?: Schema;
|
||||
|
||||
/*
|
||||
* Mapping from vector column name to expected type
|
||||
@@ -80,7 +80,9 @@ export class MakeArrowTableOptions {
|
||||
*/
|
||||
vectorColumns: Record<string, VectorColumnOptions> = {
|
||||
vector: new VectorColumnOptions()
|
||||
}
|
||||
};
|
||||
|
||||
embeddings?: EmbeddingFunction<any>;
|
||||
|
||||
/**
|
||||
* If true then string columns will be encoded with dictionary encoding
|
||||
@@ -91,10 +93,10 @@ export class MakeArrowTableOptions {
|
||||
*
|
||||
* If `schema` is provided then this property is ignored.
|
||||
*/
|
||||
dictionaryEncodeStrings: boolean = false
|
||||
dictionaryEncodeStrings: boolean = false;
|
||||
|
||||
constructor (values?: Partial<MakeArrowTableOptions>) {
|
||||
Object.assign(this, values)
|
||||
constructor(values?: Partial<MakeArrowTableOptions>) {
|
||||
Object.assign(this, values);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,59 +195,68 @@ export class MakeArrowTableOptions {
|
||||
* assert.deepEqual(table.schema, schema)
|
||||
* ```
|
||||
*/
|
||||
export function makeArrowTable (
|
||||
export function makeArrowTable(
|
||||
data: Array<Record<string, any>>,
|
||||
options?: Partial<MakeArrowTableOptions>
|
||||
): ArrowTable {
|
||||
if (data.length === 0 && (options?.schema === undefined || options?.schema === null)) {
|
||||
throw new Error('At least one record or a schema needs to be provided')
|
||||
if (
|
||||
data.length === 0 &&
|
||||
(options?.schema === undefined || options?.schema === null)
|
||||
) {
|
||||
throw new Error("At least one record or a schema needs to be provided");
|
||||
}
|
||||
|
||||
const opt = new MakeArrowTableOptions(options !== undefined ? options : {})
|
||||
const opt = new MakeArrowTableOptions(options !== undefined ? options : {});
|
||||
if (opt.schema !== undefined && opt.schema !== null) {
|
||||
opt.schema = sanitizeSchema(opt.schema)
|
||||
opt.schema = sanitizeSchema(opt.schema);
|
||||
opt.schema = validateSchemaEmbeddings(opt.schema, data, opt.embeddings);
|
||||
}
|
||||
const columns: Record<string, Vector> = {}
|
||||
|
||||
const columns: Record<string, Vector> = {};
|
||||
// TODO: sample dataset to find missing columns
|
||||
// Prefer the field ordering of the schema, if present
|
||||
const columnNames = ((opt.schema) != null) ? (opt.schema.names as string[]) : Object.keys(data[0])
|
||||
const columnNames =
|
||||
opt.schema != null ? (opt.schema.names as string[]) : Object.keys(data[0]);
|
||||
for (const colName of columnNames) {
|
||||
if (data.length !== 0 && !Object.prototype.hasOwnProperty.call(data[0], colName)) {
|
||||
if (
|
||||
data.length !== 0 &&
|
||||
!Object.prototype.hasOwnProperty.call(data[0], colName)
|
||||
) {
|
||||
// The field is present in the schema, but not in the data, skip it
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
// Extract a single column from the records (transpose from row-major to col-major)
|
||||
let values = data.map((datum) => datum[colName])
|
||||
let values = data.map((datum) => datum[colName]);
|
||||
|
||||
// By default (type === undefined) arrow will infer the type from the JS type
|
||||
let type
|
||||
let type;
|
||||
if (opt.schema !== undefined) {
|
||||
// If there is a schema provided, then use that for the type instead
|
||||
type = opt.schema?.fields.filter((f) => f.name === colName)[0]?.type
|
||||
type = opt.schema?.fields.filter((f) => f.name === colName)[0]?.type;
|
||||
if (DataType.isInt(type) && type.bitWidth === 64) {
|
||||
// wrap in BigInt to avoid bug: https://github.com/apache/arrow/issues/40051
|
||||
values = values.map((v) => {
|
||||
if (v === null) {
|
||||
return v
|
||||
return v;
|
||||
}
|
||||
return BigInt(v)
|
||||
})
|
||||
return BigInt(v);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Otherwise, check to see if this column is one of the vector columns
|
||||
// defined by opt.vectorColumns and, if so, use the fixed size list type
|
||||
const vectorColumnOptions = opt.vectorColumns[colName]
|
||||
const vectorColumnOptions = opt.vectorColumns[colName];
|
||||
if (vectorColumnOptions !== undefined) {
|
||||
type = newVectorType(values[0].length, vectorColumnOptions.type)
|
||||
type = newVectorType(values[0].length, vectorColumnOptions.type);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert an Array of JS values to an arrow vector
|
||||
columns[colName] = makeVector(values, type, opt.dictionaryEncodeStrings)
|
||||
columns[colName] = makeVector(values, type, opt.dictionaryEncodeStrings);
|
||||
} catch (error: unknown) {
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw Error(`Could not convert column "${colName}" to Arrow: ${error}`)
|
||||
throw Error(`Could not convert column "${colName}" to Arrow: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -260,97 +271,116 @@ export function makeArrowTable (
|
||||
// To work around this we first create a table with the wrong schema and
|
||||
// then patch the schema of the batches so we can use
|
||||
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
||||
const firstTable = new ArrowTable(columns)
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const batchesFixed = firstTable.batches.map(batch => new RecordBatch(opt.schema!, batch.data))
|
||||
return new ArrowTable(opt.schema, batchesFixed)
|
||||
const firstTable = new ArrowTable(columns);
|
||||
const batchesFixed = firstTable.batches.map(
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
(batch) => new RecordBatch(opt.schema!, batch.data)
|
||||
);
|
||||
return new ArrowTable(opt.schema, batchesFixed);
|
||||
} else {
|
||||
return new ArrowTable(columns)
|
||||
return new ArrowTable(columns);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty Arrow table with the provided schema
|
||||
*/
|
||||
export function makeEmptyTable (schema: Schema): ArrowTable {
|
||||
return makeArrowTable([], { schema })
|
||||
export function makeEmptyTable(schema: Schema): ArrowTable {
|
||||
return makeArrowTable([], { schema });
|
||||
}
|
||||
|
||||
// Helper function to convert Array<Array<any>> to a variable sized list array
|
||||
function makeListVector (lists: any[][]): Vector<any> {
|
||||
function makeListVector(lists: any[][]): Vector<any> {
|
||||
if (lists.length === 0 || lists[0].length === 0) {
|
||||
throw Error('Cannot infer list vector from empty array or empty list')
|
||||
throw Error("Cannot infer list vector from empty array or empty list");
|
||||
}
|
||||
const sampleList = lists[0]
|
||||
let inferredType
|
||||
const sampleList = lists[0];
|
||||
let inferredType;
|
||||
try {
|
||||
const sampleVector = makeVector(sampleList)
|
||||
inferredType = sampleVector.type
|
||||
const sampleVector = makeVector(sampleList);
|
||||
inferredType = sampleVector.type;
|
||||
} catch (error: unknown) {
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw Error(`Cannot infer list vector. Cannot infer inner type: ${error}`)
|
||||
throw Error(`Cannot infer list vector. Cannot infer inner type: ${error}`);
|
||||
}
|
||||
|
||||
const listBuilder = makeBuilder({
|
||||
type: new List(new Field('item', inferredType, true))
|
||||
})
|
||||
type: new List(new Field("item", inferredType, true))
|
||||
});
|
||||
for (const list of lists) {
|
||||
listBuilder.append(list)
|
||||
listBuilder.append(list);
|
||||
}
|
||||
return listBuilder.finish().toVector()
|
||||
return listBuilder.finish().toVector();
|
||||
}
|
||||
|
||||
// Helper function to convert an Array of JS values to an Arrow Vector
|
||||
function makeVector (values: any[], type?: DataType, stringAsDictionary?: boolean): Vector<any> {
|
||||
function makeVector(
|
||||
values: any[],
|
||||
type?: DataType,
|
||||
stringAsDictionary?: boolean
|
||||
): Vector<any> {
|
||||
if (type !== undefined) {
|
||||
// No need for inference, let Arrow create it
|
||||
return vectorFromArray(values, type)
|
||||
return vectorFromArray(values, type);
|
||||
}
|
||||
if (values.length === 0) {
|
||||
throw Error('makeVector requires at least one value or the type must be specfied')
|
||||
throw Error(
|
||||
"makeVector requires at least one value or the type must be specfied"
|
||||
);
|
||||
}
|
||||
const sampleValue = values.find(val => val !== null && val !== undefined)
|
||||
const sampleValue = values.find((val) => val !== null && val !== undefined);
|
||||
if (sampleValue === undefined) {
|
||||
throw Error('makeVector cannot infer the type if all values are null or undefined')
|
||||
throw Error(
|
||||
"makeVector cannot infer the type if all values are null or undefined"
|
||||
);
|
||||
}
|
||||
if (Array.isArray(sampleValue)) {
|
||||
// Default Arrow inference doesn't handle list types
|
||||
return makeListVector(values)
|
||||
return makeListVector(values);
|
||||
} else if (Buffer.isBuffer(sampleValue)) {
|
||||
// Default Arrow inference doesn't handle Buffer
|
||||
return vectorFromArray(values, new Binary())
|
||||
} else if (!(stringAsDictionary ?? false) && (typeof sampleValue === 'string' || sampleValue instanceof String)) {
|
||||
return vectorFromArray(values, new Binary());
|
||||
} else if (
|
||||
!(stringAsDictionary ?? false) &&
|
||||
(typeof sampleValue === "string" || sampleValue instanceof String)
|
||||
) {
|
||||
// If the type is string then don't use Arrow's default inference unless dictionaries are requested
|
||||
// because it will always use dictionary encoding for strings
|
||||
return vectorFromArray(values, new Utf8())
|
||||
return vectorFromArray(values, new Utf8());
|
||||
} else {
|
||||
// Convert a JS array of values to an arrow vector
|
||||
return vectorFromArray(values)
|
||||
return vectorFromArray(values);
|
||||
}
|
||||
}
|
||||
|
||||
async function applyEmbeddings<T> (table: ArrowTable, embeddings?: EmbeddingFunction<T>, schema?: Schema): Promise<ArrowTable> {
|
||||
async function applyEmbeddings<T>(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
schema?: Schema
|
||||
): Promise<ArrowTable> {
|
||||
if (embeddings == null) {
|
||||
return table
|
||||
return table;
|
||||
}
|
||||
if (schema !== undefined && schema !== null) {
|
||||
schema = sanitizeSchema(schema)
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
|
||||
// Convert from ArrowTable to Record<String, Vector>
|
||||
const colEntries = [...Array(table.numCols).keys()].map((_, idx) => {
|
||||
const name = table.schema.fields[idx].name
|
||||
const name = table.schema.fields[idx].name;
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const vec = table.getChildAt(idx)!
|
||||
return [name, vec]
|
||||
})
|
||||
const newColumns = Object.fromEntries(colEntries)
|
||||
const vec = table.getChildAt(idx)!;
|
||||
return [name, vec];
|
||||
});
|
||||
const newColumns = Object.fromEntries(colEntries);
|
||||
|
||||
const sourceColumn = newColumns[embeddings.sourceColumn]
|
||||
const destColumn = embeddings.destColumn ?? 'vector'
|
||||
const innerDestType = embeddings.embeddingDataType ?? new Float32()
|
||||
const sourceColumn = newColumns[embeddings.sourceColumn];
|
||||
const destColumn = embeddings.destColumn ?? "vector";
|
||||
const innerDestType = embeddings.embeddingDataType ?? new Float32();
|
||||
if (sourceColumn === undefined) {
|
||||
throw new Error(`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`)
|
||||
throw new Error(
|
||||
`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`
|
||||
);
|
||||
}
|
||||
|
||||
if (table.numRows === 0) {
|
||||
@@ -358,45 +388,60 @@ async function applyEmbeddings<T> (table: ArrowTable, embeddings?: EmbeddingFunc
|
||||
// We have an empty table and it already has the embedding column so no work needs to be done
|
||||
// Note: we don't return an error like we did below because this is a common occurrence. For example,
|
||||
// if we call convertToTable with 0 records and a schema that includes the embedding
|
||||
return table
|
||||
return table;
|
||||
}
|
||||
if (embeddings.embeddingDimension !== undefined) {
|
||||
const destType = newVectorType(embeddings.embeddingDimension, innerDestType)
|
||||
newColumns[destColumn] = makeVector([], destType)
|
||||
const destType = newVectorType(
|
||||
embeddings.embeddingDimension,
|
||||
innerDestType
|
||||
);
|
||||
newColumns[destColumn] = makeVector([], destType);
|
||||
} else if (schema != null) {
|
||||
const destField = schema.fields.find(f => f.name === destColumn)
|
||||
const destField = schema.fields.find((f) => f.name === destColumn);
|
||||
if (destField != null) {
|
||||
newColumns[destColumn] = makeVector([], destField.type)
|
||||
newColumns[destColumn] = makeVector([], destField.type);
|
||||
} else {
|
||||
throw new Error(`Attempt to apply embeddings to an empty table failed because schema was missing embedding column '${destColumn}'`)
|
||||
throw new Error(
|
||||
`Attempt to apply embeddings to an empty table failed because schema was missing embedding column '${destColumn}'`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
throw new Error('Attempt to apply embeddings to an empty table when the embeddings function does not specify `embeddingDimension`')
|
||||
throw new Error(
|
||||
"Attempt to apply embeddings to an empty table when the embeddings function does not specify `embeddingDimension`"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (Object.prototype.hasOwnProperty.call(newColumns, destColumn)) {
|
||||
throw new Error(`Attempt to apply embeddings to table failed because column ${destColumn} already existed`)
|
||||
throw new Error(
|
||||
`Attempt to apply embeddings to table failed because column ${destColumn} already existed`
|
||||
);
|
||||
}
|
||||
if (table.batches.length > 1) {
|
||||
throw new Error('Internal error: `makeArrowTable` unexpectedly created a table with more than one batch')
|
||||
throw new Error(
|
||||
"Internal error: `makeArrowTable` unexpectedly created a table with more than one batch"
|
||||
);
|
||||
}
|
||||
const values = sourceColumn.toArray()
|
||||
const vectors = await embeddings.embed(values as T[])
|
||||
const values = sourceColumn.toArray();
|
||||
const vectors = await embeddings.embed(values as T[]);
|
||||
if (vectors.length !== values.length) {
|
||||
throw new Error('Embedding function did not return an embedding for each input element')
|
||||
throw new Error(
|
||||
"Embedding function did not return an embedding for each input element"
|
||||
);
|
||||
}
|
||||
const destType = newVectorType(vectors[0].length, innerDestType)
|
||||
newColumns[destColumn] = makeVector(vectors, destType)
|
||||
const destType = newVectorType(vectors[0].length, innerDestType);
|
||||
newColumns[destColumn] = makeVector(vectors, destType);
|
||||
}
|
||||
|
||||
const newTable = new ArrowTable(newColumns)
|
||||
const newTable = new ArrowTable(newColumns);
|
||||
if (schema != null) {
|
||||
if (schema.fields.find(f => f.name === destColumn) === undefined) {
|
||||
throw new Error(`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`)
|
||||
if (schema.fields.find((f) => f.name === destColumn) === undefined) {
|
||||
throw new Error(
|
||||
`When using embedding functions and specifying a schema the schema should include the embedding column but the column ${destColumn} was missing`
|
||||
);
|
||||
}
|
||||
return alignTable(newTable, schema)
|
||||
return alignTable(newTable, schema);
|
||||
}
|
||||
return newTable
|
||||
return newTable;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -417,21 +462,24 @@ async function applyEmbeddings<T> (table: ArrowTable, embeddings?: EmbeddingFunc
|
||||
* embedding columns. If no schema is provded then embedding columns will
|
||||
* be placed at the end of the table, after all of the input columns.
|
||||
*/
|
||||
export async function convertToTable<T> (
|
||||
export async function convertToTable<T>(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
makeTableOptions?: Partial<MakeArrowTableOptions>
|
||||
): Promise<ArrowTable> {
|
||||
const table = makeArrowTable(data, makeTableOptions)
|
||||
return await applyEmbeddings(table, embeddings, makeTableOptions?.schema)
|
||||
const table = makeArrowTable(data, makeTableOptions);
|
||||
return await applyEmbeddings(table, embeddings, makeTableOptions?.schema);
|
||||
}
|
||||
|
||||
// Creates the Arrow Type for a Vector column with dimension `dim`
|
||||
function newVectorType <T extends Float> (dim: number, innerType: T): FixedSizeList<T> {
|
||||
function newVectorType<T extends Float>(
|
||||
dim: number,
|
||||
innerType: T
|
||||
): FixedSizeList<T> {
|
||||
// Somewhere we always default to have the elements nullable, so we need to set it to true
|
||||
// otherwise we often get schema mismatches because the stored data always has schema with nullable elements
|
||||
const children = new Field<T>('item', innerType, true)
|
||||
return new FixedSizeList(dim, children)
|
||||
const children = new Field<T>("item", innerType, true);
|
||||
return new FixedSizeList(dim, children);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -441,17 +489,17 @@ function newVectorType <T extends Float> (dim: number, innerType: T): FixedSizeL
|
||||
*
|
||||
* `schema` is required if data is empty
|
||||
*/
|
||||
export async function fromRecordsToBuffer<T> (
|
||||
export async function fromRecordsToBuffer<T>(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
schema?: Schema
|
||||
): Promise<Buffer> {
|
||||
if (schema !== undefined && schema !== null) {
|
||||
schema = sanitizeSchema(schema)
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
const table = await convertToTable(data, embeddings, { schema })
|
||||
const writer = RecordBatchFileWriter.writeAll(table)
|
||||
return Buffer.from(await writer.toUint8Array())
|
||||
const table = await convertToTable(data, embeddings, { schema, embeddings });
|
||||
const writer = RecordBatchFileWriter.writeAll(table);
|
||||
return Buffer.from(await writer.toUint8Array());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -461,17 +509,17 @@ export async function fromRecordsToBuffer<T> (
|
||||
*
|
||||
* `schema` is required if data is empty
|
||||
*/
|
||||
export async function fromRecordsToStreamBuffer<T> (
|
||||
export async function fromRecordsToStreamBuffer<T>(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
schema?: Schema
|
||||
): Promise<Buffer> {
|
||||
if (schema !== null && schema !== undefined) {
|
||||
schema = sanitizeSchema(schema)
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
const table = await convertToTable(data, embeddings, { schema })
|
||||
const writer = RecordBatchStreamWriter.writeAll(table)
|
||||
return Buffer.from(await writer.toUint8Array())
|
||||
const table = await convertToTable(data, embeddings, { schema });
|
||||
const writer = RecordBatchStreamWriter.writeAll(table);
|
||||
return Buffer.from(await writer.toUint8Array());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -482,17 +530,17 @@ export async function fromRecordsToStreamBuffer<T> (
|
||||
*
|
||||
* `schema` is required if the table is empty
|
||||
*/
|
||||
export async function fromTableToBuffer<T> (
|
||||
export async function fromTableToBuffer<T>(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
schema?: Schema
|
||||
): Promise<Buffer> {
|
||||
if (schema !== null && schema !== undefined) {
|
||||
schema = sanitizeSchema(schema)
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
|
||||
const writer = RecordBatchFileWriter.writeAll(tableWithEmbeddings)
|
||||
return Buffer.from(await writer.toUint8Array())
|
||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
||||
const writer = RecordBatchFileWriter.writeAll(tableWithEmbeddings);
|
||||
return Buffer.from(await writer.toUint8Array());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -503,49 +551,85 @@ export async function fromTableToBuffer<T> (
|
||||
*
|
||||
* `schema` is required if the table is empty
|
||||
*/
|
||||
export async function fromTableToStreamBuffer<T> (
|
||||
export async function fromTableToStreamBuffer<T>(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
schema?: Schema
|
||||
): Promise<Buffer> {
|
||||
if (schema !== null && schema !== undefined) {
|
||||
schema = sanitizeSchema(schema)
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema)
|
||||
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings)
|
||||
return Buffer.from(await writer.toUint8Array())
|
||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
||||
const writer = RecordBatchStreamWriter.writeAll(tableWithEmbeddings);
|
||||
return Buffer.from(await writer.toUint8Array());
|
||||
}
|
||||
|
||||
function alignBatch (batch: RecordBatch, schema: Schema): RecordBatch {
|
||||
const alignedChildren = []
|
||||
function alignBatch(batch: RecordBatch, schema: Schema): RecordBatch {
|
||||
const alignedChildren = [];
|
||||
for (const field of schema.fields) {
|
||||
const indexInBatch = batch.schema.fields?.findIndex(
|
||||
(f) => f.name === field.name
|
||||
)
|
||||
);
|
||||
if (indexInBatch < 0) {
|
||||
throw new Error(
|
||||
`The column ${field.name} was not found in the Arrow Table`
|
||||
)
|
||||
);
|
||||
}
|
||||
alignedChildren.push(batch.data.children[indexInBatch])
|
||||
alignedChildren.push(batch.data.children[indexInBatch]);
|
||||
}
|
||||
const newData = makeData({
|
||||
type: new Struct(schema.fields),
|
||||
length: batch.numRows,
|
||||
nullCount: batch.nullCount,
|
||||
children: alignedChildren
|
||||
})
|
||||
return new RecordBatch(schema, newData)
|
||||
});
|
||||
return new RecordBatch(schema, newData);
|
||||
}
|
||||
|
||||
function alignTable (table: ArrowTable, schema: Schema): ArrowTable {
|
||||
function alignTable(table: ArrowTable, schema: Schema): ArrowTable {
|
||||
const alignedBatches = table.batches.map((batch) =>
|
||||
alignBatch(batch, schema)
|
||||
)
|
||||
return new ArrowTable(schema, alignedBatches)
|
||||
);
|
||||
return new ArrowTable(schema, alignedBatches);
|
||||
}
|
||||
|
||||
// Creates an empty Arrow Table
|
||||
export function createEmptyTable (schema: Schema): ArrowTable {
|
||||
return new ArrowTable(sanitizeSchema(schema))
|
||||
export function createEmptyTable(schema: Schema): ArrowTable {
|
||||
return new ArrowTable(sanitizeSchema(schema));
|
||||
}
|
||||
|
||||
function validateSchemaEmbeddings(
|
||||
schema: Schema<any>,
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings: EmbeddingFunction<any> | undefined
|
||||
) {
|
||||
const fields = [];
|
||||
const missingEmbeddingFields = [];
|
||||
|
||||
// First we check if the field is a `FixedSizeList`
|
||||
// Then we check if the data contains the field
|
||||
// if it does not, we add it to the list of missing embedding fields
|
||||
// Finally, we check if those missing embedding fields are `this._embeddings`
|
||||
// if they are not, we throw an error
|
||||
for (const field of schema.fields) {
|
||||
if (field.type instanceof FixedSizeList) {
|
||||
if (data.length !== 0 && data?.[0]?.[field.name] === undefined) {
|
||||
missingEmbeddingFields.push(field);
|
||||
} else {
|
||||
fields.push(field);
|
||||
}
|
||||
} else {
|
||||
fields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingEmbeddingFields.length > 0 && embeddings === undefined) {
|
||||
throw new Error(
|
||||
`Table has embeddings: "${missingEmbeddingFields
|
||||
.map((f) => f.name)
|
||||
.join(",")}", but no embedding function was provided`
|
||||
);
|
||||
}
|
||||
|
||||
return new Schema(fields, schema.metadata);
|
||||
}
|
||||
|
||||
@@ -12,19 +12,20 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { type Schema, Table as ArrowTable, tableFromIPC } from 'apache-arrow'
|
||||
import { type Schema, Table as ArrowTable, tableFromIPC } from "apache-arrow";
|
||||
import {
|
||||
createEmptyTable,
|
||||
fromRecordsToBuffer,
|
||||
fromTableToBuffer,
|
||||
makeArrowTable
|
||||
} from './arrow'
|
||||
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||
import { RemoteConnection } from './remote'
|
||||
import { Query } from './query'
|
||||
import { isEmbeddingFunction } from './embedding/embedding_function'
|
||||
import { type Literal, toSQL } from './util'
|
||||
import { type HttpMiddleware } from './middleware'
|
||||
} from "./arrow";
|
||||
import type { EmbeddingFunction } from "./embedding/embedding_function";
|
||||
import { RemoteConnection } from "./remote";
|
||||
import { Query } from "./query";
|
||||
import { isEmbeddingFunction } from "./embedding/embedding_function";
|
||||
import { type Literal, toSQL } from "./util";
|
||||
|
||||
import { type HttpMiddleware } from "./middleware";
|
||||
|
||||
const {
|
||||
databaseNew,
|
||||
@@ -48,14 +49,18 @@ const {
|
||||
tableAlterColumns,
|
||||
tableDropColumns
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
} = require('../native.js')
|
||||
} = require("../native.js");
|
||||
|
||||
export { Query }
|
||||
export type { EmbeddingFunction }
|
||||
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||
export { convertToTable, makeArrowTable, type MakeArrowTableOptions } from './arrow'
|
||||
export { Query };
|
||||
export type { EmbeddingFunction };
|
||||
export { OpenAIEmbeddingFunction } from "./embedding/openai";
|
||||
export {
|
||||
convertToTable,
|
||||
makeArrowTable,
|
||||
type MakeArrowTableOptions
|
||||
} from "./arrow";
|
||||
|
||||
const defaultAwsRegion = 'us-west-2'
|
||||
const defaultAwsRegion = "us-west-2";
|
||||
|
||||
export interface AwsCredentials {
|
||||
accessKeyId: string
|
||||
@@ -128,19 +133,19 @@ export interface ConnectionOptions {
|
||||
readConsistencyInterval?: number
|
||||
}
|
||||
|
||||
function getAwsArgs (opts: ConnectionOptions): any[] {
|
||||
const callArgs: any[] = []
|
||||
const awsCredentials = opts.awsCredentials
|
||||
function getAwsArgs(opts: ConnectionOptions): any[] {
|
||||
const callArgs: any[] = [];
|
||||
const awsCredentials = opts.awsCredentials;
|
||||
if (awsCredentials !== undefined) {
|
||||
callArgs.push(awsCredentials.accessKeyId)
|
||||
callArgs.push(awsCredentials.secretKey)
|
||||
callArgs.push(awsCredentials.sessionToken)
|
||||
callArgs.push(awsCredentials.accessKeyId);
|
||||
callArgs.push(awsCredentials.secretKey);
|
||||
callArgs.push(awsCredentials.sessionToken);
|
||||
} else {
|
||||
callArgs.fill(undefined, 0, 3)
|
||||
callArgs.fill(undefined, 0, 3);
|
||||
}
|
||||
|
||||
callArgs.push(opts.awsRegion)
|
||||
return callArgs
|
||||
callArgs.push(opts.awsRegion);
|
||||
return callArgs;
|
||||
}
|
||||
|
||||
export interface CreateTableOptions<T> {
|
||||
@@ -173,56 +178,56 @@ export interface CreateTableOptions<T> {
|
||||
*
|
||||
* @see {@link ConnectionOptions} for more details on the URI format.
|
||||
*/
|
||||
export async function connect (uri: string): Promise<Connection>
|
||||
export async function connect(uri: string): Promise<Connection>;
|
||||
/**
|
||||
* Connect to a LanceDB instance with connection options.
|
||||
*
|
||||
* @param opts The {@link ConnectionOptions} to use when connecting to the database.
|
||||
*/
|
||||
export async function connect (
|
||||
export async function connect(
|
||||
opts: Partial<ConnectionOptions>
|
||||
): Promise<Connection>
|
||||
export async function connect (
|
||||
): Promise<Connection>;
|
||||
export async function connect(
|
||||
arg: string | Partial<ConnectionOptions>
|
||||
): Promise<Connection> {
|
||||
let opts: ConnectionOptions
|
||||
if (typeof arg === 'string') {
|
||||
opts = { uri: arg }
|
||||
let opts: ConnectionOptions;
|
||||
if (typeof arg === "string") {
|
||||
opts = { uri: arg };
|
||||
} else {
|
||||
const keys = Object.keys(arg)
|
||||
if (keys.length === 1 && keys[0] === 'uri' && typeof arg.uri === 'string') {
|
||||
opts = { uri: arg.uri }
|
||||
const keys = Object.keys(arg);
|
||||
if (keys.length === 1 && keys[0] === "uri" && typeof arg.uri === "string") {
|
||||
opts = { uri: arg.uri };
|
||||
} else {
|
||||
opts = Object.assign(
|
||||
{
|
||||
uri: '',
|
||||
uri: "",
|
||||
awsCredentials: undefined,
|
||||
awsRegion: defaultAwsRegion,
|
||||
apiKey: undefined,
|
||||
region: defaultAwsRegion
|
||||
},
|
||||
arg
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.uri.startsWith('db://')) {
|
||||
if (opts.uri.startsWith("db://")) {
|
||||
// Remote connection
|
||||
return new RemoteConnection(opts)
|
||||
return new RemoteConnection(opts);
|
||||
}
|
||||
|
||||
const storageOptions = opts.storageOptions ?? {};
|
||||
if (opts.awsCredentials?.accessKeyId !== undefined) {
|
||||
storageOptions.aws_access_key_id = opts.awsCredentials.accessKeyId
|
||||
storageOptions.aws_access_key_id = opts.awsCredentials.accessKeyId;
|
||||
}
|
||||
if (opts.awsCredentials?.secretKey !== undefined) {
|
||||
storageOptions.aws_secret_access_key = opts.awsCredentials.secretKey
|
||||
storageOptions.aws_secret_access_key = opts.awsCredentials.secretKey;
|
||||
}
|
||||
if (opts.awsCredentials?.sessionToken !== undefined) {
|
||||
storageOptions.aws_session_token = opts.awsCredentials.sessionToken
|
||||
storageOptions.aws_session_token = opts.awsCredentials.sessionToken;
|
||||
}
|
||||
if (opts.awsRegion !== undefined) {
|
||||
storageOptions.region = opts.awsRegion
|
||||
storageOptions.region = opts.awsRegion;
|
||||
}
|
||||
// It's a pain to pass a record to Rust, so we convert it to an array of key-value pairs
|
||||
const storageOptionsArr = Object.entries(storageOptions);
|
||||
@@ -231,8 +236,8 @@ export async function connect (
|
||||
opts.uri,
|
||||
storageOptionsArr,
|
||||
opts.readConsistencyInterval
|
||||
)
|
||||
return new LocalConnection(db, opts)
|
||||
);
|
||||
return new LocalConnection(db, opts);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -533,7 +538,11 @@ export interface Table<T = number[]> {
|
||||
* @param data the new data to insert
|
||||
* @param args parameters controlling how the operation should behave
|
||||
*/
|
||||
mergeInsert: (on: string, data: Array<Record<string, unknown>> | ArrowTable, args: MergeInsertArgs) => Promise<void>
|
||||
mergeInsert: (
|
||||
on: string,
|
||||
data: Array<Record<string, unknown>> | ArrowTable,
|
||||
args: MergeInsertArgs
|
||||
) => Promise<void>
|
||||
|
||||
/**
|
||||
* List the indicies on this table.
|
||||
@@ -558,7 +567,9 @@ export interface Table<T = number[]> {
|
||||
* expressions will be evaluated for each row in the
|
||||
* table, and can reference existing columns in the table.
|
||||
*/
|
||||
addColumns(newColumnTransforms: Array<{ name: string, valueSql: string }>): Promise<void>
|
||||
addColumns(
|
||||
newColumnTransforms: Array<{ name: string, valueSql: string }>
|
||||
): Promise<void>
|
||||
|
||||
/**
|
||||
* Alter the name or nullability of columns.
|
||||
@@ -684,38 +695,49 @@ export interface MergeInsertArgs {
|
||||
whenNotMatchedBySourceDelete?: string | boolean
|
||||
}
|
||||
|
||||
export enum IndexStatus {
|
||||
Pending = "pending",
|
||||
Indexing = "indexing",
|
||||
Done = "done",
|
||||
Failed = "failed"
|
||||
}
|
||||
|
||||
export interface VectorIndex {
|
||||
columns: string[]
|
||||
name: string
|
||||
uuid: string
|
||||
status: IndexStatus
|
||||
}
|
||||
|
||||
export interface IndexStats {
|
||||
numIndexedRows: number | null
|
||||
numUnindexedRows: number | null
|
||||
indexType: string | null
|
||||
distanceType: string | null
|
||||
completedAt: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* A connection to a LanceDB database.
|
||||
*/
|
||||
export class LocalConnection implements Connection {
|
||||
private readonly _options: () => ConnectionOptions
|
||||
private readonly _db: any
|
||||
private readonly _options: () => ConnectionOptions;
|
||||
private readonly _db: any;
|
||||
|
||||
constructor (db: any, options: ConnectionOptions) {
|
||||
this._options = () => options
|
||||
this._db = db
|
||||
constructor(db: any, options: ConnectionOptions) {
|
||||
this._options = () => options;
|
||||
this._db = db;
|
||||
}
|
||||
|
||||
get uri (): string {
|
||||
return this._options().uri
|
||||
get uri(): string {
|
||||
return this._options().uri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the names of all tables in the database.
|
||||
*/
|
||||
async tableNames (): Promise<string[]> {
|
||||
return databaseTableNames.call(this._db)
|
||||
async tableNames(): Promise<string[]> {
|
||||
return databaseTableNames.call(this._db);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -723,7 +745,7 @@ export class LocalConnection implements Connection {
|
||||
*
|
||||
* @param name The name of the table.
|
||||
*/
|
||||
async openTable (name: string): Promise<Table>
|
||||
async openTable(name: string): Promise<Table>;
|
||||
|
||||
/**
|
||||
* Open a table in the database.
|
||||
@@ -734,23 +756,20 @@ export class LocalConnection implements Connection {
|
||||
async openTable<T>(
|
||||
name: string,
|
||||
embeddings: EmbeddingFunction<T>
|
||||
): Promise<Table<T>>
|
||||
): Promise<Table<T>>;
|
||||
async openTable<T>(
|
||||
name: string,
|
||||
embeddings?: EmbeddingFunction<T>
|
||||
): Promise<Table<T>>
|
||||
): Promise<Table<T>>;
|
||||
async openTable<T>(
|
||||
name: string,
|
||||
embeddings?: EmbeddingFunction<T>
|
||||
): Promise<Table<T>> {
|
||||
const tbl = await databaseOpenTable.call(
|
||||
this._db,
|
||||
name,
|
||||
)
|
||||
const tbl = await databaseOpenTable.call(this._db, name);
|
||||
if (embeddings !== undefined) {
|
||||
return new LocalTable(tbl, name, this._options(), embeddings)
|
||||
return new LocalTable(tbl, name, this._options(), embeddings);
|
||||
} else {
|
||||
return new LocalTable(tbl, name, this._options())
|
||||
return new LocalTable(tbl, name, this._options());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -760,32 +779,32 @@ export class LocalConnection implements Connection {
|
||||
optsOrEmbedding?: WriteOptions | EmbeddingFunction<T>,
|
||||
opt?: WriteOptions
|
||||
): Promise<Table<T>> {
|
||||
if (typeof name === 'string') {
|
||||
let writeOptions: WriteOptions = new DefaultWriteOptions()
|
||||
if (typeof name === "string") {
|
||||
let writeOptions: WriteOptions = new DefaultWriteOptions();
|
||||
if (opt !== undefined && isWriteOptions(opt)) {
|
||||
writeOptions = opt
|
||||
writeOptions = opt;
|
||||
} else if (
|
||||
optsOrEmbedding !== undefined &&
|
||||
isWriteOptions(optsOrEmbedding)
|
||||
) {
|
||||
writeOptions = optsOrEmbedding
|
||||
writeOptions = optsOrEmbedding;
|
||||
}
|
||||
|
||||
let embeddings: undefined | EmbeddingFunction<T>
|
||||
let embeddings: undefined | EmbeddingFunction<T>;
|
||||
if (
|
||||
optsOrEmbedding !== undefined &&
|
||||
isEmbeddingFunction(optsOrEmbedding)
|
||||
) {
|
||||
embeddings = optsOrEmbedding
|
||||
embeddings = optsOrEmbedding;
|
||||
}
|
||||
return await this.createTableImpl({
|
||||
name,
|
||||
data,
|
||||
embeddingFunction: embeddings,
|
||||
writeOptions
|
||||
})
|
||||
});
|
||||
}
|
||||
return await this.createTableImpl(name)
|
||||
return await this.createTableImpl(name);
|
||||
}
|
||||
|
||||
private async createTableImpl<T>({
|
||||
@@ -801,27 +820,27 @@ export class LocalConnection implements Connection {
|
||||
embeddingFunction?: EmbeddingFunction<T> | undefined
|
||||
writeOptions?: WriteOptions | undefined
|
||||
}): Promise<Table<T>> {
|
||||
let buffer: Buffer
|
||||
let buffer: Buffer;
|
||||
|
||||
function isEmpty (
|
||||
function isEmpty(
|
||||
data: Array<Record<string, unknown>> | ArrowTable<any>
|
||||
): boolean {
|
||||
if (data instanceof ArrowTable) {
|
||||
return data.data.length === 0
|
||||
return data.data.length === 0;
|
||||
}
|
||||
return data.length === 0
|
||||
return data.length === 0;
|
||||
}
|
||||
|
||||
if (data === undefined || isEmpty(data)) {
|
||||
if (schema === undefined) {
|
||||
throw new Error('Either data or schema needs to defined')
|
||||
throw new Error("Either data or schema needs to defined");
|
||||
}
|
||||
buffer = await fromTableToBuffer(createEmptyTable(schema))
|
||||
buffer = await fromTableToBuffer(createEmptyTable(schema));
|
||||
} else if (data instanceof ArrowTable) {
|
||||
buffer = await fromTableToBuffer(data, embeddingFunction, schema)
|
||||
buffer = await fromTableToBuffer(data, embeddingFunction, schema);
|
||||
} else {
|
||||
// data is Array<Record<...>>
|
||||
buffer = await fromRecordsToBuffer(data, embeddingFunction, schema)
|
||||
buffer = await fromRecordsToBuffer(data, embeddingFunction, schema);
|
||||
}
|
||||
|
||||
const tbl = await tableCreate.call(
|
||||
@@ -830,11 +849,11 @@ export class LocalConnection implements Connection {
|
||||
buffer,
|
||||
writeOptions?.writeMode?.toString(),
|
||||
...getAwsArgs(this._options())
|
||||
)
|
||||
);
|
||||
if (embeddingFunction !== undefined) {
|
||||
return new LocalTable(tbl, name, this._options(), embeddingFunction)
|
||||
return new LocalTable(tbl, name, this._options(), embeddingFunction);
|
||||
} else {
|
||||
return new LocalTable(tbl, name, this._options())
|
||||
return new LocalTable(tbl, name, this._options());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -842,69 +861,69 @@ export class LocalConnection implements Connection {
|
||||
* Drop an existing table.
|
||||
* @param name The name of the table to drop.
|
||||
*/
|
||||
async dropTable (name: string): Promise<void> {
|
||||
await databaseDropTable.call(this._db, name)
|
||||
async dropTable(name: string): Promise<void> {
|
||||
await databaseDropTable.call(this._db, name);
|
||||
}
|
||||
|
||||
withMiddleware (middleware: HttpMiddleware): Connection {
|
||||
return this
|
||||
withMiddleware(middleware: HttpMiddleware): Connection {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
export class LocalTable<T = number[]> implements Table<T> {
|
||||
private _tbl: any
|
||||
private readonly _name: string
|
||||
private readonly _isElectron: boolean
|
||||
private readonly _embeddings?: EmbeddingFunction<T>
|
||||
private readonly _options: () => ConnectionOptions
|
||||
private _tbl: any;
|
||||
private readonly _name: string;
|
||||
private readonly _isElectron: boolean;
|
||||
private readonly _embeddings?: EmbeddingFunction<T>;
|
||||
private readonly _options: () => ConnectionOptions;
|
||||
|
||||
constructor (tbl: any, name: string, options: ConnectionOptions)
|
||||
constructor(tbl: any, name: string, options: ConnectionOptions);
|
||||
/**
|
||||
* @param tbl
|
||||
* @param name
|
||||
* @param options
|
||||
* @param embeddings An embedding function to use when interacting with this table
|
||||
*/
|
||||
constructor (
|
||||
constructor(
|
||||
tbl: any,
|
||||
name: string,
|
||||
options: ConnectionOptions,
|
||||
embeddings: EmbeddingFunction<T>
|
||||
)
|
||||
constructor (
|
||||
);
|
||||
constructor(
|
||||
tbl: any,
|
||||
name: string,
|
||||
options: ConnectionOptions,
|
||||
embeddings?: EmbeddingFunction<T>
|
||||
) {
|
||||
this._tbl = tbl
|
||||
this._name = name
|
||||
this._embeddings = embeddings
|
||||
this._options = () => options
|
||||
this._isElectron = this.checkElectron()
|
||||
this._tbl = tbl;
|
||||
this._name = name;
|
||||
this._embeddings = embeddings;
|
||||
this._options = () => options;
|
||||
this._isElectron = this.checkElectron();
|
||||
}
|
||||
|
||||
get name (): string {
|
||||
return this._name
|
||||
get name(): string {
|
||||
return this._name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a search query to find the nearest neighbors of the given search term
|
||||
* @param query The query search term
|
||||
*/
|
||||
search (query: T): Query<T> {
|
||||
return new Query(query, this._tbl, this._embeddings)
|
||||
search(query: T): Query<T> {
|
||||
return new Query(query, this._tbl, this._embeddings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filter query to find all rows matching the specified criteria
|
||||
* @param value The filter criteria (like SQL where clause syntax)
|
||||
*/
|
||||
filter (value: string): Query<T> {
|
||||
return new Query(undefined, this._tbl, this._embeddings).filter(value)
|
||||
filter(value: string): Query<T> {
|
||||
return new Query(undefined, this._tbl, this._embeddings).filter(value);
|
||||
}
|
||||
|
||||
where = this.filter
|
||||
where = this.filter;
|
||||
|
||||
/**
|
||||
* Insert records into this Table.
|
||||
@@ -912,16 +931,19 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
* @param data Records to be inserted into the Table
|
||||
* @return The number of rows added to the table
|
||||
*/
|
||||
async add (
|
||||
async add(
|
||||
data: Array<Record<string, unknown>> | ArrowTable
|
||||
): Promise<number> {
|
||||
const schema = await this.schema
|
||||
let tbl: ArrowTable
|
||||
const schema = await this.schema;
|
||||
|
||||
let tbl: ArrowTable;
|
||||
|
||||
if (data instanceof ArrowTable) {
|
||||
tbl = data
|
||||
tbl = data;
|
||||
} else {
|
||||
tbl = makeArrowTable(data, { schema })
|
||||
tbl = makeArrowTable(data, { schema, embeddings: this._embeddings });
|
||||
}
|
||||
|
||||
return tableAdd
|
||||
.call(
|
||||
this._tbl,
|
||||
@@ -930,8 +952,8 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
...getAwsArgs(this._options())
|
||||
)
|
||||
.then((newTable: any) => {
|
||||
this._tbl = newTable
|
||||
})
|
||||
this._tbl = newTable;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -940,14 +962,14 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
* @param data Records to be inserted into the Table
|
||||
* @return The number of rows added to the table
|
||||
*/
|
||||
async overwrite (
|
||||
async overwrite(
|
||||
data: Array<Record<string, unknown>> | ArrowTable
|
||||
): Promise<number> {
|
||||
let buffer: Buffer
|
||||
let buffer: Buffer;
|
||||
if (data instanceof ArrowTable) {
|
||||
buffer = await fromTableToBuffer(data, this._embeddings)
|
||||
buffer = await fromTableToBuffer(data, this._embeddings);
|
||||
} else {
|
||||
buffer = await fromRecordsToBuffer(data, this._embeddings)
|
||||
buffer = await fromRecordsToBuffer(data, this._embeddings);
|
||||
}
|
||||
return tableAdd
|
||||
.call(
|
||||
@@ -957,8 +979,8 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
...getAwsArgs(this._options())
|
||||
)
|
||||
.then((newTable: any) => {
|
||||
this._tbl = newTable
|
||||
})
|
||||
this._tbl = newTable;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -966,26 +988,26 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
*
|
||||
* @param indexParams The parameters of this Index, @see VectorIndexParams.
|
||||
*/
|
||||
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||
async createIndex(indexParams: VectorIndexParams): Promise<any> {
|
||||
return tableCreateVectorIndex
|
||||
.call(this._tbl, indexParams)
|
||||
.then((newTable: any) => {
|
||||
this._tbl = newTable
|
||||
})
|
||||
this._tbl = newTable;
|
||||
});
|
||||
}
|
||||
|
||||
async createScalarIndex (column: string, replace?: boolean): Promise<void> {
|
||||
async createScalarIndex(column: string, replace?: boolean): Promise<void> {
|
||||
if (replace === undefined) {
|
||||
replace = true
|
||||
replace = true;
|
||||
}
|
||||
return tableCreateScalarIndex.call(this._tbl, column, replace)
|
||||
return tableCreateScalarIndex.call(this._tbl, column, replace);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of rows in this table.
|
||||
*/
|
||||
async countRows (filter?: string): Promise<number> {
|
||||
return tableCountRows.call(this._tbl, filter)
|
||||
async countRows(filter?: string): Promise<number> {
|
||||
return tableCountRows.call(this._tbl, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -993,10 +1015,10 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
*
|
||||
* @param filter A filter in the same format used by a sql WHERE clause.
|
||||
*/
|
||||
async delete (filter: string): Promise<void> {
|
||||
async delete(filter: string): Promise<void> {
|
||||
return tableDelete.call(this._tbl, filter).then((newTable: any) => {
|
||||
this._tbl = newTable
|
||||
})
|
||||
this._tbl = newTable;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1006,55 +1028,65 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
async update (args: UpdateArgs | UpdateSqlArgs): Promise<void> {
|
||||
let filter: string | null
|
||||
let updates: Record<string, string>
|
||||
async update(args: UpdateArgs | UpdateSqlArgs): Promise<void> {
|
||||
let filter: string | null;
|
||||
let updates: Record<string, string>;
|
||||
|
||||
if ('valuesSql' in args) {
|
||||
filter = args.where ?? null
|
||||
updates = args.valuesSql
|
||||
if ("valuesSql" in args) {
|
||||
filter = args.where ?? null;
|
||||
updates = args.valuesSql;
|
||||
} else {
|
||||
filter = args.where ?? null
|
||||
updates = {}
|
||||
filter = args.where ?? null;
|
||||
updates = {};
|
||||
for (const [key, value] of Object.entries(args.values)) {
|
||||
updates[key] = toSQL(value)
|
||||
updates[key] = toSQL(value);
|
||||
}
|
||||
}
|
||||
|
||||
return tableUpdate
|
||||
.call(this._tbl, filter, updates)
|
||||
.then((newTable: any) => {
|
||||
this._tbl = newTable
|
||||
})
|
||||
this._tbl = newTable;
|
||||
});
|
||||
}
|
||||
|
||||
async mergeInsert (on: string, data: Array<Record<string, unknown>> | ArrowTable, args: MergeInsertArgs): Promise<void> {
|
||||
let whenMatchedUpdateAll = false
|
||||
let whenMatchedUpdateAllFilt = null
|
||||
if (args.whenMatchedUpdateAll !== undefined && args.whenMatchedUpdateAll !== null) {
|
||||
whenMatchedUpdateAll = true
|
||||
async mergeInsert(
|
||||
on: string,
|
||||
data: Array<Record<string, unknown>> | ArrowTable,
|
||||
args: MergeInsertArgs
|
||||
): Promise<void> {
|
||||
let whenMatchedUpdateAll = false;
|
||||
let whenMatchedUpdateAllFilt = null;
|
||||
if (
|
||||
args.whenMatchedUpdateAll !== undefined &&
|
||||
args.whenMatchedUpdateAll !== null
|
||||
) {
|
||||
whenMatchedUpdateAll = true;
|
||||
if (args.whenMatchedUpdateAll !== true) {
|
||||
whenMatchedUpdateAllFilt = args.whenMatchedUpdateAll
|
||||
whenMatchedUpdateAllFilt = args.whenMatchedUpdateAll;
|
||||
}
|
||||
}
|
||||
const whenNotMatchedInsertAll = args.whenNotMatchedInsertAll ?? false
|
||||
let whenNotMatchedBySourceDelete = false
|
||||
let whenNotMatchedBySourceDeleteFilt = null
|
||||
if (args.whenNotMatchedBySourceDelete !== undefined && args.whenNotMatchedBySourceDelete !== null) {
|
||||
whenNotMatchedBySourceDelete = true
|
||||
const whenNotMatchedInsertAll = args.whenNotMatchedInsertAll ?? false;
|
||||
let whenNotMatchedBySourceDelete = false;
|
||||
let whenNotMatchedBySourceDeleteFilt = null;
|
||||
if (
|
||||
args.whenNotMatchedBySourceDelete !== undefined &&
|
||||
args.whenNotMatchedBySourceDelete !== null
|
||||
) {
|
||||
whenNotMatchedBySourceDelete = true;
|
||||
if (args.whenNotMatchedBySourceDelete !== true) {
|
||||
whenNotMatchedBySourceDeleteFilt = args.whenNotMatchedBySourceDelete
|
||||
whenNotMatchedBySourceDeleteFilt = args.whenNotMatchedBySourceDelete;
|
||||
}
|
||||
}
|
||||
|
||||
const schema = await this.schema
|
||||
let tbl: ArrowTable
|
||||
const schema = await this.schema;
|
||||
let tbl: ArrowTable;
|
||||
if (data instanceof ArrowTable) {
|
||||
tbl = data
|
||||
tbl = data;
|
||||
} else {
|
||||
tbl = makeArrowTable(data, { schema })
|
||||
tbl = makeArrowTable(data, { schema });
|
||||
}
|
||||
const buffer = await fromTableToBuffer(tbl, this._embeddings, schema)
|
||||
const buffer = await fromTableToBuffer(tbl, this._embeddings, schema);
|
||||
|
||||
this._tbl = await tableMergeInsert.call(
|
||||
this._tbl,
|
||||
@@ -1065,7 +1097,7 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
whenNotMatchedBySourceDelete,
|
||||
whenNotMatchedBySourceDeleteFilt,
|
||||
buffer
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1083,16 +1115,16 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
* uphold this promise can lead to corrupted tables.
|
||||
* @returns
|
||||
*/
|
||||
async cleanupOldVersions (
|
||||
async cleanupOldVersions(
|
||||
olderThan?: number,
|
||||
deleteUnverified?: boolean
|
||||
): Promise<CleanupStats> {
|
||||
return tableCleanupOldVersions
|
||||
.call(this._tbl, olderThan, deleteUnverified)
|
||||
.then((res: { newTable: any, metrics: CleanupStats }) => {
|
||||
this._tbl = res.newTable
|
||||
return res.metrics
|
||||
})
|
||||
this._tbl = res.newTable;
|
||||
return res.metrics;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1106,62 +1138,64 @@ export class LocalTable<T = number[]> implements Table<T> {
|
||||
* for most tables.
|
||||
* @returns Metrics about the compaction operation.
|
||||
*/
|
||||
async compactFiles (options?: CompactionOptions): Promise<CompactionMetrics> {
|
||||
const optionsArg = options ?? {}
|
||||
async compactFiles(options?: CompactionOptions): Promise<CompactionMetrics> {
|
||||
const optionsArg = options ?? {};
|
||||
return tableCompactFiles
|
||||
.call(this._tbl, optionsArg)
|
||||
.then((res: { newTable: any, metrics: CompactionMetrics }) => {
|
||||
this._tbl = res.newTable
|
||||
return res.metrics
|
||||
})
|
||||
this._tbl = res.newTable;
|
||||
return res.metrics;
|
||||
});
|
||||
}
|
||||
|
||||
async listIndices (): Promise<VectorIndex[]> {
|
||||
return tableListIndices.call(this._tbl)
|
||||
async listIndices(): Promise<VectorIndex[]> {
|
||||
return tableListIndices.call(this._tbl);
|
||||
}
|
||||
|
||||
async indexStats (indexUuid: string): Promise<IndexStats> {
|
||||
return tableIndexStats.call(this._tbl, indexUuid)
|
||||
async indexStats(indexUuid: string): Promise<IndexStats> {
|
||||
return tableIndexStats.call(this._tbl, indexUuid);
|
||||
}
|
||||
|
||||
get schema (): Promise<Schema> {
|
||||
get schema(): Promise<Schema> {
|
||||
// empty table
|
||||
return this.getSchema()
|
||||
return this.getSchema();
|
||||
}
|
||||
|
||||
private async getSchema (): Promise<Schema> {
|
||||
const buffer = await tableSchema.call(this._tbl, this._isElectron)
|
||||
const table = tableFromIPC(buffer)
|
||||
return table.schema
|
||||
private async getSchema(): Promise<Schema> {
|
||||
const buffer = await tableSchema.call(this._tbl, this._isElectron);
|
||||
const table = tableFromIPC(buffer);
|
||||
return table.schema;
|
||||
}
|
||||
|
||||
// See https://github.com/electron/electron/issues/2288
|
||||
private checkElectron (): boolean {
|
||||
private checkElectron(): boolean {
|
||||
try {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
return (
|
||||
Object.prototype.hasOwnProperty.call(process?.versions, 'electron') ||
|
||||
navigator?.userAgent?.toLowerCase()?.includes(' electron')
|
||||
)
|
||||
Object.prototype.hasOwnProperty.call(process?.versions, "electron") ||
|
||||
navigator?.userAgent?.toLowerCase()?.includes(" electron")
|
||||
);
|
||||
} catch (e) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async addColumns (newColumnTransforms: Array<{ name: string, valueSql: string }>): Promise<void> {
|
||||
return tableAddColumns.call(this._tbl, newColumnTransforms)
|
||||
async addColumns(
|
||||
newColumnTransforms: Array<{ name: string, valueSql: string }>
|
||||
): Promise<void> {
|
||||
return tableAddColumns.call(this._tbl, newColumnTransforms);
|
||||
}
|
||||
|
||||
async alterColumns (columnAlterations: ColumnAlteration[]): Promise<void> {
|
||||
return tableAlterColumns.call(this._tbl, columnAlterations)
|
||||
async alterColumns(columnAlterations: ColumnAlteration[]): Promise<void> {
|
||||
return tableAlterColumns.call(this._tbl, columnAlterations);
|
||||
}
|
||||
|
||||
async dropColumns (columnNames: string[]): Promise<void> {
|
||||
return tableDropColumns.call(this._tbl, columnNames)
|
||||
async dropColumns(columnNames: string[]): Promise<void> {
|
||||
return tableDropColumns.call(this._tbl, columnNames);
|
||||
}
|
||||
|
||||
withMiddleware (middleware: HttpMiddleware): Table<T> {
|
||||
return this
|
||||
withMiddleware(middleware: HttpMiddleware): Table<T> {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1184,7 +1218,7 @@ export interface CompactionOptions {
|
||||
*/
|
||||
targetRowsPerFragment?: number
|
||||
/**
|
||||
* The maximum number of rows per group. Defaults to 1024.
|
||||
* The maximum number of T per group. Defaults to 1024.
|
||||
*/
|
||||
maxRowsPerGroup?: number
|
||||
/**
|
||||
@@ -1284,21 +1318,21 @@ export interface IvfPQIndexConfig {
|
||||
*/
|
||||
index_cache_size?: number
|
||||
|
||||
type: 'ivf_pq'
|
||||
type: "ivf_pq"
|
||||
}
|
||||
|
||||
export type VectorIndexParams = IvfPQIndexConfig
|
||||
export type VectorIndexParams = IvfPQIndexConfig;
|
||||
|
||||
/**
|
||||
* Write mode for writing a table.
|
||||
*/
|
||||
export enum WriteMode {
|
||||
/** Create a new {@link Table}. */
|
||||
Create = 'create',
|
||||
Create = "create",
|
||||
/** Overwrite the existing {@link Table} if presented. */
|
||||
Overwrite = 'overwrite',
|
||||
Overwrite = "overwrite",
|
||||
/** Append new data to the table. */
|
||||
Append = 'append',
|
||||
Append = "append",
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1310,14 +1344,14 @@ export interface WriteOptions {
|
||||
}
|
||||
|
||||
export class DefaultWriteOptions implements WriteOptions {
|
||||
writeMode = WriteMode.Create
|
||||
writeMode = WriteMode.Create;
|
||||
}
|
||||
|
||||
export function isWriteOptions (value: any): value is WriteOptions {
|
||||
export function isWriteOptions(value: any): value is WriteOptions {
|
||||
return (
|
||||
Object.keys(value).length === 1 &&
|
||||
(value.writeMode === undefined || typeof value.writeMode === 'string')
|
||||
)
|
||||
(value.writeMode === undefined || typeof value.writeMode === "string")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1327,15 +1361,15 @@ export enum MetricType {
|
||||
/**
|
||||
* Euclidean distance
|
||||
*/
|
||||
L2 = 'l2',
|
||||
L2 = "l2",
|
||||
|
||||
/**
|
||||
* Cosine distance
|
||||
*/
|
||||
Cosine = 'cosine',
|
||||
Cosine = "cosine",
|
||||
|
||||
/**
|
||||
* Dot product
|
||||
*/
|
||||
Dot = 'dot',
|
||||
Dot = "dot",
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('LanceDB Mirrored Store Integration test', function () {
|
||||
|
||||
const dir = tmpdir()
|
||||
console.log(dir)
|
||||
const conn = await lancedb.connect(`s3://lancedb-integtest?mirroredStore=${dir}`)
|
||||
const conn = await lancedb.connect({ uri: `s3://lancedb-integtest?mirroredStore=${dir}`, storageOptions: { allowHttp: 'true' } })
|
||||
const data = Array(200).fill({ vector: Array(128).fill(1.0), id: 0 })
|
||||
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 1 }))
|
||||
data.push(...Array(200).fill({ vector: Array(128).fill(1.0), id: 2 }))
|
||||
|
||||
@@ -140,6 +140,9 @@ export class RemoteConnection implements Connection {
|
||||
schema = nameOrOpts.schema
|
||||
embeddings = nameOrOpts.embeddingFunction
|
||||
tableName = nameOrOpts.name
|
||||
if (data === undefined) {
|
||||
data = nameOrOpts.data
|
||||
}
|
||||
}
|
||||
|
||||
let buffer: Buffer
|
||||
@@ -506,7 +509,8 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
||||
return (await results.body()).indexes?.map((index: any) => ({
|
||||
columns: index.columns,
|
||||
name: index.index_name,
|
||||
uuid: index.index_uuid
|
||||
uuid: index.index_uuid,
|
||||
status: index.status
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -517,7 +521,10 @@ export class RemoteTable<T = number[]> implements Table<T> {
|
||||
const body = await results.body()
|
||||
return {
|
||||
numIndexedRows: body?.num_indexed_rows,
|
||||
numUnindexedRows: body?.num_unindexed_rows
|
||||
numUnindexedRows: body?.num_unindexed_rows,
|
||||
indexType: body?.index_type,
|
||||
distanceType: body?.distance_type,
|
||||
completedAt: body?.completed_at
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
Bool,
|
||||
Date_,
|
||||
Decimal,
|
||||
DataType,
|
||||
type DataType,
|
||||
Dictionary,
|
||||
Binary,
|
||||
Float32,
|
||||
@@ -74,12 +74,12 @@ import {
|
||||
DurationNanosecond,
|
||||
DurationMicrosecond,
|
||||
DurationMillisecond,
|
||||
DurationSecond,
|
||||
DurationSecond
|
||||
} from "apache-arrow";
|
||||
import type { IntBitWidth, TimeBitWidth } from "apache-arrow/type";
|
||||
|
||||
function sanitizeMetadata(
|
||||
metadataLike?: unknown,
|
||||
metadataLike?: unknown
|
||||
): Map<string, string> | undefined {
|
||||
if (metadataLike === undefined || metadataLike === null) {
|
||||
return undefined;
|
||||
@@ -90,7 +90,7 @@ function sanitizeMetadata(
|
||||
for (const item of metadataLike) {
|
||||
if (!(typeof item[0] === "string" || !(typeof item[1] === "string"))) {
|
||||
throw Error(
|
||||
"Expected metadata, if present, to be a Map<string, string> but it had non-string keys or values",
|
||||
"Expected metadata, if present, to be a Map<string, string> but it had non-string keys or values"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -105,7 +105,7 @@ function sanitizeInt(typeLike: object) {
|
||||
typeof typeLike.isSigned !== "boolean"
|
||||
) {
|
||||
throw Error(
|
||||
"Expected an Int Type to have a `bitWidth` and `isSigned` property",
|
||||
"Expected an Int Type to have a `bitWidth` and `isSigned` property"
|
||||
);
|
||||
}
|
||||
return new Int(typeLike.isSigned, typeLike.bitWidth as IntBitWidth);
|
||||
@@ -128,7 +128,7 @@ function sanitizeDecimal(typeLike: object) {
|
||||
typeof typeLike.bitWidth !== "number"
|
||||
) {
|
||||
throw Error(
|
||||
"Expected a Decimal Type to have `scale`, `precision`, and `bitWidth` properties",
|
||||
"Expected a Decimal Type to have `scale`, `precision`, and `bitWidth` properties"
|
||||
);
|
||||
}
|
||||
return new Decimal(typeLike.scale, typeLike.precision, typeLike.bitWidth);
|
||||
@@ -149,7 +149,7 @@ function sanitizeTime(typeLike: object) {
|
||||
typeof typeLike.bitWidth !== "number"
|
||||
) {
|
||||
throw Error(
|
||||
"Expected a Time type to have `unit` and `bitWidth` properties",
|
||||
"Expected a Time type to have `unit` and `bitWidth` properties"
|
||||
);
|
||||
}
|
||||
return new Time(typeLike.unit, typeLike.bitWidth as TimeBitWidth);
|
||||
@@ -172,7 +172,7 @@ function sanitizeTypedTimestamp(
|
||||
| typeof TimestampNanosecond
|
||||
| typeof TimestampMicrosecond
|
||||
| typeof TimestampMillisecond
|
||||
| typeof TimestampSecond,
|
||||
| typeof TimestampSecond
|
||||
) {
|
||||
let timezone = null;
|
||||
if ("timezone" in typeLike && typeof typeLike.timezone === "string") {
|
||||
@@ -191,7 +191,7 @@ function sanitizeInterval(typeLike: object) {
|
||||
function sanitizeList(typeLike: object) {
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a List type to have an array-like `children` property",
|
||||
"Expected a List type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
if (typeLike.children.length !== 1) {
|
||||
@@ -203,7 +203,7 @@ function sanitizeList(typeLike: object) {
|
||||
function sanitizeStruct(typeLike: object) {
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a Struct type to have an array-like `children` property",
|
||||
"Expected a Struct type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
return new Struct(typeLike.children.map((child) => sanitizeField(child)));
|
||||
@@ -216,47 +216,47 @@ function sanitizeUnion(typeLike: object) {
|
||||
typeof typeLike.mode !== "number"
|
||||
) {
|
||||
throw Error(
|
||||
"Expected a Union type to have `typeIds` and `mode` properties",
|
||||
"Expected a Union type to have `typeIds` and `mode` properties"
|
||||
);
|
||||
}
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a Union type to have an array-like `children` property",
|
||||
"Expected a Union type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
|
||||
return new Union(
|
||||
typeLike.mode,
|
||||
typeLike.typeIds as any,
|
||||
typeLike.children.map((child) => sanitizeField(child)),
|
||||
typeLike.children.map((child) => sanitizeField(child))
|
||||
);
|
||||
}
|
||||
|
||||
function sanitizeTypedUnion(
|
||||
typeLike: object,
|
||||
UnionType: typeof DenseUnion | typeof SparseUnion,
|
||||
UnionType: typeof DenseUnion | typeof SparseUnion
|
||||
) {
|
||||
if (!("typeIds" in typeLike)) {
|
||||
throw Error(
|
||||
"Expected a DenseUnion/SparseUnion type to have a `typeIds` property",
|
||||
"Expected a DenseUnion/SparseUnion type to have a `typeIds` property"
|
||||
);
|
||||
}
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a DenseUnion/SparseUnion type to have an array-like `children` property",
|
||||
"Expected a DenseUnion/SparseUnion type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
|
||||
return new UnionType(
|
||||
typeLike.typeIds as any,
|
||||
typeLike.children.map((child) => sanitizeField(child)),
|
||||
typeLike.children.map((child) => sanitizeField(child))
|
||||
);
|
||||
}
|
||||
|
||||
function sanitizeFixedSizeBinary(typeLike: object) {
|
||||
if (!("byteWidth" in typeLike) || typeof typeLike.byteWidth !== "number") {
|
||||
throw Error(
|
||||
"Expected a FixedSizeBinary type to have a `byteWidth` property",
|
||||
"Expected a FixedSizeBinary type to have a `byteWidth` property"
|
||||
);
|
||||
}
|
||||
return new FixedSizeBinary(typeLike.byteWidth);
|
||||
@@ -268,7 +268,7 @@ function sanitizeFixedSizeList(typeLike: object) {
|
||||
}
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a FixedSizeList type to have an array-like `children` property",
|
||||
"Expected a FixedSizeList type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
if (typeLike.children.length !== 1) {
|
||||
@@ -276,14 +276,14 @@ function sanitizeFixedSizeList(typeLike: object) {
|
||||
}
|
||||
return new FixedSizeList(
|
||||
typeLike.listSize,
|
||||
sanitizeField(typeLike.children[0]),
|
||||
sanitizeField(typeLike.children[0])
|
||||
);
|
||||
}
|
||||
|
||||
function sanitizeMap(typeLike: object) {
|
||||
if (!("children" in typeLike) || !Array.isArray(typeLike.children)) {
|
||||
throw Error(
|
||||
"Expected a Map type to have an array-like `children` property",
|
||||
"Expected a Map type to have an array-like `children` property"
|
||||
);
|
||||
}
|
||||
if (!("keysSorted" in typeLike) || typeof typeLike.keysSorted !== "boolean") {
|
||||
@@ -291,7 +291,7 @@ function sanitizeMap(typeLike: object) {
|
||||
}
|
||||
return new Map_(
|
||||
typeLike.children.map((field) => sanitizeField(field)) as any,
|
||||
typeLike.keysSorted,
|
||||
typeLike.keysSorted
|
||||
);
|
||||
}
|
||||
|
||||
@@ -319,7 +319,7 @@ function sanitizeDictionary(typeLike: object) {
|
||||
sanitizeType(typeLike.dictionary),
|
||||
sanitizeType(typeLike.indices) as any,
|
||||
typeLike.id,
|
||||
typeLike.isOrdered,
|
||||
typeLike.isOrdered
|
||||
);
|
||||
}
|
||||
|
||||
@@ -454,7 +454,7 @@ function sanitizeField(fieldLike: unknown): Field {
|
||||
!("nullable" in fieldLike)
|
||||
) {
|
||||
throw Error(
|
||||
"The field passed in is missing a `type`/`name`/`nullable` property",
|
||||
"The field passed in is missing a `type`/`name`/`nullable` property"
|
||||
);
|
||||
}
|
||||
const type = sanitizeType(fieldLike.type);
|
||||
@@ -489,7 +489,7 @@ export function sanitizeSchema(schemaLike: unknown): Schema {
|
||||
}
|
||||
if (!("fields" in schemaLike)) {
|
||||
throw Error(
|
||||
"The schema passed in does not appear to be a schema (no 'fields' property)",
|
||||
"The schema passed in does not appear to be a schema (no 'fields' property)"
|
||||
);
|
||||
}
|
||||
let metadata;
|
||||
@@ -498,11 +498,11 @@ export function sanitizeSchema(schemaLike: unknown): Schema {
|
||||
}
|
||||
if (!Array.isArray(schemaLike.fields)) {
|
||||
throw Error(
|
||||
"The schema passed in had a 'fields' property but it was not an array",
|
||||
"The schema passed in had a 'fields' property but it was not an array"
|
||||
);
|
||||
}
|
||||
const sanitizedFields = schemaLike.fields.map((field) =>
|
||||
sanitizeField(field),
|
||||
sanitizeField(field)
|
||||
);
|
||||
return new Schema(sanitizedFields, metadata);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +0,0 @@
|
||||
**/dist/**/*
|
||||
**/native.js
|
||||
**/native.d.ts
|
||||
1
nodejs/.gitignore
vendored
Normal file
1
nodejs/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
yarn.lock
|
||||
@@ -1 +0,0 @@
|
||||
.eslintignore
|
||||
@@ -43,29 +43,20 @@ npm run test
|
||||
|
||||
### Running lint / format
|
||||
|
||||
LanceDb uses eslint for linting. VSCode does not need any plugins to use eslint. However, it
|
||||
may need some additional configuration. Make sure that eslint.experimental.useFlatConfig is
|
||||
set to true. Also, if your vscode root folder is the repo root then you will need to set
|
||||
the eslint.workingDirectories to ["nodejs"]. To manually lint your code you can run:
|
||||
LanceDb uses [biome](https://biomejs.dev/) for linting and formatting. if you are using VSCode you will need to install the official [Biome](https://marketplace.visualstudio.com/items?itemName=biomejs.biome) extension.
|
||||
To manually lint your code you can run:
|
||||
|
||||
```sh
|
||||
npm run lint
|
||||
```
|
||||
|
||||
LanceDb uses prettier for formatting. If you are using VSCode you will need to install the
|
||||
"Prettier - Code formatter" extension. You should then configure it to be the default formatter
|
||||
for typescript and you should enable format on save. To manually check your code's format you
|
||||
can run:
|
||||
to automatically fix all fixable issues:
|
||||
|
||||
```sh
|
||||
npm run chkformat
|
||||
npm run lint-fix
|
||||
```
|
||||
|
||||
If you need to manually format your code you can run:
|
||||
|
||||
```sh
|
||||
npx prettier --write .
|
||||
```
|
||||
If you do not have your workspace root set to the `nodejs` directory, unfortunately the extension will not work. You can still run the linting and formatting commands manually.
|
||||
|
||||
### Generating docs
|
||||
|
||||
|
||||
@@ -13,32 +13,27 @@
|
||||
// limitations under the License.
|
||||
|
||||
import {
|
||||
convertToTable,
|
||||
fromTableToBuffer,
|
||||
makeArrowTable,
|
||||
makeEmptyTable,
|
||||
} from "../dist/arrow";
|
||||
import {
|
||||
Field,
|
||||
FixedSizeList,
|
||||
Float16,
|
||||
Float32,
|
||||
Int32,
|
||||
tableFromIPC,
|
||||
Schema,
|
||||
Float64,
|
||||
type Table,
|
||||
Binary,
|
||||
Bool,
|
||||
Utf8,
|
||||
Struct,
|
||||
List,
|
||||
DataType,
|
||||
Dictionary,
|
||||
Int64,
|
||||
Field,
|
||||
FixedSizeList,
|
||||
Float,
|
||||
Precision,
|
||||
Float16,
|
||||
Float32,
|
||||
Float64,
|
||||
Int32,
|
||||
Int64,
|
||||
List,
|
||||
MetadataVersion,
|
||||
Precision,
|
||||
Schema,
|
||||
Struct,
|
||||
type Table,
|
||||
Type,
|
||||
Utf8,
|
||||
tableFromIPC,
|
||||
} from "apache-arrow";
|
||||
import {
|
||||
Dictionary as OldDictionary,
|
||||
@@ -46,14 +41,25 @@ import {
|
||||
FixedSizeList as OldFixedSizeList,
|
||||
Float32 as OldFloat32,
|
||||
Int32 as OldInt32,
|
||||
Struct as OldStruct,
|
||||
Schema as OldSchema,
|
||||
Struct as OldStruct,
|
||||
TimestampNanosecond as OldTimestampNanosecond,
|
||||
Utf8 as OldUtf8,
|
||||
} from "apache-arrow-old";
|
||||
import { type EmbeddingFunction } from "../dist/embedding/embedding_function";
|
||||
import {
|
||||
convertToTable,
|
||||
fromTableToBuffer,
|
||||
makeArrowTable,
|
||||
makeEmptyTable,
|
||||
} from "../lancedb/arrow";
|
||||
import {
|
||||
EmbeddingFunction,
|
||||
FieldOptions,
|
||||
FunctionOptions,
|
||||
} from "../lancedb/embedding/embedding_function";
|
||||
import { EmbeddingFunctionConfig } from "../lancedb/embedding/registry";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
||||
function sampleRecords(): Array<Record<string, any>> {
|
||||
return [
|
||||
{
|
||||
@@ -280,23 +286,46 @@ describe("The function makeArrowTable", function () {
|
||||
});
|
||||
});
|
||||
|
||||
class DummyEmbedding implements EmbeddingFunction<string> {
|
||||
public readonly sourceColumn = "string";
|
||||
public readonly embeddingDimension = 2;
|
||||
public readonly embeddingDataType = new Float16();
|
||||
class DummyEmbedding extends EmbeddingFunction<string> {
|
||||
toJSON(): Partial<FunctionOptions> {
|
||||
return {};
|
||||
}
|
||||
|
||||
async embed(data: string[]): Promise<number[][]> {
|
||||
async computeSourceEmbeddings(data: string[]): Promise<number[][]> {
|
||||
return data.map(() => [0.0, 0.0]);
|
||||
}
|
||||
|
||||
ndims(): number {
|
||||
return 2;
|
||||
}
|
||||
|
||||
embeddingDataType() {
|
||||
return new Float16();
|
||||
}
|
||||
}
|
||||
|
||||
class DummyEmbeddingWithNoDimension implements EmbeddingFunction<string> {
|
||||
public readonly sourceColumn = "string";
|
||||
class DummyEmbeddingWithNoDimension extends EmbeddingFunction<string> {
|
||||
toJSON(): Partial<FunctionOptions> {
|
||||
return {};
|
||||
}
|
||||
|
||||
async embed(data: string[]): Promise<number[][]> {
|
||||
embeddingDataType(): Float {
|
||||
return new Float16();
|
||||
}
|
||||
|
||||
async computeSourceEmbeddings(data: string[]): Promise<number[][]> {
|
||||
return data.map(() => [0.0, 0.0]);
|
||||
}
|
||||
}
|
||||
const dummyEmbeddingConfig: EmbeddingFunctionConfig = {
|
||||
sourceColumn: "string",
|
||||
function: new DummyEmbedding(),
|
||||
};
|
||||
|
||||
const dummyEmbeddingConfigWithNoDimension: EmbeddingFunctionConfig = {
|
||||
sourceColumn: "string",
|
||||
function: new DummyEmbeddingWithNoDimension(),
|
||||
};
|
||||
|
||||
describe("convertToTable", function () {
|
||||
it("will infer data types correctly", async function () {
|
||||
@@ -331,7 +360,7 @@ describe("convertToTable", function () {
|
||||
|
||||
it("will apply embeddings", async function () {
|
||||
const records = sampleRecords();
|
||||
const table = await convertToTable(records, new DummyEmbedding());
|
||||
const table = await convertToTable(records, dummyEmbeddingConfig);
|
||||
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
||||
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
||||
new Float16().toString(),
|
||||
@@ -340,7 +369,7 @@ describe("convertToTable", function () {
|
||||
|
||||
it("will fail if missing the embedding source column", async function () {
|
||||
await expect(
|
||||
convertToTable([{ id: 1 }], new DummyEmbedding()),
|
||||
convertToTable([{ id: 1 }], dummyEmbeddingConfig),
|
||||
).rejects.toThrow("'string' was not present");
|
||||
});
|
||||
|
||||
@@ -351,7 +380,7 @@ describe("convertToTable", function () {
|
||||
const table = makeEmptyTable(schema);
|
||||
|
||||
// If the embedding specifies the dimension we are fine
|
||||
await fromTableToBuffer(table, new DummyEmbedding());
|
||||
await fromTableToBuffer(table, dummyEmbeddingConfig);
|
||||
|
||||
// We can also supply a schema and should be ok
|
||||
const schemaWithEmbedding = new Schema([
|
||||
@@ -364,13 +393,13 @@ describe("convertToTable", function () {
|
||||
]);
|
||||
await fromTableToBuffer(
|
||||
table,
|
||||
new DummyEmbeddingWithNoDimension(),
|
||||
dummyEmbeddingConfigWithNoDimension,
|
||||
schemaWithEmbedding,
|
||||
);
|
||||
|
||||
// Otherwise we will get an error
|
||||
await expect(
|
||||
fromTableToBuffer(table, new DummyEmbeddingWithNoDimension()),
|
||||
fromTableToBuffer(table, dummyEmbeddingConfigWithNoDimension),
|
||||
).rejects.toThrow("does not specify `embeddingDimension`");
|
||||
});
|
||||
|
||||
@@ -383,7 +412,7 @@ describe("convertToTable", function () {
|
||||
false,
|
||||
),
|
||||
]);
|
||||
const table = await convertToTable([], new DummyEmbedding(), { schema });
|
||||
const table = await convertToTable([], dummyEmbeddingConfig, { schema });
|
||||
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(true);
|
||||
expect(table.getChild("vector")?.type.children[0].type.toString()).toEqual(
|
||||
new Float16().toString(),
|
||||
@@ -393,16 +422,17 @@ describe("convertToTable", function () {
|
||||
it("will complain if embeddings present but schema missing embedding column", async function () {
|
||||
const schema = new Schema([new Field("string", new Utf8(), false)]);
|
||||
await expect(
|
||||
convertToTable([], new DummyEmbedding(), { schema }),
|
||||
convertToTable([], dummyEmbeddingConfig, { schema }),
|
||||
).rejects.toThrow("column vector was missing");
|
||||
});
|
||||
|
||||
it("will provide a nice error if run twice", async function () {
|
||||
const records = sampleRecords();
|
||||
const table = await convertToTable(records, new DummyEmbedding());
|
||||
const table = await convertToTable(records, dummyEmbeddingConfig);
|
||||
|
||||
// fromTableToBuffer will try and apply the embeddings again
|
||||
await expect(
|
||||
fromTableToBuffer(table, new DummyEmbedding()),
|
||||
fromTableToBuffer(table, dummyEmbeddingConfig),
|
||||
).rejects.toThrow("already existed");
|
||||
});
|
||||
});
|
||||
@@ -438,7 +468,7 @@ describe("when using two versions of arrow", function () {
|
||||
new OldField("ts_no_tz", new OldTimestampNanosecond(null)),
|
||||
]),
|
||||
),
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
||||
]) as any;
|
||||
schema.metadataVersion = MetadataVersion.V5;
|
||||
const table = makeArrowTable([], { schema });
|
||||
|
||||
@@ -12,13 +12,15 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { Field, Float64, Schema } from "apache-arrow";
|
||||
import * as tmp from "tmp";
|
||||
|
||||
import { Connection, connect } from "../dist/index.js";
|
||||
import { Connection, Table, connect } from "../lancedb";
|
||||
|
||||
describe("when connecting", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => (tmpDir = tmp.dirSync({ unsafeCleanup: true })));
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
afterEach(() => tmpDir.removeCallback());
|
||||
|
||||
it("should connect", async () => {
|
||||
@@ -55,6 +57,18 @@ describe("given a connection", () => {
|
||||
expect(db.isOpen()).toBe(false);
|
||||
await expect(db.tableNames()).rejects.toThrow("Connection is closed");
|
||||
});
|
||||
it("should be able to create a table from an object arg `createTable(options)`, or args `createTable(name, data, options)`", async () => {
|
||||
let tbl = await db.createTable("test", [{ id: 1 }, { id: 2 }]);
|
||||
await expect(tbl.countRows()).resolves.toBe(2);
|
||||
|
||||
tbl = await db.createTable({
|
||||
name: "test",
|
||||
data: [{ id: 3 }],
|
||||
mode: "overwrite",
|
||||
});
|
||||
|
||||
await expect(tbl.countRows()).resolves.toBe(1);
|
||||
});
|
||||
|
||||
it("should fail if creating table twice, unless overwrite is true", async () => {
|
||||
let tbl = await db.createTable("test", [{ id: 1 }, { id: 2 }]);
|
||||
@@ -85,4 +99,39 @@ describe("given a connection", () => {
|
||||
tables = await db.tableNames({ startAfter: "a" });
|
||||
expect(tables).toEqual(["b", "c"]);
|
||||
});
|
||||
|
||||
it("should create tables in v2 mode", async () => {
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [...Array(10000).keys()].map((i) => ({ id: i }));
|
||||
|
||||
// Create in v1 mode
|
||||
let table = await db.createTable("test", data);
|
||||
|
||||
const isV2 = async (table: Table) => {
|
||||
const data = await table.query().toArrow({ maxBatchLength: 100000 });
|
||||
console.log(data.batches.length);
|
||||
return data.batches.length < 5;
|
||||
};
|
||||
|
||||
await expect(isV2(table)).resolves.toBe(false);
|
||||
|
||||
// Create in v2 mode
|
||||
table = await db.createTable("test_v2", data, { useLegacyFormat: false });
|
||||
|
||||
await expect(isV2(table)).resolves.toBe(true);
|
||||
|
||||
await table.add(data);
|
||||
|
||||
await expect(isV2(table)).resolves.toBe(true);
|
||||
|
||||
// Create empty in v2 mode
|
||||
const schema = new Schema([new Field("id", new Float64(), true)]);
|
||||
|
||||
table = await db.createEmptyTable("test_v2_empty", schema, {
|
||||
useLegacyFormat: false,
|
||||
});
|
||||
|
||||
await table.add(data);
|
||||
await expect(isV2(table)).resolves.toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
314
nodejs/__test__/embedding.test.ts
Normal file
314
nodejs/__test__/embedding.test.ts
Normal file
@@ -0,0 +1,314 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import * as tmp from "tmp";
|
||||
|
||||
import { connect } from "../lancedb";
|
||||
import {
|
||||
Field,
|
||||
FixedSizeList,
|
||||
Float,
|
||||
Float16,
|
||||
Float32,
|
||||
Float64,
|
||||
Schema,
|
||||
Utf8,
|
||||
} from "../lancedb/arrow";
|
||||
import { EmbeddingFunction, LanceSchema } from "../lancedb/embedding";
|
||||
import { getRegistry, register } from "../lancedb/embedding/registry";
|
||||
|
||||
describe("embedding functions", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
getRegistry().reset();
|
||||
});
|
||||
|
||||
it("should be able to create a table with an embedding function", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return new Float32();
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const func = new MockEmbeddingFunction();
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{
|
||||
embeddingFunction: {
|
||||
function: func,
|
||||
sourceColumn: "text",
|
||||
},
|
||||
},
|
||||
);
|
||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
||||
const arr = (await table.query().toArray()) as any;
|
||||
expect(arr[0].vector).toBeDefined();
|
||||
|
||||
// we round trip through JSON to make sure the vector properly gets converted to an array
|
||||
// otherwise it'll be a TypedArray or Vector
|
||||
const vector0 = JSON.parse(JSON.stringify(arr[0].vector));
|
||||
expect(vector0).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("should be able to create an empty table with an embedding function", async () => {
|
||||
@register()
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return new Float32();
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const schema = new Schema([
|
||||
new Field("text", new Utf8(), true),
|
||||
new Field(
|
||||
"vector",
|
||||
new FixedSizeList(3, new Field("item", new Float32(), true)),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
|
||||
const func = new MockEmbeddingFunction();
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createEmptyTable("test", schema, {
|
||||
embeddingFunction: {
|
||||
function: func,
|
||||
sourceColumn: "text",
|
||||
},
|
||||
});
|
||||
const outSchema = await table.schema();
|
||||
expect(outSchema.metadata.get("embedding_functions")).toBeDefined();
|
||||
await table.add([{ text: "hello world" }]);
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
||||
const arr = (await table.query().toArray()) as any;
|
||||
expect(arr[0].vector).toBeDefined();
|
||||
|
||||
// we round trip through JSON to make sure the vector properly gets converted to an array
|
||||
// otherwise it'll be a TypedArray or Vector
|
||||
const vector0 = JSON.parse(JSON.stringify(arr[0].vector));
|
||||
expect(vector0).toEqual([1, 2, 3]);
|
||||
});
|
||||
it("should error when appending to a table with an unregistered embedding function", async () => {
|
||||
@register("mock")
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return new Float32();
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const func = getRegistry().get<MockEmbeddingFunction>("mock")!.create();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new Float64(),
|
||||
text: func.sourceField(new Utf8()),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
|
||||
const db = await connect(tmpDir.name);
|
||||
await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{
|
||||
schema,
|
||||
},
|
||||
);
|
||||
|
||||
getRegistry().reset();
|
||||
const db2 = await connect(tmpDir.name);
|
||||
|
||||
const tbl = await db2.openTable("test");
|
||||
|
||||
expect(tbl.add([{ id: 3, text: "hello" }])).rejects.toThrow(
|
||||
`Function "mock" not found in registry`,
|
||||
);
|
||||
});
|
||||
test.each([new Float16(), new Float32(), new Float64()])(
|
||||
"should be able to provide manual embeddings with multiple float datatype",
|
||||
async (floatType) => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return floatType;
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const data = [{ text: "hello" }, { text: "hello world" }];
|
||||
|
||||
const schema = new Schema([
|
||||
new Field("vector", new FixedSizeList(3, new Field("item", floatType))),
|
||||
new Field("text", new Utf8()),
|
||||
]);
|
||||
const func = new MockEmbeddingFunction();
|
||||
|
||||
const name = "test";
|
||||
const db = await connect(tmpDir.name);
|
||||
|
||||
const table = await db.createTable(name, data, {
|
||||
schema,
|
||||
embeddingFunction: {
|
||||
sourceColumn: "text",
|
||||
function: func,
|
||||
},
|
||||
});
|
||||
const res = await table.query().toArray();
|
||||
|
||||
expect([...res[0].vector]).toEqual([1, 2, 3]);
|
||||
},
|
||||
);
|
||||
|
||||
test.each([new Float16(), new Float32(), new Float64()])(
|
||||
"should be able to provide auto embeddings with multiple float datatypes",
|
||||
async (floatType) => {
|
||||
@register("test1")
|
||||
class MockEmbeddingFunctionWithoutNDims extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
|
||||
embeddingDataType(): Float {
|
||||
return floatType;
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
@register("test")
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): Float {
|
||||
return floatType;
|
||||
}
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
return [1, 2, 3];
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return Array.from({ length: data.length }).fill([
|
||||
1, 2, 3,
|
||||
]) as number[][];
|
||||
}
|
||||
}
|
||||
const func = getRegistry().get<MockEmbeddingFunction>("test")!.create();
|
||||
const func2 = getRegistry()
|
||||
.get<MockEmbeddingFunctionWithoutNDims>("test1")!
|
||||
.create();
|
||||
|
||||
const schema = LanceSchema({
|
||||
text: func.sourceField(new Utf8()),
|
||||
vector: func.vectorField(floatType),
|
||||
});
|
||||
|
||||
const schema2 = LanceSchema({
|
||||
text: func2.sourceField(new Utf8()),
|
||||
vector: func2.vectorField({ datatype: floatType, dims: 3 }),
|
||||
});
|
||||
const schema3 = LanceSchema({
|
||||
text: func2.sourceField(new Utf8()),
|
||||
vector: func.vectorField({
|
||||
datatype: new FixedSizeList(3, new Field("item", floatType, true)),
|
||||
dims: 3,
|
||||
}),
|
||||
});
|
||||
|
||||
const expectedSchema = new Schema([
|
||||
new Field("text", new Utf8(), true),
|
||||
new Field(
|
||||
"vector",
|
||||
new FixedSizeList(3, new Field("item", floatType, true)),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
const stringSchema = JSON.stringify(schema, null, 2);
|
||||
const stringSchema2 = JSON.stringify(schema2, null, 2);
|
||||
const stringSchema3 = JSON.stringify(schema3, null, 2);
|
||||
const stringExpectedSchema = JSON.stringify(expectedSchema, null, 2);
|
||||
|
||||
expect(stringSchema).toEqual(stringExpectedSchema);
|
||||
expect(stringSchema2).toEqual(stringExpectedSchema);
|
||||
expect(stringSchema3).toEqual(stringExpectedSchema);
|
||||
},
|
||||
);
|
||||
});
|
||||
169
nodejs/__test__/registry.test.ts
Normal file
169
nodejs/__test__/registry.test.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
import * as arrow from "apache-arrow";
|
||||
import * as arrowOld from "apache-arrow-old";
|
||||
|
||||
import * as tmp from "tmp";
|
||||
|
||||
import { connect } from "../lancedb";
|
||||
import { EmbeddingFunction, LanceSchema } from "../lancedb/embedding";
|
||||
import { getRegistry, register } from "../lancedb/embedding/registry";
|
||||
|
||||
describe.each([arrow, arrowOld])("LanceSchema", (arrow) => {
|
||||
test("should preserve input order", async () => {
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: new arrow.Utf8(),
|
||||
vector: new arrow.Float32(),
|
||||
});
|
||||
expect(schema.fields.map((x) => x.name)).toEqual(["id", "text", "vector"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Registry", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
getRegistry().reset();
|
||||
});
|
||||
|
||||
it("should register a new item to the registry", async () => {
|
||||
@register("mock-embedding")
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): arrow.Float {
|
||||
return new arrow.Float32();
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
const func = getRegistry()
|
||||
.get<MockEmbeddingFunction>("mock-embedding")!
|
||||
.create();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8()),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
|
||||
const db = await connect(tmpDir.name);
|
||||
const table = await db.createTable(
|
||||
"test",
|
||||
[
|
||||
{ id: 1, text: "hello" },
|
||||
{ id: 2, text: "world" },
|
||||
],
|
||||
{ schema },
|
||||
);
|
||||
const expected = [
|
||||
[1, 2, 3],
|
||||
[1, 2, 3],
|
||||
];
|
||||
const actual = await table.query().toArrow();
|
||||
const vectors = actual
|
||||
.getChild("vector")
|
||||
?.toArray()
|
||||
.map((x: unknown) => {
|
||||
if (x instanceof arrow.Vector) {
|
||||
return [...x];
|
||||
} else {
|
||||
return x;
|
||||
}
|
||||
});
|
||||
expect(vectors).toEqual(expected);
|
||||
});
|
||||
test("should error if registering with the same name", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): arrow.Float {
|
||||
return new arrow.Float32();
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
register("mock-embedding")(MockEmbeddingFunction);
|
||||
expect(() => register("mock-embedding")(MockEmbeddingFunction)).toThrow(
|
||||
'Embedding function with alias "mock-embedding" already exists',
|
||||
);
|
||||
});
|
||||
test("schema should contain correct metadata", async () => {
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {
|
||||
someText: "hello",
|
||||
};
|
||||
}
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
ndims() {
|
||||
return 3;
|
||||
}
|
||||
embeddingDataType(): arrow.Float {
|
||||
return new arrow.Float32();
|
||||
}
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map(() => [1, 2, 3]);
|
||||
}
|
||||
}
|
||||
const func = new MockEmbeddingFunction();
|
||||
|
||||
const schema = LanceSchema({
|
||||
id: new arrow.Int32(),
|
||||
text: func.sourceField(new arrow.Utf8()),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
const expectedMetadata = new Map<string, string>([
|
||||
[
|
||||
"embedding_functions",
|
||||
JSON.stringify([
|
||||
{
|
||||
sourceColumn: "text",
|
||||
vectorColumn: "vector",
|
||||
name: "MockEmbeddingFunction",
|
||||
model: { someText: "hello" },
|
||||
},
|
||||
]),
|
||||
],
|
||||
]);
|
||||
expect(schema.metadata).toEqual(expectedMetadata);
|
||||
});
|
||||
});
|
||||
@@ -14,7 +14,11 @@
|
||||
|
||||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
|
||||
import { connect } from "../dist";
|
||||
import {
|
||||
CreateKeyCommand,
|
||||
KMSClient,
|
||||
ScheduleKeyDeletionCommand,
|
||||
} from "@aws-sdk/client-kms";
|
||||
import {
|
||||
CreateBucketCommand,
|
||||
DeleteBucketCommand,
|
||||
@@ -23,11 +27,7 @@ import {
|
||||
ListObjectsV2Command,
|
||||
S3Client,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import {
|
||||
CreateKeyCommand,
|
||||
ScheduleKeyDeletionCommand,
|
||||
KMSClient,
|
||||
} from "@aws-sdk/client-kms";
|
||||
import { connect } from "../lancedb";
|
||||
|
||||
// Skip these tests unless the S3_TEST environment variable is set
|
||||
const maybeDescribe = process.env.S3_TEST ? describe : describe.skip;
|
||||
@@ -63,9 +63,10 @@ class S3Bucket {
|
||||
// Delete the bucket if it already exists
|
||||
try {
|
||||
await this.deleteBucket(client, name);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// It's fine if the bucket doesn't exist
|
||||
}
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
await client.send(new CreateBucketCommand({ Bucket: name }));
|
||||
return new S3Bucket(name);
|
||||
}
|
||||
@@ -78,27 +79,32 @@ class S3Bucket {
|
||||
static async deleteBucket(client: S3Client, name: string) {
|
||||
// Must delete all objects before we can delete the bucket
|
||||
const objects = await client.send(
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
new ListObjectsV2Command({ Bucket: name }),
|
||||
);
|
||||
if (objects.Contents) {
|
||||
for (const object of objects.Contents) {
|
||||
await client.send(
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
new DeleteObjectCommand({ Bucket: name, Key: object.Key }),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
await client.send(new DeleteBucketCommand({ Bucket: name }));
|
||||
}
|
||||
|
||||
public async assertAllEncrypted(path: string, keyId: string) {
|
||||
const client = S3Bucket.s3Client();
|
||||
const objects = await client.send(
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
new ListObjectsV2Command({ Bucket: this.name, Prefix: path }),
|
||||
);
|
||||
if (objects.Contents) {
|
||||
for (const object of objects.Contents) {
|
||||
const metadata = await client.send(
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
new HeadObjectCommand({ Bucket: this.name, Key: object.Key }),
|
||||
);
|
||||
expect(metadata.ServerSideEncryption).toBe("aws:kms");
|
||||
@@ -137,6 +143,7 @@ class KmsKey {
|
||||
|
||||
public async delete() {
|
||||
const client = KmsKey.kmsClient();
|
||||
// biome-ignore lint/style/useNamingConvention: we dont control s3's api
|
||||
await client.send(new ScheduleKeyDeletionCommand({ KeyId: this.keyId }));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,23 +16,33 @@ import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as tmp from "tmp";
|
||||
|
||||
import { Table, connect } from "../dist";
|
||||
import {
|
||||
Schema,
|
||||
Field,
|
||||
Float32,
|
||||
Int32,
|
||||
FixedSizeList,
|
||||
Int64,
|
||||
Float64,
|
||||
} from "apache-arrow";
|
||||
import { makeArrowTable } from "../dist/arrow";
|
||||
import { Index } from "../dist/indices";
|
||||
import * as arrow from "apache-arrow";
|
||||
import * as arrowOld from "apache-arrow-old";
|
||||
|
||||
describe("Given a table", () => {
|
||||
import { Table, connect } from "../lancedb";
|
||||
import {
|
||||
Table as ArrowTable,
|
||||
Field,
|
||||
FixedSizeList,
|
||||
Float32,
|
||||
Float64,
|
||||
Int32,
|
||||
Int64,
|
||||
Schema,
|
||||
makeArrowTable,
|
||||
} from "../lancedb/arrow";
|
||||
import { EmbeddingFunction, LanceSchema, register } from "../lancedb/embedding";
|
||||
import { Index } from "../lancedb/indices";
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
describe.each([arrow, arrowOld])("Given a table", (arrow: any) => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
let table: Table;
|
||||
const schema = new Schema([new Field("id", new Float64(), true)]);
|
||||
|
||||
const schema = new arrow.Schema([
|
||||
new arrow.Field("id", new arrow.Float64(), true),
|
||||
]);
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
const conn = await connect(tmpDir.name);
|
||||
@@ -83,6 +93,177 @@ describe("Given a table", () => {
|
||||
expect(await table.countRows("id == 7")).toBe(1);
|
||||
expect(await table.countRows("id == 10")).toBe(1);
|
||||
});
|
||||
|
||||
// https://github.com/lancedb/lancedb/issues/1293
|
||||
test.each([new arrow.Float16(), new arrow.Float32(), new arrow.Float64()])(
|
||||
"can create empty table with non default float type: %s",
|
||||
async (floatType) => {
|
||||
const db = await connect(tmpDir.name);
|
||||
|
||||
const data = [
|
||||
{ text: "hello", vector: Array(512).fill(1.0) },
|
||||
{ text: "hello world", vector: Array(512).fill(1.0) },
|
||||
];
|
||||
const f64Schema = new arrow.Schema([
|
||||
new arrow.Field("text", new arrow.Utf8(), true),
|
||||
new arrow.Field(
|
||||
"vector",
|
||||
new arrow.FixedSizeList(512, new arrow.Field("item", floatType)),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
|
||||
const f64Table = await db.createEmptyTable("f64", f64Schema, {
|
||||
mode: "overwrite",
|
||||
});
|
||||
try {
|
||||
await f64Table.add(data);
|
||||
const res = await f64Table.query().toArray();
|
||||
expect(res.length).toBe(2);
|
||||
} catch (e) {
|
||||
expect(e).toBeUndefined();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
it("should return the table as an instance of an arrow table", async () => {
|
||||
const arrowTbl = await table.toArrow();
|
||||
expect(arrowTbl).toBeInstanceOf(ArrowTable);
|
||||
});
|
||||
});
|
||||
|
||||
describe("merge insert", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
let table: Table;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
const conn = await connect(tmpDir.name);
|
||||
|
||||
table = await conn.createTable("some_table", [
|
||||
{ a: 1, b: "a" },
|
||||
{ a: 2, b: "b" },
|
||||
{ a: 3, b: "c" },
|
||||
]);
|
||||
});
|
||||
afterEach(() => tmpDir.removeCallback());
|
||||
|
||||
test("upsert", async () => {
|
||||
const newData = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 3, b: "y" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
await table
|
||||
.mergeInsert("a")
|
||||
.whenMatchedUpdateAll()
|
||||
.whenNotMatchedInsertAll()
|
||||
.execute(newData);
|
||||
const expected = [
|
||||
{ a: 1, b: "a" },
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 3, b: "y" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
|
||||
expect(
|
||||
JSON.parse(JSON.stringify((await table.toArrow()).toArray())),
|
||||
).toEqual(expected);
|
||||
});
|
||||
test("conditional update", async () => {
|
||||
const newData = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 3, b: "y" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
await table
|
||||
.mergeInsert("a")
|
||||
.whenMatchedUpdateAll({ where: "target.b = 'b'" })
|
||||
.execute(newData);
|
||||
|
||||
const expected = [
|
||||
{ a: 1, b: "a" },
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 3, b: "c" },
|
||||
];
|
||||
// round trip to arrow and back to json to avoid comparing arrow objects to js object
|
||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
||||
let res: any[] = JSON.parse(
|
||||
JSON.stringify((await table.toArrow()).toArray()),
|
||||
);
|
||||
res = res.sort((a, b) => a.a - b.a);
|
||||
|
||||
expect(res).toEqual(expected);
|
||||
});
|
||||
|
||||
test("insert if not exists", async () => {
|
||||
const newData = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 3, b: "y" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
await table.mergeInsert("a").whenNotMatchedInsertAll().execute(newData);
|
||||
const expected = [
|
||||
{ a: 1, b: "a" },
|
||||
{ a: 2, b: "b" },
|
||||
{ a: 3, b: "c" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
let res: any[] = JSON.parse(
|
||||
JSON.stringify((await table.toArrow()).toArray()),
|
||||
);
|
||||
res = res.sort((a, b) => a.a - b.a);
|
||||
expect(res).toEqual(expected);
|
||||
});
|
||||
test("replace range", async () => {
|
||||
const newData = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
await table
|
||||
.mergeInsert("a")
|
||||
.whenMatchedUpdateAll()
|
||||
.whenNotMatchedInsertAll()
|
||||
.whenNotMatchedBySourceDelete({ where: "a > 2" })
|
||||
.execute(newData);
|
||||
|
||||
const expected = [
|
||||
{ a: 1, b: "a" },
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
let res: any[] = JSON.parse(
|
||||
JSON.stringify((await table.toArrow()).toArray()),
|
||||
);
|
||||
res = res.sort((a, b) => a.a - b.a);
|
||||
expect(res).toEqual(expected);
|
||||
});
|
||||
test("replace range no condition", async () => {
|
||||
const newData = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
await table
|
||||
.mergeInsert("a")
|
||||
.whenMatchedUpdateAll()
|
||||
.whenNotMatchedInsertAll()
|
||||
.whenNotMatchedBySourceDelete()
|
||||
.execute(newData);
|
||||
|
||||
const expected = [
|
||||
{ a: 2, b: "x" },
|
||||
{ a: 4, b: "z" },
|
||||
];
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
||||
let res: any[] = JSON.parse(
|
||||
JSON.stringify((await table.toArrow()).toArray()),
|
||||
);
|
||||
res = res.sort((a, b) => a.a - b.a);
|
||||
expect(res).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("When creating an index", () => {
|
||||
@@ -124,6 +305,7 @@ describe("When creating an index", () => {
|
||||
const indices = await tbl.listIndices();
|
||||
expect(indices.length).toBe(1);
|
||||
expect(indices[0]).toEqual({
|
||||
name: "vec_idx",
|
||||
indexType: "IvfPq",
|
||||
columns: ["vec"],
|
||||
});
|
||||
@@ -180,6 +362,24 @@ describe("When creating an index", () => {
|
||||
for await (const r of tbl.query().where("id > 1").select(["id"])) {
|
||||
expect(r.numRows).toBe(298);
|
||||
}
|
||||
// should also work with 'filter' alias
|
||||
for await (const r of tbl.query().filter("id > 1").select(["id"])) {
|
||||
expect(r.numRows).toBe(298);
|
||||
}
|
||||
});
|
||||
|
||||
test("should be able to get index stats", async () => {
|
||||
await tbl.createIndex("id");
|
||||
|
||||
const stats = await tbl.indexStats("id_idx");
|
||||
expect(stats).toBeDefined();
|
||||
expect(stats?.numIndexedRows).toEqual(300);
|
||||
expect(stats?.numUnindexedRows).toEqual(0);
|
||||
});
|
||||
|
||||
test("when getting stats on non-existent index", async () => {
|
||||
const stats = await tbl.indexStats("some non-existent index");
|
||||
expect(stats).toBeUndefined();
|
||||
});
|
||||
|
||||
// TODO: Move this test to the query API test (making sure we can reject queries
|
||||
@@ -419,3 +619,127 @@ describe("when dealing with versioning", () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when optimizing a dataset", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
let table: Table;
|
||||
beforeEach(async () => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
const con = await connect(tmpDir.name);
|
||||
table = await con.createTable("vectors", [{ id: 1 }]);
|
||||
await table.add([{ id: 2 }]);
|
||||
});
|
||||
afterEach(() => {
|
||||
tmpDir.removeCallback();
|
||||
});
|
||||
|
||||
it("compacts files", async () => {
|
||||
const stats = await table.optimize();
|
||||
expect(stats.compaction.filesAdded).toBe(1);
|
||||
expect(stats.compaction.filesRemoved).toBe(2);
|
||||
expect(stats.compaction.fragmentsAdded).toBe(1);
|
||||
expect(stats.compaction.fragmentsRemoved).toBe(2);
|
||||
});
|
||||
|
||||
it("cleanups old versions", async () => {
|
||||
const stats = await table.optimize({ cleanupOlderThan: new Date() });
|
||||
expect(stats.prune.bytesRemoved).toBeGreaterThan(0);
|
||||
expect(stats.prune.oldVersionsRemoved).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("table.search", () => {
|
||||
let tmpDir: tmp.DirResult;
|
||||
beforeEach(() => {
|
||||
tmpDir = tmp.dirSync({ unsafeCleanup: true });
|
||||
});
|
||||
afterEach(() => tmpDir.removeCallback());
|
||||
|
||||
test("can search using a string", async () => {
|
||||
@register()
|
||||
class MockEmbeddingFunction extends EmbeddingFunction<string> {
|
||||
toJSON(): object {
|
||||
return {};
|
||||
}
|
||||
ndims() {
|
||||
return 1;
|
||||
}
|
||||
embeddingDataType(): arrow.Float {
|
||||
return new Float32();
|
||||
}
|
||||
|
||||
// Hardcoded embeddings for the sake of testing
|
||||
async computeQueryEmbeddings(_data: string) {
|
||||
switch (_data) {
|
||||
case "greetings":
|
||||
return [0.1];
|
||||
case "farewell":
|
||||
return [0.2];
|
||||
default:
|
||||
return null as never;
|
||||
}
|
||||
}
|
||||
|
||||
// Hardcoded embeddings for the sake of testing
|
||||
async computeSourceEmbeddings(data: string[]) {
|
||||
return data.map((s) => {
|
||||
switch (s) {
|
||||
case "hello world":
|
||||
return [0.1];
|
||||
case "goodbye world":
|
||||
return [0.2];
|
||||
default:
|
||||
return null as never;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const func = new MockEmbeddingFunction();
|
||||
const schema = LanceSchema({
|
||||
text: func.sourceField(new arrow.Utf8()),
|
||||
vector: func.vectorField(),
|
||||
});
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [{ text: "hello world" }, { text: "goodbye world" }];
|
||||
const table = await db.createTable("test", data, { schema });
|
||||
|
||||
const results = await table.search("greetings").then((r) => r.toArray());
|
||||
expect(results[0].text).toBe(data[0].text);
|
||||
|
||||
const results2 = await table.search("farewell").then((r) => r.toArray());
|
||||
expect(results2[0].text).toBe(data[1].text);
|
||||
});
|
||||
|
||||
test("rejects if no embedding function provided", async () => {
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [
|
||||
{ text: "hello world", vector: [0.1, 0.2, 0.3] },
|
||||
{ text: "goodbye world", vector: [0.4, 0.5, 0.6] },
|
||||
];
|
||||
const table = await db.createTable("test", data);
|
||||
|
||||
expect(table.search("hello")).rejects.toThrow(
|
||||
"No embedding functions are defined in the table",
|
||||
);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[0.4, 0.5, 0.599], // number[]
|
||||
Float32Array.of(0.4, 0.5, 0.599), // Float32Array
|
||||
Float64Array.of(0.4, 0.5, 0.599), // Float64Array
|
||||
])("can search using vectorlike datatypes", async (vectorlike) => {
|
||||
const db = await connect(tmpDir.name);
|
||||
const data = [
|
||||
{ text: "hello world", vector: [0.1, 0.2, 0.3] },
|
||||
{ text: "goodbye world", vector: [0.4, 0.5, 0.6] },
|
||||
];
|
||||
const table = await db.createTable("test", data);
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: test
|
||||
const results: any[] = await table.search(vectorlike).toArray();
|
||||
|
||||
expect(results.length).toBe(2);
|
||||
expect(results[0].text).toBe(data[1].text);
|
||||
});
|
||||
});
|
||||
|
||||
142
nodejs/biome.json
Normal file
142
nodejs/biome.json
Normal file
@@ -0,0 +1,142 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
|
||||
"organizeImports": {
|
||||
"enabled": true
|
||||
},
|
||||
"files": {
|
||||
"ignore": [
|
||||
"**/dist/**/*",
|
||||
"**/native.js",
|
||||
"**/native.d.ts",
|
||||
"**/npm/**/*",
|
||||
"**/.vscode/**"
|
||||
]
|
||||
},
|
||||
"formatter": {
|
||||
"indentStyle": "space"
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": false,
|
||||
"complexity": {
|
||||
"noBannedTypes": "error",
|
||||
"noExtraBooleanCast": "error",
|
||||
"noMultipleSpacesInRegularExpressionLiterals": "error",
|
||||
"noUselessCatch": "error",
|
||||
"noUselessThisAlias": "error",
|
||||
"noUselessTypeConstraint": "error",
|
||||
"noWith": "error"
|
||||
},
|
||||
"correctness": {
|
||||
"noConstAssign": "error",
|
||||
"noConstantCondition": "error",
|
||||
"noEmptyCharacterClassInRegex": "error",
|
||||
"noEmptyPattern": "error",
|
||||
"noGlobalObjectCalls": "error",
|
||||
"noInnerDeclarations": "error",
|
||||
"noInvalidConstructorSuper": "error",
|
||||
"noNewSymbol": "error",
|
||||
"noNonoctalDecimalEscape": "error",
|
||||
"noPrecisionLoss": "error",
|
||||
"noSelfAssign": "error",
|
||||
"noSetterReturn": "error",
|
||||
"noSwitchDeclarations": "error",
|
||||
"noUndeclaredVariables": "error",
|
||||
"noUnreachable": "error",
|
||||
"noUnreachableSuper": "error",
|
||||
"noUnsafeFinally": "error",
|
||||
"noUnsafeOptionalChaining": "error",
|
||||
"noUnusedLabels": "error",
|
||||
"noUnusedVariables": "warn",
|
||||
"useIsNan": "error",
|
||||
"useValidForDirection": "error",
|
||||
"useYield": "error"
|
||||
},
|
||||
"style": {
|
||||
"noNamespace": "error",
|
||||
"useAsConstAssertion": "error",
|
||||
"useBlockStatements": "off",
|
||||
"useNamingConvention": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"strictCase": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"suspicious": {
|
||||
"noAssignInExpressions": "error",
|
||||
"noAsyncPromiseExecutor": "error",
|
||||
"noCatchAssign": "error",
|
||||
"noClassAssign": "error",
|
||||
"noCompareNegZero": "error",
|
||||
"noControlCharactersInRegex": "error",
|
||||
"noDebugger": "error",
|
||||
"noDuplicateCase": "error",
|
||||
"noDuplicateClassMembers": "error",
|
||||
"noDuplicateObjectKeys": "error",
|
||||
"noDuplicateParameters": "error",
|
||||
"noEmptyBlockStatements": "error",
|
||||
"noExplicitAny": "warn",
|
||||
"noExtraNonNullAssertion": "error",
|
||||
"noFallthroughSwitchClause": "error",
|
||||
"noFunctionAssign": "error",
|
||||
"noGlobalAssign": "error",
|
||||
"noImportAssign": "error",
|
||||
"noMisleadingCharacterClass": "error",
|
||||
"noMisleadingInstantiator": "error",
|
||||
"noPrototypeBuiltins": "error",
|
||||
"noRedeclare": "error",
|
||||
"noShadowRestrictedNames": "error",
|
||||
"noUnsafeDeclarationMerging": "error",
|
||||
"noUnsafeNegation": "error",
|
||||
"useGetterReturn": "error",
|
||||
"useValidTypeof": "error"
|
||||
}
|
||||
},
|
||||
"ignore": ["**/dist/**/*", "**/native.js", "**/native.d.ts"]
|
||||
},
|
||||
"javascript": {
|
||||
"globals": []
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
"**/*.mts",
|
||||
"**/*.cts",
|
||||
"__test__/*.test.ts"
|
||||
],
|
||||
"linter": {
|
||||
"rules": {
|
||||
"correctness": {
|
||||
"noConstAssign": "off",
|
||||
"noGlobalObjectCalls": "off",
|
||||
"noInvalidConstructorSuper": "off",
|
||||
"noNewSymbol": "off",
|
||||
"noSetterReturn": "off",
|
||||
"noUndeclaredVariables": "off",
|
||||
"noUnreachable": "off",
|
||||
"noUnreachableSuper": "off"
|
||||
},
|
||||
"style": {
|
||||
"noArguments": "error",
|
||||
"noVar": "error",
|
||||
"useConst": "error"
|
||||
},
|
||||
"suspicious": {
|
||||
"noDuplicateClassMembers": "off",
|
||||
"noDuplicateObjectKeys": "off",
|
||||
"noDuplicateParameters": "off",
|
||||
"noFunctionAssign": "off",
|
||||
"noImportAssign": "off",
|
||||
"noRedeclare": "off",
|
||||
"noUnsafeNegation": "off",
|
||||
"useGetterReturn": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
// @ts-check
|
||||
|
||||
const eslint = require("@eslint/js");
|
||||
const tseslint = require("typescript-eslint");
|
||||
const eslintConfigPrettier = require("eslint-config-prettier");
|
||||
const jsdoc = require("eslint-plugin-jsdoc");
|
||||
|
||||
module.exports = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
jsdoc.configs["flat/recommended"],
|
||||
eslintConfigPrettier,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
rules: {
|
||||
"@typescript-eslint/naming-convention": "error",
|
||||
"jsdoc/require-returns": "off",
|
||||
"jsdoc/require-param": "off",
|
||||
"jsdoc/require-jsdoc": [
|
||||
"error",
|
||||
{
|
||||
publicOnly: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: jsdoc,
|
||||
},
|
||||
);
|
||||
@@ -13,28 +13,126 @@
|
||||
// limitations under the License.
|
||||
|
||||
import {
|
||||
Field,
|
||||
makeBuilder,
|
||||
RecordBatchFileWriter,
|
||||
Utf8,
|
||||
type Vector,
|
||||
FixedSizeList,
|
||||
vectorFromArray,
|
||||
type Schema,
|
||||
Table as ArrowTable,
|
||||
RecordBatchStreamWriter,
|
||||
List,
|
||||
RecordBatch,
|
||||
makeData,
|
||||
Struct,
|
||||
type Float,
|
||||
DataType,
|
||||
Binary,
|
||||
DataType,
|
||||
Field,
|
||||
FixedSizeBinary,
|
||||
FixedSizeList,
|
||||
Float,
|
||||
Float32,
|
||||
Int,
|
||||
LargeBinary,
|
||||
List,
|
||||
Null,
|
||||
RecordBatch,
|
||||
RecordBatchFileWriter,
|
||||
RecordBatchStreamWriter,
|
||||
Schema,
|
||||
Struct,
|
||||
Utf8,
|
||||
Vector,
|
||||
makeBuilder,
|
||||
makeData,
|
||||
type makeTable,
|
||||
vectorFromArray,
|
||||
} from "apache-arrow";
|
||||
import { type EmbeddingFunction } from "./embedding/embedding_function";
|
||||
import { sanitizeSchema } from "./sanitize";
|
||||
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
||||
import { sanitizeField, sanitizeSchema, sanitizeType } from "./sanitize";
|
||||
export * from "apache-arrow";
|
||||
|
||||
export type IntoVector = Float32Array | Float64Array | number[];
|
||||
|
||||
export function isArrowTable(value: object): value is ArrowTable {
|
||||
if (value instanceof ArrowTable) return true;
|
||||
return "schema" in value && "batches" in value;
|
||||
}
|
||||
|
||||
export function isDataType(value: unknown): value is DataType {
|
||||
return (
|
||||
value instanceof DataType ||
|
||||
DataType.isNull(value) ||
|
||||
DataType.isInt(value) ||
|
||||
DataType.isFloat(value) ||
|
||||
DataType.isBinary(value) ||
|
||||
DataType.isLargeBinary(value) ||
|
||||
DataType.isUtf8(value) ||
|
||||
DataType.isLargeUtf8(value) ||
|
||||
DataType.isBool(value) ||
|
||||
DataType.isDecimal(value) ||
|
||||
DataType.isDate(value) ||
|
||||
DataType.isTime(value) ||
|
||||
DataType.isTimestamp(value) ||
|
||||
DataType.isInterval(value) ||
|
||||
DataType.isDuration(value) ||
|
||||
DataType.isList(value) ||
|
||||
DataType.isStruct(value) ||
|
||||
DataType.isUnion(value) ||
|
||||
DataType.isFixedSizeBinary(value) ||
|
||||
DataType.isFixedSizeList(value) ||
|
||||
DataType.isMap(value) ||
|
||||
DataType.isDictionary(value)
|
||||
);
|
||||
}
|
||||
export function isNull(value: unknown): value is Null {
|
||||
return value instanceof Null || DataType.isNull(value);
|
||||
}
|
||||
export function isInt(value: unknown): value is Int {
|
||||
return value instanceof Int || DataType.isInt(value);
|
||||
}
|
||||
export function isFloat(value: unknown): value is Float {
|
||||
return value instanceof Float || DataType.isFloat(value);
|
||||
}
|
||||
export function isBinary(value: unknown): value is Binary {
|
||||
return value instanceof Binary || DataType.isBinary(value);
|
||||
}
|
||||
export function isLargeBinary(value: unknown): value is LargeBinary {
|
||||
return value instanceof LargeBinary || DataType.isLargeBinary(value);
|
||||
}
|
||||
export function isUtf8(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isUtf8(value);
|
||||
}
|
||||
export function isLargeUtf8(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isLargeUtf8(value);
|
||||
}
|
||||
export function isBool(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isBool(value);
|
||||
}
|
||||
export function isDecimal(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isDecimal(value);
|
||||
}
|
||||
export function isDate(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isDate(value);
|
||||
}
|
||||
export function isTime(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isTime(value);
|
||||
}
|
||||
export function isTimestamp(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isTimestamp(value);
|
||||
}
|
||||
export function isInterval(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isInterval(value);
|
||||
}
|
||||
export function isDuration(value: unknown): value is Utf8 {
|
||||
return value instanceof Utf8 || DataType.isDuration(value);
|
||||
}
|
||||
export function isList(value: unknown): value is List {
|
||||
return value instanceof List || DataType.isList(value);
|
||||
}
|
||||
export function isStruct(value: unknown): value is Struct {
|
||||
return value instanceof Struct || DataType.isStruct(value);
|
||||
}
|
||||
export function isUnion(value: unknown): value is Struct {
|
||||
return value instanceof Struct || DataType.isUnion(value);
|
||||
}
|
||||
export function isFixedSizeBinary(value: unknown): value is FixedSizeBinary {
|
||||
return value instanceof FixedSizeBinary || DataType.isFixedSizeBinary(value);
|
||||
}
|
||||
|
||||
export function isFixedSizeList(value: unknown): value is FixedSizeList {
|
||||
return value instanceof FixedSizeList || DataType.isFixedSizeList(value);
|
||||
}
|
||||
|
||||
/** Data type accepted by NodeJS SDK */
|
||||
export type Data = Record<string, unknown>[] | ArrowTable;
|
||||
@@ -85,6 +183,8 @@ export class MakeArrowTableOptions {
|
||||
vectorColumns: Record<string, VectorColumnOptions> = {
|
||||
vector: new VectorColumnOptions(),
|
||||
};
|
||||
embeddings?: EmbeddingFunction<unknown>;
|
||||
embeddingFunction?: EmbeddingFunctionConfig;
|
||||
|
||||
/**
|
||||
* If true then string columns will be encoded with dictionary encoding
|
||||
@@ -197,6 +297,7 @@ export class MakeArrowTableOptions {
|
||||
export function makeArrowTable(
|
||||
data: Array<Record<string, unknown>>,
|
||||
options?: Partial<MakeArrowTableOptions>,
|
||||
metadata?: Map<string, string>,
|
||||
): ArrowTable {
|
||||
if (
|
||||
data.length === 0 &&
|
||||
@@ -208,6 +309,11 @@ export function makeArrowTable(
|
||||
const opt = new MakeArrowTableOptions(options !== undefined ? options : {});
|
||||
if (opt.schema !== undefined && opt.schema !== null) {
|
||||
opt.schema = sanitizeSchema(opt.schema);
|
||||
opt.schema = validateSchemaEmbeddings(
|
||||
opt.schema,
|
||||
data,
|
||||
options?.embeddingFunction,
|
||||
);
|
||||
}
|
||||
const columns: Record<string, Vector> = {};
|
||||
// TODO: sample dataset to find missing columns
|
||||
@@ -287,21 +393,42 @@ export function makeArrowTable(
|
||||
// then patch the schema of the batches so we can use
|
||||
// `new ArrowTable(schema, batches)` which does not do any schema inference
|
||||
const firstTable = new ArrowTable(columns);
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const batchesFixed = firstTable.batches.map(
|
||||
(batch) => new RecordBatch(opt.schema!, batch.data),
|
||||
);
|
||||
return new ArrowTable(opt.schema, batchesFixed);
|
||||
} else {
|
||||
return new ArrowTable(columns);
|
||||
let schema: Schema;
|
||||
if (metadata !== undefined) {
|
||||
let schemaMetadata = opt.schema.metadata;
|
||||
if (schemaMetadata.size === 0) {
|
||||
schemaMetadata = metadata;
|
||||
} else {
|
||||
for (const [key, entry] of schemaMetadata.entries()) {
|
||||
schemaMetadata.set(key, entry);
|
||||
}
|
||||
}
|
||||
|
||||
schema = new Schema(opt.schema.fields, schemaMetadata);
|
||||
} else {
|
||||
schema = opt.schema;
|
||||
}
|
||||
return new ArrowTable(schema, batchesFixed);
|
||||
}
|
||||
const tbl = new ArrowTable(columns);
|
||||
if (metadata !== undefined) {
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
(<any>tbl.schema).metadata = metadata;
|
||||
}
|
||||
return tbl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty Arrow table with the provided schema
|
||||
*/
|
||||
export function makeEmptyTable(schema: Schema): ArrowTable {
|
||||
return makeArrowTable([], { schema });
|
||||
export function makeEmptyTable(
|
||||
schema: Schema,
|
||||
metadata?: Map<string, string>,
|
||||
): ArrowTable {
|
||||
return makeArrowTable([], { schema }, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -313,7 +440,7 @@ function makeListVector(lists: unknown[][]): Vector<unknown> {
|
||||
throw Error("Cannot infer list vector from empty array or empty list");
|
||||
}
|
||||
const sampleList = lists[0];
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
||||
let inferredType: any;
|
||||
try {
|
||||
const sampleVector = makeVector(sampleList);
|
||||
@@ -337,7 +464,7 @@ function makeVector(
|
||||
values: unknown[],
|
||||
type?: DataType,
|
||||
stringAsDictionary?: boolean,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
// biome-ignore lint/suspicious/noExplicitAny: skip
|
||||
): Vector<any> {
|
||||
if (type !== undefined) {
|
||||
// No need for inference, let Arrow create it
|
||||
@@ -373,13 +500,74 @@ function makeVector(
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper function to apply embeddings from metadata to an input table */
|
||||
async function applyEmbeddingsFromMetadata(
|
||||
table: ArrowTable,
|
||||
schema: Schema,
|
||||
): Promise<ArrowTable> {
|
||||
const registry = getRegistry();
|
||||
const functions = registry.parseFunctions(schema.metadata);
|
||||
|
||||
const columns = Object.fromEntries(
|
||||
table.schema.fields.map((field) => [
|
||||
field.name,
|
||||
table.getChild(field.name)!,
|
||||
]),
|
||||
);
|
||||
|
||||
for (const functionEntry of functions.values()) {
|
||||
const sourceColumn = columns[functionEntry.sourceColumn];
|
||||
const destColumn = functionEntry.vectorColumn ?? "vector";
|
||||
if (sourceColumn === undefined) {
|
||||
throw new Error(
|
||||
`Cannot apply embedding function because the source column '${functionEntry.sourceColumn}' was not present in the data`,
|
||||
);
|
||||
}
|
||||
if (columns[destColumn] !== undefined) {
|
||||
throw new Error(
|
||||
`Attempt to apply embeddings to table failed because column ${destColumn} already existed`,
|
||||
);
|
||||
}
|
||||
if (table.batches.length > 1) {
|
||||
throw new Error(
|
||||
"Internal error: `makeArrowTable` unexpectedly created a table with more than one batch",
|
||||
);
|
||||
}
|
||||
const values = sourceColumn.toArray();
|
||||
|
||||
const vectors =
|
||||
await functionEntry.function.computeSourceEmbeddings(values);
|
||||
if (vectors.length !== values.length) {
|
||||
throw new Error(
|
||||
"Embedding function did not return an embedding for each input element",
|
||||
);
|
||||
}
|
||||
let destType: DataType;
|
||||
const dtype = schema.fields.find((f) => f.name === destColumn)!.type;
|
||||
if (isFixedSizeList(dtype)) {
|
||||
destType = sanitizeType(dtype);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Expected FixedSizeList as datatype for vector field, instead got: " +
|
||||
dtype,
|
||||
);
|
||||
}
|
||||
const vector = makeVector(vectors, destType);
|
||||
columns[destColumn] = vector;
|
||||
}
|
||||
const newTable = new ArrowTable(columns);
|
||||
return alignTable(newTable, schema);
|
||||
}
|
||||
|
||||
/** Helper function to apply embeddings to an input table */
|
||||
async function applyEmbeddings<T>(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<ArrowTable> {
|
||||
if (embeddings == null) {
|
||||
if (schema?.metadata.has("embedding_functions")) {
|
||||
return applyEmbeddingsFromMetadata(table, schema!);
|
||||
} else if (embeddings == null || embeddings === undefined) {
|
||||
return table;
|
||||
}
|
||||
|
||||
@@ -397,8 +585,9 @@ async function applyEmbeddings<T>(
|
||||
const newColumns = Object.fromEntries(colEntries);
|
||||
|
||||
const sourceColumn = newColumns[embeddings.sourceColumn];
|
||||
const destColumn = embeddings.destColumn ?? "vector";
|
||||
const innerDestType = embeddings.embeddingDataType ?? new Float32();
|
||||
const destColumn = embeddings.vectorColumn ?? "vector";
|
||||
const innerDestType =
|
||||
embeddings.function.embeddingDataType() ?? new Float32();
|
||||
if (sourceColumn === undefined) {
|
||||
throw new Error(
|
||||
`Cannot apply embedding function because the source column '${embeddings.sourceColumn}' was not present in the data`,
|
||||
@@ -412,11 +601,9 @@ async function applyEmbeddings<T>(
|
||||
// if we call convertToTable with 0 records and a schema that includes the embedding
|
||||
return table;
|
||||
}
|
||||
if (embeddings.embeddingDimension !== undefined) {
|
||||
const destType = newVectorType(
|
||||
embeddings.embeddingDimension,
|
||||
innerDestType,
|
||||
);
|
||||
const dimensions = embeddings.function.ndims();
|
||||
if (dimensions !== undefined) {
|
||||
const destType = newVectorType(dimensions, innerDestType);
|
||||
newColumns[destColumn] = makeVector([], destType);
|
||||
} else if (schema != null) {
|
||||
const destField = schema.fields.find((f) => f.name === destColumn);
|
||||
@@ -444,7 +631,9 @@ async function applyEmbeddings<T>(
|
||||
);
|
||||
}
|
||||
const values = sourceColumn.toArray();
|
||||
const vectors = await embeddings.embed(values as T[]);
|
||||
const vectors = await embeddings.function.computeSourceEmbeddings(
|
||||
values as T[],
|
||||
);
|
||||
if (vectors.length !== values.length) {
|
||||
throw new Error(
|
||||
"Embedding function did not return an embedding for each input element",
|
||||
@@ -484,9 +673,9 @@ async function applyEmbeddings<T>(
|
||||
* embedding columns. If no schema is provded then embedding columns will
|
||||
* be placed at the end of the table, after all of the input columns.
|
||||
*/
|
||||
export async function convertToTable<T>(
|
||||
export async function convertToTable(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
makeTableOptions?: Partial<MakeArrowTableOptions>,
|
||||
): Promise<ArrowTable> {
|
||||
const table = makeArrowTable(data, makeTableOptions);
|
||||
@@ -494,13 +683,13 @@ export async function convertToTable<T>(
|
||||
}
|
||||
|
||||
/** Creates the Arrow Type for a Vector column with dimension `dim` */
|
||||
function newVectorType<T extends Float>(
|
||||
export function newVectorType<T extends Float>(
|
||||
dim: number,
|
||||
innerType: T,
|
||||
): FixedSizeList<T> {
|
||||
// in Lance we always default to have the elements nullable, so we need to set it to true
|
||||
// otherwise we often get schema mismatches because the stored data always has schema with nullable elements
|
||||
const children = new Field<T>("item", innerType, true);
|
||||
const children = new Field("item", <T>sanitizeType(innerType), true);
|
||||
return new FixedSizeList(dim, children);
|
||||
}
|
||||
|
||||
@@ -511,9 +700,9 @@ function newVectorType<T extends Float>(
|
||||
*
|
||||
* `schema` is required if data is empty
|
||||
*/
|
||||
export async function fromRecordsToBuffer<T>(
|
||||
export async function fromRecordsToBuffer(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<Buffer> {
|
||||
if (schema !== undefined && schema !== null) {
|
||||
@@ -531,9 +720,9 @@ export async function fromRecordsToBuffer<T>(
|
||||
*
|
||||
* `schema` is required if data is empty
|
||||
*/
|
||||
export async function fromRecordsToStreamBuffer<T>(
|
||||
export async function fromRecordsToStreamBuffer(
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<Buffer> {
|
||||
if (schema !== undefined && schema !== null) {
|
||||
@@ -552,9 +741,9 @@ export async function fromRecordsToStreamBuffer<T>(
|
||||
*
|
||||
* `schema` is required if the table is empty
|
||||
*/
|
||||
export async function fromTableToBuffer<T>(
|
||||
export async function fromTableToBuffer(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<Buffer> {
|
||||
if (schema !== undefined && schema !== null) {
|
||||
@@ -573,19 +762,19 @@ export async function fromTableToBuffer<T>(
|
||||
*
|
||||
* `schema` is required if the table is empty
|
||||
*/
|
||||
export async function fromDataToBuffer<T>(
|
||||
export async function fromDataToBuffer(
|
||||
data: Data,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<Buffer> {
|
||||
if (schema !== undefined && schema !== null) {
|
||||
schema = sanitizeSchema(schema);
|
||||
}
|
||||
if (data instanceof ArrowTable) {
|
||||
if (isArrowTable(data)) {
|
||||
return fromTableToBuffer(data, embeddings, schema);
|
||||
} else {
|
||||
const table = await convertToTable(data);
|
||||
return fromTableToBuffer(table, embeddings, schema);
|
||||
const table = await convertToTable(data, embeddings, { schema });
|
||||
return fromTableToBuffer(table);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -597,9 +786,9 @@ export async function fromDataToBuffer<T>(
|
||||
*
|
||||
* `schema` is required if the table is empty
|
||||
*/
|
||||
export async function fromTableToStreamBuffer<T>(
|
||||
export async function fromTableToStreamBuffer(
|
||||
table: ArrowTable,
|
||||
embeddings?: EmbeddingFunction<T>,
|
||||
embeddings?: EmbeddingFunctionConfig,
|
||||
schema?: Schema,
|
||||
): Promise<Buffer> {
|
||||
const tableWithEmbeddings = await applyEmbeddings(table, embeddings, schema);
|
||||
@@ -648,3 +837,54 @@ function alignTable(table: ArrowTable, schema: Schema): ArrowTable {
|
||||
export function createEmptyTable(schema: Schema): ArrowTable {
|
||||
return new ArrowTable(sanitizeSchema(schema));
|
||||
}
|
||||
|
||||
function validateSchemaEmbeddings(
|
||||
schema: Schema,
|
||||
data: Array<Record<string, unknown>>,
|
||||
embeddings: EmbeddingFunctionConfig | undefined,
|
||||
) {
|
||||
const fields = [];
|
||||
const missingEmbeddingFields = [];
|
||||
|
||||
// First we check if the field is a `FixedSizeList`
|
||||
// Then we check if the data contains the field
|
||||
// if it does not, we add it to the list of missing embedding fields
|
||||
// Finally, we check if those missing embedding fields are `this._embeddings`
|
||||
// if they are not, we throw an error
|
||||
for (let field of schema.fields) {
|
||||
if (isFixedSizeList(field.type)) {
|
||||
field = sanitizeField(field);
|
||||
|
||||
if (data.length !== 0 && data?.[0]?.[field.name] === undefined) {
|
||||
if (schema.metadata.has("embedding_functions")) {
|
||||
const embeddings = JSON.parse(
|
||||
schema.metadata.get("embedding_functions")!,
|
||||
);
|
||||
if (
|
||||
// biome-ignore lint/suspicious/noExplicitAny: we don't know the type of `f`
|
||||
embeddings.find((f: any) => f["vectorColumn"] === field.name) ===
|
||||
undefined
|
||||
) {
|
||||
missingEmbeddingFields.push(field);
|
||||
}
|
||||
} else {
|
||||
missingEmbeddingFields.push(field);
|
||||
}
|
||||
} else {
|
||||
fields.push(field);
|
||||
}
|
||||
} else {
|
||||
fields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingEmbeddingFields.length > 0 && embeddings === undefined) {
|
||||
throw new Error(
|
||||
`Table has embeddings: "${missingEmbeddingFields
|
||||
.map((f) => f.name)
|
||||
.join(",")}", but no embedding function was provided`,
|
||||
);
|
||||
}
|
||||
|
||||
return new Schema(fields, schema.metadata);
|
||||
}
|
||||
|
||||
@@ -12,32 +12,11 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { fromTableToBuffer, makeArrowTable, makeEmptyTable } from "./arrow";
|
||||
import { ConnectionOptions, Connection as LanceDbConnection } from "./native";
|
||||
import { Table } from "./table";
|
||||
import { Table as ArrowTable, Schema } from "apache-arrow";
|
||||
|
||||
/**
|
||||
* Connect to a LanceDB instance at the given URI.
|
||||
*
|
||||
* Accepted formats:
|
||||
*
|
||||
* - `/path/to/database` - local database
|
||||
* - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage
|
||||
* - `db://host:port` - remote database (LanceDB cloud)
|
||||
* @param {string} uri - The uri of the database. If the database uri starts
|
||||
* with `db://` then it connects to a remote database.
|
||||
* @see {@link ConnectionOptions} for more details on the URI format.
|
||||
*/
|
||||
export async function connect(
|
||||
uri: string,
|
||||
opts?: Partial<ConnectionOptions>,
|
||||
): Promise<Connection> {
|
||||
opts = opts ?? {};
|
||||
opts.storageOptions = cleanseStorageOptions(opts.storageOptions);
|
||||
const nativeConn = await LanceDbConnection.new(uri, opts);
|
||||
return new Connection(nativeConn);
|
||||
}
|
||||
import { Table as ArrowTable, Data, Schema } from "./arrow";
|
||||
import { fromTableToBuffer, makeEmptyTable } from "./arrow";
|
||||
import { EmbeddingFunctionConfig, getRegistry } from "./embedding/registry";
|
||||
import { Connection as LanceDbConnection } from "./native";
|
||||
import { LocalTable, Table } from "./table";
|
||||
|
||||
export interface CreateTableOptions {
|
||||
/**
|
||||
@@ -65,6 +44,14 @@ export interface CreateTableOptions {
|
||||
* The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||
*/
|
||||
storageOptions?: Record<string, string>;
|
||||
/**
|
||||
* If true then data files will be written with the legacy format
|
||||
*
|
||||
* The default is true while the new format is in beta
|
||||
*/
|
||||
useLegacyFormat?: boolean;
|
||||
schema?: Schema;
|
||||
embeddingFunction?: EmbeddingFunctionConfig;
|
||||
}
|
||||
|
||||
export interface OpenTableOptions {
|
||||
@@ -77,6 +64,18 @@ export interface OpenTableOptions {
|
||||
* The available options are described at https://lancedb.github.io/lancedb/guides/storage/
|
||||
*/
|
||||
storageOptions?: Record<string, string>;
|
||||
/**
|
||||
* Set the size of the index cache, specified as a number of entries
|
||||
*
|
||||
* The exact meaning of an "entry" will depend on the type of index:
|
||||
* - IVF: there is one entry for each IVF partition
|
||||
* - BTREE: there is one entry for the entire index
|
||||
*
|
||||
* This cache applies to the entire opened table, across all indices.
|
||||
* Setting this value higher will increase performance on larger datasets
|
||||
* at the expense of more RAM
|
||||
*/
|
||||
indexCacheSize?: number;
|
||||
}
|
||||
|
||||
export interface TableNamesOptions {
|
||||
@@ -91,7 +90,6 @@ export interface TableNamesOptions {
|
||||
/** An optional limit to the number of results to return. */
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* A LanceDB Connection that allows you to open tables and create new ones.
|
||||
*
|
||||
@@ -110,17 +108,15 @@ export interface TableNamesOptions {
|
||||
* Any created tables are independent and will continue to work even if
|
||||
* the underlying connection has been closed.
|
||||
*/
|
||||
export class Connection {
|
||||
readonly inner: LanceDbConnection;
|
||||
|
||||
constructor(inner: LanceDbConnection) {
|
||||
this.inner = inner;
|
||||
export abstract class Connection {
|
||||
[Symbol.for("nodejs.util.inspect.custom")](): string {
|
||||
return this.display();
|
||||
}
|
||||
|
||||
/** Return true if the connection has not been closed */
|
||||
isOpen(): boolean {
|
||||
return this.inner.isOpen();
|
||||
}
|
||||
/**
|
||||
* Return true if the connection has not been closed
|
||||
*/
|
||||
abstract isOpen(): boolean;
|
||||
|
||||
/**
|
||||
* Close the connection, releasing any underlying resources.
|
||||
@@ -129,14 +125,12 @@ export class Connection {
|
||||
*
|
||||
* Any attempt to use the connection after it is closed will result in an error.
|
||||
*/
|
||||
close(): void {
|
||||
this.inner.close();
|
||||
}
|
||||
abstract close(): void;
|
||||
|
||||
/** Return a brief description of the connection */
|
||||
display(): string {
|
||||
return this.inner.display();
|
||||
}
|
||||
/**
|
||||
* Return a brief description of the connection
|
||||
*/
|
||||
abstract display(): string;
|
||||
|
||||
/**
|
||||
* List all the table names in this database.
|
||||
@@ -144,15 +138,86 @@ export class Connection {
|
||||
* Tables will be returned in lexicographical order.
|
||||
* @param {Partial<TableNamesOptions>} options - options to control the
|
||||
* paging / start point
|
||||
*
|
||||
*/
|
||||
async tableNames(options?: Partial<TableNamesOptions>): Promise<string[]> {
|
||||
return this.inner.tableNames(options?.startAfter, options?.limit);
|
||||
}
|
||||
abstract tableNames(options?: Partial<TableNamesOptions>): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* Open a table in the database.
|
||||
* @param {string} name - The name of the table
|
||||
*/
|
||||
abstract openTable(
|
||||
name: string,
|
||||
options?: Partial<OpenTableOptions>,
|
||||
): Promise<Table>;
|
||||
|
||||
/**
|
||||
* Creates a new Table and initialize it with new data.
|
||||
* @param {object} options - The options object.
|
||||
* @param {string} options.name - The name of the table.
|
||||
* @param {Data} options.data - Non-empty Array of Records to be inserted into the table
|
||||
*
|
||||
*/
|
||||
abstract createTable(
|
||||
options: {
|
||||
name: string;
|
||||
data: Data;
|
||||
} & Partial<CreateTableOptions>,
|
||||
): Promise<Table>;
|
||||
/**
|
||||
* Creates a new Table and initialize it with new data.
|
||||
* @param {string} name - The name of the table.
|
||||
* @param {Record<string, unknown>[] | ArrowTable} data - Non-empty Array of Records
|
||||
* to be inserted into the table
|
||||
*/
|
||||
abstract createTable(
|
||||
name: string,
|
||||
data: Record<string, unknown>[] | ArrowTable,
|
||||
options?: Partial<CreateTableOptions>,
|
||||
): Promise<Table>;
|
||||
|
||||
/**
|
||||
* Creates a new empty Table
|
||||
* @param {string} name - The name of the table.
|
||||
* @param {Schema} schema - The schema of the table
|
||||
*/
|
||||
abstract createEmptyTable(
|
||||
name: string,
|
||||
schema: Schema,
|
||||
options?: Partial<CreateTableOptions>,
|
||||
): Promise<Table>;
|
||||
|
||||
/**
|
||||
* Drop an existing table.
|
||||
* @param {string} name The name of the table to drop.
|
||||
*/
|
||||
abstract dropTable(name: string): Promise<void>;
|
||||
}
|
||||
|
||||
export class LocalConnection extends Connection {
|
||||
readonly inner: LanceDbConnection;
|
||||
|
||||
constructor(inner: LanceDbConnection) {
|
||||
super();
|
||||
this.inner = inner;
|
||||
}
|
||||
|
||||
isOpen(): boolean {
|
||||
return this.inner.isOpen();
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.inner.close();
|
||||
}
|
||||
|
||||
display(): string {
|
||||
return this.inner.display();
|
||||
}
|
||||
|
||||
async tableNames(options?: Partial<TableNamesOptions>): Promise<string[]> {
|
||||
return this.inner.tableNames(options?.startAfter, options?.limit);
|
||||
}
|
||||
|
||||
async openTable(
|
||||
name: string,
|
||||
options?: Partial<OpenTableOptions>,
|
||||
@@ -160,49 +225,38 @@ export class Connection {
|
||||
const innerTable = await this.inner.openTable(
|
||||
name,
|
||||
cleanseStorageOptions(options?.storageOptions),
|
||||
options?.indexCacheSize,
|
||||
);
|
||||
return new Table(innerTable);
|
||||
|
||||
return new LocalTable(innerTable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Table and initialize it with new data.
|
||||
* @param {string} name - The name of the table.
|
||||
* @param {Record<string, unknown>[] | ArrowTable} data - Non-empty Array of Records
|
||||
* to be inserted into the table
|
||||
*/
|
||||
async createTable(
|
||||
name: string,
|
||||
data: Record<string, unknown>[] | ArrowTable,
|
||||
nameOrOptions:
|
||||
| string
|
||||
| ({ name: string; data: Data } & Partial<CreateTableOptions>),
|
||||
data?: Record<string, unknown>[] | ArrowTable,
|
||||
options?: Partial<CreateTableOptions>,
|
||||
): Promise<Table> {
|
||||
let mode: string = options?.mode ?? "create";
|
||||
const existOk = options?.existOk ?? false;
|
||||
|
||||
if (mode === "create" && existOk) {
|
||||
mode = "exist_ok";
|
||||
if (typeof nameOrOptions !== "string" && "name" in nameOrOptions) {
|
||||
const { name, data, ...options } = nameOrOptions;
|
||||
return this.createTable(name, data, options);
|
||||
}
|
||||
|
||||
let table: ArrowTable;
|
||||
if (data instanceof ArrowTable) {
|
||||
table = data;
|
||||
} else {
|
||||
table = makeArrowTable(data);
|
||||
if (data === undefined) {
|
||||
throw new Error("data is required");
|
||||
}
|
||||
const buf = await fromTableToBuffer(table);
|
||||
const { buf, mode } = await Table.parseTableData(data, options);
|
||||
const innerTable = await this.inner.createTable(
|
||||
name,
|
||||
nameOrOptions,
|
||||
buf,
|
||||
mode,
|
||||
cleanseStorageOptions(options?.storageOptions),
|
||||
options?.useLegacyFormat,
|
||||
);
|
||||
return new Table(innerTable);
|
||||
|
||||
return new LocalTable(innerTable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new empty Table
|
||||
* @param {string} name - The name of the table.
|
||||
* @param {Schema} schema - The schema of the table
|
||||
*/
|
||||
async createEmptyTable(
|
||||
name: string,
|
||||
schema: Schema,
|
||||
@@ -214,22 +268,25 @@ export class Connection {
|
||||
if (mode === "create" && existOk) {
|
||||
mode = "exist_ok";
|
||||
}
|
||||
let metadata: Map<string, string> | undefined = undefined;
|
||||
if (options?.embeddingFunction !== undefined) {
|
||||
const embeddingFunction = options.embeddingFunction;
|
||||
const registry = getRegistry();
|
||||
metadata = registry.getTableMetadata([embeddingFunction]);
|
||||
}
|
||||
|
||||
const table = makeEmptyTable(schema);
|
||||
const table = makeEmptyTable(schema, metadata);
|
||||
const buf = await fromTableToBuffer(table);
|
||||
const innerTable = await this.inner.createEmptyTable(
|
||||
name,
|
||||
buf,
|
||||
mode,
|
||||
cleanseStorageOptions(options?.storageOptions),
|
||||
options?.useLegacyFormat,
|
||||
);
|
||||
return new Table(innerTable);
|
||||
return new LocalTable(innerTable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drop an existing table.
|
||||
* @param {string} name The name of the table to drop.
|
||||
*/
|
||||
async dropTable(name: string): Promise<void> {
|
||||
return this.inner.dropTable(name);
|
||||
}
|
||||
@@ -238,7 +295,7 @@ export class Connection {
|
||||
/**
|
||||
* Takes storage options and makes all the keys snake case.
|
||||
*/
|
||||
function cleanseStorageOptions(
|
||||
export function cleanseStorageOptions(
|
||||
options?: Record<string, string>,
|
||||
): Record<string, string> | undefined {
|
||||
if (options === undefined) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Copyright 2023 Lance Developers.
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
@@ -12,67 +12,172 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { type Float } from "apache-arrow";
|
||||
import "reflect-metadata";
|
||||
import {
|
||||
DataType,
|
||||
Field,
|
||||
FixedSizeList,
|
||||
Float,
|
||||
Float32,
|
||||
type IntoVector,
|
||||
isDataType,
|
||||
isFixedSizeList,
|
||||
isFloat,
|
||||
newVectorType,
|
||||
} from "../arrow";
|
||||
import { sanitizeType } from "../sanitize";
|
||||
|
||||
/**
|
||||
* Options for a given embedding function
|
||||
*/
|
||||
export interface FunctionOptions {
|
||||
// biome-ignore lint/suspicious/noExplicitAny: options can be anything
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* An embedding function that automatically creates vector representation for a given column.
|
||||
*/
|
||||
export interface EmbeddingFunction<T> {
|
||||
export abstract class EmbeddingFunction<
|
||||
// biome-ignore lint/suspicious/noExplicitAny: we don't know what the implementor will do
|
||||
T = any,
|
||||
M extends FunctionOptions = FunctionOptions,
|
||||
> {
|
||||
/**
|
||||
* The name of the column that will be used as input for the Embedding Function.
|
||||
* Convert the embedding function to a JSON object
|
||||
* It is used to serialize the embedding function to the schema
|
||||
* It's important that any object returned by this method contains all the necessary
|
||||
* information to recreate the embedding function
|
||||
*
|
||||
* It should return the same object that was passed to the constructor
|
||||
* If it does not, the embedding function will not be able to be recreated, or could be recreated incorrectly
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* class MyEmbeddingFunction extends EmbeddingFunction {
|
||||
* constructor(options: {model: string, timeout: number}) {
|
||||
* super();
|
||||
* this.model = options.model;
|
||||
* this.timeout = options.timeout;
|
||||
* }
|
||||
* toJSON() {
|
||||
* return {
|
||||
* model: this.model,
|
||||
* timeout: this.timeout,
|
||||
* };
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
sourceColumn: string;
|
||||
abstract toJSON(): Partial<M>;
|
||||
|
||||
/**
|
||||
* The data type of the embedding
|
||||
* sourceField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
*
|
||||
* The embedding function should return `number`. This will be converted into
|
||||
* an Arrow float array. By default this will be Float32 but this property can
|
||||
* be used to control the conversion.
|
||||
* @param optionsOrDatatype - The options for the field or the datatype
|
||||
*
|
||||
* @see {@link lancedb.LanceSchema}
|
||||
*/
|
||||
embeddingDataType?: Float;
|
||||
sourceField(
|
||||
optionsOrDatatype: Partial<FieldOptions> | DataType,
|
||||
): [DataType, Map<string, EmbeddingFunction>] {
|
||||
let datatype = isDataType(optionsOrDatatype)
|
||||
? optionsOrDatatype
|
||||
: optionsOrDatatype?.datatype;
|
||||
if (!datatype) {
|
||||
throw new Error("Datatype is required");
|
||||
}
|
||||
datatype = sanitizeType(datatype);
|
||||
const metadata = new Map<string, EmbeddingFunction>();
|
||||
metadata.set("source_column_for", this);
|
||||
|
||||
return [datatype, metadata];
|
||||
}
|
||||
|
||||
/**
|
||||
* The dimension of the embedding
|
||||
* vectorField is used in combination with `LanceSchema` to provide a declarative data model
|
||||
*
|
||||
* This is optional, normally this can be determined by looking at the results of
|
||||
* `embed`. If this is not specified, and there is an attempt to apply the embedding
|
||||
* to an empty table, then that process will fail.
|
||||
* @param options - The options for the field
|
||||
*
|
||||
* @see {@link lancedb.LanceSchema}
|
||||
*/
|
||||
embeddingDimension?: number;
|
||||
vectorField(
|
||||
optionsOrDatatype?: Partial<FieldOptions> | DataType,
|
||||
): [DataType, Map<string, EmbeddingFunction>] {
|
||||
let dtype: DataType | undefined;
|
||||
let vectorType: DataType;
|
||||
let dims: number | undefined = this.ndims();
|
||||
|
||||
/**
|
||||
* The name of the column that will contain the embedding
|
||||
*
|
||||
* By default this is "vector"
|
||||
*/
|
||||
destColumn?: string;
|
||||
// `func.vectorField(new Float32())`
|
||||
if (isDataType(optionsOrDatatype)) {
|
||||
dtype = optionsOrDatatype;
|
||||
} else {
|
||||
// `func.vectorField({
|
||||
// datatype: new Float32(),
|
||||
// dims: 10
|
||||
// })`
|
||||
dims = dims ?? optionsOrDatatype?.dims;
|
||||
dtype = optionsOrDatatype?.datatype;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the source column be excluded from the resulting table
|
||||
*
|
||||
* By default the source column is included. Set this to true and
|
||||
* only the embedding will be stored.
|
||||
*/
|
||||
excludeSource?: boolean;
|
||||
if (dtype !== undefined) {
|
||||
// `func.vectorField(new FixedSizeList(dims, new Field("item", new Float32(), true)))`
|
||||
// or `func.vectorField({datatype: new FixedSizeList(dims, new Field("item", new Float32(), true))})`
|
||||
if (isFixedSizeList(dtype)) {
|
||||
vectorType = dtype;
|
||||
// `func.vectorField(new Float32())`
|
||||
// or `func.vectorField({datatype: new Float32()})`
|
||||
} else if (isFloat(dtype)) {
|
||||
// No `ndims` impl and no `{dims: n}` provided;
|
||||
if (dims === undefined) {
|
||||
throw new Error("ndims is required for vector field");
|
||||
}
|
||||
vectorType = newVectorType(dims, dtype);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Expected FixedSizeList or Float as datatype for vector field",
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (dims === undefined) {
|
||||
throw new Error("ndims is required for vector field");
|
||||
}
|
||||
vectorType = new FixedSizeList(
|
||||
dims,
|
||||
new Field("item", new Float32(), true),
|
||||
);
|
||||
}
|
||||
const metadata = new Map<string, EmbeddingFunction>();
|
||||
metadata.set("vector_column_for", this);
|
||||
|
||||
return [vectorType, metadata];
|
||||
}
|
||||
|
||||
/** The number of dimensions of the embeddings */
|
||||
ndims(): number | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** The datatype of the embeddings */
|
||||
abstract embeddingDataType(): Float;
|
||||
|
||||
/**
|
||||
* Creates a vector representation for the given values.
|
||||
*/
|
||||
embed: (data: T[]) => Promise<number[][]>;
|
||||
abstract computeSourceEmbeddings(
|
||||
data: T[],
|
||||
): Promise<number[][] | Float32Array[] | Float64Array[]>;
|
||||
|
||||
/**
|
||||
Compute the embeddings for a single query
|
||||
*/
|
||||
async computeQueryEmbeddings(data: T): Promise<IntoVector> {
|
||||
return this.computeSourceEmbeddings([data]).then(
|
||||
(embeddings) => embeddings[0],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** Test if the input seems to be an embedding function */
|
||||
export function isEmbeddingFunction<T>(
|
||||
value: unknown,
|
||||
): value is EmbeddingFunction<T> {
|
||||
if (typeof value !== "object" || value === null) {
|
||||
return false;
|
||||
}
|
||||
if (!("sourceColumn" in value) || !("embed" in value)) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
typeof value.sourceColumn === "string" && typeof value.embed === "function"
|
||||
);
|
||||
export interface FieldOptions<T extends DataType = DataType> {
|
||||
datatype: T;
|
||||
dims?: number;
|
||||
}
|
||||
|
||||
@@ -1,2 +1,113 @@
|
||||
export { EmbeddingFunction, isEmbeddingFunction } from "./embedding_function";
|
||||
export { OpenAIEmbeddingFunction } from "./openai";
|
||||
// Copyright 2023 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { DataType, Field, Schema } from "../arrow";
|
||||
import { isDataType } from "../arrow";
|
||||
import { sanitizeType } from "../sanitize";
|
||||
import { EmbeddingFunction } from "./embedding_function";
|
||||
import { EmbeddingFunctionConfig, getRegistry } from "./registry";
|
||||
|
||||
export { EmbeddingFunction } from "./embedding_function";
|
||||
|
||||
// We need to explicitly export '*' so that the `register` decorator actually registers the class.
|
||||
export * from "./openai";
|
||||
export * from "./registry";
|
||||
|
||||
/**
|
||||
* Create a schema with embedding functions.
|
||||
*
|
||||
* @param fields
|
||||
* @returns Schema
|
||||
* @example
|
||||
* ```ts
|
||||
* class MyEmbeddingFunction extends EmbeddingFunction {
|
||||
* // ...
|
||||
* }
|
||||
* const func = new MyEmbeddingFunction();
|
||||
* const schema = LanceSchema({
|
||||
* id: new Int32(),
|
||||
* text: func.sourceField(new Utf8()),
|
||||
* vector: func.vectorField(),
|
||||
* // optional: specify the datatype and/or dimensions
|
||||
* vector2: func.vectorField({ datatype: new Float32(), dims: 3}),
|
||||
* });
|
||||
*
|
||||
* const table = await db.createTable("my_table", data, { schema });
|
||||
* ```
|
||||
*/
|
||||
export function LanceSchema(
|
||||
fields: Record<string, [object, Map<string, EmbeddingFunction>] | object>,
|
||||
): Schema {
|
||||
const arrowFields: Field[] = [];
|
||||
|
||||
const embeddingFunctions = new Map<
|
||||
EmbeddingFunction,
|
||||
Partial<EmbeddingFunctionConfig>
|
||||
>();
|
||||
Object.entries(fields).forEach(([key, value]) => {
|
||||
if (isDataType(value)) {
|
||||
arrowFields.push(new Field(key, sanitizeType(value), true));
|
||||
} else {
|
||||
const [dtype, metadata] = value as [
|
||||
object,
|
||||
Map<string, EmbeddingFunction>,
|
||||
];
|
||||
arrowFields.push(new Field(key, sanitizeType(dtype), true));
|
||||
parseEmbeddingFunctions(embeddingFunctions, key, metadata);
|
||||
}
|
||||
});
|
||||
const registry = getRegistry();
|
||||
const metadata = registry.getTableMetadata(
|
||||
Array.from(embeddingFunctions.values()) as EmbeddingFunctionConfig[],
|
||||
);
|
||||
const schema = new Schema(arrowFields, metadata);
|
||||
return schema;
|
||||
}
|
||||
|
||||
function parseEmbeddingFunctions(
|
||||
embeddingFunctions: Map<EmbeddingFunction, Partial<EmbeddingFunctionConfig>>,
|
||||
key: string,
|
||||
metadata: Map<string, EmbeddingFunction>,
|
||||
): void {
|
||||
if (metadata.has("source_column_for")) {
|
||||
const embedFunction = metadata.get("source_column_for")!;
|
||||
const current = embeddingFunctions.get(embedFunction);
|
||||
if (current !== undefined) {
|
||||
embeddingFunctions.set(embedFunction, {
|
||||
...current,
|
||||
sourceColumn: key,
|
||||
});
|
||||
} else {
|
||||
embeddingFunctions.set(embedFunction, {
|
||||
sourceColumn: key,
|
||||
function: embedFunction,
|
||||
});
|
||||
}
|
||||
} else if (metadata.has("vector_column_for")) {
|
||||
const embedFunction = metadata.get("vector_column_for")!;
|
||||
|
||||
const current = embeddingFunctions.get(embedFunction);
|
||||
if (current !== undefined) {
|
||||
embeddingFunctions.set(embedFunction, {
|
||||
...current,
|
||||
vectorColumn: key,
|
||||
});
|
||||
} else {
|
||||
embeddingFunctions.set(embedFunction, {
|
||||
vectorColumn: key,
|
||||
function: embedFunction,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,18 +12,32 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { type EmbeddingFunction } from "./embedding_function";
|
||||
import type OpenAI from "openai";
|
||||
import { Float, Float32 } from "../arrow";
|
||||
import { EmbeddingFunction } from "./embedding_function";
|
||||
import { register } from "./registry";
|
||||
|
||||
export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
|
||||
private readonly _openai: OpenAI;
|
||||
private readonly _modelName: string;
|
||||
export type OpenAIOptions = {
|
||||
apiKey?: string;
|
||||
model?: string;
|
||||
};
|
||||
|
||||
@register("openai")
|
||||
export class OpenAIEmbeddingFunction extends EmbeddingFunction<
|
||||
string,
|
||||
OpenAIOptions
|
||||
> {
|
||||
#openai: OpenAI;
|
||||
#modelName: string;
|
||||
|
||||
constructor(options: OpenAIOptions = { model: "text-embedding-ada-002" }) {
|
||||
super();
|
||||
const openAIKey = options?.apiKey ?? process.env.OPENAI_API_KEY;
|
||||
if (!openAIKey) {
|
||||
throw new Error("OpenAI API key is required");
|
||||
}
|
||||
const modelName = options?.model ?? "text-embedding-ada-002";
|
||||
|
||||
constructor(
|
||||
sourceColumn: string,
|
||||
openAIKey: string,
|
||||
modelName: string = "text-embedding-ada-002",
|
||||
) {
|
||||
/**
|
||||
* @type {import("openai").default}
|
||||
*/
|
||||
@@ -36,18 +50,40 @@ export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
|
||||
throw new Error("please install openai@^4.24.1 using npm install openai");
|
||||
}
|
||||
|
||||
this.sourceColumn = sourceColumn;
|
||||
const configuration = {
|
||||
apiKey: openAIKey,
|
||||
};
|
||||
|
||||
this._openai = new Openai(configuration);
|
||||
this._modelName = modelName;
|
||||
this.#openai = new Openai(configuration);
|
||||
this.#modelName = modelName;
|
||||
}
|
||||
|
||||
async embed(data: string[]): Promise<number[][]> {
|
||||
const response = await this._openai.embeddings.create({
|
||||
model: this._modelName,
|
||||
toJSON() {
|
||||
return {
|
||||
model: this.#modelName,
|
||||
};
|
||||
}
|
||||
|
||||
ndims(): number {
|
||||
switch (this.#modelName) {
|
||||
case "text-embedding-ada-002":
|
||||
return 1536;
|
||||
case "text-embedding-3-large":
|
||||
return 3072;
|
||||
case "text-embedding-3-small":
|
||||
return 1536;
|
||||
default:
|
||||
return null as never;
|
||||
}
|
||||
}
|
||||
|
||||
embeddingDataType(): Float {
|
||||
return new Float32();
|
||||
}
|
||||
|
||||
async computeSourceEmbeddings(data: string[]): Promise<number[][]> {
|
||||
const response = await this.#openai.embeddings.create({
|
||||
model: this.#modelName,
|
||||
input: data,
|
||||
});
|
||||
|
||||
@@ -58,5 +94,15 @@ export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
|
||||
return embeddings;
|
||||
}
|
||||
|
||||
sourceColumn: string;
|
||||
async computeQueryEmbeddings(data: string): Promise<number[]> {
|
||||
if (typeof data !== "string") {
|
||||
throw new Error("Data must be a string");
|
||||
}
|
||||
const response = await this.#openai.embeddings.create({
|
||||
model: this.#modelName,
|
||||
input: data,
|
||||
});
|
||||
|
||||
return response.data[0].embedding;
|
||||
}
|
||||
}
|
||||
|
||||
176
nodejs/lancedb/embedding/registry.ts
Normal file
176
nodejs/lancedb/embedding/registry.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
// Copyright 2024 Lance Developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import type { EmbeddingFunction } from "./embedding_function";
|
||||
import "reflect-metadata";
|
||||
|
||||
export interface EmbeddingFunctionOptions {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface EmbeddingFunctionFactory<
|
||||
T extends EmbeddingFunction = EmbeddingFunction,
|
||||
> {
|
||||
new (modelOptions?: EmbeddingFunctionOptions): T;
|
||||
}
|
||||
|
||||
interface EmbeddingFunctionCreate<T extends EmbeddingFunction> {
|
||||
create(options?: EmbeddingFunctionOptions): T;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a singleton class used to register embedding functions
|
||||
* and fetch them by name. It also handles serializing and deserializing.
|
||||
* You can implement your own embedding function by subclassing EmbeddingFunction
|
||||
* or TextEmbeddingFunction and registering it with the registry
|
||||
*/
|
||||
export class EmbeddingFunctionRegistry {
|
||||
#functions: Map<string, EmbeddingFunctionFactory> = new Map();
|
||||
|
||||
/**
|
||||
* Register an embedding function
|
||||
* @param name The name of the function
|
||||
* @param func The function to register
|
||||
* @throws Error if the function is already registered
|
||||
*/
|
||||
register<T extends EmbeddingFunctionFactory = EmbeddingFunctionFactory>(
|
||||
this: EmbeddingFunctionRegistry,
|
||||
alias?: string,
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
): (ctor: T) => any {
|
||||
const self = this;
|
||||
return function (ctor: T) {
|
||||
if (!alias) {
|
||||
alias = ctor.name;
|
||||
}
|
||||
if (self.#functions.has(alias)) {
|
||||
throw new Error(
|
||||
`Embedding function with alias "${alias}" already exists`,
|
||||
);
|
||||
}
|
||||
self.#functions.set(alias, ctor);
|
||||
Reflect.defineMetadata("lancedb::embedding::name", alias, ctor);
|
||||
return ctor;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch an embedding function by name
|
||||
* @param name The name of the function
|
||||
*/
|
||||
get<T extends EmbeddingFunction<unknown> = EmbeddingFunction>(
|
||||
name: string,
|
||||
): EmbeddingFunctionCreate<T> | undefined {
|
||||
const factory = this.#functions.get(name);
|
||||
if (!factory) {
|
||||
return undefined;
|
||||
}
|
||||
return {
|
||||
create: function (options: EmbeddingFunctionOptions) {
|
||||
return new factory(options) as unknown as T;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* reset the registry to the initial state
|
||||
*/
|
||||
reset(this: EmbeddingFunctionRegistry) {
|
||||
this.#functions.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
parseFunctions(
|
||||
this: EmbeddingFunctionRegistry,
|
||||
metadata: Map<string, string>,
|
||||
): Map<string, EmbeddingFunctionConfig> {
|
||||
if (!metadata.has("embedding_functions")) {
|
||||
return new Map();
|
||||
} else {
|
||||
type FunctionConfig = {
|
||||
name: string;
|
||||
sourceColumn: string;
|
||||
vectorColumn: string;
|
||||
model: EmbeddingFunctionOptions;
|
||||
};
|
||||
const functions = <FunctionConfig[]>(
|
||||
JSON.parse(metadata.get("embedding_functions")!)
|
||||
);
|
||||
return new Map(
|
||||
functions.map((f) => {
|
||||
const fn = this.get(f.name);
|
||||
if (!fn) {
|
||||
throw new Error(`Function "${f.name}" not found in registry`);
|
||||
}
|
||||
return [
|
||||
f.name,
|
||||
{
|
||||
sourceColumn: f.sourceColumn,
|
||||
vectorColumn: f.vectorColumn,
|
||||
function: this.get(f.name)!.create(f.model),
|
||||
},
|
||||
];
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
functionToMetadata(conf: EmbeddingFunctionConfig): Record<string, any> {
|
||||
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
|
||||
const metadata: Record<string, any> = {};
|
||||
const name = Reflect.getMetadata(
|
||||
"lancedb::embedding::name",
|
||||
conf.function.constructor,
|
||||
);
|
||||
metadata["sourceColumn"] = conf.sourceColumn;
|
||||
metadata["vectorColumn"] = conf.vectorColumn ?? "vector";
|
||||
metadata["name"] = name ?? conf.function.constructor.name;
|
||||
metadata["model"] = conf.function.toJSON();
|
||||
return metadata;
|
||||
}
|
||||
|
||||
getTableMetadata(functions: EmbeddingFunctionConfig[]): Map<string, string> {
|
||||
const metadata = new Map<string, string>();
|
||||
const jsonData = functions.map((conf) => this.functionToMetadata(conf));
|
||||
metadata.set("embedding_functions", JSON.stringify(jsonData));
|
||||
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
const _REGISTRY = new EmbeddingFunctionRegistry();
|
||||
|
||||
export function register(name?: string) {
|
||||
return _REGISTRY.register(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to get the global instance of the registry
|
||||
* @returns `EmbeddingFunctionRegistry` The global instance of the registry
|
||||
* @example
|
||||
* ```ts
|
||||
* const registry = getRegistry();
|
||||
* const openai = registry.get("openai").create();
|
||||
*/
|
||||
export function getRegistry(): EmbeddingFunctionRegistry {
|
||||
return _REGISTRY;
|
||||
}
|
||||
|
||||
export interface EmbeddingFunctionConfig {
|
||||
sourceColumn: string;
|
||||
vectorColumn?: string;
|
||||
function: EmbeddingFunction;
|
||||
}
|
||||
@@ -12,25 +12,43 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import {
|
||||
Connection,
|
||||
LocalConnection,
|
||||
cleanseStorageOptions,
|
||||
} from "./connection";
|
||||
|
||||
import {
|
||||
ConnectionOptions,
|
||||
Connection as LanceDbConnection,
|
||||
} from "./native.js";
|
||||
|
||||
import { RemoteConnection, RemoteConnectionOptions } from "./remote";
|
||||
|
||||
export {
|
||||
WriteOptions,
|
||||
WriteMode,
|
||||
AddColumnsSql,
|
||||
ColumnAlteration,
|
||||
ConnectionOptions,
|
||||
IndexStatistics,
|
||||
IndexMetadata,
|
||||
IndexConfig,
|
||||
} from "./native.js";
|
||||
|
||||
export {
|
||||
makeArrowTable,
|
||||
MakeArrowTableOptions,
|
||||
Data,
|
||||
VectorColumnOptions,
|
||||
} from "./arrow";
|
||||
|
||||
export {
|
||||
connect,
|
||||
Connection,
|
||||
CreateTableOptions,
|
||||
TableNamesOptions,
|
||||
} from "./connection";
|
||||
|
||||
export {
|
||||
ExecutableQuery,
|
||||
Query,
|
||||
@@ -38,6 +56,87 @@ export {
|
||||
VectorQuery,
|
||||
RecordBatchIterator,
|
||||
} from "./query";
|
||||
|
||||
export { Index, IndexOptions, IvfPqOptions } from "./indices";
|
||||
export { Table, AddDataOptions, IndexConfig, UpdateOptions } from "./table";
|
||||
|
||||
export { Table, AddDataOptions, UpdateOptions } from "./table";
|
||||
|
||||
export * as embedding from "./embedding";
|
||||
|
||||
/**
|
||||
* Connect to a LanceDB instance at the given URI.
|
||||
*
|
||||
* Accepted formats:
|
||||
*
|
||||
* - `/path/to/database` - local database
|
||||
* - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage
|
||||
* - `db://host:port` - remote database (LanceDB cloud)
|
||||
* @param {string} uri - The uri of the database. If the database uri starts
|
||||
* with `db://` then it connects to a remote database.
|
||||
* @see {@link ConnectionOptions} for more details on the URI format.
|
||||
* @example
|
||||
* ```ts
|
||||
* const conn = await connect("/path/to/database");
|
||||
* ```
|
||||
* @example
|
||||
* ```ts
|
||||
* const conn = await connect(
|
||||
* "s3://bucket/path/to/database",
|
||||
* {storageOptions: {timeout: "60s"}
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export async function connect(
|
||||
uri: string,
|
||||
opts?: Partial<ConnectionOptions | RemoteConnectionOptions>,
|
||||
): Promise<Connection>;
|
||||
/**
|
||||
* Connect to a LanceDB instance at the given URI.
|
||||
*
|
||||
* Accepted formats:
|
||||
*
|
||||
* - `/path/to/database` - local database
|
||||
* - `s3://bucket/path/to/database` or `gs://bucket/path/to/database` - database on cloud storage
|
||||
* - `db://host:port` - remote database (LanceDB cloud)
|
||||
* @param options - The options to use when connecting to the database
|
||||
* @see {@link ConnectionOptions} for more details on the URI format.
|
||||
* @example
|
||||
* ```ts
|
||||
* const conn = await connect({
|
||||
* uri: "/path/to/database",
|
||||
* storageOptions: {timeout: "60s"}
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export async function connect(
|
||||
opts: Partial<RemoteConnectionOptions | ConnectionOptions> & { uri: string },
|
||||
): Promise<Connection>;
|
||||
export async function connect(
|
||||
uriOrOptions:
|
||||
| string
|
||||
| (Partial<RemoteConnectionOptions | ConnectionOptions> & { uri: string }),
|
||||
opts: Partial<ConnectionOptions | RemoteConnectionOptions> = {},
|
||||
): Promise<Connection> {
|
||||
let uri: string | undefined;
|
||||
if (typeof uriOrOptions !== "string") {
|
||||
const { uri: uri_, ...options } = uriOrOptions;
|
||||
uri = uri_;
|
||||
opts = options;
|
||||
} else {
|
||||
uri = uriOrOptions;
|
||||
}
|
||||
|
||||
if (!uri) {
|
||||
throw new Error("uri is required");
|
||||
}
|
||||
|
||||
if (uri?.startsWith("db://")) {
|
||||
return new RemoteConnection(uri, opts as RemoteConnectionOptions);
|
||||
}
|
||||
opts = (opts as ConnectionOptions) ?? {};
|
||||
(<ConnectionOptions>opts).storageOptions = cleanseStorageOptions(
|
||||
(<ConnectionOptions>opts).storageOptions,
|
||||
);
|
||||
const nativeConn = await LanceDbConnection.new(uri, opts);
|
||||
return new LocalConnection(nativeConn);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user