Compare commits

..

2 Commits

Author SHA1 Message Date
lancedb automation
cb956d5464 chore: update lance dependency to v1.0.0-beta.1 2025-11-15 08:54:39 +00:00
Xuanwo
3f6b0e4588 ci: migrate codex to API key based
Removed Codex authentication steps and added OPENAI_API_KEY environment variable for Codex execution.
2025-11-15 14:27:04 +08:00
141 changed files with 2927 additions and 8258 deletions

View File

@@ -1,5 +1,5 @@
[tool.bumpversion] [tool.bumpversion]
current_version = "0.23.1" current_version = "0.22.3"
parse = """(?x) parse = """(?x)
(?P<major>0|[1-9]\\d*)\\. (?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\. (?P<minor>0|[1-9]\\d*)\\.
@@ -72,9 +72,3 @@ search = "\nversion = \"{current_version}\""
filename = "nodejs/Cargo.toml" filename = "nodejs/Cargo.toml"
replace = "\nversion = \"{new_version}\"" replace = "\nversion = \"{new_version}\""
search = "\nversion = \"{current_version}\"" search = "\nversion = \"{current_version}\""
# Java documentation
[[tool.bumpversion.files]]
filename = "docs/src/java/java.md"
replace = "<version>{new_version}</version>"
search = "<version>{current_version}</version>"

View File

@@ -19,7 +19,7 @@ rustflags = [
"-Wclippy::string_add_assign", "-Wclippy::string_add_assign",
"-Wclippy::string_add", "-Wclippy::string_add",
"-Wclippy::string_lit_as_bytes", "-Wclippy::string_lit_as_bytes",
"-Wclippy::implicit_clone", "-Wclippy::string_to_string",
"-Wclippy::use_self", "-Wclippy::use_self",
"-Dclippy::cargo", "-Dclippy::cargo",
"-Dclippy::dbg_macro", "-Dclippy::dbg_macro",

View File

@@ -18,6 +18,6 @@ body:
label: Link label: Link
description: > description: >
Provide a link to the existing documentation, if applicable. Provide a link to the existing documentation, if applicable.
placeholder: ex. https://lancedb.com/docs/tables/... placeholder: ex. https://lancedb.github.io/lancedb/guides/tables/...
validations: validations:
required: false required: false

View File

@@ -31,7 +31,6 @@ runs:
with: with:
command: build command: build
working-directory: python working-directory: python
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'"
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
manylinux: ${{ inputs.manylinux }} manylinux: ${{ inputs.manylinux }}
args: ${{ inputs.args }} args: ${{ inputs.args }}
@@ -46,7 +45,7 @@ runs:
with: with:
command: build command: build
working-directory: python working-directory: python
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'" docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
target: aarch64-unknown-linux-gnu target: aarch64-unknown-linux-gnu
manylinux: ${{ inputs.manylinux }} manylinux: ${{ inputs.manylinux }}
args: ${{ inputs.args }} args: ${{ inputs.args }}

View File

@@ -22,5 +22,5 @@ runs:
command: build command: build
# TODO: pass through interpreter # TODO: pass through interpreter
args: ${{ inputs.args }} args: ${{ inputs.args }}
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'" docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
working-directory: python working-directory: python

View File

@@ -26,7 +26,7 @@ runs:
with: with:
command: build command: build
args: ${{ inputs.args }} args: ${{ inputs.args }}
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'" docker-options: "-e PIP_EXTRA_INDEX_URL=https://pypi.fury.io/lancedb/"
working-directory: python working-directory: python
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
with: with:

View File

@@ -98,30 +98,3 @@ jobs:
printenv OPENAI_API_KEY | codex login --with-api-key printenv OPENAI_API_KEY | codex login --with-api-key
codex --config shell_environment_policy.ignore_default_excludes=true exec --dangerously-bypass-approvals-and-sandbox "$(cat /tmp/codex-prompt.txt)" codex --config shell_environment_policy.ignore_default_excludes=true exec --dangerously-bypass-approvals-and-sandbox "$(cat /tmp/codex-prompt.txt)"
- name: Trigger sophon dependency update
env:
TAG: ${{ inputs.tag }}
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
set -euo pipefail
VERSION="${TAG#refs/tags/}"
VERSION="${VERSION#v}"
LANCEDB_BRANCH="codex/update-lance-${VERSION//[^a-zA-Z0-9]/-}"
echo "Triggering sophon workflow with:"
echo " lance_ref: ${TAG#refs/tags/}"
echo " lancedb_ref: ${LANCEDB_BRANCH}"
gh workflow run codex-bump-lancedb-lance.yml \
--repo lancedb/sophon \
-f lance_ref="${TAG#refs/tags/}" \
-f lancedb_ref="${LANCEDB_BRANCH}"
- name: Show latest sophon workflow run
env:
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
set -euo pipefail
echo "Latest sophon workflow run:"
gh run list --repo lancedb/sophon --workflow codex-bump-lancedb-lance.yml --limit 1 --json databaseId,url,displayTitle

View File

@@ -24,7 +24,6 @@ env:
# according to: https://matklad.github.io/2021/09/04/fast-rust-builds.html # according to: https://matklad.github.io/2021/09/04/fast-rust-builds.html
# CI builds are faster with incremental disabled. # CI builds are faster with incremental disabled.
CARGO_INCREMENTAL: "0" CARGO_INCREMENTAL: "0"
PIP_EXTRA_INDEX_URL: "https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/"
jobs: jobs:
# Single deploy job since we're just deploying # Single deploy job since we're just deploying
@@ -50,8 +49,8 @@ jobs:
- name: Build Python - name: Build Python
working-directory: python working-directory: python
run: | run: |
python -m pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -e . python -m pip install --extra-index-url https://pypi.fury.io/lancedb/ -e .
python -m pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -r ../docs/requirements.txt python -m pip install --extra-index-url https://pypi.fury.io/lancedb/ -r ../docs/requirements.txt
- name: Set up node - name: Set up node
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:

View File

@@ -1,35 +1,76 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Build and publish Java packages name: Build and publish Java packages
on: on:
push: release:
tags: types: [released]
- "v*"
pull_request: pull_request:
paths: paths:
- .github/workflows/java-publish.yml - .github/workflows/java-publish.yml
jobs: jobs:
publish: macos-arm64:
name: Build and Publish name: Build on MacOS Arm64
runs-on: ubuntu-24.04 runs-on: macos-14
timeout-minutes: 45
defaults:
run:
working-directory: ./java/core/lancedb-jni
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: |
brew install protobuf
- name: Build release
run: |
cargo build --release
- uses: actions/upload-artifact@v4
with:
name: liblancedb_jni_darwin_aarch64.zip
path: target/release/liblancedb_jni.dylib
retention-days: 1
if-no-files-found: error
linux-arm64:
name: Build on Linux Arm64
runs-on: warp-ubuntu-2204-arm64-8x
timeout-minutes: 45
defaults:
run:
working-directory: ./java/core/lancedb-jni
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: Swatinem/rust-cache@v2
- uses: actions-rust-lang/setup-rust-toolchain@v1
with:
cache-workspaces: "./java/core/lancedb-jni"
# Disable full debug symbol generation to speed up CI build and keep memory down
# "1" means line tables only, which is useful for panic tracebacks.
rustflags: "-C debuginfo=1"
- name: Install dependencies
run: |
sudo apt -y -qq update
sudo apt install -y protobuf-compiler libssl-dev pkg-config
- name: Build release
run: |
cargo build --release
- uses: actions/upload-artifact@v4
with:
name: liblancedb_jni_linux_aarch64.zip
path: target/release/liblancedb_jni.so
retention-days: 1
if-no-files-found: error
linux-x86:
runs-on: warp-ubuntu-2204-x64-8x
timeout-minutes: 30 timeout-minutes: 30
needs: [macos-arm64, linux-arm64]
defaults: defaults:
run: run:
working-directory: ./java working-directory: ./java
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- uses: Swatinem/rust-cache@v2
- name: Set up Java 8 - name: Set up Java 8
uses: actions/setup-java@v4 uses: actions/setup-java@v4
with: with:
@@ -41,30 +82,40 @@ jobs:
server-password: SONATYPE_TOKEN server-password: SONATYPE_TOKEN
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }} gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Set git config - name: Install dependencies
run: | run: |
git config --global user.email "dev+gha@lancedb.com" sudo apt -y -qq update
git config --global user.name "LanceDB Github Runner" sudo apt install -y protobuf-compiler libssl-dev pkg-config
- name: Download artifact
uses: actions/download-artifact@v4
- name: Copy native libs
run: |
mkdir -p ./core/target/classes/nativelib/darwin-aarch64 ./core/target/classes/nativelib/linux-aarch64
cp ../liblancedb_jni_darwin_aarch64.zip/liblancedb_jni.dylib ./core/target/classes/nativelib/darwin-aarch64/liblancedb_jni.dylib
cp ../liblancedb_jni_linux_aarch64.zip/liblancedb_jni.so ./core/target/classes/nativelib/linux-aarch64/liblancedb_jni.so
- name: Dry run - name: Dry run
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
run: | run: |
./mvnw --batch-mode -DskipTests package -pl lancedb-core -am mvn --batch-mode -DskipTests -Drust.release.build=true package
- name: Publish - name: Set github
if: startsWith(github.ref, 'refs/tags/v') run: |
git config --global user.email "LanceDB Github Runner"
git config --global user.name "dev+gha@lancedb.com"
- name: Publish with Java 8
if: github.event_name == 'release'
run: | run: |
echo "use-agent" >> ~/.gnupg/gpg.conf echo "use-agent" >> ~/.gnupg/gpg.conf
echo "pinentry-mode loopback" >> ~/.gnupg/gpg.conf echo "pinentry-mode loopback" >> ~/.gnupg/gpg.conf
export GPG_TTY=$(tty) export GPG_TTY=$(tty)
./mvnw --batch-mode -DskipTests -DpushChanges=false -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} deploy -pl lancedb-core -am -P deploy-to-ossrh mvn --batch-mode -DskipTests -Drust.release.build=true -DpushChanges=false -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} deploy -P deploy-to-ossrh
env: env:
SONATYPE_USER: ${{ secrets.SONATYPE_USER }} SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
SONATYPE_TOKEN: ${{ secrets.SONATYPE_TOKEN }} SONATYPE_TOKEN: ${{ secrets.SONATYPE_TOKEN }}
report-failure: report-failure:
name: Report Workflow Failure name: Report Workflow Failure
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [publish] needs: [linux-arm64, linux-x86, macos-arm64]
if: always() && failure() && startsWith(github.ref, 'refs/tags/v') if: always() && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
permissions: permissions:
contents: read contents: read
issues: write issues: write

View File

@@ -1,46 +1,118 @@
# Licensed under the Apache License, Version 2.0 (the "License"); name: Build and Run Java JNI Tests
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Build Java LanceDB Core
on: on:
push: push:
branches: branches:
- main - main
paths: paths:
- java/** - java/**
- .github/workflows/java.yml
pull_request: pull_request:
paths: paths:
- java/** - java/**
- rust/**
- .github/workflows/java.yml - .github/workflows/java.yml
env:
# This env var is used by Swatinem/rust-cache@v2 for the cache
# key, so we set it to make sure it is always consistent.
CARGO_TERM_COLOR: always
# Disable full debug symbol generation to speed up CI build and keep memory down
# "1" means line tables only, which is useful for panic tracebacks.
RUSTFLAGS: "-C debuginfo=1"
RUST_BACKTRACE: "1"
# according to: https://matklad.github.io/2021/09/04/fast-rust-builds.html
# CI builds are faster with incremental disabled.
CARGO_INCREMENTAL: "0"
CARGO_BUILD_JOBS: "1"
jobs: jobs:
build-java: linux-build-java-11:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
name: Build name: ubuntu-22.04 + Java 11
defaults: defaults:
run: run:
working-directory: ./java working-directory: ./java
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Java 17 - uses: Swatinem/rust-cache@v2
with:
workspaces: java/core/lancedb-jni
- uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: rustfmt
- name: Run cargo fmt
run: cargo fmt --check
working-directory: ./java/core/lancedb-jni
- name: Install dependencies
run: |
sudo apt update
sudo apt install -y protobuf-compiler libssl-dev
- name: Install Java 11
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: 11
cache: "maven"
- name: Java Style Check
run: mvn checkstyle:check
# Disable because of issues in lancedb rust core code
# - name: Rust Clippy
# working-directory: java/core/lancedb-jni
# run: cargo clippy --all-targets -- -D warnings
- name: Running tests with Java 11
run: mvn clean test
linux-build-java-17:
runs-on: ubuntu-22.04
name: ubuntu-22.04 + Java 17
defaults:
run:
working-directory: ./java
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: Swatinem/rust-cache@v2
with:
workspaces: java/core/lancedb-jni
- uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: rustfmt
- name: Run cargo fmt
run: cargo fmt --check
working-directory: ./java/core/lancedb-jni
- name: Install dependencies
run: |
sudo apt update
sudo apt install -y protobuf-compiler libssl-dev
- name: Install Java 17
uses: actions/setup-java@v4 uses: actions/setup-java@v4
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
cache: "maven" cache: "maven"
- run: echo "JAVA_17=$JAVA_HOME" >> $GITHUB_ENV
- name: Java Style Check - name: Java Style Check
run: ./mvnw checkstyle:check run: mvn checkstyle:check
- name: Build and install # Disable because of issues in lancedb rust core code
run: ./mvnw clean install # - name: Rust Clippy
# working-directory: java/core/lancedb-jni
# run: cargo clippy --all-targets -- -D warnings
- name: Running tests with Java 17
run: |
export JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS \
-XX:+IgnoreUnrecognizedVMOptions \
--add-opens=java.base/java.lang=ALL-UNNAMED \
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED \
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED \
--add-opens=java.base/java.io=ALL-UNNAMED \
--add-opens=java.base/java.net=ALL-UNNAMED \
--add-opens=java.base/java.nio=ALL-UNNAMED \
--add-opens=java.base/java.util=ALL-UNNAMED \
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED \
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED \
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED \
--add-opens=java.base/sun.security.action=ALL-UNNAMED \
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED \
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
-Djdk.reflect.useDirectMethodHandle=false \
-Dio.netty.tryReflectionSetAccessible=true"
JAVA_HOME=$JAVA_17 mvn clean test

View File

@@ -1,62 +0,0 @@
name: Lance Release Timer
on:
schedule:
- cron: "*/10 * * * *"
workflow_dispatch:
permissions:
contents: read
actions: write
concurrency:
group: lance-release-timer
cancel-in-progress: false
jobs:
trigger-update:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check for new Lance tag
id: check
env:
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
python3 ci/check_lance_release.py --github-output "$GITHUB_OUTPUT"
- name: Look for existing PR
if: steps.check.outputs.needs_update == 'true'
id: pr
env:
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
set -euo pipefail
TITLE="chore: update lance dependency to v${{ steps.check.outputs.latest_version }}"
COUNT=$(gh pr list --search "\"$TITLE\" in:title" --state open --limit 1 --json number --jq 'length')
if [ "$COUNT" -gt 0 ]; then
echo "Open PR already exists for $TITLE"
echo "pr_exists=true" >> "$GITHUB_OUTPUT"
else
echo "No existing PR for $TITLE"
echo "pr_exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Trigger codex update workflow
if: steps.check.outputs.needs_update == 'true' && steps.pr.outputs.pr_exists != 'true'
env:
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
set -euo pipefail
TAG=${{ steps.check.outputs.latest_tag }}
gh workflow run codex-update-lance-dependency.yml -f tag=refs/tags/$TAG
- name: Show latest codex workflow run
if: steps.check.outputs.needs_update == 'true' && steps.pr.outputs.pr_exists != 'true'
env:
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
run: |
set -euo pipefail
gh run list --workflow codex-update-lance-dependency.yml --limit 1 --json databaseId,url,displayTitle

View File

@@ -16,6 +16,9 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
env: env:
# Disable full debug symbol generation to speed up CI build and keep memory down
# "1" means line tables only, which is useful for panic tracebacks.
RUSTFLAGS: "-C debuginfo=1"
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
jobs: jobs:
@@ -40,20 +43,18 @@ jobs:
node-version: 20 node-version: 20
cache: 'npm' cache: 'npm'
cache-dependency-path: nodejs/package-lock.json cache-dependency-path: nodejs/package-lock.json
- uses: actions-rust-lang/setup-rust-toolchain@v1 - uses: Swatinem/rust-cache@v2
with:
components: rustfmt, clippy
- name: Install dependencies - name: Install dependencies
run: | run: |
sudo apt update sudo apt update
sudo apt install -y protobuf-compiler libssl-dev sudo apt install -y protobuf-compiler libssl-dev
- uses: Swatinem/rust-cache@v2 - uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Format Rust with:
run: cargo fmt --all -- --check components: rustfmt, clippy
- name: Lint Rust - name: Lint
run: cargo clippy --profile ci --all --all-features -- -D warnings
- name: Lint Typescript
run: | run: |
cargo fmt --all -- --check
cargo clippy --all --all-features -- -D warnings
npm ci npm ci
npm run lint-ci npm run lint-ci
- name: Lint examples - name: Lint examples
@@ -88,9 +89,8 @@ jobs:
npm install -g @napi-rs/cli npm install -g @napi-rs/cli
- name: Build - name: Build
run: | run: |
npm ci --include=optional npm ci
npm run build:debug -- --profile ci npm run build
npm run tsc
- name: Setup localstack - name: Setup localstack
working-directory: . working-directory: .
run: docker compose up --detach --wait run: docker compose up --detach --wait
@@ -146,9 +146,8 @@ jobs:
npm install -g @napi-rs/cli npm install -g @napi-rs/cli
- name: Build - name: Build
run: | run: |
npm ci --include=optional npm ci
npm run build:debug -- --profile ci npm run build
npm run tsc
- name: Test - name: Test
run: | run: |
npm run test npm run test

View File

@@ -97,6 +97,12 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
settings: settings:
- target: x86_64-apple-darwin
host: macos-latest
features: ","
pre_build: |-
brew install protobuf
rustup target add x86_64-apple-darwin
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
host: macos-latest host: macos-latest
features: fp16kernels features: fp16kernels

View File

@@ -10,9 +10,6 @@ on:
- .github/workflows/pypi-publish.yml - .github/workflows/pypi-publish.yml
- Cargo.toml # Change in dependency frequently breaks builds - Cargo.toml # Change in dependency frequently breaks builds
env:
PIP_EXTRA_INDEX_URL: "https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/"
jobs: jobs:
linux: linux:
name: Python ${{ matrix.config.platform }} manylinux${{ matrix.config.manylinux }} name: Python ${{ matrix.config.platform }} manylinux${{ matrix.config.manylinux }}
@@ -64,6 +61,8 @@ jobs:
strategy: strategy:
matrix: matrix:
config: config:
- target: x86_64-apple-darwin
runner: macos-13
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
runner: warp-macos-14-arm64-6x runner: warp-macos-14-arm64-6x
env: env:

View File

@@ -18,8 +18,6 @@ env:
# Color output for pytest is off by default. # Color output for pytest is off by default.
PYTEST_ADDOPTS: "--color=yes" PYTEST_ADDOPTS: "--color=yes"
FORCE_COLOR: "1" FORCE_COLOR: "1"
PIP_EXTRA_INDEX_URL: "https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/"
RUST_BACKTRACE: "1"
jobs: jobs:
lint: lint:
@@ -49,8 +47,8 @@ jobs:
type-check: type-check:
name: "Type Check" name: "Type Check"
timeout-minutes: 60 timeout-minutes: 30
runs-on: ubuntu-2404-8x-x64 runs-on: "ubuntu-22.04"
defaults: defaults:
run: run:
shell: bash shell: bash
@@ -78,8 +76,8 @@ jobs:
doctest: doctest:
name: "Doctest" name: "Doctest"
timeout-minutes: 60 timeout-minutes: 30
runs-on: ubuntu-2404-8x-x64 runs-on: "ubuntu-24.04"
defaults: defaults:
run: run:
shell: bash shell: bash
@@ -98,9 +96,12 @@ jobs:
run: | run: |
sudo apt update sudo apt update
sudo apt install -y protobuf-compiler sudo apt install -y protobuf-compiler
- uses: Swatinem/rust-cache@v2
with:
workspaces: python
- name: Install - name: Install
run: | run: |
pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests,dev,embeddings] pip install --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests,dev,embeddings]
pip install tantivy pip install tantivy
pip install mlx pip install mlx
- name: Doctest - name: Doctest
@@ -129,9 +130,10 @@ jobs:
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: 3.${{ matrix.python-minor-version }} python-version: 3.${{ matrix.python-minor-version }}
- uses: ./.github/workflows/build_linux_wheel - uses: Swatinem/rust-cache@v2
with: with:
args: --profile ci workspaces: python
- uses: ./.github/workflows/build_linux_wheel
- uses: ./.github/workflows/run_tests - uses: ./.github/workflows/run_tests
with: with:
integration: true integration: true
@@ -143,9 +145,16 @@ jobs:
- name: Delete wheels - name: Delete wheels
run: rm -rf target/wheels run: rm -rf target/wheels
platform: platform:
name: "Mac" name: "Mac: ${{ matrix.config.name }}"
timeout-minutes: 30 timeout-minutes: 30
runs-on: macos-14 strategy:
matrix:
config:
- name: x86
runner: macos-13
- name: Arm
runner: macos-14
runs-on: "${{ matrix.config.runner }}"
defaults: defaults:
run: run:
shell: bash shell: bash
@@ -159,9 +168,10 @@ jobs:
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.12" python-version: "3.12"
- uses: ./.github/workflows/build_mac_wheel - uses: Swatinem/rust-cache@v2
with: with:
args: --profile ci workspaces: python
- uses: ./.github/workflows/build_mac_wheel
- uses: ./.github/workflows/run_tests - uses: ./.github/workflows/run_tests
# Make sure wheels are not included in the Rust cache # Make sure wheels are not included in the Rust cache
- name: Delete wheels - name: Delete wheels
@@ -188,9 +198,10 @@ jobs:
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.12" python-version: "3.12"
- uses: ./.github/workflows/build_windows_wheel - uses: Swatinem/rust-cache@v2
with: with:
args: --profile ci workspaces: python
- uses: ./.github/workflows/build_windows_wheel
- uses: ./.github/workflows/run_tests - uses: ./.github/workflows/run_tests
# Make sure wheels are not included in the Rust cache # Make sure wheels are not included in the Rust cache
- name: Delete wheels - name: Delete wheels
@@ -219,7 +230,7 @@ jobs:
run: | run: |
pip install "pydantic<2" pip install "pydantic<2"
pip install pyarrow==16 pip install pyarrow==16
pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests] pip install --extra-index-url https://pypi.fury.io/lancedb/ -e .[tests]
pip install tantivy pip install tantivy
- name: Run tests - name: Run tests
run: pytest -m "not slow and not s3_test" -x -v --durations=30 python/tests run: pytest -m "not slow and not s3_test" -x -v --durations=30 python/tests

View File

@@ -15,7 +15,7 @@ runs:
- name: Install lancedb - name: Install lancedb
shell: bash shell: bash
run: | run: |
pip3 install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ $(ls target/wheels/lancedb-*.whl)[tests,dev] pip3 install --extra-index-url https://pypi.fury.io/lancedb/ $(ls target/wheels/lancedb-*.whl)[tests,dev]
- name: Setup localstack for integration tests - name: Setup localstack for integration tests
if: ${{ inputs.integration == 'true' }} if: ${{ inputs.integration == 'true' }}
shell: bash shell: bash

View File

@@ -18,7 +18,11 @@ env:
# This env var is used by Swatinem/rust-cache@v2 for the cache # This env var is used by Swatinem/rust-cache@v2 for the cache
# key, so we set it to make sure it is always consistent. # key, so we set it to make sure it is always consistent.
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
# Disable full debug symbol generation to speed up CI build and keep memory down
# "1" means line tables only, which is useful for panic tracebacks.
RUSTFLAGS: "-C debuginfo=1"
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
CARGO_INCREMENTAL: 0
jobs: jobs:
lint: lint:
@@ -40,6 +44,8 @@ jobs:
with: with:
components: rustfmt, clippy components: rustfmt, clippy
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
with:
workspaces: rust
- name: Install dependencies - name: Install dependencies
run: | run: |
sudo apt update sudo apt update
@@ -47,7 +53,7 @@ jobs:
- name: Run format - name: Run format
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
- name: Run clippy - name: Run clippy
run: cargo clippy --profile ci --workspace --tests --all-features -- -D warnings run: cargo clippy --workspace --tests --all-features -- -D warnings
build-no-lock: build-no-lock:
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
@@ -74,7 +80,7 @@ jobs:
sudo apt install -y protobuf-compiler libssl-dev sudo apt install -y protobuf-compiler libssl-dev
- name: Build all - name: Build all
run: | run: |
cargo build --profile ci --benches --all-features --tests cargo build --benches --all-features --tests
linux: linux:
timeout-minutes: 30 timeout-minutes: 30
@@ -97,8 +103,14 @@ jobs:
fetch-depth: 0 fetch-depth: 0
lfs: true lfs: true
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
with:
workspaces: rust
- name: Install dependencies - name: Install dependencies
run: sudo apt install -y protobuf-compiler libssl-dev run: |
# This shaves 2 minutes off this step in CI. This doesn't seem to be
# necessary in standard runners, but it is in the 4x runners.
sudo rm /var/lib/man-db/auto-update
sudo apt install -y protobuf-compiler libssl-dev
- uses: rui314/setup-mold@v1 - uses: rui314/setup-mold@v1
- name: Make Swap - name: Make Swap
run: | run: |
@@ -107,22 +119,22 @@ jobs:
sudo mkswap /swapfile sudo mkswap /swapfile
sudo swapon /swapfile sudo swapon /swapfile
- name: Build - name: Build
run: cargo build --profile ci --all-features --tests --locked --examples run: cargo build --all-features --tests --locked --examples
- name: Run feature tests - name: Run feature tests
run: CARGO_ARGS="--profile ci" make -C ./lancedb feature-tests run: make -C ./lancedb feature-tests
- name: Run examples - name: Run examples
run: cargo run --profile ci --example simple --locked run: cargo run --example simple --locked
- name: Run remote tests - name: Run remote tests
# Running this requires access to secrets, so skip if this is # Running this requires access to secrets, so skip if this is
# a PR from a fork. # a PR from a fork.
if: github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork if: github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork
run: CARGO_ARGS="--profile ci" make -C ./lancedb remote-tests run: make -C ./lancedb remote-tests
macos: macos:
timeout-minutes: 30 timeout-minutes: 30
strategy: strategy:
matrix: matrix:
mac-runner: ["macos-14", "macos-15"] mac-runner: ["macos-13", "macos-14"]
runs-on: "${{ matrix.mac-runner }}" runs-on: "${{ matrix.mac-runner }}"
defaults: defaults:
run: run:
@@ -136,6 +148,8 @@ jobs:
- name: CPU features - name: CPU features
run: sysctl -a | grep cpu run: sysctl -a | grep cpu
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
with:
workspaces: rust
- name: Install dependencies - name: Install dependencies
run: brew install protobuf run: brew install protobuf
- name: Run tests - name: Run tests
@@ -145,7 +159,7 @@ jobs:
ALL_FEATURES=`cargo metadata --format-version=1 --no-deps \ ALL_FEATURES=`cargo metadata --format-version=1 --no-deps \
| jq -r '.packages[] | .features | keys | .[]' \ | jq -r '.packages[] | .features | keys | .[]' \
| grep -v s3-test | sort | uniq | paste -s -d "," -` | grep -v s3-test | sort | uniq | paste -s -d "," -`
cargo test --profile ci --features $ALL_FEATURES --locked cargo test --features $ALL_FEATURES --locked
windows: windows:
runs-on: windows-2022 runs-on: windows-2022
@@ -159,21 +173,22 @@ jobs:
working-directory: rust/lancedb working-directory: rust/lancedb
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Set target
run: rustup target add ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
with:
workspaces: rust
- name: Install Protoc v21.12 - name: Install Protoc v21.12
run: choco install --no-progress protoc run: choco install --no-progress protoc
- name: Build - name: Build
run: | run: |
rustup target add ${{ matrix.target }}
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT $env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
cargo build --profile ci --features remote --tests --locked --target ${{ matrix.target }} cargo build --features remote --tests --locked --target ${{ matrix.target }}
- name: Run tests - name: Run tests
# Can only run tests when target matches host # Can only run tests when target matches host
if: ${{ matrix.target == 'x86_64-pc-windows-msvc' }} if: ${{ matrix.target == 'x86_64-pc-windows-msvc' }}
run: | run: |
$env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT $env:VCPKG_ROOT = $env:VCPKG_INSTALLATION_ROOT
cargo test --profile ci --features remote --locked cargo test --features remote --locked
msrv: msrv:
# Check the minimum supported Rust version # Check the minimum supported Rust version
@@ -198,7 +213,6 @@ jobs:
uses: dtolnay/rust-toolchain@master uses: dtolnay/rust-toolchain@master
with: with:
toolchain: ${{ matrix.msrv }} toolchain: ${{ matrix.msrv }}
- uses: Swatinem/rust-cache@v2
- name: Downgrade dependencies - name: Downgrade dependencies
# These packages have newer requirements for MSRV # These packages have newer requirements for MSRV
run: | run: |
@@ -212,4 +226,4 @@ jobs:
cargo update -p aws-sdk-sts --precise 1.51.0 cargo update -p aws-sdk-sts --precise 1.51.0
cargo update -p home --precise 0.5.9 cargo update -p home --precise 0.5.9
- name: cargo +${{ matrix.msrv }} check - name: cargo +${{ matrix.msrv }} check
run: cargo check --profile ci --workspace --tests --benches --all-features run: cargo check --workspace --tests --benches --all-features

1001
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[workspace] [workspace]
members = ["rust/lancedb", "nodejs", "python"] members = ["rust/lancedb", "nodejs", "python", "java/core/lancedb-jni"]
# Python package needs to be built by maturin. # Python package needs to be built by maturin.
exclude = ["python"] exclude = ["python"]
resolver = "2" resolver = "2"
@@ -15,20 +15,20 @@ categories = ["database-implementations"]
rust-version = "1.78.0" rust-version = "1.78.0"
[workspace.dependencies] [workspace.dependencies]
lance = { "version" = "=1.0.1", default-features = false } lance = { "version" = "=1.0.0-beta.1", default-features = false, "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-core = "=1.0.1" lance-core = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-datagen = "=1.0.1" lance-datagen = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-file = "=1.0.1" lance-file = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-io = { "version" = "=1.0.1", default-features = false } lance-io = { "version" = "=1.0.0-beta.1", default-features = false, "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-index = "=1.0.1" lance-index = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-linalg = "=1.0.1" lance-linalg = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-namespace = "=1.0.1" lance-namespace = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-namespace-impls = { "version" = "=1.0.1", default-features = false } lance-namespace-impls = { "version" = "=1.0.0-beta.1", "features" = ["dir-aws", "dir-gcp", "dir-azure", "dir-oss", "rest"], "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-table = "=1.0.1" lance-table = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-testing = "=1.0.1" lance-testing = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-datafusion = "=1.0.1" lance-datafusion = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-encoding = "=1.0.1" lance-encoding = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
lance-arrow = "=1.0.1" lance-arrow = { "version" = "=1.0.0-beta.1", "tag" = "v1.0.0-beta.1", "git" = "https://github.com/lancedb/lance.git" }
ahash = "0.8" ahash = "0.8"
# Note that this one does not include pyarrow # Note that this one does not include pyarrow
arrow = { version = "56.2", optional = false } arrow = { version = "56.2", optional = false }
@@ -63,17 +63,3 @@ regex = "1.10"
lazy_static = "1" lazy_static = "1"
semver = "1.0.25" semver = "1.0.25"
chrono = "0.4" chrono = "0.4"
[profile.ci]
debug = "line-tables-only"
inherits = "dev"
incremental = false
# This rule applies to every package except workspace members (dependencies
# such as `arrow` and `tokio`). It disables debug info and related features on
# dependencies so their binaries stay smaller, improving cache reuse.
[profile.ci.package."*"]
debug = false
debug-assertions = false
strip = "debuginfo"
incremental = false

View File

@@ -15,7 +15,7 @@
# **The Multimodal AI Lakehouse** # **The Multimodal AI Lakehouse**
[**How to Install** ](#how-to-install) ✦ [**Detailed Documentation**](https://lancedb.com/docs) ✦ [**Tutorials and Recipes**](https://github.com/lancedb/vectordb-recipes/tree/main) ✦ [**Contributors**](#contributors) [**How to Install** ](#how-to-install) ✦ [**Detailed Documentation**](https://lancedb.github.io/lancedb/) ✦ [**Tutorials and Recipes**](https://github.com/lancedb/vectordb-recipes/tree/main) ✦ [**Contributors**](#contributors)
**The ultimate multimodal data platform for AI/ML applications.** **The ultimate multimodal data platform for AI/ML applications.**

View File

@@ -1,208 +0,0 @@
#!/usr/bin/env python3
"""Determine whether a newer Lance tag exists and expose results for CI."""
from __future__ import annotations
import argparse
import json
import os
import re
import subprocess
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import Iterable, List, Sequence, Tuple, Union
try: # Python >=3.11
import tomllib # type: ignore
except ModuleNotFoundError: # pragma: no cover - fallback for older Python
import tomli as tomllib # type: ignore
LANCE_REPO = "lance-format/lance"
SEMVER_RE = re.compile(
r"^\s*(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)"
r"(?:-(?P<prerelease>[0-9A-Za-z.-]+))?"
r"(?:\+[0-9A-Za-z.-]+)?\s*$"
)
@dataclass(frozen=True)
class SemVer:
major: int
minor: int
patch: int
prerelease: Tuple[Union[int, str], ...]
def __lt__(self, other: "SemVer") -> bool: # pragma: no cover - simple comparison
if (self.major, self.minor, self.patch) != (other.major, other.minor, other.patch):
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
if self.prerelease == other.prerelease:
return False
if not self.prerelease:
return False # release > anything else
if not other.prerelease:
return True
for left, right in zip(self.prerelease, other.prerelease):
if left == right:
continue
if isinstance(left, int) and isinstance(right, int):
return left < right
if isinstance(left, int):
return True
if isinstance(right, int):
return False
return str(left) < str(right)
return len(self.prerelease) < len(other.prerelease)
def __eq__(self, other: object) -> bool: # pragma: no cover - trivial
if not isinstance(other, SemVer):
return NotImplemented
return (
self.major == other.major
and self.minor == other.minor
and self.patch == other.patch
and self.prerelease == other.prerelease
)
def parse_semver(raw: str) -> SemVer:
match = SEMVER_RE.match(raw)
if not match:
raise ValueError(f"Unsupported version format: {raw}")
prerelease = match.group("prerelease")
parts: Tuple[Union[int, str], ...] = ()
if prerelease:
parsed: List[Union[int, str]] = []
for piece in prerelease.split("."):
if piece.isdigit():
parsed.append(int(piece))
else:
parsed.append(piece)
parts = tuple(parsed)
return SemVer(
major=int(match.group("major")),
minor=int(match.group("minor")),
patch=int(match.group("patch")),
prerelease=parts,
)
@dataclass
class TagInfo:
tag: str # e.g. v1.0.0-beta.2
version: str # e.g. 1.0.0-beta.2
semver: SemVer
def run_command(cmd: Sequence[str]) -> str:
result = subprocess.run(cmd, capture_output=True, text=True, check=False)
if result.returncode != 0:
raise RuntimeError(
f"Command {' '.join(cmd)} failed with {result.returncode}: {result.stderr.strip()}"
)
return result.stdout.strip()
def fetch_remote_tags() -> List[TagInfo]:
output = run_command(
[
"gh",
"api",
"-X",
"GET",
f"repos/{LANCE_REPO}/git/refs/tags",
"--paginate",
"--jq",
".[].ref",
]
)
tags: List[TagInfo] = []
for line in output.splitlines():
ref = line.strip()
if not ref.startswith("refs/tags/v"):
continue
tag = ref.split("refs/tags/")[-1]
version = tag.lstrip("v")
try:
tags.append(TagInfo(tag=tag, version=version, semver=parse_semver(version)))
except ValueError:
continue
if not tags:
raise RuntimeError("No Lance tags could be parsed from GitHub API output")
return tags
def read_current_version(repo_root: Path) -> str:
cargo_path = repo_root / "Cargo.toml"
with cargo_path.open("rb") as fh:
data = tomllib.load(fh)
try:
deps = data["workspace"]["dependencies"]
entry = deps["lance"]
except KeyError as exc: # pragma: no cover - configuration guard
raise RuntimeError("Failed to locate workspace.dependencies.lance in Cargo.toml") from exc
if isinstance(entry, str):
raw_version = entry
elif isinstance(entry, dict):
raw_version = entry.get("version", "")
else: # pragma: no cover - defensive
raise RuntimeError("Unexpected lance dependency format")
raw_version = raw_version.strip()
if not raw_version:
raise RuntimeError("lance dependency does not declare a version")
return raw_version.lstrip("=")
def determine_latest_tag(tags: Iterable[TagInfo]) -> TagInfo:
return max(tags, key=lambda tag: tag.semver)
def write_outputs(args: argparse.Namespace, payload: dict) -> None:
target = getattr(args, "github_output", None)
if not target:
return
with open(target, "a", encoding="utf-8") as handle:
for key, value in payload.items():
handle.write(f"{key}={value}\n")
def main(argv: Sequence[str] | None = None) -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--repo-root",
default=Path(__file__).resolve().parents[1],
type=Path,
help="Path to the lancedb repository root",
)
parser.add_argument(
"--github-output",
default=os.environ.get("GITHUB_OUTPUT"),
help="Optional file path for writing GitHub Action outputs",
)
args = parser.parse_args(argv)
repo_root = Path(args.repo_root)
current_version = read_current_version(repo_root)
current_semver = parse_semver(current_version)
tags = fetch_remote_tags()
latest = determine_latest_tag(tags)
needs_update = latest.semver > current_semver
payload = {
"current_version": current_version,
"current_tag": f"v{current_version}",
"latest_version": latest.version,
"latest_tag": latest.tag,
"needs_update": "true" if needs_update else "false",
}
print(json.dumps(payload))
write_outputs(args, payload)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -16,7 +16,7 @@ check_command_exists() {
} }
if [[ ! -e ./lancedb ]]; then if [[ ! -e ./lancedb ]]; then
if [[ x${SOPHON_READ_TOKEN} != "x" ]]; then if [[ -v SOPHON_READ_TOKEN ]]; then
INPUT="lancedb-linux-x64" INPUT="lancedb-linux-x64"
gh release \ gh release \
--repo lancedb/lancedb \ --repo lancedb/lancedb \

View File

@@ -3,8 +3,6 @@ import re
import sys import sys
import json import json
LANCE_GIT_URL = "https://github.com/lance-format/lance.git"
def run_command(command: str) -> str: def run_command(command: str) -> str:
""" """
@@ -31,7 +29,7 @@ def get_latest_stable_version() -> str:
def get_latest_preview_version() -> str: def get_latest_preview_version() -> str:
lance_tags = run_command( lance_tags = run_command(
f"git ls-remote --tags {LANCE_GIT_URL} | grep 'refs/tags/v[0-9beta.-]\\+$'" "git ls-remote --tags https://github.com/lancedb/lance.git | grep 'refs/tags/v[0-9beta.-]\\+$'"
).splitlines() ).splitlines()
lance_tags = ( lance_tags = (
tag.split("refs/tags/")[1] tag.split("refs/tags/")[1]
@@ -178,8 +176,8 @@ def set_stable_version(version: str):
def set_preview_version(version: str): def set_preview_version(version: str):
""" """
Sets lines to Sets lines to
lance = { "version" = "=0.29.0", default-features = false, "features" = ["dynamodb"], "tag" = "v0.29.0-beta.2", "git" = LANCE_GIT_URL } lance = { "version" = "=0.29.0", default-features = false, "features" = ["dynamodb"], "tag" = "v0.29.0-beta.2", "git" = "https://github.com/lancedb/lance.git" }
lance-io = { "version" = "=0.29.0", default-features = false, "tag" = "v0.29.0-beta.2", "git" = LANCE_GIT_URL } lance-io = { "version" = "=0.29.0", default-features = false, "tag" = "v0.29.0-beta.2", "git" = "https://github.com/lancedb/lance.git" }
... ...
""" """
@@ -196,7 +194,7 @@ def set_preview_version(version: str):
config["features"] = features config["features"] = features
config["tag"] = f"v{version}" config["tag"] = f"v{version}"
config["git"] = LANCE_GIT_URL config["git"] = "https://github.com/lancedb/lance.git"
return dict_to_toml_line(package_name, config) return dict_to_toml_line(package_name, config)
@@ -229,29 +227,6 @@ def set_local_version():
update_cargo_toml(line_updater) update_cargo_toml(line_updater)
def update_lockfiles(version: str, fallback_to_git: bool = False):
"""
Update Cargo metadata and optionally fall back to using the git tag if the
requested crates.io version is unavailable.
"""
try:
print("Updating lockfiles...", file=sys.stderr, end="")
run_command("cargo metadata > /dev/null")
print(" done.", file=sys.stderr)
except Exception as e:
if fallback_to_git and "failed to select a version" in str(e):
print(
f" failed for crates.io v{version}, retrying with git tag...",
file=sys.stderr,
)
set_preview_version(version)
print("Updating lockfiles...", file=sys.stderr, end="")
run_command("cargo metadata > /dev/null")
print(" done.", file=sys.stderr)
else:
raise
parser = argparse.ArgumentParser(description="Set the version of the Lance package.") parser = argparse.ArgumentParser(description="Set the version of the Lance package.")
parser.add_argument( parser.add_argument(
"version", "version",
@@ -267,7 +242,6 @@ if args.version == "stable":
file=sys.stderr, file=sys.stderr,
) )
set_stable_version(latest_stable_version) set_stable_version(latest_stable_version)
update_lockfiles(latest_stable_version)
elif args.version == "preview": elif args.version == "preview":
latest_preview_version = get_latest_preview_version() latest_preview_version = get_latest_preview_version()
print( print(
@@ -275,10 +249,8 @@ elif args.version == "preview":
file=sys.stderr, file=sys.stderr,
) )
set_preview_version(latest_preview_version) set_preview_version(latest_preview_version)
update_lockfiles(latest_preview_version)
elif args.version == "local": elif args.version == "local":
set_local_version() set_local_version()
update_lockfiles("local")
else: else:
# Parse the version number. # Parse the version number.
version = args.version version = args.version
@@ -288,7 +260,9 @@ else:
if "beta" in version: if "beta" in version:
set_preview_version(version) set_preview_version(version)
update_lockfiles(version)
else: else:
set_stable_version(version) set_stable_version(version)
update_lockfiles(version, fallback_to_git=True)
print("Updating lockfiles...", file=sys.stderr, end="")
run_command("cargo metadata > /dev/null")
print(" done.", file=sys.stderr)

View File

@@ -1,8 +1,8 @@
# LanceDB Documentation # LanceDB Documentation
LanceDB docs are available at [lancedb.com/docs](https://lancedb.com/docs). LanceDB docs are deployed to https://lancedb.github.io/lancedb/.
The SDK docs are built and deployed automatically by [Github Actions](../.github/workflows/docs.yml) Docs is built and deployed automatically by [Github Actions](../.github/workflows/docs.yml)
whenever a commit is pushed to the `main` branch. So it is possible for the docs to show whenever a commit is pushed to the `main` branch. So it is possible for the docs to show
unreleased features. unreleased features.

View File

@@ -11,7 +11,7 @@ watch:
theme: theme:
name: "material" name: "material"
logo: assets/logo.png logo: assets/logo.png
favicon: assets/favicon.ico favicon: assets/logo.png
palette: palette:
# Palette toggle for light mode # Palette toggle for light mode
- scheme: lancedb - scheme: lancedb
@@ -32,6 +32,8 @@ theme:
- content.tooltips - content.tooltips
- toc.follow - toc.follow
- navigation.top - navigation.top
- navigation.tabs
- navigation.tabs.sticky
- navigation.footer - navigation.footer
- navigation.tracking - navigation.tracking
- navigation.instant - navigation.instant
@@ -113,16 +115,14 @@ markdown_extensions:
emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_index: !!python/name:material.extensions.emoji.twemoji
emoji_generator: !!python/name:material.extensions.emoji.to_svg emoji_generator: !!python/name:material.extensions.emoji.to_svg
- markdown.extensions.toc: - markdown.extensions.toc:
toc_depth: 3 baselevel: 1
permalink: true permalink: ""
permalink_title: Anchor link to this section
nav: nav:
- Documentation: - API reference:
- SDK Reference: index.md - Overview: index.md
- Python: python/python.md - Python: python/python.md
- Javascript/TypeScript: js/globals.md - Javascript/TypeScript: js/globals.md
- Java: java/java.md
- Rust: https://docs.rs/lancedb/latest/lancedb/index.html - Rust: https://docs.rs/lancedb/latest/lancedb/index.html
extra_css: extra_css:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1,111 +0,0 @@
# VoyageAI Embeddings : Multimodal
VoyageAI embeddings can also be used to embed both text and image data, only some of the models support image data and you can check the list
under [https://docs.voyageai.com/docs/multimodal-embeddings](https://docs.voyageai.com/docs/multimodal-embeddings)
Supported multimodal models:
- `voyage-multimodal-3` - 1024 dimensions (text + images)
- `voyage-multimodal-3.5` - Flexible dimensions (256, 512, 1024 default, 2048). Supports text, images, and video.
### Video Support (voyage-multimodal-3.5)
The `voyage-multimodal-3.5` model supports video input through:
- Video URLs (`.mp4`, `.webm`, `.mov`, `.avi`, `.mkv`, `.m4v`, `.gif`)
- Video file paths
Constraints: Max 20MB video size.
Supported parameters (to be passed in `create` method) are:
| Parameter | Type | Default Value | Description |
|---|---|-------------------------|-------------------------------------------|
| `name` | `str` | `"voyage-multimodal-3"` | The model ID of the VoyageAI model to use |
| `output_dimension` | `int` | `None` | Output dimension for voyage-multimodal-3.5. Valid: 256, 512, 1024, 2048 |
Usage Example:
```python
import base64
import os
from io import BytesIO
import requests
import lancedb
from lancedb.pydantic import LanceModel, Vector
from lancedb.embeddings import get_registry
import pandas as pd
os.environ['VOYAGE_API_KEY'] = 'YOUR_VOYAGE_API_KEY'
db = lancedb.connect(".lancedb")
func = get_registry().get("voyageai").create(name="voyage-multimodal-3")
def image_to_base64(image_bytes: bytes):
buffered = BytesIO(image_bytes)
img_str = base64.b64encode(buffered.getvalue())
return img_str.decode("utf-8")
class Images(LanceModel):
label: str
image_uri: str = func.SourceField() # image uri as the source
image_bytes: str = func.SourceField() # image bytes base64 encoded as the source
vector: Vector(func.ndims()) = func.VectorField() # vector column
vec_from_bytes: Vector(func.ndims()) = func.VectorField() # Another vector column
if "images" in db.table_names():
db.drop_table("images")
table = db.create_table("images", schema=Images)
labels = ["cat", "cat", "dog", "dog", "horse", "horse"]
uris = [
"http://farm1.staticflickr.com/53/167798175_7c7845bbbd_z.jpg",
"http://farm1.staticflickr.com/134/332220238_da527d8140_z.jpg",
"http://farm9.staticflickr.com/8387/8602747737_2e5c2a45d4_z.jpg",
"http://farm5.staticflickr.com/4092/5017326486_1f46057f5f_z.jpg",
"http://farm9.staticflickr.com/8216/8434969557_d37882c42d_z.jpg",
"http://farm6.staticflickr.com/5142/5835678453_4f3a4edb45_z.jpg",
]
# get each uri as bytes
images_bytes = [image_to_base64(requests.get(uri).content) for uri in uris]
table.add(
pd.DataFrame({"label": labels, "image_uri": uris, "image_bytes": images_bytes})
)
```
Now we can search using text from both the default vector column and the custom vector column
```python
# text search
actual = table.search("man's best friend", "vec_from_bytes").limit(1).to_pydantic(Images)[0]
print(actual.label) # prints "dog"
frombytes = (
table.search("man's best friend", vector_column_name="vec_from_bytes")
.limit(1)
.to_pydantic(Images)[0]
)
print(frombytes.label)
```
Because we're using a multi-modal embedding function, we can also search using images
```python
# image search
query_image_uri = "http://farm1.staticflickr.com/200/467715466_ed4a31801f_z.jpg"
image_bytes = requests.get(query_image_uri).content
query_image = Image.open(BytesIO(image_bytes))
actual = table.search(query_image, "vec_from_bytes").limit(1).to_pydantic(Images)[0]
print(actual.label == "dog")
# image search using a custom vector column
other = (
table.search(query_image, vector_column_name="vec_from_bytes")
.limit(1)
.to_pydantic(Images)[0]
)
print(actual.label)
```

View File

@@ -1,12 +1,7 @@
# SDK Reference # API Reference
This site contains the API reference for the client SDKs supported by [LanceDB](https://lancedb.com). This page contains the API reference for the SDKs supported by the LanceDB team.
- [Python](python/python.md) - [Python](python/python.md)
- [JavaScript/TypeScript](js/globals.md) - [JavaScript/TypeScript](js/globals.md)
- [Java](java/java.md) - [Rust](https://docs.rs/lancedb/latest/lancedb/index.html)
- [Rust](https://docs.rs/lancedb/latest/lancedb/index.html)
!!! info "LanceDB Documentation"
If you're looking for the full documentation of LanceDB, visit [docs.lancedb.com](https://docs.lancedb.com).

View File

@@ -1,499 +0,0 @@
# Java SDK
The LanceDB Java SDK provides a convenient way to interact with LanceDB Cloud and Enterprise deployments using the Lance REST Namespace API.
!!! note
The Java SDK currently only works for LanceDB remote database that connects to LanceDB Cloud and Enterprise.
Local database support is a work in progress. Check [LANCEDB-2848](https://github.com/lancedb/lancedb/issues/2848) for the latest progress.
## Installation
Add the following dependency to your `pom.xml`:
```xml
<dependency>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-core</artifactId>
<version>0.23.1</version>
</dependency>
```
## Quick Start
### Connecting to LanceDB Cloud
```java
import com.lancedb.LanceDbNamespaceClientBuilder;
import org.lance.namespace.LanceNamespace;
// If your DB url is db://example-db, then your database here is example-db
LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
.apiKey("your_lancedb_cloud_api_key")
.database("your_database_name")
.build();
```
### Connecting to LanceDB Enterprise
For LanceDB Enterprise deployments with a custom endpoint:
```java
LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
.apiKey("your_lancedb_enterprise_api_key")
.database("your_database_name")
.endpoint("<your_enterprise_endpoint>")
.build();
```
### Configuration Options
| Method | Description | Required |
|--------|-------------|----------|
| `apiKey(String)` | LanceDB API key | Yes |
| `database(String)` | Database name | Yes |
| `endpoint(String)` | Custom endpoint URL for Enterprise deployments | No |
| `region(String)` | AWS region (default: "us-east-1") | No |
| `config(String, String)` | Additional configuration parameters | No |
## Metadata Operations
### Creating a Namespace
Namespaces organize tables hierarchically. Create a namespace before creating tables within it:
```java
import org.lance.namespace.model.CreateNamespaceRequest;
import org.lance.namespace.model.CreateNamespaceResponse;
// Create a child namespace
CreateNamespaceRequest request = new CreateNamespaceRequest();
request.setId(Arrays.asList("my_namespace"));
CreateNamespaceResponse response = namespaceClient.createNamespace(request);
```
You can also create nested namespaces:
```java
// Create a nested namespace: parent/child
CreateNamespaceRequest request = new CreateNamespaceRequest();
request.setId(Arrays.asList("parent_namespace", "child_namespace"));
CreateNamespaceResponse response = namespaceClient.createNamespace(request);
```
### Describing a Namespace
```java
import org.lance.namespace.model.DescribeNamespaceRequest;
import org.lance.namespace.model.DescribeNamespaceResponse;
DescribeNamespaceRequest request = new DescribeNamespaceRequest();
request.setId(Arrays.asList("my_namespace"));
DescribeNamespaceResponse response = namespaceClient.describeNamespace(request);
System.out.println("Namespace properties: " + response.getProperties());
```
### Listing Namespaces
```java
import org.lance.namespace.model.ListNamespacesRequest;
import org.lance.namespace.model.ListNamespacesResponse;
// List all namespaces at root level
ListNamespacesRequest request = new ListNamespacesRequest();
request.setId(Arrays.asList()); // Empty for root
ListNamespacesResponse response = namespaceClient.listNamespaces(request);
for (String ns : response.getNamespaces()) {
System.out.println("Namespace: " + ns);
}
// List child namespaces under a parent
ListNamespacesRequest childRequest = new ListNamespacesRequest();
childRequest.setId(Arrays.asList("parent_namespace"));
ListNamespacesResponse childResponse = namespaceClient.listNamespaces(childRequest);
```
### Listing Tables
```java
import org.lance.namespace.model.ListTablesRequest;
import org.lance.namespace.model.ListTablesResponse;
// List tables in a namespace
ListTablesRequest request = new ListTablesRequest();
request.setId(Arrays.asList("my_namespace"));
ListTablesResponse response = namespaceClient.listTables(request);
for (String table : response.getTables()) {
System.out.println("Table: " + table);
}
```
### Dropping a Namespace
```java
import org.lance.namespace.model.DropNamespaceRequest;
import org.lance.namespace.model.DropNamespaceResponse;
DropNamespaceRequest request = new DropNamespaceRequest();
request.setId(Arrays.asList("my_namespace"));
DropNamespaceResponse response = namespaceClient.dropNamespace(request);
```
### Describing a Table
```java
import org.lance.namespace.model.DescribeTableRequest;
import org.lance.namespace.model.DescribeTableResponse;
DescribeTableRequest request = new DescribeTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
DescribeTableResponse response = namespaceClient.describeTable(request);
System.out.println("Table version: " + response.getVersion());
System.out.println("Schema fields: " + response.getSchema().getFields());
```
### Dropping a Table
```java
import org.lance.namespace.model.DropTableRequest;
import org.lance.namespace.model.DropTableResponse;
DropTableRequest request = new DropTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
DropTableResponse response = namespaceClient.dropTable(request);
```
## Writing Data
### Creating a Table
Tables are created within a namespace by providing data in Apache Arrow IPC format:
```java
import org.lance.namespace.LanceNamespace;
import org.lance.namespace.model.CreateTableRequest;
import org.lance.namespace.model.CreateTableResponse;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.vector.IntVector;
import org.apache.arrow.vector.VarCharVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.complex.FixedSizeListVector;
import org.apache.arrow.vector.Float4Vector;
import org.apache.arrow.vector.ipc.ArrowStreamWriter;
import org.apache.arrow.vector.types.FloatingPointPrecision;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.types.pojo.Schema;
import java.io.ByteArrayOutputStream;
import java.nio.channels.Channels;
import java.util.Arrays;
// Create schema with id, name, and embedding fields
Schema schema = new Schema(Arrays.asList(
new Field("id", FieldType.nullable(new ArrowType.Int(32, true)), null),
new Field("name", FieldType.nullable(new ArrowType.Utf8()), null),
new Field("embedding",
FieldType.nullable(new ArrowType.FixedSizeList(128)),
Arrays.asList(new Field("item",
FieldType.nullable(new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)),
null)))
));
try (BufferAllocator allocator = new RootAllocator();
VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator)) {
// Populate data
root.setRowCount(3);
IntVector idVector = (IntVector) root.getVector("id");
VarCharVector nameVector = (VarCharVector) root.getVector("name");
FixedSizeListVector embeddingVector = (FixedSizeListVector) root.getVector("embedding");
Float4Vector embeddingData = (Float4Vector) embeddingVector.getDataVector();
for (int i = 0; i < 3; i++) {
idVector.setSafe(i, i + 1);
nameVector.setSafe(i, ("item_" + i).getBytes());
embeddingVector.setNotNull(i);
for (int j = 0; j < 128; j++) {
embeddingData.setSafe(i * 128 + j, (float) i);
}
}
idVector.setValueCount(3);
nameVector.setValueCount(3);
embeddingData.setValueCount(3 * 128);
embeddingVector.setValueCount(3);
// Serialize to Arrow IPC format
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (ArrowStreamWriter writer = new ArrowStreamWriter(root, null, Channels.newChannel(out))) {
writer.start();
writer.writeBatch();
writer.end();
}
byte[] tableData = out.toByteArray();
// Create table in a namespace
CreateTableRequest request = new CreateTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
CreateTableResponse response = namespaceClient.createTable(request, tableData);
}
```
### Insert
```java
import org.lance.namespace.model.InsertIntoTableRequest;
import org.lance.namespace.model.InsertIntoTableResponse;
// Prepare data in Arrow IPC format (similar to create table example)
byte[] insertData = prepareArrowData();
InsertIntoTableRequest request = new InsertIntoTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
request.setMode(InsertIntoTableRequest.ModeEnum.APPEND);
InsertIntoTableResponse response = namespaceClient.insertIntoTable(request, insertData);
System.out.println("New version: " + response.getVersion());
```
### Update
Update rows matching a predicate condition:
```java
import org.lance.namespace.model.UpdateTableRequest;
import org.lance.namespace.model.UpdateTableResponse;
UpdateTableRequest request = new UpdateTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
// Predicate to select rows to update
request.setPredicate("id = 1");
// Set new values using SQL expressions as [column_name, expression] pairs
request.setUpdates(Arrays.asList(
Arrays.asList("name", "'updated_name'")
));
UpdateTableResponse response = namespaceClient.updateTable(request);
System.out.println("Updated rows: " + response.getUpdatedRows());
```
### Delete
Delete rows matching a predicate condition:
```java
import org.lance.namespace.model.DeleteFromTableRequest;
import org.lance.namespace.model.DeleteFromTableResponse;
DeleteFromTableRequest request = new DeleteFromTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
// Predicate to select rows to delete
request.setPredicate("id > 100");
DeleteFromTableResponse response = namespaceClient.deleteFromTable(request);
System.out.println("New version: " + response.getVersion());
```
### Merge Insert (Upsert)
Merge insert allows you to update existing rows and insert new rows in a single operation based on a key column:
```java
import org.lance.namespace.model.MergeInsertIntoTableRequest;
import org.lance.namespace.model.MergeInsertIntoTableResponse;
// Prepare data with rows to update (id=2,3) and new rows (id=4)
byte[] mergeData = prepareArrowData(); // Contains rows with id=2,3,4
MergeInsertIntoTableRequest request = new MergeInsertIntoTableRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
// Match on the "id" column
request.setOn("id");
// Update all columns when a matching row is found
request.setWhenMatchedUpdateAll(true);
// Insert new rows when no match is found
request.setWhenNotMatchedInsertAll(true);
MergeInsertIntoTableResponse response = namespaceClient.mergeInsertIntoTable(request, mergeData);
System.out.println("Updated rows: " + response.getNumUpdatedRows());
System.out.println("Inserted rows: " + response.getNumInsertedRows());
```
## Querying Data
### Counting Rows
```java
import org.lance.namespace.model.CountTableRowsRequest;
CountTableRowsRequest request = new CountTableRowsRequest();
request.setId(Arrays.asList("my_namespace", "my_table"));
Long rowCount = namespaceClient.countTableRows(request);
System.out.println("Row count: " + rowCount);
```
### Vector Search
```java
import org.lance.namespace.model.QueryTableRequest;
import org.lance.namespace.model.QueryTableRequestVector;
QueryTableRequest query = new QueryTableRequest();
query.setId(Arrays.asList("my_namespace", "my_table"));
query.setK(10); // Return top 10 results
// Set the query vector
List<Float> queryVector = new ArrayList<>();
for (int i = 0; i < 128; i++) {
queryVector.add(1.0f);
}
QueryTableRequestVector vector = new QueryTableRequestVector();
vector.setSingleVector(queryVector);
query.setVector(vector);
// Specify columns to return
query.setColumns(Arrays.asList("id", "name", "embedding"));
// Execute query - returns Arrow IPC format
byte[] result = namespaceClient.queryTable(query);
```
### Full Text Search
```java
import org.lance.namespace.model.QueryTableRequest;
import org.lance.namespace.model.QueryTableRequestFullTextQuery;
import org.lance.namespace.model.StringFtsQuery;
QueryTableRequest query = new QueryTableRequest();
query.setId(Arrays.asList("my_namespace", "my_table"));
query.setK(10);
// Set full text search query
StringFtsQuery stringQuery = new StringFtsQuery();
stringQuery.setQuery("search terms");
stringQuery.setColumns(Arrays.asList("text_column"));
QueryTableRequestFullTextQuery fts = new QueryTableRequestFullTextQuery();
fts.setStringQuery(stringQuery);
query.setFullTextQuery(fts);
// Specify columns to return
query.setColumns(Arrays.asList("id", "text_column"));
byte[] result = namespaceClient.queryTable(query);
```
### Query with Filter
```java
QueryTableRequest query = new QueryTableRequest();
query.setId(Arrays.asList("my_namespace", "my_table"));
query.setK(10);
query.setFilter("id > 50");
query.setColumns(Arrays.asList("id", "name"));
byte[] result = namespaceClient.queryTable(query);
```
### Query with Prefilter
```java
QueryTableRequest query = new QueryTableRequest();
query.setId(Arrays.asList("my_namespace", "my_table"));
query.setK(5);
query.setPrefilter(true); // Apply filter before vector search
query.setFilter("category = 'electronics'");
// Set query vector
QueryTableRequestVector vector = new QueryTableRequestVector();
vector.setSingleVector(queryVector);
query.setVector(vector);
byte[] result = namespaceClient.queryTable(query);
```
### Reading Query Results
Query results are returned in Apache Arrow IPC file format. Here's how to read them:
```java
import org.apache.arrow.vector.ipc.ArrowFileReader;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import java.nio.ByteBuffer;
import java.nio.channels.SeekableByteChannel;
// Helper class to read Arrow data from byte array
class ByteArraySeekableByteChannel implements SeekableByteChannel {
private final byte[] data;
private long position = 0;
private boolean isOpen = true;
public ByteArraySeekableByteChannel(byte[] data) {
this.data = data;
}
@Override
public int read(ByteBuffer dst) {
int remaining = dst.remaining();
int available = (int) (data.length - position);
if (available <= 0) return -1;
int toRead = Math.min(remaining, available);
dst.put(data, (int) position, toRead);
position += toRead;
return toRead;
}
@Override public long position() { return position; }
@Override public SeekableByteChannel position(long newPosition) { position = newPosition; return this; }
@Override public long size() { return data.length; }
@Override public boolean isOpen() { return isOpen; }
@Override public void close() { isOpen = false; }
@Override public int write(ByteBuffer src) { throw new UnsupportedOperationException(); }
@Override public SeekableByteChannel truncate(long size) { throw new UnsupportedOperationException(); }
}
// Read query results
byte[] queryResult = namespaceClient.queryTable(query);
try (BufferAllocator allocator = new RootAllocator();
ArrowFileReader reader = new ArrowFileReader(
new ByteArraySeekableByteChannel(queryResult), allocator)) {
for (int i = 0; i < reader.getRecordBlocks().size(); i++) {
reader.loadRecordBatch(reader.getRecordBlocks().get(i));
VectorSchemaRoot root = reader.getVectorSchemaRoot();
// Access data
IntVector idVector = (IntVector) root.getVector("id");
VarCharVector nameVector = (VarCharVector) root.getVector("name");
for (int row = 0; row < root.getRowCount(); row++) {
int id = idVector.get(row);
String name = new String(nameVector.get(row));
System.out.println("Row " + row + ": id=" + id + ", name=" + name);
}
}
}
```

View File

@@ -34,7 +34,7 @@ const results = await table.vectorSearch([0.1, 0.3]).limit(20).toArray();
console.log(results); console.log(results);
``` ```
The [quickstart](https://lancedb.com/docs/quickstart/basic-usage/) contains more complete examples. The [quickstart](https://lancedb.github.io/lancedb/basic/) contains a more complete example.
## Development ## Development

View File

@@ -1,7 +1,7 @@
# Contributing to LanceDB Typescript # Contributing to LanceDB Typescript
This document outlines the process for contributing to LanceDB Typescript. This document outlines the process for contributing to LanceDB Typescript.
For general contribution guidelines, see [CONTRIBUTING.md](../CONTRIBUTING.md). For general contribution guidelines, see [CONTRIBUTING.md](../../../../CONTRIBUTING.md).
## Project layout ## Project layout

View File

@@ -147,7 +147,7 @@ A new PermutationBuilder instance
#### Example #### Example
```ts ```ts
builder.splitCalculated({ calculation: "user_id % 3" }); builder.splitCalculated("user_id % 3");
``` ```
*** ***

View File

@@ -89,4 +89,4 @@ optional storageOptions: Record<string, string>;
(For LanceDB OSS only): configuration for object storage. (For LanceDB OSS only): configuration for object storage.
The available options are described at https://lancedb.com/docs/storage/ The available options are described at https://lancedb.github.io/lancedb/guides/storage/

View File

@@ -97,4 +97,4 @@ Configuration for object storage.
Options already set on the connection will be inherited by the table, Options already set on the connection will be inherited by the table,
but can be overridden here. but can be overridden here.
The available options are described at https://lancedb.com/docs/storage/ The available options are described at https://lancedb.github.io/lancedb/guides/storage/

View File

@@ -8,14 +8,6 @@
## Properties ## Properties
### numAttempts
```ts
numAttempts: number;
```
***
### numDeletedRows ### numDeletedRows
```ts ```ts

View File

@@ -42,4 +42,4 @@ Configuration for object storage.
Options already set on the connection will be inherited by the table, Options already set on the connection will be inherited by the table,
but can be overridden here. but can be overridden here.
The available options are described at https://lancedb.com/docs/storage/ The available options are described at https://lancedb.github.io/lancedb/guides/storage/

View File

@@ -30,12 +30,6 @@ is also an [asynchronous API client](#connections-asynchronous).
::: lancedb.table.Table ::: lancedb.table.Table
::: lancedb.table.FragmentStatistics
::: lancedb.table.FragmentSummaryStats
::: lancedb.table.Tags
## Querying (Synchronous) ## Querying (Synchronous)
::: lancedb.query.Query ::: lancedb.query.Query
@@ -64,14 +58,6 @@ is also an [asynchronous API client](#connections-asynchronous).
::: lancedb.embeddings.open_clip.OpenClipEmbeddings ::: lancedb.embeddings.open_clip.OpenClipEmbeddings
## Remote configuration
::: lancedb.remote.ClientConfig
::: lancedb.remote.TimeoutConfig
::: lancedb.remote.RetryConfig
## Context ## Context
::: lancedb.context.contextualize ::: lancedb.context.contextualize
@@ -129,8 +115,6 @@ Table hold your actual data as a collection of records / rows.
::: lancedb.table.AsyncTable ::: lancedb.table.AsyncTable
::: lancedb.table.AsyncTags
## Indices (Asynchronous) ## Indices (Asynchronous)
Indices can be created on a table to speed up queries. This section Indices can be created on a table to speed up queries. This section
@@ -152,8 +136,6 @@ lists the indices that LanceDb supports.
::: lancedb.index.IvfFlat ::: lancedb.index.IvfFlat
::: lancedb.table.IndexStatistics
## Querying (Asynchronous) ## Querying (Asynchronous)
Queries allow you to return data from your database. Basic queries can be Queries allow you to return data from your database. Basic queries can be

View File

@@ -85,26 +85,17 @@
/* Header gradient (only header area) */ /* Header gradient (only header area) */
.md-header { .md-header {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
box-shadow: inset 0 1px 0 rgba(255,255,255,0.08), 0 1px 0 rgba(0,0,0,0.08); box-shadow: inset 0 1px 0 rgba(255,255,255,0.08), 0 1px 0 rgba(0,0,0,0.08);
} }
/* Improve brand title contrast on the lavender side */
.md-header__title,
.md-header__topic,
.md-header__title .md-ellipsis,
.md-header__topic .md-ellipsis {
color: #2b1b3a;
text-shadow: 0 1px 0 rgba(255, 255, 255, 0.25);
}
/* Same colors as header for tabs (that hold the text) */ /* Same colors as header for tabs (that hold the text) */
.md-tabs { .md-tabs {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
} }
/* Dark scheme variant */ /* Dark scheme variant */
[data-md-color-scheme="slate"] .md-header, [data-md-color-scheme="slate"] .md-header,
[data-md-color-scheme="slate"] .md-tabs { [data-md-color-scheme="slate"] .md-tabs {
background: linear-gradient(90deg, #e4d8f8 0%, #F0B7C1 45%, #E55A2B 100%); background: linear-gradient(90deg, #3B2E58 0%, #F0B7C1 45%, #E55A2B 100%);
} }

View File

@@ -1,28 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
.PHONY: build-lancedb
build-lancedb:
./mvnw spotless:apply -pl lancedb-core -am
./mvnw install -pl lancedb-core -am
.PHONY: test-lancedb
test-lancedb:
# Requires LANCEDB_DB and LANCEDB_API_KEY environment variables
./mvnw test -pl lancedb-core -P integration-tests
.PHONY: clean
clean:
./mvnw clean
.PHONY: build
build: build-lancedb

View File

@@ -7,11 +7,10 @@
For LanceDB Cloud, use the simplified builder API: For LanceDB Cloud, use the simplified builder API:
```java ```java
import com.lancedb.LanceDbNamespaceClientBuilder; import com.lancedb.lance.namespace.LanceRestNamespace;
import org.lance.namespace.LanceNamespace;
// If your DB url is db://example-db, then your database here is example-db // If your DB url is db://example-db, then your database here is example-db
LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder() LanceRestNamespace namespace = LanceDBRestNamespaces.builder()
.apiKey("your_lancedb_cloud_api_key") .apiKey("your_lancedb_cloud_api_key")
.database("your_database_name") .database("your_database_name")
.build(); .build();
@@ -19,13 +18,13 @@ LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
### LanceDB Enterprise ### LanceDB Enterprise
For Enterprise deployments, use your custom endpoint: For Enterprise deployments, use your VPC endpoint:
```java ```java
LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder() LanceRestNamespace namespace = LanceDBRestNamespaces.builder()
.apiKey("your_lancedb_enterprise_api_key") .apiKey("your_lancedb_enterprise_api_key")
.database("your_database_name") .database("your-top-dir") // Your top level folder under your cloud bucket, e.g. s3://your-bucket/your-top-dir/
.endpoint("<your_enterprise_endpoint>") .hostOverride("http://<vpc_endpoint_dns_name>:80")
.build(); .build();
``` ```
@@ -34,11 +33,5 @@ LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
Build: Build:
```shell ```shell
./mvnw install -pl lancedb-core -am ./mvnw install
``` ```
Run tests:
```shell
./mvnw test -pl lancedb-core
```

View File

@@ -0,0 +1,30 @@
[package]
name = "lancedb-jni"
description = "JNI bindings for LanceDB"
# TODO modify lancedb/Cargo.toml for version and dependencies
version = "0.10.0"
edition.workspace = true
repository.workspace = true
readme.workspace = true
license.workspace = true
keywords.workspace = true
categories.workspace = true
publish = false
[lib]
crate-type = ["cdylib"]
[dependencies]
lancedb = { path = "../../../rust/lancedb", default-features = false }
lance = { workspace = true }
arrow = { workspace = true, features = ["ffi"] }
arrow-schema.workspace = true
tokio = "1.46"
jni = "0.21.1"
snafu.workspace = true
lazy_static.workspace = true
serde = { version = "^1" }
serde_json = { version = "1" }
[features]
default = ["lancedb/default"]

View File

@@ -0,0 +1,133 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use crate::ffi::JNIEnvExt;
use crate::traits::IntoJava;
use crate::{Error, RT};
use jni::objects::{JObject, JString, JValue};
use jni::JNIEnv;
pub const NATIVE_CONNECTION: &str = "nativeConnectionHandle";
use crate::Result;
use lancedb::connection::{connect, Connection};
#[derive(Clone)]
pub struct BlockingConnection {
pub(crate) inner: Connection,
}
impl BlockingConnection {
pub fn create(dataset_uri: &str) -> Result<Self> {
let inner = RT.block_on(connect(dataset_uri).execute())?;
Ok(Self { inner })
}
pub fn table_names(
&self,
start_after: Option<String>,
limit: Option<i32>,
) -> Result<Vec<String>> {
let mut op = self.inner.table_names();
if let Some(start_after) = start_after {
op = op.start_after(start_after);
}
if let Some(limit) = limit {
op = op.limit(limit as u32);
}
Ok(RT.block_on(op.execute())?)
}
}
impl IntoJava for BlockingConnection {
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a> {
attach_native_connection(env, self)
}
}
fn attach_native_connection<'local>(
env: &mut JNIEnv<'local>,
connection: BlockingConnection,
) -> JObject<'local> {
let j_connection = create_java_connection_object(env);
// This block sets a native Rust object (Connection) as a field in the Java object (j_Connection).
// Caution: This creates a potential for memory leaks. The Rust object (Connection) is not
// automatically garbage-collected by Java, and its memory will not be freed unless
// explicitly handled.
//
// To prevent memory leaks, ensure the following:
// 1. The Java object (`j_Connection`) should implement the `java.io.Closeable` interface.
// 2. Users of this Java object should be instructed to always use it within a try-with-resources
// statement (or manually call the `close()` method) to ensure that `self.close()` is invoked.
match unsafe { env.set_rust_field(&j_connection, NATIVE_CONNECTION, connection) } {
Ok(_) => j_connection,
Err(err) => {
env.throw_new(
"java/lang/RuntimeException",
format!("Failed to set native handle for Connection: {}", err),
)
.expect("Error throwing exception");
JObject::null()
}
}
}
fn create_java_connection_object<'a>(env: &mut JNIEnv<'a>) -> JObject<'a> {
env.new_object("com/lancedb/lancedb/Connection", "()V", &[])
.expect("Failed to create Java Lance Connection instance")
}
#[no_mangle]
pub extern "system" fn Java_com_lancedb_lancedb_Connection_releaseNativeConnection(
mut env: JNIEnv,
j_connection: JObject,
) {
let _: BlockingConnection = unsafe {
env.take_rust_field(j_connection, NATIVE_CONNECTION)
.expect("Failed to take native Connection handle")
};
}
#[no_mangle]
pub extern "system" fn Java_com_lancedb_lancedb_Connection_connect<'local>(
mut env: JNIEnv<'local>,
_obj: JObject,
dataset_uri_object: JString,
) -> JObject<'local> {
let dataset_uri: String = ok_or_throw!(env, env.get_string(&dataset_uri_object)).into();
let blocking_connection = ok_or_throw!(env, BlockingConnection::create(&dataset_uri));
blocking_connection.into_java(&mut env)
}
#[no_mangle]
pub extern "system" fn Java_com_lancedb_lancedb_Connection_tableNames<'local>(
mut env: JNIEnv<'local>,
j_connection: JObject,
start_after_obj: JObject, // Optional<String>
limit_obj: JObject, // Optional<Integer>
) -> JObject<'local> {
ok_or_throw!(
env,
inner_table_names(&mut env, j_connection, start_after_obj, limit_obj)
)
}
fn inner_table_names<'local>(
env: &mut JNIEnv<'local>,
j_connection: JObject,
start_after_obj: JObject, // Optional<String>
limit_obj: JObject, // Optional<Integer>
) -> Result<JObject<'local>> {
let start_after = env.get_string_opt(&start_after_obj)?;
let limit = env.get_int_opt(&limit_obj)?;
let conn =
unsafe { env.get_rust_field::<_, _, BlockingConnection>(j_connection, NATIVE_CONNECTION) }?;
let table_names = conn.table_names(start_after, limit)?;
drop(conn);
let j_names = env.new_object("java/util/ArrayList", "()V", &[])?;
for item in table_names {
let jstr_item = env.new_string(item)?;
let item_jobj = JObject::from(jstr_item);
let item_gen = JValue::Object(&item_jobj);
env.call_method(&j_names, "add", "(Ljava/lang/Object;)Z", &[item_gen])?;
}
Ok(j_names)
}

View File

@@ -0,0 +1,217 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use std::str::Utf8Error;
use arrow_schema::ArrowError;
use jni::errors::Error as JniError;
use serde_json::Error as JsonError;
use snafu::{Location, Snafu};
type BoxedError = Box<dyn std::error::Error + Send + Sync + 'static>;
/// Java Exception types
pub enum JavaException {
IllegalArgumentException,
IOException,
RuntimeException,
}
impl JavaException {
pub fn as_str(&self) -> &str {
match self {
Self::IllegalArgumentException => "java/lang/IllegalArgumentException",
Self::IOException => "java/io/IOException",
Self::RuntimeException => "java/lang/RuntimeException",
}
}
}
/// TODO(lu) change to lancedb-jni
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum Error {
#[snafu(display("JNI error: {message}, {location}"))]
Jni { message: String, location: Location },
#[snafu(display("Invalid argument: {message}, {location}"))]
InvalidArgument { message: String, location: Location },
#[snafu(display("IO error: {source}, {location}"))]
IO {
source: BoxedError,
location: Location,
},
#[snafu(display("Arrow error: {message}, {location}"))]
Arrow { message: String, location: Location },
#[snafu(display("Index error: {message}, {location}"))]
Index { message: String, location: Location },
#[snafu(display("JSON error: {message}, {location}"))]
JSON { message: String, location: Location },
#[snafu(display("Dataset at path {path} was not found, {location}"))]
DatasetNotFound { path: String, location: Location },
#[snafu(display("Dataset already exists: {uri}, {location}"))]
DatasetAlreadyExists { uri: String, location: Location },
#[snafu(display("Table '{name}' already exists"))]
TableAlreadyExists { name: String },
#[snafu(display("Table '{name}' was not found: {source}"))]
TableNotFound {
name: String,
source: Box<dyn std::error::Error + Send + Sync>,
},
#[snafu(display("Invalid table name '{name}': {reason}"))]
InvalidTableName { name: String, reason: String },
#[snafu(display("Embedding function '{name}' was not found: {reason}, {location}"))]
EmbeddingFunctionNotFound {
name: String,
reason: String,
location: Location,
},
#[snafu(display("Other Lance error: {message}, {location}"))]
OtherLance { message: String, location: Location },
#[snafu(display("Other LanceDB error: {message}, {location}"))]
OtherLanceDB { message: String, location: Location },
}
impl Error {
/// Throw as Java Exception
pub fn throw(&self, env: &mut jni::JNIEnv) {
match self {
Self::InvalidArgument { .. }
| Self::DatasetNotFound { .. }
| Self::DatasetAlreadyExists { .. }
| Self::TableAlreadyExists { .. }
| Self::TableNotFound { .. }
| Self::InvalidTableName { .. }
| Self::EmbeddingFunctionNotFound { .. } => {
self.throw_as(env, JavaException::IllegalArgumentException)
}
Self::IO { .. } | Self::Index { .. } => self.throw_as(env, JavaException::IOException),
Self::Arrow { .. }
| Self::JSON { .. }
| Self::OtherLance { .. }
| Self::OtherLanceDB { .. }
| Self::Jni { .. } => self.throw_as(env, JavaException::RuntimeException),
}
}
/// Throw as an concrete Java Exception
pub fn throw_as(&self, env: &mut jni::JNIEnv, exception: JavaException) {
let message = &format!(
"Error when throwing Java exception: {}:{}",
exception.as_str(),
self
);
env.throw_new(exception.as_str(), self.to_string())
.expect(message);
}
}
pub type Result<T> = std::result::Result<T, Error>;
trait ToSnafuLocation {
fn to_snafu_location(&'static self) -> snafu::Location;
}
impl ToSnafuLocation for std::panic::Location<'static> {
fn to_snafu_location(&'static self) -> snafu::Location {
snafu::Location::new(self.file(), self.line(), self.column())
}
}
impl From<JniError> for Error {
#[track_caller]
fn from(source: JniError) -> Self {
Self::Jni {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
}
}
}
impl From<Utf8Error> for Error {
#[track_caller]
fn from(source: Utf8Error) -> Self {
Self::InvalidArgument {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
}
}
}
impl From<ArrowError> for Error {
#[track_caller]
fn from(source: ArrowError) -> Self {
Self::Arrow {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
}
}
}
impl From<JsonError> for Error {
#[track_caller]
fn from(source: JsonError) -> Self {
Self::JSON {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
}
}
}
impl From<lance::Error> for Error {
#[track_caller]
fn from(source: lance::Error) -> Self {
match source {
lance::Error::DatasetNotFound {
path,
source: _,
location,
} => Self::DatasetNotFound { path, location },
lance::Error::DatasetAlreadyExists { uri, location } => {
Self::DatasetAlreadyExists { uri, location }
}
lance::Error::IO { source, location } => Self::IO { source, location },
lance::Error::Arrow { message, location } => Self::Arrow { message, location },
lance::Error::Index { message, location } => Self::Index { message, location },
lance::Error::InvalidInput { source, location } => Self::InvalidArgument {
message: source.to_string(),
location,
},
_ => Self::OtherLance {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
},
}
}
}
impl From<lancedb::Error> for Error {
#[track_caller]
fn from(source: lancedb::Error) -> Self {
match source {
lancedb::Error::InvalidTableName { name, reason } => {
Self::InvalidTableName { name, reason }
}
lancedb::Error::InvalidInput { message } => Self::InvalidArgument {
message,
location: std::panic::Location::caller().to_snafu_location(),
},
lancedb::Error::TableNotFound { name, source } => Self::TableNotFound { name, source },
lancedb::Error::TableAlreadyExists { name } => Self::TableAlreadyExists { name },
lancedb::Error::EmbeddingFunctionNotFound { name, reason } => {
Self::EmbeddingFunctionNotFound {
name,
reason,
location: std::panic::Location::caller().to_snafu_location(),
}
}
lancedb::Error::Arrow { source } => Self::Arrow {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
},
lancedb::Error::Lance { source } => Self::from(source),
_ => Self::OtherLanceDB {
message: source.to_string(),
location: std::panic::Location::caller().to_snafu_location(),
},
}
}
}

View File

@@ -0,0 +1,194 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use core::slice;
use jni::objects::{JByteBuffer, JObjectArray, JString};
use jni::sys::jobjectArray;
use jni::{objects::JObject, JNIEnv};
use crate::error::{Error, Result};
/// TODO(lu) import from lance-jni without duplicate
/// Extend JNIEnv with helper functions.
pub trait JNIEnvExt {
/// Get integers from Java List<Integer> object.
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>>;
/// Get strings from Java List<String> object.
#[allow(dead_code)]
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>>;
/// Get strings from Java String[] object.
/// Note that get Option<Vec<String>> from Java Optional<String[]> just doesn't work.
#[allow(unused)]
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>>;
/// Get Option<String> from Java Optional<String>.
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>>;
/// Get Option<Vec<String>> from Java Optional<List<String>>.
#[allow(unused)]
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>>;
/// Get Option<i32> from Java Optional<Integer>.
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>>;
/// Get Option<Vec<i32>> from Java Optional<List<Integer>>.
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>>;
/// Get Option<i64> from Java Optional<Long>.
#[allow(unused)]
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>>;
/// Get Option<u64> from Java Optional<Long>.
#[allow(unused)]
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>>;
/// Get Option<&[u8]> from Java Optional<ByteBuffer>.
#[allow(unused)]
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>>;
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
where
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>;
}
impl JNIEnvExt for JNIEnv<'_> {
fn get_integers(&mut self, obj: &JObject) -> Result<Vec<i32>> {
let list = self.get_list(obj)?;
let mut iter = list.iter(self)?;
let mut results = Vec::with_capacity(list.size(self)? as usize);
while let Some(elem) = iter.next(self)? {
let int_obj = self.call_method(elem, "intValue", "()I", &[])?;
let int_value = int_obj.i()?;
results.push(int_value);
}
Ok(results)
}
fn get_strings(&mut self, obj: &JObject) -> Result<Vec<String>> {
let list = self.get_list(obj)?;
let mut iter = list.iter(self)?;
let mut results = Vec::with_capacity(list.size(self)? as usize);
while let Some(elem) = iter.next(self)? {
let jstr = JString::from(elem);
let val = self.get_string(&jstr)?;
results.push(val.to_str()?.to_string())
}
Ok(results)
}
fn get_strings_array(&mut self, obj: jobjectArray) -> Result<Vec<String>> {
let jobject_array = unsafe { JObjectArray::from_raw(obj) };
let array_len = self.get_array_length(&jobject_array)?;
let mut res: Vec<String> = Vec::new();
for i in 0..array_len {
let item: JString = self.get_object_array_element(&jobject_array, i)?.into();
res.push(self.get_string(&item)?.into());
}
Ok(res)
}
fn get_string_opt(&mut self, obj: &JObject) -> Result<Option<String>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_string_obj = java_obj_gen.l()?;
let jstr = JString::from(java_string_obj);
let val = env.get_string(&jstr)?;
Ok(val.to_str()?.to_string())
})
}
fn get_strings_opt(&mut self, obj: &JObject) -> Result<Option<Vec<String>>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_list_obj = java_obj_gen.l()?;
env.get_strings(&java_list_obj)
})
}
fn get_int_opt(&mut self, obj: &JObject) -> Result<Option<i32>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_int_obj = java_obj_gen.l()?;
let int_obj = env.call_method(java_int_obj, "intValue", "()I", &[])?;
let int_value = int_obj.i()?;
Ok(int_value)
})
}
fn get_ints_opt(&mut self, obj: &JObject) -> Result<Option<Vec<i32>>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_list_obj = java_obj_gen.l()?;
env.get_integers(&java_list_obj)
})
}
fn get_long_opt(&mut self, obj: &JObject) -> Result<Option<i64>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_long_obj = java_obj_gen.l()?;
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
let long_value = long_obj.j()?;
Ok(long_value)
})
}
fn get_u64_opt(&mut self, obj: &JObject) -> Result<Option<u64>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_long_obj = java_obj_gen.l()?;
let long_obj = env.call_method(java_long_obj, "longValue", "()J", &[])?;
let long_value = long_obj.j()?;
Ok(long_value as u64)
})
}
fn get_bytes_opt(&mut self, obj: &JObject) -> Result<Option<&[u8]>> {
self.get_optional(obj, |env, inner_obj| {
let java_obj_gen = env.call_method(inner_obj, "get", "()Ljava/lang/Object;", &[])?;
let java_byte_buffer_obj = java_obj_gen.l()?;
let j_byte_buffer = JByteBuffer::from(java_byte_buffer_obj);
let raw_data = env.get_direct_buffer_address(&j_byte_buffer)?;
let capacity = env.get_direct_buffer_capacity(&j_byte_buffer)?;
let data = unsafe { slice::from_raw_parts(raw_data, capacity) };
Ok(data)
})
}
fn get_optional<T, F>(&mut self, obj: &JObject, f: F) -> Result<Option<T>>
where
F: FnOnce(&mut JNIEnv, &JObject) -> Result<T>,
{
if obj.is_null() {
return Ok(None);
}
let is_present = self.call_method(obj, "isPresent", "()Z", &[])?;
if !is_present.z()? {
// TODO(lu): put get java object into here cuz can only get java Object
Ok(None)
} else {
f(self, obj).map(Some)
}
}
}
#[no_mangle]
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseInts(
mut env: JNIEnv,
_obj: JObject,
list_obj: JObject, // List<Integer>
) {
ok_or_throw_without_return!(env, env.get_integers(&list_obj));
}
#[no_mangle]
pub extern "system" fn Java_com_lancedb_lance_test_JniTestHelper_parseIntsOpt(
mut env: JNIEnv,
_obj: JObject,
list_obj: JObject, // Optional<List<Integer>>
) {
ok_or_throw_without_return!(env, env.get_ints_opt(&list_obj));
}

View File

@@ -0,0 +1,57 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use lazy_static::lazy_static;
// TODO import from lance-jni without duplicate
#[macro_export]
macro_rules! ok_or_throw {
($env:expr, $result:expr) => {
match $result {
Ok(value) => value,
Err(err) => {
Error::from(err).throw(&mut $env);
return JObject::null();
}
}
};
}
macro_rules! ok_or_throw_without_return {
($env:expr, $result:expr) => {
match $result {
Ok(value) => value,
Err(err) => {
Error::from(err).throw(&mut $env);
return;
}
}
};
}
#[macro_export]
macro_rules! ok_or_throw_with_return {
($env:expr, $result:expr, $ret:expr) => {
match $result {
Ok(value) => value,
Err(err) => {
Error::from(err).throw(&mut $env);
return $ret;
}
}
};
}
mod connection;
pub mod error;
mod ffi;
mod traits;
pub use error::{Error, Result};
lazy_static! {
static ref RT: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.expect("Failed to create tokio runtime");
}

View File

@@ -0,0 +1,114 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use jni::objects::{JMap, JObject, JString, JValue};
use jni::JNIEnv;
use crate::Result;
#[allow(dead_code)]
pub trait FromJObject<T> {
fn extract(&self) -> Result<T>;
}
/// Convert a Rust type into a Java Object.
pub trait IntoJava {
fn into_java<'a>(self, env: &mut JNIEnv<'a>) -> JObject<'a>;
}
impl FromJObject<i32> for JObject<'_> {
fn extract(&self) -> Result<i32> {
Ok(JValue::from(self).i()?)
}
}
impl FromJObject<i64> for JObject<'_> {
fn extract(&self) -> Result<i64> {
Ok(JValue::from(self).j()?)
}
}
impl FromJObject<f32> for JObject<'_> {
fn extract(&self) -> Result<f32> {
Ok(JValue::from(self).f()?)
}
}
impl FromJObject<f64> for JObject<'_> {
fn extract(&self) -> Result<f64> {
Ok(JValue::from(self).d()?)
}
}
#[allow(dead_code)]
pub trait FromJString {
fn extract(&self, env: &mut JNIEnv) -> Result<String>;
}
impl FromJString for JString<'_> {
fn extract(&self, env: &mut JNIEnv) -> Result<String> {
Ok(env.get_string(self)?.into())
}
}
pub trait JMapExt {
#[allow(dead_code)]
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>>;
#[allow(dead_code)]
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>>;
#[allow(dead_code)]
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>>;
#[allow(dead_code)]
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>>;
#[allow(dead_code)]
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>>;
}
#[allow(dead_code)]
fn get_map_value<T>(env: &mut JNIEnv, map: &JMap, key: &str) -> Result<Option<T>>
where
for<'a> JObject<'a>: FromJObject<T>,
{
let key_obj: JObject = env.new_string(key)?.into();
if let Some(value) = map.get(env, &key_obj)? {
if value.is_null() {
Ok(None)
} else {
Ok(Some(value.extract()?))
}
} else {
Ok(None)
}
}
impl JMapExt for JMap<'_, '_, '_> {
fn get_string(&self, env: &mut JNIEnv, key: &str) -> Result<Option<String>> {
let key_obj: JObject = env.new_string(key)?.into();
if let Some(value) = self.get(env, &key_obj)? {
let value_str: JString = value.into();
Ok(Some(value_str.extract(env)?))
} else {
Ok(None)
}
}
fn get_i32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i32>> {
get_map_value(env, self, key)
}
fn get_i64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<i64>> {
get_map_value(env, self, key)
}
fn get_f32(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f32>> {
get_map_value(env, self, key)
}
fn get_f64(&self, env: &mut JNIEnv, key: &str) -> Result<Option<f64>> {
get_map_value(env, self, key)
}
}

103
java/core/pom.xml Normal file
View File

@@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId>
<version>0.22.3-final.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>lancedb-core</artifactId>
<name>${project.artifactId}</name>
<description>LanceDB Core</description>
<packaging>jar</packaging>
<properties>
<rust.release.build>false</rust.release.build>
</properties>
<dependencies>
<dependency>
<groupId>com.lancedb</groupId>
<artifactId>lance-namespace-core</artifactId>
<version>0.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-vector</artifactId>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory-netty</artifactId>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-c-data</artifactId>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-dataset</artifactId>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
</dependency>
<dependency>
<groupId>org.questdb</groupId>
<artifactId>jar-jni</artifactId>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>build-jni</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.questdb</groupId>
<artifactId>rust-maven-plugin</artifactId>
<version>1.1.1</version>
<executions>
<execution>
<id>lancedb-jni</id>
<goals>
<goal>build</goal>
</goals>
<configuration>
<path>lancedb-jni</path>
<release>${rust.release.build}</release>
<!-- Copy native libraries to target/classes for runtime access -->
<copyTo>${project.build.directory}/classes/nativelib</copyTo>
<copyWithPlatformDir>true</copyWithPlatformDir>
</configuration>
</execution>
<execution>
<id>lancedb-jni-test</id>
<goals>
<goal>test</goal>
</goals>
<configuration>
<path>lancedb-jni</path>
<release>false</release>
<verbosity>-v</verbosity>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@@ -0,0 +1,108 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lancedb.lancedb;
import io.questdb.jar.jni.JarJniLoader;
import java.io.Closeable;
import java.util.List;
import java.util.Optional;
/** Represents LanceDB database. */
public class Connection implements Closeable {
static {
JarJniLoader.loadLib(Connection.class, "/nativelib", "lancedb_jni");
}
private long nativeConnectionHandle;
/** Connect to a LanceDB instance. */
public static native Connection connect(String uri);
/**
* Get the names of all tables in the database. The names are sorted in ascending order.
*
* @return the table names
*/
public List<String> tableNames() {
return tableNames(Optional.empty(), Optional.empty());
}
/**
* Get the names of filtered tables in the database. The names are sorted in ascending order.
*
* @param limit The number of results to return.
* @return the table names
*/
public List<String> tableNames(int limit) {
return tableNames(Optional.empty(), Optional.of(limit));
}
/**
* Get the names of filtered tables in the database. The names are sorted in ascending order.
*
* @param startAfter If present, only return names that come lexicographically after the supplied
* value. This can be combined with limit to implement pagination by setting this to the last
* table name from the previous page.
* @return the table names
*/
public List<String> tableNames(String startAfter) {
return tableNames(Optional.of(startAfter), Optional.empty());
}
/**
* Get the names of filtered tables in the database. The names are sorted in ascending order.
*
* @param startAfter If present, only return names that come lexicographically after the supplied
* value. This can be combined with limit to implement pagination by setting this to the last
* table name from the previous page.
* @param limit The number of results to return.
* @return the table names
*/
public List<String> tableNames(String startAfter, int limit) {
return tableNames(Optional.of(startAfter), Optional.of(limit));
}
/**
* Get the names of filtered tables in the database. The names are sorted in ascending order.
*
* @param startAfter If present, only return names that come lexicographically after the supplied
* value. This can be combined with limit to implement pagination by setting this to the last
* table name from the previous page.
* @param limit The number of results to return.
* @return the table names
*/
public native List<String> tableNames(Optional<String> startAfter, Optional<Integer> limit);
/**
* Closes this connection and releases any system resources associated with it. If the connection
* is already closed, then invoking this method has no effect.
*/
@Override
public void close() {
if (nativeConnectionHandle != 0) {
releaseNativeConnection(nativeConnectionHandle);
nativeConnectionHandle = 0;
}
}
/**
* Native method to release the Lance connection resources associated with the given handle.
*
* @param handle The native handle to the connection resource.
*/
private native void releaseNativeConnection(long handle);
private Connection() {}
}

View File

@@ -0,0 +1,135 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lancedb.lancedb;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import java.net.URL;
import java.nio.file.Path;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ConnectionTest {
private static final String[] TABLE_NAMES = {
"dataset_version", "new_empty_dataset", "test", "write_stream"
};
@TempDir static Path tempDir; // Temporary directory for the tests
private static URL lanceDbURL;
@BeforeAll
static void setUp() {
ClassLoader classLoader = ConnectionTest.class.getClassLoader();
lanceDbURL = classLoader.getResource("example_db");
}
@Test
void emptyDB() {
String databaseUri = tempDir.resolve("emptyDB").toString();
try (Connection conn = Connection.connect(databaseUri)) {
List<String> tableNames = conn.tableNames();
assertTrue(tableNames.isEmpty());
}
}
@Test
void tableNames() {
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
List<String> tableNames = conn.tableNames();
assertEquals(4, tableNames.size());
for (int i = 0; i < TABLE_NAMES.length; i++) {
assertEquals(TABLE_NAMES[i], tableNames.get(i));
}
}
}
@Test
void tableNamesStartAfter() {
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
assertTableNamesStartAfter(
conn, TABLE_NAMES[0], 3, TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, TABLE_NAMES[1], 2, TABLE_NAMES[2], TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, TABLE_NAMES[2], 1, TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, TABLE_NAMES[3], 0);
assertTableNamesStartAfter(
conn, "a_dataset", 4, TABLE_NAMES[0], TABLE_NAMES[1], TABLE_NAMES[2], TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, "o_dataset", 2, TABLE_NAMES[2], TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, "v_dataset", 1, TABLE_NAMES[3]);
assertTableNamesStartAfter(conn, "z_dataset", 0);
}
}
private void assertTableNamesStartAfter(
Connection conn, String startAfter, int expectedSize, String... expectedNames) {
List<String> tableNames = conn.tableNames(startAfter);
assertEquals(expectedSize, tableNames.size());
for (int i = 0; i < expectedNames.length; i++) {
assertEquals(expectedNames[i], tableNames.get(i));
}
}
@Test
void tableNamesLimit() {
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
for (int i = 0; i <= TABLE_NAMES.length; i++) {
List<String> tableNames = conn.tableNames(i);
assertEquals(i, tableNames.size());
for (int j = 0; j < i; j++) {
assertEquals(TABLE_NAMES[j], tableNames.get(j));
}
}
}
}
@Test
void tableNamesStartAfterLimit() {
try (Connection conn = Connection.connect(lanceDbURL.toString())) {
List<String> tableNames = conn.tableNames(TABLE_NAMES[0], 2);
assertEquals(2, tableNames.size());
assertEquals(TABLE_NAMES[1], tableNames.get(0));
assertEquals(TABLE_NAMES[2], tableNames.get(1));
tableNames = conn.tableNames(TABLE_NAMES[1], 1);
assertEquals(1, tableNames.size());
assertEquals(TABLE_NAMES[2], tableNames.get(0));
tableNames = conn.tableNames(TABLE_NAMES[2], 2);
assertEquals(1, tableNames.size());
assertEquals(TABLE_NAMES[3], tableNames.get(0));
tableNames = conn.tableNames(TABLE_NAMES[3], 2);
assertEquals(0, tableNames.size());
tableNames = conn.tableNames(TABLE_NAMES[0], 0);
assertEquals(0, tableNames.size());
// Limit larger than the number of remaining tables
tableNames = conn.tableNames(TABLE_NAMES[0], 10);
assertEquals(3, tableNames.size());
assertEquals(TABLE_NAMES[1], tableNames.get(0));
assertEquals(TABLE_NAMES[2], tableNames.get(1));
assertEquals(TABLE_NAMES[3], tableNames.get(2));
// Start after a value not in the list
tableNames = conn.tableNames("non_existent_table", 2);
assertEquals(2, tableNames.size());
assertEquals(TABLE_NAMES[2], tableNames.get(0));
assertEquals(TABLE_NAMES[3], tableNames.get(1));
// Start after the last table with a limit
tableNames = conn.tableNames(TABLE_NAMES[3], 1);
assertEquals(0, tableNames.size());
}
}
}

View File

@@ -0,0 +1 @@
$d51afd07-e3cd-4c76-9b9b-787e13fd55b0<62>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08

View File

@@ -0,0 +1 @@
$15648e72-076f-4ef1-8b90-10d305b95b3b<33>=id <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*int3208name <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string08

View File

@@ -0,0 +1 @@
$a3689caf-4f6b-4afc-a3c7-97af75661843<34>oitem <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*string8price <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*double80vector <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>*fixed_size_list:float:28

View File

@@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId>
<version>0.22.3-final.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>lancedb-lance-namespace</artifactId>
<name>${project.artifactId}</name>
<description>LanceDB Java Integration with Lance Namespace</description>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.lancedb</groupId>
<artifactId>lance-namespace-core</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -11,58 +11,35 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.lancedb; package com.lancedb.lancedb;
import org.lance.namespace.LanceNamespace; import com.lancedb.lance.namespace.LanceRestNamespace;
import com.lancedb.lance.namespace.client.apache.ApiClient;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
/** /** Util class to help construct a {@link LanceRestNamespace} for LanceDB. */
* Util class to help construct a {@link LanceNamespace} for LanceDB. public class LanceDbRestNamespaces {
*
* <p>For LanceDB Cloud, use the simplified builder API:
*
* <pre>{@code
* import org.lance.namespace.LanceNamespace;
*
* // If your DB url is db://example-db, then your database here is example-db
* LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
* .apiKey("your_lancedb_cloud_api_key")
* .database("your_database_name")
* .build();
* }</pre>
*
* <p>For LanceDB Enterprise deployments, use your custom endpoint:
*
* <pre>{@code
* LanceNamespace namespaceClient = LanceDbNamespaceClientBuilder.newBuilder()
* .apiKey("your_lancedb_enterprise_api_key")
* .database("your_database_name")
* .endpoint("<your_enterprise_endpoint>")
* .build();
* }</pre>
*/
public class LanceDbNamespaceClientBuilder {
private static final String DEFAULT_REGION = "us-east-1"; private static final String DEFAULT_REGION = "us-east-1";
private static final String CLOUD_URL_PATTERN = "https://%s.%s.api.lancedb.com"; private static final String CLOUD_URL_PATTERN = "https://%s.%s.api.lancedb.com";
private String apiKey; private String apiKey;
private String database; private String database;
private Optional<String> endpoint = Optional.empty(); private Optional<String> hostOverride = Optional.empty();
private Optional<String> region = Optional.empty(); private Optional<String> region = Optional.empty();
private Map<String, String> additionalConfig = new HashMap<>(); private Map<String, String> additionalConfig = new HashMap<>();
private LanceDbNamespaceClientBuilder() {} private LanceDbRestNamespaces() {}
/** /**
* Create a new builder instance. * Create a new builder instance.
* *
* @return A new LanceDbNamespaceClientBuilder * @return A new LanceRestNamespaceBuilder
*/ */
public static LanceDbNamespaceClientBuilder newBuilder() { public static LanceDbRestNamespaces builder() {
return new LanceDbNamespaceClientBuilder(); return new LanceDbRestNamespaces();
} }
/** /**
@@ -71,7 +48,7 @@ public class LanceDbNamespaceClientBuilder {
* @param apiKey The LanceDB API key * @param apiKey The LanceDB API key
* @return This builder * @return This builder
*/ */
public LanceDbNamespaceClientBuilder apiKey(String apiKey) { public LanceDbRestNamespaces apiKey(String apiKey) {
if (apiKey == null || apiKey.trim().isEmpty()) { if (apiKey == null || apiKey.trim().isEmpty()) {
throw new IllegalArgumentException("API key cannot be null or empty"); throw new IllegalArgumentException("API key cannot be null or empty");
} }
@@ -85,7 +62,7 @@ public class LanceDbNamespaceClientBuilder {
* @param database The database name * @param database The database name
* @return This builder * @return This builder
*/ */
public LanceDbNamespaceClientBuilder database(String database) { public LanceDbRestNamespaces database(String database) {
if (database == null || database.trim().isEmpty()) { if (database == null || database.trim().isEmpty()) {
throw new IllegalArgumentException("Database cannot be null or empty"); throw new IllegalArgumentException("Database cannot be null or empty");
} }
@@ -94,25 +71,25 @@ public class LanceDbNamespaceClientBuilder {
} }
/** /**
* Set a custom endpoint URL (optional). When set, this overrides the default LanceDB Cloud URL * Set a custom host override (optional). When set, this overrides the default LanceDB Cloud URL
* construction. Use this for LanceDB Enterprise deployments. * construction. Use this for LanceDB Enterprise deployments.
* *
* @param endpoint The complete base URL for your LanceDB Enterprise deployment * @param hostOverride The complete base URL (e.g., "http://your-vpc-endpoint:80")
* @return This builder * @return This builder
*/ */
public LanceDbNamespaceClientBuilder endpoint(String endpoint) { public LanceDbRestNamespaces hostOverride(String hostOverride) {
this.endpoint = Optional.ofNullable(endpoint); this.hostOverride = Optional.ofNullable(hostOverride);
return this; return this;
} }
/** /**
* Set the region for LanceDB Cloud (optional). Defaults to "us-east-1" if not specified. This is * Set the region for LanceDB Cloud (optional). Defaults to "us-east-1" if not specified. This is
* ignored when endpoint is set. * ignored when hostOverride is set.
* *
* @param region The AWS region (e.g., "us-east-1", "eu-west-1") * @param region The AWS region (e.g., "us-east-1", "eu-west-1")
* @return This builder * @return This builder
*/ */
public LanceDbNamespaceClientBuilder region(String region) { public LanceDbRestNamespaces region(String region) {
this.region = Optional.ofNullable(region); this.region = Optional.ofNullable(region);
return this; return this;
} }
@@ -124,18 +101,18 @@ public class LanceDbNamespaceClientBuilder {
* @param value The configuration value * @param value The configuration value
* @return This builder * @return This builder
*/ */
public LanceDbNamespaceClientBuilder config(String key, String value) { public LanceDbRestNamespaces config(String key, String value) {
this.additionalConfig.put(key, value); this.additionalConfig.put(key, value);
return this; return this;
} }
/** /**
* Build the LanceNamespace instance. * Build the LanceRestNamespace instance.
* *
* @return A configured LanceNamespace * @return A configured LanceRestNamespace
* @throws IllegalStateException if required parameters are missing * @throws IllegalStateException if required parameters are missing
*/ */
public LanceNamespace build() { public LanceRestNamespace build() {
// Validate required fields // Validate required fields
if (apiKey == null) { if (apiKey == null) {
throw new IllegalStateException("API key is required"); throw new IllegalStateException("API key is required");
@@ -146,19 +123,24 @@ public class LanceDbNamespaceClientBuilder {
// Build configuration map // Build configuration map
Map<String, String> config = new HashMap<>(additionalConfig); Map<String, String> config = new HashMap<>(additionalConfig);
config.put("header.x-lancedb-database", database); config.put("headers.x-lancedb-database", database);
config.put("header.x-api-key", apiKey); config.put("headers.x-api-key", apiKey);
// Determine base URL // Determine base URL
String uri; String baseUrl;
if (endpoint.isPresent()) { if (hostOverride.isPresent()) {
uri = endpoint.get(); baseUrl = hostOverride.get();
config.put("host_override", hostOverride.get());
} else { } else {
String effectiveRegion = region.orElse(DEFAULT_REGION); String effectiveRegion = region.orElse(DEFAULT_REGION);
uri = String.format(CLOUD_URL_PATTERN, database, effectiveRegion); baseUrl = String.format(CLOUD_URL_PATTERN, database, effectiveRegion);
config.put("region", effectiveRegion);
} }
config.put("uri", uri);
return LanceNamespace.connect("rest", config, null); // Create and configure ApiClient
ApiClient apiClient = new ApiClient();
apiClient.setBasePath(baseUrl);
return new LanceRestNamespace(apiClient, config);
} }
} }

View File

@@ -1,99 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId>
<version>0.23.1-final.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>lancedb-core</artifactId>
<name>${project.artifactId}</name>
<description>Utilities to work with LanceDB Cloud and Enterprise via Lance REST Namespace</description>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.lance</groupId>
<artifactId>lance-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-vector</artifactId>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory-netty</artifactId>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<version>5.18.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.16</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j2-impl</artifactId>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,96 +0,0 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lancedb;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/** Unit tests for LanceDbNamespaceClientBuilder. */
public class LanceDbNamespaceClientBuilderTest {
@Test
public void testBuilderRequiresApiKey() {
LanceDbNamespaceClientBuilder builder =
LanceDbNamespaceClientBuilder.newBuilder().database("test-db");
IllegalStateException exception = assertThrows(IllegalStateException.class, builder::build);
assertEquals("API key is required", exception.getMessage());
}
@Test
public void testBuilderRequiresDatabase() {
LanceDbNamespaceClientBuilder builder =
LanceDbNamespaceClientBuilder.newBuilder().apiKey("test-api-key");
IllegalStateException exception = assertThrows(IllegalStateException.class, builder::build);
assertEquals("Database is required", exception.getMessage());
}
@Test
public void testApiKeyCannotBeNull() {
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> LanceDbNamespaceClientBuilder.newBuilder().apiKey(null));
assertEquals("API key cannot be null or empty", exception.getMessage());
}
@Test
public void testApiKeyCannotBeEmpty() {
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> LanceDbNamespaceClientBuilder.newBuilder().apiKey(" "));
assertEquals("API key cannot be null or empty", exception.getMessage());
}
@Test
public void testDatabaseCannotBeNull() {
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> LanceDbNamespaceClientBuilder.newBuilder().database(null));
assertEquals("Database cannot be null or empty", exception.getMessage());
}
@Test
public void testDatabaseCannotBeEmpty() {
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> LanceDbNamespaceClientBuilder.newBuilder().database(" "));
assertEquals("Database cannot be null or empty", exception.getMessage());
}
@Test
public void testBuilderFluentApi() {
// Verify the builder returns itself for chaining
LanceDbNamespaceClientBuilder builder = LanceDbNamespaceClientBuilder.newBuilder();
assertSame(builder, builder.apiKey("test-key"));
assertSame(builder, builder.database("test-db"));
assertSame(builder, builder.endpoint("http://localhost:8080"));
assertSame(builder, builder.region("eu-west-1"));
assertSame(builder, builder.config("custom-key", "custom-value"));
}
@Test
public void testNewBuilderCreatesNewInstance() {
LanceDbNamespaceClientBuilder builder1 = LanceDbNamespaceClientBuilder.newBuilder();
LanceDbNamespaceClientBuilder builder2 = LanceDbNamespaceClientBuilder.newBuilder();
assertNotSame(builder1, builder2);
}
}

View File

@@ -1,32 +0,0 @@
<?xml version='1.0' encoding='UTF-8'?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<configuration monitorInterval="30">
<appenders>
<Console name='Console' target='SYSTEM_ERR'>
<PatternLayout pattern='%d{HH:mm:ss.SSS} %p [%t] %C{1}.%M: %m%n'/>
</Console>
</appenders>
<loggers>
<logger name='com.lancedb' level='DEBUG' additivity='false'>
<appender-ref ref='Console'/>
</logger>
<root level='INFO'>
<appender-ref ref='Console'/>
</root>
</loggers>
</configuration>

View File

@@ -6,7 +6,7 @@
<groupId>com.lancedb</groupId> <groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId> <artifactId>lancedb-parent</artifactId>
<version>0.23.1-final.0</version> <version>0.22.3-final.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>${project.artifactId}</name> <name>${project.artifactId}</name>
<description>LanceDB Java SDK Parent POM</description> <description>LanceDB Java SDK Parent POM</description>
@@ -28,7 +28,7 @@
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<arrow.version>15.0.0</arrow.version> <arrow.version>15.0.0</arrow.version>
<lance-core.version>1.0.0-rc.2</lance-core.version> <lance-namespace.verison>0.0.1</lance-namespace.verison>
<spotless.skip>false</spotless.skip> <spotless.skip>false</spotless.skip>
<spotless.version>2.30.0</spotless.version> <spotless.version>2.30.0</spotless.version>
<spotless.java.googlejavaformat.version>1.7</spotless.java.googlejavaformat.version> <spotless.java.googlejavaformat.version>1.7</spotless.java.googlejavaformat.version>
@@ -51,7 +51,8 @@
</properties> </properties>
<modules> <modules>
<module>lancedb-core</module> <module>core</module>
<module>lance-namespace</module>
</modules> </modules>
<scm> <scm>
@@ -63,9 +64,9 @@
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.lance</groupId> <groupId>com.lancedb</groupId>
<artifactId>lance-core</artifactId> <artifactId>lance-namespace-core</artifactId>
<version>${lance-core.version}</version> <version>${lance-namespace.verison}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.arrow</groupId> <groupId>org.apache.arrow</groupId>
@@ -87,11 +88,21 @@
<artifactId>arrow-dataset</artifactId> <artifactId>arrow-dataset</artifactId>
<version>${arrow.version}</version> <version>${arrow.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.questdb</groupId>
<artifactId>jar-jni</artifactId>
<version>1.1.1</version>
</dependency>
<dependency> <dependency>
<groupId>org.junit.jupiter</groupId> <groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId> <artifactId>junit-jupiter</artifactId>
<version>5.10.1</version> <version>5.10.1</version>
</dependency> </dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20210307</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "lancedb-nodejs" name = "lancedb-nodejs"
edition.workspace = true edition.workspace = true
version = "0.23.1" version = "0.22.3"
license.workspace = true license.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true

View File

@@ -30,7 +30,7 @@ const results = await table.vectorSearch([0.1, 0.3]).limit(20).toArray();
console.log(results); console.log(results);
``` ```
The [quickstart](https://lancedb.com/docs/quickstart/basic-usage/) contains more complete examples. The [quickstart](https://lancedb.github.io/lancedb/basic/) contains a more complete example.
## Development ## Development

View File

@@ -42,7 +42,7 @@ export interface CreateTableOptions {
* Options already set on the connection will be inherited by the table, * Options already set on the connection will be inherited by the table,
* but can be overridden here. * but can be overridden here.
* *
* The available options are described at https://lancedb.com/docs/storage/ * The available options are described at https://lancedb.github.io/lancedb/guides/storage/
*/ */
storageOptions?: Record<string, string>; storageOptions?: Record<string, string>;
@@ -78,7 +78,7 @@ export interface OpenTableOptions {
* Options already set on the connection will be inherited by the table, * Options already set on the connection will be inherited by the table,
* but can be overridden here. * but can be overridden here.
* *
* The available options are described at https://lancedb.com/docs/storage/ * The available options are described at https://lancedb.github.io/lancedb/guides/storage/
*/ */
storageOptions?: Record<string, string>; storageOptions?: Record<string, string>;
/** /**

View File

@@ -118,7 +118,7 @@ export class PermutationBuilder {
* @returns A new PermutationBuilder instance * @returns A new PermutationBuilder instance
* @example * @example
* ```ts * ```ts
* builder.splitCalculated({ calculation: "user_id % 3" }); * builder.splitCalculated("user_id % 3");
* ``` * ```
*/ */
splitCalculated(options: SplitCalculatedOptions): PermutationBuilder { splitCalculated(options: SplitCalculatedOptions): PermutationBuilder {

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-darwin-arm64", "name": "@lancedb/lancedb-darwin-arm64",
"version": "0.23.1", "version": "0.22.3",
"os": ["darwin"], "os": ["darwin"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.darwin-arm64.node", "main": "lancedb.darwin-arm64.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-darwin-x64", "name": "@lancedb/lancedb-darwin-x64",
"version": "0.23.1", "version": "0.22.3",
"os": ["darwin"], "os": ["darwin"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.darwin-x64.node", "main": "lancedb.darwin-x64.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-arm64-gnu", "name": "@lancedb/lancedb-linux-arm64-gnu",
"version": "0.23.1", "version": "0.22.3",
"os": ["linux"], "os": ["linux"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.linux-arm64-gnu.node", "main": "lancedb.linux-arm64-gnu.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-arm64-musl", "name": "@lancedb/lancedb-linux-arm64-musl",
"version": "0.23.1", "version": "0.22.3",
"os": ["linux"], "os": ["linux"],
"cpu": ["arm64"], "cpu": ["arm64"],
"main": "lancedb.linux-arm64-musl.node", "main": "lancedb.linux-arm64-musl.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-x64-gnu", "name": "@lancedb/lancedb-linux-x64-gnu",
"version": "0.23.1", "version": "0.22.3",
"os": ["linux"], "os": ["linux"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.linux-x64-gnu.node", "main": "lancedb.linux-x64-gnu.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-linux-x64-musl", "name": "@lancedb/lancedb-linux-x64-musl",
"version": "0.23.1", "version": "0.22.3",
"os": ["linux"], "os": ["linux"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.linux-x64-musl.node", "main": "lancedb.linux-x64-musl.node",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-win32-arm64-msvc", "name": "@lancedb/lancedb-win32-arm64-msvc",
"version": "0.23.1", "version": "0.22.3",
"os": [ "os": [
"win32" "win32"
], ],

View File

@@ -1,6 +1,6 @@
{ {
"name": "@lancedb/lancedb-win32-x64-msvc", "name": "@lancedb/lancedb-win32-x64-msvc",
"version": "0.23.1", "version": "0.22.3",
"os": ["win32"], "os": ["win32"],
"cpu": ["x64"], "cpu": ["x64"],
"main": "lancedb.win32-x64-msvc.node", "main": "lancedb.win32-x64-msvc.node",

View File

@@ -1,12 +1,12 @@
{ {
"name": "@lancedb/lancedb", "name": "@lancedb/lancedb",
"version": "0.23.1", "version": "0.22.3",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@lancedb/lancedb", "name": "@lancedb/lancedb",
"version": "0.23.1", "version": "0.22.3",
"cpu": [ "cpu": [
"x64", "x64",
"arm64" "arm64"

View File

@@ -11,7 +11,7 @@
"ann" "ann"
], ],
"private": false, "private": false,
"version": "0.23.1", "version": "0.22.3",
"main": "dist/index.js", "main": "dist/index.js",
"exports": { "exports": {
".": "./dist/index.js", ".": "./dist/index.js",
@@ -73,10 +73,8 @@
"scripts": { "scripts": {
"artifacts": "napi artifacts", "artifacts": "napi artifacts",
"build:debug": "napi build --platform --no-const-enum --dts ../lancedb/native.d.ts --js ../lancedb/native.js lancedb", "build:debug": "napi build --platform --no-const-enum --dts ../lancedb/native.d.ts --js ../lancedb/native.js lancedb",
"postbuild:debug": "shx mkdir -p dist && shx cp lancedb/*.node dist/",
"build:release": "napi build --platform --no-const-enum --release --dts ../lancedb/native.d.ts --js ../lancedb/native.js dist/", "build:release": "napi build --platform --no-const-enum --release --dts ../lancedb/native.d.ts --js ../lancedb/native.js dist/",
"postbuild:release": "shx mkdir -p dist && shx cp lancedb/*.node dist/", "build": "npm run build:debug && npm run tsc && shx cp lancedb/*.node dist/",
"build": "npm run build:debug && npm run tsc",
"build-release": "npm run build:release && npm run tsc", "build-release": "npm run build:release && npm run tsc",
"tsc": "tsc -b", "tsc": "tsc -b",
"posttsc": "shx cp lancedb/native.d.ts dist/native.d.ts", "posttsc": "shx cp lancedb/native.d.ts dist/native.d.ts",

View File

@@ -35,7 +35,7 @@ pub struct ConnectionOptions {
pub read_consistency_interval: Option<f64>, pub read_consistency_interval: Option<f64>,
/// (For LanceDB OSS only): configuration for object storage. /// (For LanceDB OSS only): configuration for object storage.
/// ///
/// The available options are described at https://lancedb.com/docs/storage/ /// The available options are described at https://lancedb.github.io/lancedb/guides/storage/
pub storage_options: Option<HashMap<String, String>>, pub storage_options: Option<HashMap<String, String>>,
/// (For LanceDB OSS only): the session to use for this connection. Holds /// (For LanceDB OSS only): the session to use for this connection. Holds
/// shared caches and other session-specific state. /// shared caches and other session-specific state.

View File

@@ -740,7 +740,6 @@ pub struct MergeResult {
pub num_inserted_rows: i64, pub num_inserted_rows: i64,
pub num_updated_rows: i64, pub num_updated_rows: i64,
pub num_deleted_rows: i64, pub num_deleted_rows: i64,
pub num_attempts: i64,
} }
impl From<lancedb::table::MergeResult> for MergeResult { impl From<lancedb::table::MergeResult> for MergeResult {
@@ -750,7 +749,6 @@ impl From<lancedb::table::MergeResult> for MergeResult {
num_inserted_rows: value.num_inserted_rows as i64, num_inserted_rows: value.num_inserted_rows as i64,
num_updated_rows: value.num_updated_rows as i64, num_updated_rows: value.num_updated_rows as i64,
num_deleted_rows: value.num_deleted_rows as i64, num_deleted_rows: value.num_deleted_rows as i64,
num_attempts: value.num_attempts as i64,
} }
} }
} }

View File

@@ -1,5 +1,5 @@
[tool.bumpversion] [tool.bumpversion]
current_version = "0.26.1" current_version = "0.25.3"
parse = """(?x) parse = """(?x)
(?P<major>0|[1-9]\\d*)\\. (?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\. (?P<minor>0|[1-9]\\d*)\\.

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "lancedb-python" name = "lancedb-python"
version = "0.26.1" version = "0.25.3"
edition.workspace = true edition.workspace = true
description = "Python bindings for LanceDB" description = "Python bindings for LanceDB"
license.workspace = true license.workspace = true
@@ -17,9 +17,6 @@ crate-type = ["cdylib"]
arrow = { version = "56.2", features = ["pyarrow"] } arrow = { version = "56.2", features = ["pyarrow"] }
async-trait = "0.1" async-trait = "0.1"
lancedb = { path = "../rust/lancedb", default-features = false } lancedb = { path = "../rust/lancedb", default-features = false }
lance-core.workspace = true
lance-namespace.workspace = true
lance-io.workspace = true
env_logger.workspace = true env_logger.workspace = true
pyo3 = { version = "0.25", features = ["extension-module", "abi3-py39"] } pyo3 = { version = "0.25", features = ["extension-module", "abi3-py39"] }
pyo3-async-runtimes = { version = "0.25", features = [ pyo3-async-runtimes = { version = "0.25", features = [
@@ -28,7 +25,6 @@ pyo3-async-runtimes = { version = "0.25", features = [
] } ] }
pin-project = "1.1.5" pin-project = "1.1.5"
futures.workspace = true futures.workspace = true
snafu.workspace = true
tokio = { version = "1.40", features = ["sync"] } tokio = { version = "1.40", features = ["sync"] }
[build-dependencies] [build-dependencies]

View File

@@ -1,11 +1,11 @@
PIP_EXTRA_INDEX_URL ?= https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/ PIP_EXTRA_INDEX_URL ?= https://pypi.fury.io/lancedb/
help: ## Show this help. help: ## Show this help.
@sed -ne '/@sed/!s/## //p' $(MAKEFILE_LIST) @sed -ne '/@sed/!s/## //p' $(MAKEFILE_LIST)
.PHONY: develop .PHONY: develop
develop: ## Install the package in development mode. develop: ## Install the package in development mode.
PIP_EXTRA_INDEX_URL="$(PIP_EXTRA_INDEX_URL)" maturin develop --extras tests,dev,embeddings PIP_EXTRA_INDEX_URL=$(PIP_EXTRA_INDEX_URL) maturin develop --extras tests,dev,embeddings
.PHONY: format .PHONY: format
format: ## Format the code. format: ## Format the code.

View File

@@ -10,7 +10,7 @@ dependencies = [
"pyarrow>=16", "pyarrow>=16",
"pydantic>=1.10", "pydantic>=1.10",
"tqdm>=4.27.0", "tqdm>=4.27.0",
"lance-namespace>=0.3.2" "lance-namespace>=0.0.16"
] ]
description = "lancedb" description = "lancedb"
authors = [{ name = "LanceDB Devs", email = "dev@lancedb.com" }] authors = [{ name = "LanceDB Devs", email = "dev@lancedb.com" }]
@@ -45,7 +45,7 @@ repository = "https://github.com/lancedb/lancedb"
[project.optional-dependencies] [project.optional-dependencies]
pylance = [ pylance = [
"pylance>=1.0.0b14", "pylance>=0.25",
] ]
tests = [ tests = [
"aiohttp", "aiohttp",
@@ -59,7 +59,7 @@ tests = [
"polars>=0.19, <=1.3.0", "polars>=0.19, <=1.3.0",
"tantivy", "tantivy",
"pyarrow-stubs", "pyarrow-stubs",
"pylance>=1.0.0b14", "pylance>=0.25",
"requests", "requests",
"datafusion", "datafusion",
] ]

View File

@@ -13,53 +13,13 @@ __version__ = importlib.metadata.version("lancedb")
from ._lancedb import connect as lancedb_connect from ._lancedb import connect as lancedb_connect
from .common import URI, sanitize_uri from .common import URI, sanitize_uri
from urllib.parse import urlparse
from .db import AsyncConnection, DBConnection, LanceDBConnection from .db import AsyncConnection, DBConnection, LanceDBConnection
from .io import StorageOptionsProvider
from .remote import ClientConfig from .remote import ClientConfig
from .remote.db import RemoteDBConnection from .remote.db import RemoteDBConnection
from .schema import vector from .schema import vector
from .table import AsyncTable, Table from .table import AsyncTable, Table
from ._lancedb import Session from ._lancedb import Session
from .namespace import ( from .namespace import connect_namespace, LanceNamespaceDBConnection
connect_namespace,
connect_namespace_async,
LanceNamespaceDBConnection,
AsyncLanceNamespaceDBConnection,
)
def _check_s3_bucket_with_dots(
uri: str, storage_options: Optional[Dict[str, str]]
) -> None:
"""
Check if an S3 URI has a bucket name containing dots and warn if no region
is specified. S3 buckets with dots cannot use virtual-hosted-style URLs,
which breaks automatic region detection.
See: https://github.com/lancedb/lancedb/issues/1898
"""
if not isinstance(uri, str) or not uri.startswith("s3://"):
return
parsed = urlparse(uri)
bucket = parsed.netloc
if "." not in bucket:
return
# Check if region is provided in storage_options
region_keys = {"region", "aws_region"}
has_region = storage_options and any(k in storage_options for k in region_keys)
if not has_region:
raise ValueError(
f"S3 bucket name '{bucket}' contains dots, which prevents automatic "
f"region detection. Please specify the region explicitly via "
f"storage_options={{'region': '<your-region>'}} or "
f"storage_options={{'aws_region': '<your-region>'}}. "
f"See https://github.com/lancedb/lancedb/issues/1898 for details."
)
def connect( def connect(
@@ -75,7 +35,7 @@ def connect(
session: Optional[Session] = None, session: Optional[Session] = None,
**kwargs: Any, **kwargs: Any,
) -> DBConnection: ) -> DBConnection:
"""Connect to a LanceDB database. """Connect to a LanceDB database. YAY!
Parameters Parameters
---------- ----------
@@ -106,7 +66,7 @@ def connect(
default configuration is used. default configuration is used.
storage_options: dict, optional storage_options: dict, optional
Additional options for the storage backend. See available options at Additional options for the storage backend. See available options at
<https://lancedb.com/docs/storage/> <https://lancedb.github.io/lancedb/guides/storage/>
session: Session, optional session: Session, optional
(For LanceDB OSS only) (For LanceDB OSS only)
A session to use for this connection. Sessions allow you to configure A session to use for this connection. Sessions allow you to configure
@@ -155,11 +115,9 @@ def connect(
storage_options=storage_options, storage_options=storage_options,
**kwargs, **kwargs,
) )
_check_s3_bucket_with_dots(str(uri), storage_options)
if kwargs: if kwargs:
raise ValueError(f"Unknown keyword arguments: {kwargs}") raise ValueError(f"Unknown keyword arguments: {kwargs}")
return LanceDBConnection( return LanceDBConnection(
uri, uri,
read_consistency_interval=read_consistency_interval, read_consistency_interval=read_consistency_interval,
@@ -210,7 +168,7 @@ async def connect_async(
default configuration is used. default configuration is used.
storage_options: dict, optional storage_options: dict, optional
Additional options for the storage backend. See available options at Additional options for the storage backend. See available options at
<https://lancedb.com/docs/storage/> <https://lancedb.github.io/lancedb/guides/storage/>
session: Session, optional session: Session, optional
(For LanceDB OSS only) (For LanceDB OSS only)
A session to use for this connection. Sessions allow you to configure A session to use for this connection. Sessions allow you to configure
@@ -247,8 +205,6 @@ async def connect_async(
if isinstance(client_config, dict): if isinstance(client_config, dict):
client_config = ClientConfig(**client_config) client_config = ClientConfig(**client_config)
_check_s3_bucket_with_dots(str(uri), storage_options)
return AsyncConnection( return AsyncConnection(
await lancedb_connect( await lancedb_connect(
sanitize_uri(uri), sanitize_uri(uri),
@@ -267,9 +223,7 @@ __all__ = [
"connect", "connect",
"connect_async", "connect_async",
"connect_namespace", "connect_namespace",
"connect_namespace_async",
"AsyncConnection", "AsyncConnection",
"AsyncLanceNamespaceDBConnection",
"AsyncTable", "AsyncTable",
"URI", "URI",
"sanitize_uri", "sanitize_uri",
@@ -279,7 +233,6 @@ __all__ = [
"LanceNamespaceDBConnection", "LanceNamespaceDBConnection",
"RemoteDBConnection", "RemoteDBConnection",
"Session", "Session",
"StorageOptionsProvider",
"Table", "Table",
"__version__", "__version__",
] ]

View File

@@ -3,30 +3,9 @@ from typing import Dict, List, Optional, Tuple, Any, TypedDict, Union, Literal
import pyarrow as pa import pyarrow as pa
from .index import ( from .index import BTree, IvfFlat, IvfPq, Bitmap, LabelList, HnswPq, HnswSq, FTS
BTree,
IvfFlat,
IvfPq,
IvfSq,
Bitmap,
LabelList,
HnswPq,
HnswSq,
FTS,
)
from .io import StorageOptionsProvider
from lance_namespace import (
ListNamespacesResponse,
CreateNamespaceResponse,
DropNamespaceResponse,
DescribeNamespaceResponse,
ListTablesResponse,
)
from .remote import ClientConfig from .remote import ClientConfig
IvfHnswPq: type[HnswPq] = HnswPq
IvfHnswSq: type[HnswSq] = HnswSq
class Session: class Session:
def __init__( def __init__(
self, self,
@@ -45,73 +24,47 @@ class Connection(object):
async def is_open(self): ... async def is_open(self): ...
async def close(self): ... async def close(self): ...
async def list_namespaces( async def list_namespaces(
self,
namespace: Optional[List[str]] = None,
page_token: Optional[str] = None,
limit: Optional[int] = None,
) -> ListNamespacesResponse: ...
async def create_namespace(
self, self,
namespace: List[str], namespace: List[str],
mode: Optional[str] = None, page_token: Optional[str],
properties: Optional[Dict[str, str]] = None, limit: Optional[int],
) -> CreateNamespaceResponse: ... ) -> List[str]: ...
async def drop_namespace( async def create_namespace(self, namespace: List[str]) -> None: ...
self, async def drop_namespace(self, namespace: List[str]) -> None: ...
namespace: List[str],
mode: Optional[str] = None,
behavior: Optional[str] = None,
) -> DropNamespaceResponse: ...
async def describe_namespace(
self,
namespace: List[str],
) -> DescribeNamespaceResponse: ...
async def list_tables(
self,
namespace: Optional[List[str]] = None,
page_token: Optional[str] = None,
limit: Optional[int] = None,
) -> ListTablesResponse: ...
async def table_names( async def table_names(
self, self,
namespace: Optional[List[str]], namespace: List[str],
start_after: Optional[str], start_after: Optional[str],
limit: Optional[int], limit: Optional[int],
) -> list[str]: ... # Deprecated: Use list_tables instead ) -> list[str]: ...
async def create_table( async def create_table(
self, self,
name: str, name: str,
mode: str, mode: str,
data: pa.RecordBatchReader, data: pa.RecordBatchReader,
namespace: Optional[List[str]] = None, namespace: List[str] = [],
storage_options: Optional[Dict[str, str]] = None, storage_options: Optional[Dict[str, str]] = None,
storage_options_provider: Optional[StorageOptionsProvider] = None,
location: Optional[str] = None,
) -> Table: ... ) -> Table: ...
async def create_empty_table( async def create_empty_table(
self, self,
name: str, name: str,
mode: str, mode: str,
schema: pa.Schema, schema: pa.Schema,
namespace: Optional[List[str]] = None, namespace: List[str] = [],
storage_options: Optional[Dict[str, str]] = None, storage_options: Optional[Dict[str, str]] = None,
storage_options_provider: Optional[StorageOptionsProvider] = None,
location: Optional[str] = None,
) -> Table: ... ) -> Table: ...
async def open_table( async def open_table(
self, self,
name: str, name: str,
namespace: Optional[List[str]] = None, namespace: List[str] = [],
storage_options: Optional[Dict[str, str]] = None, storage_options: Optional[Dict[str, str]] = None,
storage_options_provider: Optional[StorageOptionsProvider] = None,
index_cache_size: Optional[int] = None, index_cache_size: Optional[int] = None,
location: Optional[str] = None,
) -> Table: ... ) -> Table: ...
async def clone_table( async def clone_table(
self, self,
target_table_name: str, target_table_name: str,
source_uri: str, source_uri: str,
target_namespace: Optional[List[str]] = None, target_namespace: List[str] = [],
source_version: Optional[int] = None, source_version: Optional[int] = None,
source_tag: Optional[str] = None, source_tag: Optional[str] = None,
is_shallow: bool = True, is_shallow: bool = True,
@@ -120,13 +73,11 @@ class Connection(object):
self, self,
cur_name: str, cur_name: str,
new_name: str, new_name: str,
cur_namespace: Optional[List[str]] = None, cur_namespace: List[str] = [],
new_namespace: Optional[List[str]] = None, new_namespace: List[str] = [],
) -> None: ... ) -> None: ...
async def drop_table( async def drop_table(self, name: str, namespace: List[str] = []) -> None: ...
self, name: str, namespace: Optional[List[str]] = None async def drop_all_tables(self, namespace: List[str] = []) -> None: ...
) -> None: ...
async def drop_all_tables(self, namespace: Optional[List[str]] = None) -> None: ...
class Table: class Table:
def name(self) -> str: ... def name(self) -> str: ...
@@ -144,17 +95,7 @@ class Table:
async def create_index( async def create_index(
self, self,
column: str, column: str,
index: Union[ index: Union[IvfFlat, IvfPq, HnswPq, HnswSq, BTree, Bitmap, LabelList, FTS],
IvfFlat,
IvfSq,
IvfPq,
HnswPq,
HnswSq,
BTree,
Bitmap,
LabelList,
FTS,
],
replace: Optional[bool], replace: Optional[bool],
wait_timeout: Optional[object], wait_timeout: Optional[object],
*, *,
@@ -358,7 +299,6 @@ class MergeResult:
num_updated_rows: int num_updated_rows: int
num_inserted_rows: int num_inserted_rows: int
num_deleted_rows: int num_deleted_rows: int
num_attempts: int
class AddColumnsResult: class AddColumnsResult:
version: int version: int

View File

@@ -96,7 +96,7 @@ def data_to_reader(
f"Unknown data type {type(data)}. " f"Unknown data type {type(data)}. "
"Supported types: list of dicts, pandas DataFrame, polars DataFrame, " "Supported types: list of dicts, pandas DataFrame, polars DataFrame, "
"pyarrow Table/RecordBatch, or Pydantic models. " "pyarrow Table/RecordBatch, or Pydantic models. "
"See https://lancedb.com/docs/tables/ for examples." "See https://lancedb.github.io/lancedb/guides/tables/ for examples."
) )

Some files were not shown because too many files have changed in this diff Show More