Compare commits

..

1 Commits

Author SHA1 Message Date
Lance Release
bde059b763 Bump version: 0.26.0-beta.0 → 0.26.0 2026-02-06 18:08:12 +00:00
185 changed files with 8356 additions and 69149 deletions

View File

@@ -1,5 +1,5 @@
[tool.bumpversion]
current_version = "0.27.0"
current_version = "0.26.0"
parse = """(?x)
(?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\.

View File

@@ -29,7 +29,6 @@ runs:
if: ${{ inputs.arm-build == 'false' }}
uses: PyO3/maturin-action@v1
with:
maturin-version: "1.12.4"
command: build
working-directory: python
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'"
@@ -45,7 +44,6 @@ runs:
if: ${{ inputs.arm-build == 'true' }}
uses: PyO3/maturin-action@v1
with:
maturin-version: "1.12.4"
command: build
working-directory: python
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'"

View File

@@ -20,7 +20,6 @@ runs:
uses: PyO3/maturin-action@v1
with:
command: build
maturin-version: "1.12.4"
# TODO: pass through interpreter
args: ${{ inputs.args }}
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'"

View File

@@ -25,7 +25,6 @@ runs:
uses: PyO3/maturin-action@v1
with:
command: build
maturin-version: "1.12.4"
args: ${{ inputs.args }}
docker-options: "-e PIP_EXTRA_INDEX_URL='https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/'"
working-directory: python

View File

@@ -1,173 +0,0 @@
name: Codex Fix CI
on:
workflow_dispatch:
inputs:
workflow_run_url:
description: "Failing CI workflow run URL (e.g., https://github.com/lancedb/lancedb/actions/runs/12345678)"
required: true
type: string
branch:
description: "Branch to fix (e.g., main, release/v2.0, or feature-branch)"
required: true
type: string
guidelines:
description: "Additional guidelines for the fix (optional)"
required: false
type: string
permissions:
contents: write
pull-requests: write
actions: read
jobs:
fix-ci:
runs-on: warp-ubuntu-latest-x64-4x
timeout-minutes: 60
env:
CC: clang
CXX: clang++
steps:
- name: Show inputs
run: |
echo "workflow_run_url = ${{ inputs.workflow_run_url }}"
echo "branch = ${{ inputs.branch }}"
echo "guidelines = ${{ inputs.guidelines }}"
- name: Checkout Repo
uses: actions/checkout@v4
with:
ref: ${{ inputs.branch }}
fetch-depth: 0
persist-credentials: true
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install Codex CLI
run: npm install -g @openai/codex
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
components: clippy, rustfmt
- uses: Swatinem/rust-cache@v2
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y protobuf-compiler libssl-dev
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install Python dependencies
run: |
pip install maturin ruff pytest pyarrow pandas polars
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: '11'
cache: maven
- name: Install Node.js dependencies for TypeScript bindings
run: |
cd nodejs
npm ci
- name: Configure git user
run: |
git config user.name "lancedb automation"
git config user.email "robot@lancedb.com"
- name: Run Codex to fix CI failure
env:
WORKFLOW_RUN_URL: ${{ inputs.workflow_run_url }}
BRANCH: ${{ inputs.branch }}
GUIDELINES: ${{ inputs.guidelines }}
GITHUB_TOKEN: ${{ secrets.ROBOT_TOKEN }}
GH_TOKEN: ${{ secrets.ROBOT_TOKEN }}
OPENAI_API_KEY: ${{ secrets.CODEX_TOKEN }}
run: |
set -euo pipefail
cat <<EOF >/tmp/codex-prompt.txt
You are running inside the lancedb repository on a GitHub Actions runner. Your task is to fix a CI failure.
Input parameters:
- Failing workflow run URL: ${WORKFLOW_RUN_URL}
- Branch to fix: ${BRANCH}
- Additional guidelines: ${GUIDELINES:-"None provided"}
Follow these steps exactly:
1. Extract the run ID from the workflow URL. The URL format is https://github.com/lancedb/lancedb/actions/runs/<run_id>.
2. Use "gh run view <run_id> --json jobs,conclusion,name" to get information about the failed run.
3. Identify which jobs failed. For each failed job, use "gh run view <run_id> --job <job_id> --log-failed" to get the failure logs.
4. Analyze the failure logs to understand what went wrong. Common failures include:
- Compilation errors
- Test failures
- Clippy warnings treated as errors
- Formatting issues
- Dependency issues
5. Based on the analysis, fix the issues in the codebase:
- For compilation errors: Fix the code that doesn't compile
- For test failures: Fix the failing tests or the code they test
- For clippy warnings: Apply the suggested fixes
- For formatting issues: Run "cargo fmt --all"
- For other issues: Apply appropriate fixes
6. After making fixes, verify them locally:
- Run "cargo fmt --all" to ensure formatting is correct
- Run "cargo clippy --workspace --tests --all-features -- -D warnings" to check for issues
- Run ONLY the specific failing tests to confirm they pass now:
- For Rust test failures: Run the specific test with "cargo test -p <crate> <test_name>"
- For Python test failures: Build with "cd python && maturin develop" then run "pytest <specific_test_file>::<test_name>"
- For Java test failures: Run "cd java && mvn test -Dtest=<TestClass>#<testMethod>"
- For TypeScript test failures: Run "cd nodejs && npm run build && npm test -- --testNamePattern='<test_name>'"
- Do NOT run the full test suite - only run the tests that were failing
7. If the additional guidelines are provided, follow them as well.
8. Inspect "git status --short" and "git diff" to review your changes.
9. Create a fix branch: "git checkout -b codex/fix-ci-<run_id>".
10. Stage all changes with "git add -A" and commit with message "fix: resolve CI failures from run <run_id>".
11. Push the branch: "git push origin codex/fix-ci-<run_id>". If the remote branch exists, delete it first with "gh api -X DELETE repos/lancedb/lancedb/git/refs/heads/codex/fix-ci-<run_id>" then push. Do NOT use "git push --force" or "git push -f".
12. Create a pull request targeting "${BRANCH}":
- Title: "ci: <short summary describing the fix>" (e.g., "ci: fix clippy warnings in lancedb" or "ci: resolve test flakiness in vector search")
- First, write the PR body to /tmp/pr-body.md using a heredoc (cat <<'PREOF' > /tmp/pr-body.md). The body should include:
- Link to the failing workflow run
- Summary of what failed
- Description of the fixes applied
- Then run "gh pr create --base ${BRANCH} --body-file /tmp/pr-body.md".
13. Display the new PR URL, "git status --short", and a summary of what was fixed.
Constraints:
- Use bash commands for all operations.
- Do not merge the PR.
- Do not modify GitHub workflow files unless they are the cause of the failure.
- If any command fails, diagnose and attempt to fix the issue instead of aborting immediately.
- If you cannot fix the issue automatically, create the PR anyway with a clear explanation of what you tried and what remains to be fixed.
- env "GH_TOKEN" is available, use "gh" tools for GitHub-related operations.
EOF
printenv OPENAI_API_KEY | codex login --with-api-key
codex --config shell_environment_policy.ignore_default_excludes=true exec --dangerously-bypass-approvals-and-sandbox "$(cat /tmp/codex-prompt.txt)"

View File

@@ -15,7 +15,7 @@ jobs:
name: Label PR
runs-on: ubuntu-latest
steps:
- uses: srvaroa/labeler@v1
- uses: srvaroa/labeler@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
commitlint:
@@ -24,7 +24,7 @@ jobs:
name: Verify PR title / description conforms to semantic-release
runs-on: ubuntu-latest
steps:
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: "18"
# These rules are disabled because Github will always ensure there
@@ -47,7 +47,7 @@ jobs:
${{ github.event.pull_request.body }}
- if: failure()
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
script: |
const message = `**ACTION NEEDED**

View File

@@ -53,7 +53,7 @@ jobs:
python -m pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -e .
python -m pip install --extra-index-url https://pypi.fury.io/lance-format/ --extra-index-url https://pypi.fury.io/lancedb/ -r ../docs/requirements.txt
- name: Set up node
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: 20
cache: 'npm'
@@ -68,7 +68,7 @@ jobs:
run: |
PYTHONPATH=. mkdocs build
- name: Setup Pages
uses: actions/configure-pages@v5
uses: actions/configure-pages@v2
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:

View File

@@ -7,9 +7,7 @@ on:
pull_request:
paths:
- Cargo.toml
- Cargo.lock
- nodejs/**
- rust/**
- docs/src/js/**
- .github/workflows/nodejs.yml
- docker-compose.yml
@@ -38,7 +36,7 @@ jobs:
with:
fetch-depth: 0
lfs: true
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 20
cache: 'npm'
@@ -78,12 +76,9 @@ jobs:
with:
fetch-depth: 0
lfs: true
- uses: actions/setup-node@v4
name: Setup Node.js 20 for build
- uses: actions/setup-node@v3
with:
# @napi-rs/cli v3 requires Node >= 20.12 (via @inquirer/prompts@8).
# Build always on Node 20; tests run on the matrix version below.
node-version: 20
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: nodejs/package-lock.json
- uses: Swatinem/rust-cache@v2
@@ -91,16 +86,12 @@ jobs:
run: |
sudo apt update
sudo apt install -y protobuf-compiler libssl-dev
npm install -g @napi-rs/cli
- name: Build
run: |
npm ci --include=optional
npm run build:debug -- --profile ci
- uses: actions/setup-node@v4
name: Setup Node.js ${{ matrix.node-version }} for test
with:
node-version: ${{ matrix.node-version }}
- name: Compile TypeScript
run: npm run tsc
npm run tsc
- name: Setup localstack
working-directory: .
run: docker compose up --detach --wait
@@ -144,7 +135,7 @@ jobs:
with:
fetch-depth: 0
lfs: true
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 20
cache: 'npm'
@@ -153,6 +144,7 @@ jobs:
- name: Install dependencies
run: |
brew install protobuf
npm install -g @napi-rs/cli
- name: Build
run: |
npm ci --include=optional

View File

@@ -19,7 +19,6 @@ on:
paths:
- .github/workflows/npm-publish.yml
- Cargo.toml # Change in dependency frequently breaks builds
- Cargo.lock
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -125,22 +124,20 @@ jobs:
pre_build: |-
set -e &&
apt-get update &&
apt-get install -y protobuf-compiler pkg-config &&
# The base image (manylinux2014-cross) sets TARGET_CC to the old
# GCC 4.8 cross-compiler. aws-lc-sys checks TARGET_CC before CC,
# so it picks up GCC even though the napi-rs image sets CC=clang.
# Override to use the image's clang-18 which supports -fuse-ld=lld.
export TARGET_CC=clang TARGET_CXX=clang++
apt-get install -y protobuf-compiler pkg-config
- target: x86_64-unknown-linux-musl
# This one seems to need some extra memory
host: ubuntu-2404-8x-x64
# https://github.com/napi-rs/napi-rs/blob/main/alpine.Dockerfile
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
features: fp16kernels
pre_build: |-
set -e &&
sudo apt-get update &&
sudo apt-get install -y protobuf-compiler pkg-config &&
rustup target add x86_64-unknown-linux-musl &&
export EXTRA_ARGS="-x"
apk add protobuf-dev curl &&
ln -s /usr/lib/gcc/x86_64-alpine-linux-musl/14.2.0/crtbeginS.o /usr/lib/crtbeginS.o &&
ln -s /usr/lib/libgcc_s.so /usr/lib/libgcc.so &&
CC=gcc &&
CXX=g++
- target: aarch64-unknown-linux-gnu
host: ubuntu-2404-8x-x64
# https://github.com/napi-rs/napi-rs/blob/main/debian-aarch64.Dockerfile
@@ -150,20 +147,21 @@ jobs:
set -e &&
apt-get update &&
apt-get install -y protobuf-compiler pkg-config &&
export TARGET_CC=clang TARGET_CXX=clang++ &&
# The manylinux2014 sysroot has glibc 2.17 headers which lack
# AT_HWCAP2 (added in Linux 3.17). Define it for aws-lc-sys.
export CFLAGS="$CFLAGS -DAT_HWCAP2=26" &&
# https://github.com/aws/aws-lc-rs/issues/737#issuecomment-2725918627
ln -s /usr/aarch64-unknown-linux-gnu/lib/gcc/aarch64-unknown-linux-gnu/4.8.5/crtbeginS.o /usr/aarch64-unknown-linux-gnu/aarch64-unknown-linux-gnu/sysroot/usr/lib/crtbeginS.o &&
ln -s /usr/aarch64-unknown-linux-gnu/lib/gcc /usr/aarch64-unknown-linux-gnu/aarch64-unknown-linux-gnu/sysroot/usr/lib/gcc &&
rustup target add aarch64-unknown-linux-gnu
- target: aarch64-unknown-linux-musl
host: ubuntu-2404-8x-x64
# https://github.com/napi-rs/napi-rs/blob/main/alpine.Dockerfile
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
features: ","
pre_build: |-
set -e &&
sudo apt-get update &&
sudo apt-get install -y protobuf-compiler &&
apk add protobuf-dev &&
rustup target add aarch64-unknown-linux-musl &&
export EXTRA_ARGS="-x"
export CC_aarch64_unknown_linux_musl=aarch64-linux-musl-gcc &&
export CXX_aarch64_unknown_linux_musl=aarch64-linux-musl-g++
name: build - ${{ matrix.settings.target }}
runs-on: ${{ matrix.settings.host }}
defaults:
@@ -194,18 +192,12 @@ jobs:
.cargo-cache
target/
key: nodejs-${{ matrix.settings.target }}-cargo-${{ matrix.settings.host }}
- name: Setup toolchain
run: ${{ matrix.settings.setup }}
if: ${{ matrix.settings.setup }}
shell: bash
- name: Install dependencies
run: npm ci
- name: Install Zig
uses: mlugg/setup-zig@v2
if: ${{ contains(matrix.settings.target, 'musl') }}
with:
version: 0.14.1
- name: Install cargo-zigbuild
uses: taiki-e/install-action@v2
if: ${{ contains(matrix.settings.target, 'musl') }}
with:
tool: cargo-zigbuild
- name: Build in docker
uses: addnab/docker-run-action@v3
if: ${{ matrix.settings.docker }}
@@ -218,24 +210,24 @@ jobs:
run: |
set -e
${{ matrix.settings.pre_build }}
npx napi build --platform --release \
npx napi build --platform --release --no-const-enum \
--features ${{ matrix.settings.features }} \
--target ${{ matrix.settings.target }} \
--dts ../lancedb/native.d.ts \
--js ../lancedb/native.js \
--strip \
--output-dir dist/
dist/
- name: Build
run: |
${{ matrix.settings.pre_build }}
npx napi build --platform --release \
npx napi build --platform --release --no-const-enum \
--features ${{ matrix.settings.features }} \
--target ${{ matrix.settings.target }} \
--dts ../lancedb/native.d.ts \
--js ../lancedb/native.js \
--strip \
$EXTRA_ARGS \
--output-dir dist/
dist/
if: ${{ !matrix.settings.docker }}
shell: bash
- name: Upload artifact
@@ -273,7 +265,7 @@ jobs:
- target: x86_64-unknown-linux-gnu
host: ubuntu-latest
- target: aarch64-unknown-linux-gnu
host: ubuntu-2404-8x-arm64
host: buildjet-16vcpu-ubuntu-2204-arm
node:
- '20'
runs-on: ${{ matrix.settings.host }}
@@ -326,7 +318,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 24
node-version: 20
cache: npm
cache-dependency-path: nodejs/package-lock.json
registry-url: "https://registry.npmjs.org"
@@ -358,13 +350,11 @@ jobs:
env:
DRY_RUN: ${{ !startsWith(github.ref, 'refs/tags/v') }}
run: |
npm config set provenance true
ARGS="--access public"
if [[ $DRY_RUN == "true" ]]; then
ARGS="$ARGS --dry-run"
fi
VERSION=$(node -p "require('./package.json').version")
if [[ $VERSION == *-* ]]; then
if [[ $GITHUB_REF =~ refs/tags/v(.*)-beta.* ]]; then
ARGS="$ARGS --tag preview"
fi
npm publish $ARGS

View File

@@ -9,7 +9,6 @@ on:
paths:
- .github/workflows/pypi-publish.yml
- Cargo.toml # Change in dependency frequently breaks builds
- Cargo.lock
env:
PIP_EXTRA_INDEX_URL: "https://pypi.fury.io/lance-format/ https://pypi.fury.io/lancedb/"

View File

@@ -7,14 +7,8 @@ on:
pull_request:
paths:
- Cargo.toml
- Cargo.lock
- python/**
- rust/**
- .github/workflows/python.yml
- .github/workflows/build_linux_wheel/**
- .github/workflows/build_mac_wheel/**
- .github/workflows/build_windows_wheel/**
- .github/workflows/run_tests/**
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}

View File

@@ -7,7 +7,6 @@ on:
pull_request:
paths:
- Cargo.toml
- Cargo.lock
- rust/**
- .github/workflows/rust.yml
@@ -101,9 +100,7 @@ jobs:
lfs: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: |
sudo apt update
sudo apt install -y protobuf-compiler libssl-dev
run: sudo apt install -y protobuf-compiler libssl-dev
- uses: rui314/setup-mold@v1
- name: Make Swap
run: |
@@ -186,7 +183,7 @@ jobs:
runs-on: ubuntu-24.04
strategy:
matrix:
msrv: ["1.91.0"] # This should match up with rust-version in Cargo.toml
msrv: ["1.88.0"] # This should match up with rust-version in Cargo.toml
env:
# Need up-to-date compilers for kernels
CC: clang-18

1192
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,30 +5,30 @@ exclude = ["python"]
resolver = "2"
[workspace.package]
edition = "2024"
edition = "2021"
authors = ["LanceDB Devs <dev@lancedb.com>"]
license = "Apache-2.0"
repository = "https://github.com/lancedb/lancedb"
description = "Serverless, low-latency vector database for AI applications"
keywords = ["lancedb", "lance", "database", "vector", "search"]
categories = ["database-implementations"]
rust-version = "1.91.0"
rust-version = "1.88.0"
[workspace.dependencies]
lance = { version = "=3.0.1", default-features = false }
lance-core = { version = "=3.0.1" }
lance-datagen = { version = "=3.0.1" }
lance-file = { version = "=3.0.1" }
lance-io = { version = "=3.0.1", default-features = false }
lance-index = { version = "=3.0.1" }
lance-linalg = { version = "=3.0.1" }
lance-namespace = { version = "=3.0.1" }
lance-namespace-impls = { version = "=3.0.1", default-features = false }
lance-table = { version = "=3.0.1" }
lance-testing = { version = "=3.0.1" }
lance-datafusion = { version = "=3.0.1" }
lance-encoding = { version = "=3.0.1" }
lance-arrow = { version = "=3.0.1" }
lance = { "version" = "=2.0.0", default-features = false }
lance-core = "=2.0.0"
lance-datagen = "=2.0.0"
lance-file = "=2.0.0"
lance-io = { "version" = "=2.0.0", default-features = false }
lance-index = "=2.0.0"
lance-linalg = "=2.0.0"
lance-namespace = "=2.0.0"
lance-namespace-impls = { "version" = "=2.0.0", default-features = false }
lance-table = "=2.0.0"
lance-testing = "=2.0.0"
lance-datafusion = "=2.0.0"
lance-encoding = "=2.0.0"
lance-arrow = "=2.0.0"
ahash = "0.8"
# Note that this one does not include pyarrow
arrow = { version = "57.2", optional = false }
@@ -40,15 +40,13 @@ arrow-schema = "57.2"
arrow-select = "57.2"
arrow-cast = "57.2"
async-trait = "0"
datafusion = { version = "52.1", default-features = false }
datafusion-catalog = "52.1"
datafusion-common = { version = "52.1", default-features = false }
datafusion-execution = "52.1"
datafusion-expr = "52.1"
datafusion-functions = "52.1"
datafusion-physical-plan = "52.1"
datafusion-physical-expr = "52.1"
datafusion-sql = "52.1"
datafusion = { version = "51.0", default-features = false }
datafusion-catalog = "51.0"
datafusion-common = { version = "51.0", default-features = false }
datafusion-execution = "51.0"
datafusion-expr = "51.0"
datafusion-physical-plan = "51.0"
datafusion-physical-expr = "51.0"
env_logger = "0.11"
half = { "version" = "2.7.1", default-features = false, features = [
"num-traits",

View File

@@ -1,9 +0,0 @@
.PHONY: licenses
licenses:
cargo about generate about.hbs -o RUST_THIRD_PARTY_LICENSES.html -c about.toml
cd python && cargo about generate ../about.hbs -o RUST_THIRD_PARTY_LICENSES.html -c ../about.toml
cd python && uv sync --all-extras && uv tool run pip-licenses --python .venv/bin/python --format=markdown --with-urls --output-file=PYTHON_THIRD_PARTY_LICENSES.md
cd nodejs && cargo about generate ../about.hbs -o RUST_THIRD_PARTY_LICENSES.html -c ../about.toml
cd nodejs && npx license-checker --markdown --out NODEJS_THIRD_PARTY_LICENSES.md
cd java && ./mvnw license:aggregate-add-third-party -q

File diff suppressed because it is too large Load Diff

View File

@@ -1,70 +0,0 @@
<html>
<head>
<style>
@media (prefers-color-scheme: dark) {
body {
background: #333;
color: white;
}
a {
color: skyblue;
}
}
.container {
font-family: sans-serif;
max-width: 800px;
margin: 0 auto;
}
.intro {
text-align: center;
}
.licenses-list {
list-style-type: none;
margin: 0;
padding: 0;
}
.license-used-by {
margin-top: -10px;
}
.license-text {
max-height: 200px;
overflow-y: scroll;
white-space: pre-wrap;
}
</style>
</head>
<body>
<main class="container">
<div class="intro">
<h1>Third Party Licenses</h1>
<p>This page lists the licenses of the projects used in cargo-about.</p>
</div>
<h2>Overview of licenses:</h2>
<ul class="licenses-overview">
{{#each overview}}
<li><a href="#{{id}}">{{name}}</a> ({{count}})</li>
{{/each}}
</ul>
<h2>All license text:</h2>
<ul class="licenses-list">
{{#each licenses}}
<li class="license">
<h3 id="{{id}}">{{name}}</h3>
<h4>Used by:</h4>
<ul class="license-used-by">
{{#each used_by}}
<li><a href="{{#if crate.repository}} {{crate.repository}} {{else}} https://crates.io/crates/{{crate.name}} {{/if}}">{{crate.name}} {{crate.version}}</a></li>
{{/each}}
</ul>
<pre class="license-text">{{text}}</pre>
</li>
{{/each}}
</ul>
</main>
</body>
</html>

View File

@@ -1,18 +0,0 @@
accepted = [
"0BSD",
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"BSD-2-Clause",
"BSD-3-Clause",
"BSL-1.0",
"bzip2-1.0.6",
"CC0-1.0",
"CDDL-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import argparse
import functools
import json
import os
import re
@@ -27,7 +26,6 @@ SEMVER_RE = re.compile(
)
@functools.total_ordering
@dataclass(frozen=True)
class SemVer:
major: int
@@ -158,9 +156,7 @@ def read_current_version(repo_root: Path) -> str:
def determine_latest_tag(tags: Iterable[TagInfo]) -> TagInfo:
# Stable releases (no prerelease) are always preferred over pre-releases.
# Within each group, standard semver ordering applies.
return max(tags, key=lambda tag: (not tag.semver.prerelease, tag.semver))
return max(tags, key=lambda tag: tag.semver)
def write_outputs(args: argparse.Namespace, payload: dict) -> None:

View File

@@ -1,7 +1,7 @@
version: "3.9"
services:
localstack:
image: localstack/localstack:4.0
image: localstack/localstack:3.3
ports:
- 4566:4566
environment:

View File

@@ -1,27 +1,27 @@
# Simple base dockerfile that supports basic dependencies required to run lance with FTS and Hybrid Search
# Usage: docker build -t lancedb:latest -f Dockerfile .
FROM python:3.12-slim-bookworm
#Simple base dockerfile that supports basic dependencies required to run lance with FTS and Hybrid Search
#Usage docker build -t lancedb:latest -f Dockerfile .
FROM python:3.10-slim-buster
# Install build dependencies in a single layer
RUN apt-get update && \
apt-get install -y --no-install-recommends \
curl \
build-essential \
protobuf-compiler \
git \
ca-certificates && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Install Rust (pinned installer, non-interactive)
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable --profile minimal
# Install Rust
RUN apt-get update && apt-get install -y curl build-essential && \
curl https://sh.rustup.rs -sSf | sh -s -- -y
# Set the environment variable for Rust
ENV PATH="/root/.cargo/bin:${PATH}"
# Install protobuf compiler
RUN apt-get install -y protobuf-compiler && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN apt-get -y update &&\
apt-get -y upgrade && \
apt-get -y install git
# Verify installations
RUN python --version && \
rustc --version && \
protoc --version
RUN pip install --no-cache-dir tantivy lancedb
RUN pip install tantivy lancedb

View File

@@ -52,21 +52,14 @@ plugins:
options:
docstring_style: numpy
heading_level: 3
show_source: true
show_symbol_type_in_heading: true
show_signature_annotations: true
show_root_heading: true
show_docstring_examples: true
show_docstring_attributes: false
show_docstring_other_parameters: true
show_symbol_type_heading: true
show_labels: false
show_if_no_docstring: true
show_source: false
members_order: source
docstring_section_style: list
signature_crossrefs: true
separate_signature: true
filters:
- "!^_"
import:
# for cross references
- https://arrow.apache.org/docs/objects.inv
@@ -120,7 +113,7 @@ markdown_extensions:
emoji_index: !!python/name:material.extensions.emoji.twemoji
emoji_generator: !!python/name:material.extensions.emoji.to_svg
- markdown.extensions.toc:
toc_depth: 4
toc_depth: 3
permalink: true
permalink_title: Anchor link to this section

View File

@@ -1,9 +1,9 @@
mkdocs==1.5.3
mkdocs-jupyter==0.24.1
mkdocs-material==9.5.3
mkdocs-autorefs>=0.5,<=1.0
mkdocs-autorefs<=1.0
mkdocstrings[python]==0.25.2
griffe>=0.40,<1.0
mkdocs-render-swagger-plugin>=0.1.0
pydantic>=2.0,<3.0
mkdocs-redirects>=1.2.0
griffe
mkdocs-render-swagger-plugin
pydantic
mkdocs-redirects

View File

@@ -14,7 +14,7 @@ Add the following dependency to your `pom.xml`:
<dependency>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-core</artifactId>
<version>0.27.0</version>
<version>0.26.0</version>
</dependency>
```

View File

@@ -71,12 +71,11 @@ Add new columns with defined values.
#### Parameters
* **newColumnTransforms**: `Field`&lt;`any`&gt; \| `Field`&lt;`any`&gt;[] \| `Schema`&lt;`any`&gt; \| [`AddColumnsSql`](../interfaces/AddColumnsSql.md)[]
Either:
- An array of objects with column names and SQL expressions to calculate values
- A single Arrow Field defining one column with its data type (column will be initialized with null values)
- An array of Arrow Fields defining columns with their data types (columns will be initialized with null values)
- An Arrow Schema defining columns with their data types (columns will be initialized with null values)
* **newColumnTransforms**: [`AddColumnsSql`](../interfaces/AddColumnsSql.md)[]
pairs of column names and
the SQL expression to use to calculate the value of the new column. These
expressions will be evaluated for each row in the table, and can
reference existing columns in the table.
#### Returns
@@ -485,7 +484,19 @@ Modeled after ``VACUUM`` in PostgreSQL.
- Prune: Removes old versions of the dataset
- Index: Optimizes the indices, adding new data to existing indices
The frequency an application should call optimize is based on the frequency of
Experimental API
----------------
The optimization process is undergoing active development and may change.
Our goal with these changes is to improve the performance of optimization and
reduce the complexity.
That being said, it is essential today to run optimize if you want the best
performance. It should be stable and safe to use in production, but it our
hope that the API may be simplified (or not even need to be called) in the
future.
The frequency an application shoudl call optimize is based on the frequency of
data modifications. If data is frequently added, deleted, or updated then
optimize should be run frequently. A good rule of thumb is to run optimize if
you have added or modified 100,000 or more records or run more than 20 data

View File

@@ -8,14 +8,6 @@
## Properties
### numDeletedRows
```ts
numDeletedRows: number;
```
***
### version
```ts

View File

@@ -37,12 +37,3 @@ tbl.optimize({cleanupOlderThan: new Date()});
```ts
deleteUnverified: boolean;
```
Because they may be part of an in-progress transaction, files newer than
7 days old are not deleted by default. If you are sure that there are no
in-progress transactions, then you can set this to true to delete all
files older than `cleanupOlderThan`.
**WARNING**: This should only be set to true if you can guarantee that
no other process is currently working on this dataset. Otherwise the
dataset could be put into a corrupted state.

View File

@@ -1,71 +0,0 @@
List of third-party dependencies grouped by their license type.
Apache 2.0:
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.28.0 - https://errorprone.info/error_prone_annotations)
Apache License 2.0:
* JsonNullable Jackson module (org.openapitools:jackson-databind-nullable:0.2.6 - https://github.com/OpenAPITools/jackson-databind-nullable)
Apache License V2.0:
* FlatBuffers Java API (com.google.flatbuffers:flatbuffers-java:23.5.26 - https://github.com/google/flatbuffers)
Apache License, Version 2.0:
* Apache Commons Codec (commons-codec:commons-codec:1.15 - https://commons.apache.org/proper/commons-codec/)
* Apache HttpClient (org.apache.httpcomponents.client5:httpclient5:5.2.1 - https://hc.apache.org/httpcomponents-client-5.0.x/5.2.1/httpclient5/)
* Apache HttpComponents Core HTTP/1.1 (org.apache.httpcomponents.core5:httpcore5:5.2 - https://hc.apache.org/httpcomponents-core-5.2.x/5.2/httpcore5/)
* Apache HttpComponents Core HTTP/2 (org.apache.httpcomponents.core5:httpcore5-h2:5.2 - https://hc.apache.org/httpcomponents-core-5.2.x/5.2/httpcore5-h2/)
* Arrow Format (org.apache.arrow:arrow-format:15.0.0 - https://arrow.apache.org/arrow-format/)
* Arrow Java C Data Interface (org.apache.arrow:arrow-c-data:15.0.0 - https://arrow.apache.org/arrow-c-data/)
* Arrow Java Dataset (org.apache.arrow:arrow-dataset:15.0.0 - https://arrow.apache.org/arrow-dataset/)
* Arrow Memory - Core (org.apache.arrow:arrow-memory-core:15.0.0 - https://arrow.apache.org/arrow-memory/arrow-memory-core/)
* Arrow Memory - Netty (org.apache.arrow:arrow-memory-netty:15.0.0 - https://arrow.apache.org/arrow-memory/arrow-memory-netty/)
* Arrow Vectors (org.apache.arrow:arrow-vector:15.0.0 - https://arrow.apache.org/arrow-vector/)
* Guava: Google Core Libraries for Java (com.google.guava:guava:33.3.1-jre - https://github.com/google/guava)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:3.0.0 - https://github.com/google/j2objc/)
* Netty/Buffer (io.netty:netty-buffer:4.1.104.Final - https://netty.io/netty-buffer/)
* Netty/Common (io.netty:netty-common:4.1.104.Final - https://netty.io/netty-common/)
Apache-2.0:
* Apache Commons Lang (org.apache.commons:commons-lang3:3.18.0 - https://commons.apache.org/proper/commons-lang/)
* lance-namespace-apache-client (org.lance:lance-namespace-apache-client:0.4.5 - https://github.com/openapitools/openapi-generator)
* lance-namespace-core (org.lance:lance-namespace-core:0.4.5 - https://lance.org/format/namespace/lance-namespace-core/)
EDL 1.0:
* Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
Eclipse Distribution License - v 1.0:
* Eclipse Collections API (org.eclipse.collections:eclipse-collections-api:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections-api)
* Eclipse Collections Main Library (org.eclipse.collections:eclipse-collections:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections)
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
Eclipse Public License - v 1.0:
* Eclipse Collections API (org.eclipse.collections:eclipse-collections-api:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections-api)
* Eclipse Collections Main Library (org.eclipse.collections:eclipse-collections:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections)
The Apache Software License, Version 2.0:
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.2 - http://findbugs.sourceforge.net/)
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.2 - https://github.com/google/guava/failureaccess)
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.16.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson module: Old JAXB Annotations (javax.xml.bind) (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.17.1 - https://github.com/FasterXML/jackson-modules-base)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.16.0 - https://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.16.0 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.15.2 - https://github.com/FasterXML/jackson)
* Jackson-JAXRS: base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.17.1 - https://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-base)
* Jackson-JAXRS: JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.17.1 - https://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-json-provider)
* JAR JNI Loader (org.questdb:jar-jni:1.1.1 - https://github.com/questdb/rust-maven-plugin)
* Lance Core (org.lance:lance-core:2.0.0 - https://lance.org/)
The MIT License:
* Checker Qual (org.checkerframework:checker-qual:3.43.0 - https://checkerframework.org/)

View File

@@ -1,4 +1,4 @@
# LanceDB Java Enterprise Client
# LanceDB Java SDK
## Configuration and Initialization

View File

@@ -1,71 +0,0 @@
List of third-party dependencies grouped by their license type.
Apache 2.0:
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.28.0 - https://errorprone.info/error_prone_annotations)
Apache License 2.0:
* JsonNullable Jackson module (org.openapitools:jackson-databind-nullable:0.2.6 - https://github.com/OpenAPITools/jackson-databind-nullable)
Apache License V2.0:
* FlatBuffers Java API (com.google.flatbuffers:flatbuffers-java:23.5.26 - https://github.com/google/flatbuffers)
Apache License, Version 2.0:
* Apache Commons Codec (commons-codec:commons-codec:1.15 - https://commons.apache.org/proper/commons-codec/)
* Apache HttpClient (org.apache.httpcomponents.client5:httpclient5:5.2.1 - https://hc.apache.org/httpcomponents-client-5.0.x/5.2.1/httpclient5/)
* Apache HttpComponents Core HTTP/1.1 (org.apache.httpcomponents.core5:httpcore5:5.2 - https://hc.apache.org/httpcomponents-core-5.2.x/5.2/httpcore5/)
* Apache HttpComponents Core HTTP/2 (org.apache.httpcomponents.core5:httpcore5-h2:5.2 - https://hc.apache.org/httpcomponents-core-5.2.x/5.2/httpcore5-h2/)
* Arrow Format (org.apache.arrow:arrow-format:15.0.0 - https://arrow.apache.org/arrow-format/)
* Arrow Java C Data Interface (org.apache.arrow:arrow-c-data:15.0.0 - https://arrow.apache.org/arrow-c-data/)
* Arrow Java Dataset (org.apache.arrow:arrow-dataset:15.0.0 - https://arrow.apache.org/arrow-dataset/)
* Arrow Memory - Core (org.apache.arrow:arrow-memory-core:15.0.0 - https://arrow.apache.org/arrow-memory/arrow-memory-core/)
* Arrow Memory - Netty (org.apache.arrow:arrow-memory-netty:15.0.0 - https://arrow.apache.org/arrow-memory/arrow-memory-netty/)
* Arrow Vectors (org.apache.arrow:arrow-vector:15.0.0 - https://arrow.apache.org/arrow-vector/)
* Guava: Google Core Libraries for Java (com.google.guava:guava:33.3.1-jre - https://github.com/google/guava)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:3.0.0 - https://github.com/google/j2objc/)
* Netty/Buffer (io.netty:netty-buffer:4.1.104.Final - https://netty.io/netty-buffer/)
* Netty/Common (io.netty:netty-common:4.1.104.Final - https://netty.io/netty-common/)
Apache-2.0:
* Apache Commons Lang (org.apache.commons:commons-lang3:3.18.0 - https://commons.apache.org/proper/commons-lang/)
* lance-namespace-apache-client (org.lance:lance-namespace-apache-client:0.4.5 - https://github.com/openapitools/openapi-generator)
* lance-namespace-core (org.lance:lance-namespace-core:0.4.5 - https://lance.org/format/namespace/lance-namespace-core/)
EDL 1.0:
* Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
Eclipse Distribution License - v 1.0:
* Eclipse Collections API (org.eclipse.collections:eclipse-collections-api:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections-api)
* Eclipse Collections Main Library (org.eclipse.collections:eclipse-collections:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections)
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
Eclipse Public License - v 1.0:
* Eclipse Collections API (org.eclipse.collections:eclipse-collections-api:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections-api)
* Eclipse Collections Main Library (org.eclipse.collections:eclipse-collections:11.1.0 - https://github.com/eclipse/eclipse-collections/eclipse-collections)
The Apache Software License, Version 2.0:
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.2 - http://findbugs.sourceforge.net/)
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.2 - https://github.com/google/guava/failureaccess)
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.16.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson module: Old JAXB Annotations (javax.xml.bind) (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.17.1 - https://github.com/FasterXML/jackson-modules-base)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.16.0 - https://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.16.0 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.15.2 - https://github.com/FasterXML/jackson)
* Jackson-JAXRS: base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.17.1 - https://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-base)
* Jackson-JAXRS: JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.17.1 - https://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-json-provider)
* JAR JNI Loader (org.questdb:jar-jni:1.1.1 - https://github.com/questdb/rust-maven-plugin)
* Lance Core (org.lance:lance-core:2.0.0 - https://lance.org/)
The MIT License:
* Checker Qual (org.checkerframework:checker-qual:3.43.0 - https://checkerframework.org/)

View File

@@ -8,7 +8,7 @@
<parent>
<groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId>
<version>0.27.0-final.0</version>
<version>0.26.0-final.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -56,21 +56,21 @@
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j2-impl</artifactId>
<version>2.25.3</version>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.25.3</version>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.25.3</version>
<version>2.24.3</version>
<scope>test</scope>
</dependency>
</dependencies>

View File

@@ -6,7 +6,7 @@
<groupId>com.lancedb</groupId>
<artifactId>lancedb-parent</artifactId>
<version>0.27.0-final.0</version>
<version>0.26.0-final.0</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<description>LanceDB Java SDK Parent POM</description>
@@ -28,7 +28,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<arrow.version>15.0.0</arrow.version>
<lance-core.version>3.1.0-beta.2</lance-core.version>
<lance-core.version>2.0.0</lance-core.version>
<spotless.skip>false</spotless.skip>
<spotless.version>2.30.0</spotless.version>
<spotless.java.googlejavaformat.version>1.7</spotless.java.googlejavaformat.version>
@@ -111,7 +111,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.1</version>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
@@ -124,7 +124,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.11.2</version>
<version>2.9.1</version>
<executions>
<execution>
<id>attach-javadocs</id>
@@ -160,33 +160,20 @@
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>license-maven-plugin</artifactId>
<version>2.4.0</version>
<configuration>
<outputDirectory>${project.basedir}</outputDirectory>
<thirdPartyFilename>JAVA_THIRD_PARTY_LICENSES.md</thirdPartyFilename>
<fileTemplate>/org/codehaus/mojo/license/third-party-file-groupByLicense.ftl</fileTemplate>
<includedScopes>compile,runtime</includedScopes>
<excludedScopes>test,provided</excludedScopes>
<sortArtifactByName>true</sortArtifactByName>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>3.4.1</version>
<version>3.1.0</version>
</plugin>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.1</version>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.14.0</version>
<version>3.8.1</version>
<configuration>
<compilerArgs>
<arg>-h</arg>
@@ -205,11 +192,11 @@
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>3.4.2</version>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-install-plugin</artifactId>
<version>3.1.3</version>
<version>2.5.2</version>
</plugin>
<plugin>
<groupId>com.diffplug.spotless</groupId>
@@ -327,7 +314,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>3.2.7</version>
<version>1.5</version>
<executions>
<execution>
<id>sign-artifacts</id>

View File

@@ -1,7 +1,7 @@
[package]
name = "lancedb-nodejs"
edition.workspace = true
version = "0.27.0"
version = "0.26.0"
license.workspace = true
description.workspace = true
repository.workspace = true
@@ -19,21 +19,21 @@ arrow-schema.workspace = true
env_logger.workspace = true
futures.workspace = true
lancedb = { path = "../rust/lancedb", default-features = false }
napi = { version = "3.8.3", default-features = false, features = [
napi = { version = "2.16.8", default-features = false, features = [
"napi9",
"async"
] }
napi-derive = "3.5.2"
napi-derive = "2.16.4"
# Prevent dynamic linking of lzma, which comes from datafusion
lzma-sys = { version = "0.1", features = ["static"] }
lzma-sys = { version = "*", features = ["static"] }
log.workspace = true
# Pin to resolve build failures; update periodically for security patches.
aws-lc-sys = "=0.38.0"
aws-lc-rs = "=1.16.1"
# Workaround for build failure until we can fix it.
aws-lc-sys = "=0.28.0"
aws-lc-rs = "=1.13.0"
[build-dependencies]
napi-build = "2.3.1"
napi-build = "2.1"
[features]
default = ["remote", "lancedb/aws", "lancedb/gcs", "lancedb/azure", "lancedb/dynamodb", "lancedb/oss", "lancedb/huggingface"]

View File

@@ -1,668 +0,0 @@
[@75lb/deep-merge@1.1.2](https://github.com/75lb/deep-merge) - MIT
[@aashutoshrathi/word-wrap@1.2.6](https://github.com/aashutoshrathi/word-wrap) - MIT
[@ampproject/remapping@2.2.1](https://github.com/ampproject/remapping) - Apache-2.0
[@aws-crypto/crc32@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/crc32c@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/ie11-detection@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/sha1-browser@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/sha256-browser@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/sha256-browser@5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/sha256-js@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/sha256-js@5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/supports-web-crypto@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/supports-web-crypto@5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/util@3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-crypto/util@5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers) - Apache-2.0
[@aws-sdk/client-dynamodb@3.602.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-kms@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-s3@3.550.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sso-oidc@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sso-oidc@3.600.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sso@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sso@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sts@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/client-sts@3.600.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/core@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/core@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-env@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-env@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-http@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-http@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-ini@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-ini@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-node@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-node@3.600.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-process@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-process@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-sso@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-sso@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-web-identity@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/credential-provider-web-identity@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/endpoint-cache@3.572.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-bucket-endpoint@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-endpoint-discovery@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-expect-continue@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-flexible-checksums@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-host-header@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-host-header@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-location-constraint@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-logger@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-logger@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-recursion-detection@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-recursion-detection@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-sdk-s3@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-signing@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-ssec@3.537.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-user-agent@3.540.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/middleware-user-agent@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/region-config-resolver@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/region-config-resolver@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/signature-v4-multi-region@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/token-providers@3.549.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/token-providers@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/types@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/types@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-arn-parser@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-endpoints@3.540.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-endpoints@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-locate-window@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-user-agent-browser@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-user-agent-browser@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-user-agent-node@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-user-agent-node@3.598.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/util-utf8-browser@3.259.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@aws-sdk/xml-builder@3.535.0](https://github.com/aws/aws-sdk-js-v3) - Apache-2.0
[@babel/code-frame@7.26.2](https://github.com/babel/babel) - MIT
[@babel/compat-data@7.23.5](https://github.com/babel/babel) - MIT
[@babel/core@7.23.7](https://github.com/babel/babel) - MIT
[@babel/generator@7.23.6](https://github.com/babel/babel) - MIT
[@babel/helper-compilation-targets@7.23.6](https://github.com/babel/babel) - MIT
[@babel/helper-environment-visitor@7.22.20](https://github.com/babel/babel) - MIT
[@babel/helper-function-name@7.23.0](https://github.com/babel/babel) - MIT
[@babel/helper-hoist-variables@7.22.5](https://github.com/babel/babel) - MIT
[@babel/helper-module-imports@7.22.15](https://github.com/babel/babel) - MIT
[@babel/helper-module-transforms@7.23.3](https://github.com/babel/babel) - MIT
[@babel/helper-plugin-utils@7.22.5](https://github.com/babel/babel) - MIT
[@babel/helper-simple-access@7.22.5](https://github.com/babel/babel) - MIT
[@babel/helper-split-export-declaration@7.22.6](https://github.com/babel/babel) - MIT
[@babel/helper-string-parser@7.25.9](https://github.com/babel/babel) - MIT
[@babel/helper-validator-identifier@7.25.9](https://github.com/babel/babel) - MIT
[@babel/helper-validator-option@7.23.5](https://github.com/babel/babel) - MIT
[@babel/helpers@7.27.0](https://github.com/babel/babel) - MIT
[@babel/parser@7.27.0](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-async-generators@7.8.4](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-async-generators) - MIT
[@babel/plugin-syntax-bigint@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-bigint) - MIT
[@babel/plugin-syntax-class-properties@7.12.13](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-import-meta@7.10.4](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-json-strings@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-json-strings) - MIT
[@babel/plugin-syntax-jsx@7.23.3](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-logical-assignment-operators@7.10.4](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-nullish-coalescing-operator@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-nullish-coalescing-operator) - MIT
[@babel/plugin-syntax-numeric-separator@7.10.4](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-object-rest-spread@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-object-rest-spread) - MIT
[@babel/plugin-syntax-optional-catch-binding@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-optional-catch-binding) - MIT
[@babel/plugin-syntax-optional-chaining@7.8.3](https://github.com/babel/babel/tree/master/packages/babel-plugin-syntax-optional-chaining) - MIT
[@babel/plugin-syntax-top-level-await@7.14.5](https://github.com/babel/babel) - MIT
[@babel/plugin-syntax-typescript@7.23.3](https://github.com/babel/babel) - MIT
[@babel/template@7.27.0](https://github.com/babel/babel) - MIT
[@babel/traverse@7.23.7](https://github.com/babel/babel) - MIT
[@babel/types@7.27.0](https://github.com/babel/babel) - MIT
[@bcoe/v8-coverage@0.2.3](https://github.com/demurgos/v8-coverage) - MIT
[@biomejs/biome@1.8.3](https://github.com/biomejs/biome) - MIT OR Apache-2.0
[@biomejs/cli-darwin-arm64@1.8.3](https://github.com/biomejs/biome) - MIT OR Apache-2.0
[@eslint-community/eslint-utils@4.4.0](https://github.com/eslint-community/eslint-utils) - MIT
[@eslint-community/regexpp@4.10.0](https://github.com/eslint-community/regexpp) - MIT
[@eslint/eslintrc@2.1.4](https://github.com/eslint/eslintrc) - MIT
[@eslint/js@8.57.0](https://github.com/eslint/eslint) - MIT
[@huggingface/jinja@0.3.2](https://github.com/huggingface/huggingface.js) - MIT
[@huggingface/transformers@3.0.2](https://github.com/huggingface/transformers.js) - Apache-2.0
[@humanwhocodes/config-array@0.11.14](https://github.com/humanwhocodes/config-array) - Apache-2.0
[@humanwhocodes/module-importer@1.0.1](https://github.com/humanwhocodes/module-importer) - Apache-2.0
[@humanwhocodes/object-schema@2.0.2](https://github.com/humanwhocodes/object-schema) - BSD-3-Clause
[@img/sharp-darwin-arm64@0.33.5](https://github.com/lovell/sharp) - Apache-2.0
[@img/sharp-libvips-darwin-arm64@1.0.4](https://github.com/lovell/sharp-libvips) - LGPL-3.0-or-later
[@isaacs/cliui@8.0.2](https://github.com/yargs/cliui) - ISC
[@isaacs/fs-minipass@4.0.1](https://github.com/npm/fs-minipass) - ISC
[@istanbuljs/load-nyc-config@1.1.0](https://github.com/istanbuljs/load-nyc-config) - ISC
[@istanbuljs/schema@0.1.3](https://github.com/istanbuljs/schema) - MIT
[@jest/console@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/core@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/environment@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/expect-utils@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/expect@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/fake-timers@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/globals@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/reporters@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/schemas@29.6.3](https://github.com/jestjs/jest) - MIT
[@jest/source-map@29.6.3](https://github.com/jestjs/jest) - MIT
[@jest/test-result@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/test-sequencer@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/transform@29.7.0](https://github.com/jestjs/jest) - MIT
[@jest/types@29.6.3](https://github.com/jestjs/jest) - MIT
[@jridgewell/gen-mapping@0.3.3](https://github.com/jridgewell/gen-mapping) - MIT
[@jridgewell/resolve-uri@3.1.1](https://github.com/jridgewell/resolve-uri) - MIT
[@jridgewell/set-array@1.1.2](https://github.com/jridgewell/set-array) - MIT
[@jridgewell/sourcemap-codec@1.4.15](https://github.com/jridgewell/sourcemap-codec) - MIT
[@jridgewell/trace-mapping@0.3.22](https://github.com/jridgewell/trace-mapping) - MIT
[@lancedb/lancedb@0.26.2](https://github.com/lancedb/lancedb) - Apache-2.0
[@napi-rs/cli@2.18.3](https://github.com/napi-rs/napi-rs) - MIT
[@nodelib/fs.scandir@2.1.5](https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir) - MIT
[@nodelib/fs.stat@2.0.5](https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat) - MIT
[@nodelib/fs.walk@1.2.8](https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk) - MIT
[@pkgjs/parseargs@0.11.0](https://github.com/pkgjs/parseargs) - MIT
[@protobufjs/aspromise@1.1.2](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/base64@1.1.2](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/codegen@2.0.4](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/eventemitter@1.1.0](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/fetch@1.1.0](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/float@1.0.2](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/inquire@1.1.0](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/path@1.1.2](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/pool@1.1.0](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@protobufjs/utf8@1.1.0](https://github.com/dcodeIO/protobuf.js) - BSD-3-Clause
[@shikijs/core@1.10.3](https://github.com/shikijs/shiki) - MIT
[@sinclair/typebox@0.27.8](https://github.com/sinclairzx81/typebox) - MIT
[@sinonjs/commons@3.0.1](https://github.com/sinonjs/commons) - BSD-3-Clause
[@sinonjs/fake-timers@10.3.0](https://github.com/sinonjs/fake-timers) - BSD-3-Clause
[@smithy/abort-controller@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/abort-controller@3.1.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/chunked-blob-reader-native@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/chunked-blob-reader@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/config-resolver@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/config-resolver@3.0.3](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/core@1.4.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/core@2.2.3](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/credential-provider-imds@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/credential-provider-imds@3.1.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/eventstream-codec@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/eventstream-serde-browser@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/eventstream-serde-config-resolver@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/eventstream-serde-node@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/eventstream-serde-universal@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/fetch-http-handler@2.5.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/fetch-http-handler@3.1.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/hash-blob-browser@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/hash-node@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/hash-node@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/hash-stream-node@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/invalid-dependency@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/invalid-dependency@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/is-array-buffer@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/is-array-buffer@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/md5-js@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-content-length@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-content-length@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-endpoint@2.5.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-endpoint@3.0.3](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-retry@2.3.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-retry@3.0.6](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-serde@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-serde@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-stack@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/middleware-stack@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/node-config-provider@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/node-config-provider@3.1.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/node-http-handler@2.5.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/node-http-handler@3.1.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/property-provider@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/property-provider@3.1.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/protocol-http@3.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/protocol-http@4.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/querystring-builder@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/querystring-builder@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/querystring-parser@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/querystring-parser@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/service-error-classification@2.1.5](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/service-error-classification@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/shared-ini-file-loader@2.4.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/shared-ini-file-loader@3.1.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/signature-v4@2.2.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/signature-v4@3.1.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/smithy-client@2.5.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/smithy-client@3.1.4](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/types@2.12.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/types@3.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/url-parser@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/url-parser@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-base64@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-base64@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-body-length-browser@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-body-length-browser@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-body-length-node@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-body-length-node@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-buffer-from@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-buffer-from@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-config-provider@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-config-provider@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-defaults-mode-browser@2.2.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-defaults-mode-browser@3.0.6](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-defaults-mode-node@2.3.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-defaults-mode-node@3.0.6](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-endpoints@1.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-endpoints@2.0.3](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-hex-encoding@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-hex-encoding@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-middleware@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-middleware@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-retry@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-retry@3.0.2](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-stream@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-stream@3.0.4](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-uri-escape@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-uri-escape@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-utf8@2.3.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-utf8@3.0.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-waiter@2.2.0](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@smithy/util-waiter@3.1.1](https://github.com/awslabs/smithy-typescript) - Apache-2.0
[@swc/helpers@0.5.12](https://github.com/swc-project/swc) - Apache-2.0
[@types/axios@0.14.0](https://github.com/mzabriskie/axios) - MIT
[@types/babel__core@7.20.5](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/babel__generator@7.6.8](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/babel__template@7.4.4](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/babel__traverse@7.20.5](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/command-line-args@5.2.3](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/command-line-usage@5.0.2](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/command-line-usage@5.0.4](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/graceful-fs@4.1.9](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/hast@3.0.4](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/istanbul-lib-coverage@2.0.6](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/istanbul-lib-report@3.0.3](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/istanbul-reports@3.0.4](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/jest@29.5.12](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/json-schema@7.0.15](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/node-fetch@2.6.11](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/node@18.19.26](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/node@20.16.10](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/node@20.17.9](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/node@22.7.4](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/semver@7.5.6](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/stack-utils@2.0.3](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/tmp@0.2.6](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/unist@3.0.2](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/yargs-parser@21.0.3](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@types/yargs@17.0.32](https://github.com/DefinitelyTyped/DefinitelyTyped) - MIT
[@typescript-eslint/eslint-plugin@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@typescript-eslint/parser@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - BSD-2-Clause
[@typescript-eslint/scope-manager@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@typescript-eslint/type-utils@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@typescript-eslint/types@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@typescript-eslint/typescript-estree@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - BSD-2-Clause
[@typescript-eslint/utils@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@typescript-eslint/visitor-keys@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[@ungap/structured-clone@1.2.0](https://github.com/ungap/structured-clone) - ISC
[abort-controller@3.0.0](https://github.com/mysticatea/abort-controller) - MIT
[acorn-jsx@5.3.2](https://github.com/acornjs/acorn-jsx) - MIT
[acorn@8.11.3](https://github.com/acornjs/acorn) - MIT
[agentkeepalive@4.5.0](https://github.com/node-modules/agentkeepalive) - MIT
[ajv@6.12.6](https://github.com/ajv-validator/ajv) - MIT
[ansi-escapes@4.3.2](https://github.com/sindresorhus/ansi-escapes) - MIT
[ansi-regex@5.0.1](https://github.com/chalk/ansi-regex) - MIT
[ansi-regex@6.1.0](https://github.com/chalk/ansi-regex) - MIT
[ansi-styles@4.3.0](https://github.com/chalk/ansi-styles) - MIT
[ansi-styles@5.2.0](https://github.com/chalk/ansi-styles) - MIT
[ansi-styles@6.2.1](https://github.com/chalk/ansi-styles) - MIT
[anymatch@3.1.3](https://github.com/micromatch/anymatch) - ISC
[apache-arrow@15.0.0](https://github.com/apache/arrow) - Apache-2.0
[apache-arrow@16.0.0](https://github.com/apache/arrow) - Apache-2.0
[apache-arrow@17.0.0](https://github.com/apache/arrow) - Apache-2.0
[apache-arrow@18.0.0](https://github.com/apache/arrow) - Apache-2.0
[argparse@1.0.10](https://github.com/nodeca/argparse) - MIT
[argparse@2.0.1](https://github.com/nodeca/argparse) - Python-2.0
[array-back@3.1.0](https://github.com/75lb/array-back) - MIT
[array-back@6.2.2](https://github.com/75lb/array-back) - MIT
[array-union@2.1.0](https://github.com/sindresorhus/array-union) - MIT
[asynckit@0.4.0](https://github.com/alexindigo/asynckit) - MIT
[axios@1.8.4](https://github.com/axios/axios) - MIT
[babel-jest@29.7.0](https://github.com/jestjs/jest) - MIT
[babel-plugin-istanbul@6.1.1](https://github.com/istanbuljs/babel-plugin-istanbul) - BSD-3-Clause
[babel-plugin-jest-hoist@29.6.3](https://github.com/jestjs/jest) - MIT
[babel-preset-current-node-syntax@1.0.1](https://github.com/nicolo-ribaudo/babel-preset-current-node-syntax) - MIT
[babel-preset-jest@29.6.3](https://github.com/jestjs/jest) - MIT
[balanced-match@1.0.2](https://github.com/juliangruber/balanced-match) - MIT
[base-64@0.1.0](https://github.com/mathiasbynens/base64) - MIT
[bowser@2.11.0](https://github.com/lancedikson/bowser) - MIT
[brace-expansion@1.1.11](https://github.com/juliangruber/brace-expansion) - MIT
[brace-expansion@2.0.1](https://github.com/juliangruber/brace-expansion) - MIT
[braces@3.0.3](https://github.com/micromatch/braces) - MIT
[browserslist@4.22.2](https://github.com/browserslist/browserslist) - MIT
[bs-logger@0.2.6](https://github.com/huafu/bs-logger) - MIT
[bser@2.1.1](https://github.com/facebook/watchman) - Apache-2.0
[buffer-from@1.1.2](https://github.com/LinusU/buffer-from) - MIT
[callsites@3.1.0](https://github.com/sindresorhus/callsites) - MIT
[camelcase@5.3.1](https://github.com/sindresorhus/camelcase) - MIT
[camelcase@6.3.0](https://github.com/sindresorhus/camelcase) - MIT
[caniuse-lite@1.0.30001579](https://github.com/browserslist/caniuse-lite) - CC-BY-4.0
[chalk-template@0.4.0](https://github.com/chalk/chalk-template) - MIT
[chalk@4.1.2](https://github.com/chalk/chalk) - MIT
[char-regex@1.0.2](https://github.com/Richienb/char-regex) - MIT
[charenc@0.0.2](https://github.com/pvorb/node-charenc) - BSD-3-Clause
[chownr@3.0.0](https://github.com/isaacs/chownr) - BlueOak-1.0.0
[ci-info@3.9.0](https://github.com/watson/ci-info) - MIT
[cjs-module-lexer@1.2.3](https://github.com/nodejs/cjs-module-lexer) - MIT
[cliui@8.0.1](https://github.com/yargs/cliui) - ISC
[co@4.6.0](https://github.com/tj/co) - MIT
[collect-v8-coverage@1.0.2](https://github.com/SimenB/collect-v8-coverage) - MIT
[color-convert@2.0.1](https://github.com/Qix-/color-convert) - MIT
[color-name@1.1.4](https://github.com/colorjs/color-name) - MIT
[color-string@1.9.1](https://github.com/Qix-/color-string) - MIT
[color@4.2.3](https://github.com/Qix-/color) - MIT
[combined-stream@1.0.8](https://github.com/felixge/node-combined-stream) - MIT
[command-line-args@5.2.1](https://github.com/75lb/command-line-args) - MIT
[command-line-usage@7.0.1](https://github.com/75lb/command-line-usage) - MIT
[concat-map@0.0.1](https://github.com/substack/node-concat-map) - MIT
[convert-source-map@2.0.0](https://github.com/thlorenz/convert-source-map) - MIT
[create-jest@29.7.0](https://github.com/jestjs/jest) - MIT
[cross-spawn@7.0.6](https://github.com/moxystudio/node-cross-spawn) - MIT
[crypt@0.0.2](https://github.com/pvorb/node-crypt) - BSD-3-Clause
[debug@4.3.4](https://github.com/debug-js/debug) - MIT
[dedent@1.5.1](https://github.com/dmnd/dedent) - MIT
[deep-is@0.1.4](https://github.com/thlorenz/deep-is) - MIT
[deepmerge@4.3.1](https://github.com/TehShrike/deepmerge) - MIT
[delayed-stream@1.0.0](https://github.com/felixge/node-delayed-stream) - MIT
[detect-libc@2.0.3](https://github.com/lovell/detect-libc) - Apache-2.0
[detect-newline@3.1.0](https://github.com/sindresorhus/detect-newline) - MIT
[diff-sequences@29.6.3](https://github.com/jestjs/jest) - MIT
[digest-fetch@1.3.0](https://github.com/devfans/digest-fetch) - ISC
[dir-glob@3.0.1](https://github.com/kevva/dir-glob) - MIT
[doctrine@3.0.0](https://github.com/eslint/doctrine) - Apache-2.0
[eastasianwidth@0.2.0](https://github.com/komagata/eastasianwidth) - MIT
[electron-to-chromium@1.4.642](https://github.com/kilian/electron-to-chromium) - ISC
[emittery@0.13.1](https://github.com/sindresorhus/emittery) - MIT
[emoji-regex@8.0.0](https://github.com/mathiasbynens/emoji-regex) - MIT
[emoji-regex@9.2.2](https://github.com/mathiasbynens/emoji-regex) - MIT
[entities@4.5.0](https://github.com/fb55/entities) - BSD-2-Clause
[error-ex@1.3.2](https://github.com/qix-/node-error-ex) - MIT
[escalade@3.1.1](https://github.com/lukeed/escalade) - MIT
[escape-string-regexp@2.0.0](https://github.com/sindresorhus/escape-string-regexp) - MIT
[escape-string-regexp@4.0.0](https://github.com/sindresorhus/escape-string-regexp) - MIT
[eslint-scope@7.2.2](https://github.com/eslint/eslint-scope) - BSD-2-Clause
[eslint-visitor-keys@3.4.3](https://github.com/eslint/eslint-visitor-keys) - Apache-2.0
[eslint@8.57.0](https://github.com/eslint/eslint) - MIT
[espree@9.6.1](https://github.com/eslint/espree) - BSD-2-Clause
[esprima@4.0.1](https://github.com/jquery/esprima) - BSD-2-Clause
[esquery@1.5.0](https://github.com/estools/esquery) - BSD-3-Clause
[esrecurse@4.3.0](https://github.com/estools/esrecurse) - BSD-2-Clause
[estraverse@5.3.0](https://github.com/estools/estraverse) - BSD-2-Clause
[esutils@2.0.3](https://github.com/estools/esutils) - BSD-2-Clause
[event-target-shim@5.0.1](https://github.com/mysticatea/event-target-shim) - MIT
[execa@5.1.1](https://github.com/sindresorhus/execa) - MIT
[exit@0.1.2](https://github.com/cowboy/node-exit) - MIT
[expect@29.7.0](https://github.com/jestjs/jest) - MIT
[fast-deep-equal@3.1.3](https://github.com/epoberezkin/fast-deep-equal) - MIT
[fast-glob@3.3.2](https://github.com/mrmlnc/fast-glob) - MIT
[fast-json-stable-stringify@2.1.0](https://github.com/epoberezkin/fast-json-stable-stringify) - MIT
[fast-levenshtein@2.0.6](https://github.com/hiddentao/fast-levenshtein) - MIT
[fast-xml-parser@4.2.5](https://github.com/NaturalIntelligence/fast-xml-parser) - MIT
[fastq@1.16.0](https://github.com/mcollina/fastq) - ISC
[fb-watchman@2.0.2](https://github.com/facebook/watchman) - Apache-2.0
[file-entry-cache@6.0.1](https://github.com/royriojas/file-entry-cache) - MIT
[fill-range@7.1.1](https://github.com/jonschlinkert/fill-range) - MIT
[find-replace@3.0.0](https://github.com/75lb/find-replace) - MIT
[find-up@4.1.0](https://github.com/sindresorhus/find-up) - MIT
[find-up@5.0.0](https://github.com/sindresorhus/find-up) - MIT
[flat-cache@3.2.0](https://github.com/jaredwray/flat-cache) - MIT
[flatbuffers@1.12.0](https://github.com/google/flatbuffers) - Apache*
[flatbuffers@23.5.26](https://github.com/google/flatbuffers) - Apache*
[flatbuffers@24.3.25](https://github.com/google/flatbuffers) - Apache-2.0
[flatted@3.2.9](https://github.com/WebReflection/flatted) - ISC
[follow-redirects@1.15.6](https://github.com/follow-redirects/follow-redirects) - MIT
[foreground-child@3.3.0](https://github.com/tapjs/foreground-child) - ISC
[form-data-encoder@1.7.2](https://github.com/octet-stream/form-data-encoder) - MIT
[form-data@4.0.0](https://github.com/form-data/form-data) - MIT
[formdata-node@4.4.1](https://github.com/octet-stream/form-data) - MIT
[fs.realpath@1.0.0](https://github.com/isaacs/fs.realpath) - ISC
[fsevents@2.3.3](https://github.com/fsevents/fsevents) - MIT
[function-bind@1.1.2](https://github.com/Raynos/function-bind) - MIT
[gensync@1.0.0-beta.2](https://github.com/loganfsmyth/gensync) - MIT
[get-caller-file@2.0.5](https://github.com/stefanpenner/get-caller-file) - ISC
[get-package-type@0.1.0](https://github.com/cfware/get-package-type) - MIT
[get-stream@6.0.1](https://github.com/sindresorhus/get-stream) - MIT
[glob-parent@5.1.2](https://github.com/gulpjs/glob-parent) - ISC
[glob-parent@6.0.2](https://github.com/gulpjs/glob-parent) - ISC
[glob@10.4.5](https://github.com/isaacs/node-glob) - ISC
[glob@7.2.3](https://github.com/isaacs/node-glob) - ISC
[globals@11.12.0](https://github.com/sindresorhus/globals) - MIT
[globals@13.24.0](https://github.com/sindresorhus/globals) - MIT
[globby@11.1.0](https://github.com/sindresorhus/globby) - MIT
[graceful-fs@4.2.11](https://github.com/isaacs/node-graceful-fs) - ISC
[graphemer@1.4.0](https://github.com/flmnt/graphemer) - MIT
[guid-typescript@1.0.9](https://github.com/NicolasDeveloper/guid-typescript) - ISC
[has-flag@4.0.0](https://github.com/sindresorhus/has-flag) - MIT
[hasown@2.0.0](https://github.com/inspect-js/hasOwn) - MIT
[html-escaper@2.0.2](https://github.com/WebReflection/html-escaper) - MIT
[human-signals@2.1.0](https://github.com/ehmicky/human-signals) - Apache-2.0
[humanize-ms@1.2.1](https://github.com/node-modules/humanize-ms) - MIT
[ignore@5.3.0](https://github.com/kaelzhang/node-ignore) - MIT
[import-fresh@3.3.0](https://github.com/sindresorhus/import-fresh) - MIT
[import-local@3.1.0](https://github.com/sindresorhus/import-local) - MIT
[imurmurhash@0.1.4](https://github.com/jensyt/imurmurhash-js) - MIT
[inflight@1.0.6](https://github.com/npm/inflight) - ISC
[inherits@2.0.4](https://github.com/isaacs/inherits) - ISC
[interpret@1.4.0](https://github.com/gulpjs/interpret) - MIT
[is-arrayish@0.2.1](https://github.com/qix-/node-is-arrayish) - MIT
[is-arrayish@0.3.2](https://github.com/qix-/node-is-arrayish) - MIT
[is-buffer@1.1.6](https://github.com/feross/is-buffer) - MIT
[is-core-module@2.13.1](https://github.com/inspect-js/is-core-module) - MIT
[is-extglob@2.1.1](https://github.com/jonschlinkert/is-extglob) - MIT
[is-fullwidth-code-point@3.0.0](https://github.com/sindresorhus/is-fullwidth-code-point) - MIT
[is-generator-fn@2.1.0](https://github.com/sindresorhus/is-generator-fn) - MIT
[is-glob@4.0.3](https://github.com/micromatch/is-glob) - MIT
[is-number@7.0.0](https://github.com/jonschlinkert/is-number) - MIT
[is-path-inside@3.0.3](https://github.com/sindresorhus/is-path-inside) - MIT
[is-stream@2.0.1](https://github.com/sindresorhus/is-stream) - MIT
[isexe@2.0.0](https://github.com/isaacs/isexe) - ISC
[istanbul-lib-coverage@3.2.2](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[istanbul-lib-instrument@5.2.1](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[istanbul-lib-instrument@6.0.1](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[istanbul-lib-report@3.0.1](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[istanbul-lib-source-maps@4.0.1](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[istanbul-reports@3.1.6](https://github.com/istanbuljs/istanbuljs) - BSD-3-Clause
[jackspeak@3.4.3](https://github.com/isaacs/jackspeak) - BlueOak-1.0.0
[jest-changed-files@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-circus@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-cli@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-config@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-diff@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-docblock@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-each@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-environment-node@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-get-type@29.6.3](https://github.com/jestjs/jest) - MIT
[jest-haste-map@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-leak-detector@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-matcher-utils@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-message-util@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-mock@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-pnp-resolver@1.2.3](https://github.com/arcanis/jest-pnp-resolver) - MIT
[jest-regex-util@29.6.3](https://github.com/jestjs/jest) - MIT
[jest-resolve-dependencies@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-resolve@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-runner@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-runtime@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-snapshot@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-util@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-validate@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-watcher@29.7.0](https://github.com/jestjs/jest) - MIT
[jest-worker@29.7.0](https://github.com/jestjs/jest) - MIT
[jest@29.7.0](https://github.com/jestjs/jest) - MIT
[js-tokens@4.0.0](https://github.com/lydell/js-tokens) - MIT
[js-yaml@3.14.1](https://github.com/nodeca/js-yaml) - MIT
[js-yaml@4.1.0](https://github.com/nodeca/js-yaml) - MIT
[jsesc@2.5.2](https://github.com/mathiasbynens/jsesc) - MIT
[json-bignum@0.0.3](https://github.com/datalanche/json-bignum) - MIT
[json-buffer@3.0.1](https://github.com/dominictarr/json-buffer) - MIT
[json-parse-even-better-errors@2.3.1](https://github.com/npm/json-parse-even-better-errors) - MIT
[json-schema-traverse@0.4.1](https://github.com/epoberezkin/json-schema-traverse) - MIT
[json-stable-stringify-without-jsonify@1.0.1](https://github.com/samn/json-stable-stringify) - MIT
[json5@2.2.3](https://github.com/json5/json5) - MIT
[keyv@4.5.4](https://github.com/jaredwray/keyv) - MIT
[kleur@3.0.3](https://github.com/lukeed/kleur) - MIT
[leven@3.1.0](https://github.com/sindresorhus/leven) - MIT
[levn@0.4.1](https://github.com/gkz/levn) - MIT
[lines-and-columns@1.2.4](https://github.com/eventualbuddha/lines-and-columns) - MIT
[linkify-it@5.0.0](https://github.com/markdown-it/linkify-it) - MIT
[locate-path@5.0.0](https://github.com/sindresorhus/locate-path) - MIT
[locate-path@6.0.0](https://github.com/sindresorhus/locate-path) - MIT
[lodash.camelcase@4.3.0](https://github.com/lodash/lodash) - MIT
[lodash.memoize@4.1.2](https://github.com/lodash/lodash) - MIT
[lodash.merge@4.6.2](https://github.com/lodash/lodash) - MIT
[lodash@4.17.21](https://github.com/lodash/lodash) - MIT
[long@5.2.3](https://github.com/dcodeIO/long.js) - Apache-2.0
[lru-cache@10.4.3](https://github.com/isaacs/node-lru-cache) - ISC
[lru-cache@5.1.1](https://github.com/isaacs/node-lru-cache) - ISC
[lunr@2.3.9](https://github.com/olivernn/lunr.js) - MIT
[make-dir@4.0.0](https://github.com/sindresorhus/make-dir) - MIT
[make-error@1.3.6](https://github.com/JsCommunity/make-error) - ISC
[makeerror@1.0.12](https://github.com/daaku/nodejs-makeerror) - BSD-3-Clause
[markdown-it@14.1.0](https://github.com/markdown-it/markdown-it) - MIT
[md5@2.3.0](https://github.com/pvorb/node-md5) - BSD-3-Clause
[mdurl@2.0.0](https://github.com/markdown-it/mdurl) - MIT
[merge-stream@2.0.0](https://github.com/grncdr/merge-stream) - MIT
[merge2@1.4.1](https://github.com/teambition/merge2) - MIT
[micromatch@4.0.8](https://github.com/micromatch/micromatch) - MIT
[mime-db@1.52.0](https://github.com/jshttp/mime-db) - MIT
[mime-types@2.1.35](https://github.com/jshttp/mime-types) - MIT
[mimic-fn@2.1.0](https://github.com/sindresorhus/mimic-fn) - MIT
[minimatch@3.1.2](https://github.com/isaacs/minimatch) - ISC
[minimatch@9.0.3](https://github.com/isaacs/minimatch) - ISC
[minimatch@9.0.5](https://github.com/isaacs/minimatch) - ISC
[minimist@1.2.8](https://github.com/minimistjs/minimist) - MIT
[minipass@7.1.2](https://github.com/isaacs/minipass) - ISC
[minizlib@3.0.1](https://github.com/isaacs/minizlib) - MIT
[mkdirp@3.0.1](https://github.com/isaacs/node-mkdirp) - MIT
[mnemonist@0.38.3](https://github.com/yomguithereal/mnemonist) - MIT
[ms@2.1.2](https://github.com/zeit/ms) - MIT
[ms@2.1.3](https://github.com/vercel/ms) - MIT
[natural-compare@1.4.0](https://github.com/litejs/natural-compare-lite) - MIT
[node-domexception@1.0.0](https://github.com/jimmywarting/node-domexception) - MIT
[node-fetch@2.7.0](https://github.com/bitinn/node-fetch) - MIT
[node-int64@0.4.0](https://github.com/broofa/node-int64) - MIT
[node-releases@2.0.14](https://github.com/chicoxyzzy/node-releases) - MIT
[normalize-path@3.0.0](https://github.com/jonschlinkert/normalize-path) - MIT
[npm-run-path@4.0.1](https://github.com/sindresorhus/npm-run-path) - MIT
[obliterator@1.6.1](https://github.com/yomguithereal/obliterator) - MIT
[once@1.4.0](https://github.com/isaacs/once) - ISC
[onetime@5.1.2](https://github.com/sindresorhus/onetime) - MIT
[onnxruntime-common@1.19.2](https://github.com/Microsoft/onnxruntime) - MIT
[onnxruntime-common@1.20.0-dev.20241016-2b8fc5529b](https://github.com/Microsoft/onnxruntime) - MIT
[onnxruntime-node@1.19.2](https://github.com/Microsoft/onnxruntime) - MIT
[onnxruntime-web@1.21.0-dev.20241024-d9ca84ef96](https://github.com/Microsoft/onnxruntime) - MIT
[openai@4.29.2](https://github.com/openai/openai-node) - Apache-2.0
[optionator@0.9.3](https://github.com/gkz/optionator) - MIT
[p-limit@2.3.0](https://github.com/sindresorhus/p-limit) - MIT
[p-limit@3.1.0](https://github.com/sindresorhus/p-limit) - MIT
[p-locate@4.1.0](https://github.com/sindresorhus/p-locate) - MIT
[p-locate@5.0.0](https://github.com/sindresorhus/p-locate) - MIT
[p-try@2.2.0](https://github.com/sindresorhus/p-try) - MIT
[package-json-from-dist@1.0.1](https://github.com/isaacs/package-json-from-dist) - BlueOak-1.0.0
[parent-module@1.0.1](https://github.com/sindresorhus/parent-module) - MIT
[parse-json@5.2.0](https://github.com/sindresorhus/parse-json) - MIT
[path-exists@4.0.0](https://github.com/sindresorhus/path-exists) - MIT
[path-is-absolute@1.0.1](https://github.com/sindresorhus/path-is-absolute) - MIT
[path-key@3.1.1](https://github.com/sindresorhus/path-key) - MIT
[path-parse@1.0.7](https://github.com/jbgutierrez/path-parse) - MIT
[path-scurry@1.11.1](https://github.com/isaacs/path-scurry) - BlueOak-1.0.0
[path-type@4.0.0](https://github.com/sindresorhus/path-type) - MIT
[picocolors@1.0.0](https://github.com/alexeyraspopov/picocolors) - ISC
[picomatch@2.3.1](https://github.com/micromatch/picomatch) - MIT
[pirates@4.0.6](https://github.com/danez/pirates) - MIT
[pkg-dir@4.2.0](https://github.com/sindresorhus/pkg-dir) - MIT
[platform@1.3.6](https://github.com/bestiejs/platform.js) - MIT
[prelude-ls@1.2.1](https://github.com/gkz/prelude-ls) - MIT
[pretty-format@29.7.0](https://github.com/jestjs/jest) - MIT
[prompts@2.4.2](https://github.com/terkelg/prompts) - MIT
[protobufjs@7.4.0](https://github.com/protobufjs/protobuf.js) - BSD-3-Clause
[proxy-from-env@1.1.0](https://github.com/Rob--W/proxy-from-env) - MIT
[punycode.js@2.3.1](https://github.com/mathiasbynens/punycode.js) - MIT
[punycode@2.3.1](https://github.com/mathiasbynens/punycode.js) - MIT
[pure-rand@6.0.4](https://github.com/dubzzz/pure-rand) - MIT
[queue-microtask@1.2.3](https://github.com/feross/queue-microtask) - MIT
[react-is@18.2.0](https://github.com/facebook/react) - MIT
[rechoir@0.6.2](https://github.com/tkellen/node-rechoir) - MIT
[reflect-metadata@0.2.2](https://github.com/rbuckton/reflect-metadata) - Apache-2.0
[require-directory@2.1.1](https://github.com/troygoode/node-require-directory) - MIT
[resolve-cwd@3.0.0](https://github.com/sindresorhus/resolve-cwd) - MIT
[resolve-from@4.0.0](https://github.com/sindresorhus/resolve-from) - MIT
[resolve-from@5.0.0](https://github.com/sindresorhus/resolve-from) - MIT
[resolve.exports@2.0.2](https://github.com/lukeed/resolve.exports) - MIT
[resolve@1.22.8](https://github.com/browserify/resolve) - MIT
[reusify@1.0.4](https://github.com/mcollina/reusify) - MIT
[rimraf@3.0.2](https://github.com/isaacs/rimraf) - ISC
[rimraf@5.0.10](https://github.com/isaacs/rimraf) - ISC
[run-parallel@1.2.0](https://github.com/feross/run-parallel) - MIT
[semver@6.3.1](https://github.com/npm/node-semver) - ISC
[semver@7.6.3](https://github.com/npm/node-semver) - ISC
[sharp@0.33.5](https://github.com/lovell/sharp) - Apache-2.0
[shebang-command@2.0.0](https://github.com/kevva/shebang-command) - MIT
[shebang-regex@3.0.0](https://github.com/sindresorhus/shebang-regex) - MIT
[shelljs@0.8.5](https://github.com/shelljs/shelljs) - BSD-3-Clause
[shiki@1.10.3](https://github.com/shikijs/shiki) - MIT
[shx@0.3.4](https://github.com/shelljs/shx) - MIT
[signal-exit@3.0.7](https://github.com/tapjs/signal-exit) - ISC
[signal-exit@4.1.0](https://github.com/tapjs/signal-exit) - ISC
[simple-swizzle@0.2.2](https://github.com/qix-/node-simple-swizzle) - MIT
[sisteransi@1.0.5](https://github.com/terkelg/sisteransi) - MIT
[slash@3.0.0](https://github.com/sindresorhus/slash) - MIT
[source-map-support@0.5.13](https://github.com/evanw/node-source-map-support) - MIT
[source-map@0.6.1](https://github.com/mozilla/source-map) - BSD-3-Clause
[sprintf-js@1.0.3](https://github.com/alexei/sprintf.js) - BSD-3-Clause
[stack-utils@2.0.6](https://github.com/tapjs/stack-utils) - MIT
[stream-read-all@3.0.1](https://github.com/75lb/stream-read-all) - MIT
[string-length@4.0.2](https://github.com/sindresorhus/string-length) - MIT
[string-width@4.2.3](https://github.com/sindresorhus/string-width) - MIT
[string-width@5.1.2](https://github.com/sindresorhus/string-width) - MIT
[strip-ansi@6.0.1](https://github.com/chalk/strip-ansi) - MIT
[strip-ansi@7.1.0](https://github.com/chalk/strip-ansi) - MIT
[strip-bom@4.0.0](https://github.com/sindresorhus/strip-bom) - MIT
[strip-final-newline@2.0.0](https://github.com/sindresorhus/strip-final-newline) - MIT
[strip-json-comments@3.1.1](https://github.com/sindresorhus/strip-json-comments) - MIT
[strnum@1.0.5](https://github.com/NaturalIntelligence/strnum) - MIT
[supports-color@7.2.0](https://github.com/chalk/supports-color) - MIT
[supports-color@8.1.1](https://github.com/chalk/supports-color) - MIT
[supports-preserve-symlinks-flag@1.0.0](https://github.com/inspect-js/node-supports-preserve-symlinks-flag) - MIT
[table-layout@3.0.2](https://github.com/75lb/table-layout) - MIT
[tar@7.4.3](https://github.com/isaacs/node-tar) - ISC
[test-exclude@6.0.0](https://github.com/istanbuljs/test-exclude) - ISC
[text-table@0.2.0](https://github.com/substack/text-table) - MIT
[tmp@0.2.3](https://github.com/raszi/node-tmp) - MIT
[tmpl@1.0.5](https://github.com/daaku/nodejs-tmpl) - BSD-3-Clause
[to-regex-range@5.0.1](https://github.com/micromatch/to-regex-range) - MIT
[tr46@0.0.3](https://github.com/Sebmaster/tr46.js) - MIT
[ts-api-utils@1.0.3](https://github.com/JoshuaKGoldberg/ts-api-utils) - MIT
[ts-jest@29.1.2](https://github.com/kulshekhar/ts-jest) - MIT
[tslib@1.14.1](https://github.com/Microsoft/tslib) - 0BSD
[tslib@2.6.2](https://github.com/Microsoft/tslib) - 0BSD
[type-check@0.4.0](https://github.com/gkz/type-check) - MIT
[type-detect@4.0.8](https://github.com/chaijs/type-detect) - MIT
[type-fest@0.20.2](https://github.com/sindresorhus/type-fest) - (MIT OR CC0-1.0)
[type-fest@0.21.3](https://github.com/sindresorhus/type-fest) - (MIT OR CC0-1.0)
[typedoc-plugin-markdown@4.2.1](https://github.com/typedoc2md/typedoc-plugin-markdown) - MIT
[typedoc@0.26.4](https://github.com/TypeStrong/TypeDoc) - Apache-2.0
[typescript-eslint@7.1.0](https://github.com/typescript-eslint/typescript-eslint) - MIT
[typescript@5.5.4](https://github.com/Microsoft/TypeScript) - Apache-2.0
[typical@4.0.0](https://github.com/75lb/typical) - MIT
[typical@7.1.1](https://github.com/75lb/typical) - MIT
[uc.micro@2.1.0](https://github.com/markdown-it/uc.micro) - MIT
[undici-types@5.26.5](https://github.com/nodejs/undici) - MIT
[undici-types@6.19.8](https://github.com/nodejs/undici) - MIT
[update-browserslist-db@1.0.13](https://github.com/browserslist/update-db) - MIT
[uri-js@4.4.1](https://github.com/garycourt/uri-js) - BSD-2-Clause
[uuid@9.0.1](https://github.com/uuidjs/uuid) - MIT
[v8-to-istanbul@9.2.0](https://github.com/istanbuljs/v8-to-istanbul) - ISC
[walker@1.0.8](https://github.com/daaku/nodejs-walker) - Apache-2.0
[web-streams-polyfill@3.3.3](https://github.com/MattiasBuelens/web-streams-polyfill) - MIT
[web-streams-polyfill@4.0.0-beta.3](https://github.com/MattiasBuelens/web-streams-polyfill) - MIT
[webidl-conversions@3.0.1](https://github.com/jsdom/webidl-conversions) - BSD-2-Clause
[whatwg-url@5.0.0](https://github.com/jsdom/whatwg-url) - MIT
[which@2.0.2](https://github.com/isaacs/node-which) - ISC
[wordwrapjs@5.1.0](https://github.com/75lb/wordwrapjs) - MIT
[wrap-ansi@7.0.0](https://github.com/chalk/wrap-ansi) - MIT
[wrap-ansi@8.1.0](https://github.com/chalk/wrap-ansi) - MIT
[wrappy@1.0.2](https://github.com/npm/wrappy) - ISC
[write-file-atomic@4.0.2](https://github.com/npm/write-file-atomic) - ISC
[y18n@5.0.8](https://github.com/yargs/y18n) - ISC
[yallist@3.1.1](https://github.com/isaacs/yallist) - ISC
[yallist@5.0.0](https://github.com/isaacs/yallist) - BlueOak-1.0.0
[yaml@2.4.5](https://github.com/eemeli/yaml) - ISC
[yargs-parser@21.1.1](https://github.com/yargs/yargs-parser) - ISC
[yargs@17.7.2](https://github.com/yargs/yargs) - MIT
[yocto-queue@0.1.0](https://github.com/sindresorhus/yocto-queue) - MIT

File diff suppressed because it is too large Load Diff

View File

@@ -63,7 +63,6 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
tableFromIPC,
DataType,
Dictionary,
Uint8: ArrowUint8,
// biome-ignore lint/suspicious/noExplicitAny: <explanation>
} = <any>arrow;
type Schema = ApacheArrow["Schema"];
@@ -363,38 +362,6 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
).toEqual(new Float64().toString());
});
it("will infer FixedSizeList<Float32> from Float32Array values", async function () {
const table = makeArrowTable([
{ id: "a", vector: new Float32Array([0.1, 0.2, 0.3]) },
{ id: "b", vector: new Float32Array([0.4, 0.5, 0.6]) },
]);
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(
true,
);
const vectorType = table.getChild("vector")?.type;
expect(vectorType.listSize).toBe(3);
expect(vectorType.children[0].type.toString()).toEqual(
new Float32().toString(),
);
});
it("will infer FixedSizeList<Uint8> from Uint8Array values", async function () {
const table = makeArrowTable([
{ id: "a", vector: new Uint8Array([1, 2, 3]) },
{ id: "b", vector: new Uint8Array([4, 5, 6]) },
]);
expect(DataType.isFixedSizeList(table.getChild("vector")?.type)).toBe(
true,
);
const vectorType = table.getChild("vector")?.type;
expect(vectorType.listSize).toBe(3);
expect(vectorType.children[0].type.toString()).toEqual(
new ArrowUint8().toString(),
);
});
it("will use dictionary encoded strings if asked", async function () {
const table = makeArrowTable([{ str: "hello" }]);
expect(DataType.isUtf8(table.getChild("str")?.type)).toBe(true);

View File

@@ -1259,98 +1259,6 @@ describe("schema evolution", function () {
expect(await table.schema()).toEqual(expectedSchema);
});
it("can add columns with schema for explicit data types", async function () {
const con = await connect(tmpDir.name);
const table = await con.createTable("vectors", [
{ id: 1n, vector: [0.1, 0.2] },
]);
// Define schema for new columns with explicit data types
// Note: All columns must be nullable when using addColumns with Schema
// because they are initially populated with null values
const newColumnsSchema = new Schema([
new Field("price", new Float64(), true),
new Field("category", new Utf8(), true),
new Field("rating", new Int32(), true),
]);
const result = await table.addColumns(newColumnsSchema);
expect(result).toHaveProperty("version");
expect(result.version).toBe(2);
const expectedSchema = new Schema([
new Field("id", new Int64(), true),
new Field(
"vector",
new FixedSizeList(2, new Field("item", new Float32(), true)),
true,
),
new Field("price", new Float64(), true),
new Field("category", new Utf8(), true),
new Field("rating", new Int32(), true),
]);
expect(await table.schema()).toEqual(expectedSchema);
// Verify that new columns are populated with null values
const results = await table.query().toArray();
expect(results).toHaveLength(1);
expect(results[0].price).toBeNull();
expect(results[0].category).toBeNull();
expect(results[0].rating).toBeNull();
});
it("can add a single column using Field", async function () {
const con = await connect(tmpDir.name);
const table = await con.createTable("vectors", [
{ id: 1n, vector: [0.1, 0.2] },
]);
// Add a single field
const priceField = new Field("price", new Float64(), true);
const result = await table.addColumns(priceField);
expect(result).toHaveProperty("version");
expect(result.version).toBe(2);
const expectedSchema = new Schema([
new Field("id", new Int64(), true),
new Field(
"vector",
new FixedSizeList(2, new Field("item", new Float32(), true)),
true,
),
new Field("price", new Float64(), true),
]);
expect(await table.schema()).toEqual(expectedSchema);
});
it("can add multiple columns using array of Fields", async function () {
const con = await connect(tmpDir.name);
const table = await con.createTable("vectors", [
{ id: 1n, vector: [0.1, 0.2] },
]);
// Add multiple fields as array
const fields = [
new Field("price", new Float64(), true),
new Field("category", new Utf8(), true),
];
const result = await table.addColumns(fields);
expect(result).toHaveProperty("version");
expect(result.version).toBe(2);
const expectedSchema = new Schema([
new Field("id", new Int64(), true),
new Field(
"vector",
new FixedSizeList(2, new Field("item", new Float32(), true)),
true,
),
new Field("price", new Float64(), true),
new Field("category", new Utf8(), true),
]);
expect(await table.schema()).toEqual(expectedSchema);
});
it("can alter the columns in the schema", async function () {
const con = await connect(tmpDir.name);
const schema = new Schema([
@@ -1789,65 +1697,6 @@ describe.each([arrow15, arrow16, arrow17, arrow18])(
expect(results2[0].text).toBe(data[1].text);
});
test("full text search fast search", async () => {
const db = await connect(tmpDir.name);
const data = [{ text: "hello world", vector: [0.1, 0.2, 0.3], id: 1 }];
const table = await db.createTable("test", data);
await table.createIndex("text", {
config: Index.fts(),
});
// Insert unindexed data after creating the index.
await table.add([{ text: "xyz", vector: [0.4, 0.5, 0.6], id: 2 }]);
const withFlatSearch = await table
.search("xyz", "fts")
.limit(10)
.toArray();
expect(withFlatSearch.length).toBeGreaterThan(0);
const fastSearchResults = await table
.search("xyz", "fts")
.fastSearch()
.limit(10)
.toArray();
expect(fastSearchResults.length).toBe(0);
const nearestToTextFastSearch = await table
.query()
.nearestToText("xyz")
.fastSearch()
.limit(10)
.toArray();
expect(nearestToTextFastSearch.length).toBe(0);
// fastSearch should be chainable with other methods.
const chainedFastSearch = await table
.search("xyz", "fts")
.fastSearch()
.select(["text"])
.limit(5)
.toArray();
expect(chainedFastSearch.length).toBe(0);
await table.optimize();
const indexedFastSearch = await table
.search("xyz", "fts")
.fastSearch()
.limit(10)
.toArray();
expect(indexedFastSearch.length).toBeGreaterThan(0);
const indexedNearestToTextFastSearch = await table
.query()
.nearestToText("xyz")
.fastSearch()
.limit(10)
.toArray();
expect(indexedNearestToTextFastSearch.length).toBeGreaterThan(0);
});
test("prewarm full text search index", async () => {
const db = await connect(tmpDir.name);
const data = [
@@ -2296,36 +2145,3 @@ describe("when creating an empty table", () => {
expect((actualSchema.fields[1].type as Float64).precision).toBe(2);
});
});
// Ensure we can create float32 arrays without using Arrow
// by utilizing native JS TypedArray support
//
// https://github.com/lancedb/lancedb/issues/3115
describe("when creating a table with Float32Array vectors", () => {
let tmpDir: tmp.DirResult;
beforeEach(() => {
tmpDir = tmp.dirSync({ unsafeCleanup: true });
});
afterEach(() => {
tmpDir.removeCallback();
});
it("should persist Float32Array as FixedSizeList<Float32> in the LanceDB schema", async () => {
const db = await connect(tmpDir.name);
const table = await db.createTable("test", [
{ id: "a", vector: new Float32Array([0.1, 0.2, 0.3]) },
{ id: "b", vector: new Float32Array([0.4, 0.5, 0.6]) },
]);
const schema = await table.schema();
const vectorField = schema.fields.find((f) => f.name === "vector");
expect(vectorField).toBeDefined();
expect(vectorField!.type).toBeInstanceOf(FixedSizeList);
const fsl = vectorField!.type as FixedSizeList;
expect(fsl.listSize).toBe(3);
expect(fsl.children[0].type.typeId).toBe(Type.Float);
// precision: HALF=0, SINGLE=1, DOUBLE=2
expect((fsl.children[0].type as Float32).precision).toBe(1);
});
});

View File

@@ -30,15 +30,12 @@
"x64",
"arm64"
],
"dev": true,
"license": "Apache-2.0",
"optional": true,
"os": [
"darwin",
"linux",
"win32"
],
"peer": true,
"dependencies": {
"reflect-metadata": "^0.2.2"
},
@@ -94,15 +91,14 @@
}
},
"node_modules/@babel/code-frame": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz",
"integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==",
"version": "7.26.2",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
"integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.28.5",
"@babel/helper-validator-identifier": "^7.25.9",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
"picocolors": "^1.0.0"
},
"engines": {
"node": ">=6.9.0"
@@ -237,21 +233,19 @@
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz",
"integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz",
"integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
@@ -266,27 +260,25 @@
}
},
"node_modules/@babel/helpers": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz",
"integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
"version": "7.26.0",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz",
"integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.28.6",
"@babel/types": "^7.28.6"
"@babel/template": "^7.25.9",
"@babel/types": "^7.26.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz",
"integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==",
"version": "7.26.2",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz",
"integrity": "sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.29.0"
"@babel/types": "^7.26.0"
},
"bin": {
"parser": "bin/babel-parser.js"
@@ -518,15 +510,14 @@
}
},
"node_modules/@babel/template": {
"version": "7.28.6",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz",
"integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz",
"integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.28.6",
"@babel/parser": "^7.28.6",
"@babel/types": "^7.28.6"
"@babel/code-frame": "^7.25.9",
"@babel/parser": "^7.25.9",
"@babel/types": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
@@ -551,14 +542,13 @@
}
},
"node_modules/@babel/types": {
"version": "7.29.0",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz",
"integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==",
"version": "7.26.0",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz",
"integrity": "sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5"
"@babel/helper-string-parser": "^7.25.9",
"@babel/helper-validator-identifier": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
@@ -1161,6 +1151,95 @@
"url": "https://opencollective.com/libvips"
}
},
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
"dependencies": {
"string-width": "^5.1.2",
"string-width-cjs": "npm:string-width@^4.2.0",
"strip-ansi": "^7.0.1",
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
"wrap-ansi": "^8.1.0",
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@isaacs/cliui/node_modules/ansi-regex": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
"integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/@isaacs/cliui/node_modules/ansi-styles": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/@isaacs/cliui/node_modules/emoji-regex": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="
},
"node_modules/@isaacs/cliui/node_modules/string-width": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
"dependencies": {
"eastasianwidth": "^0.2.0",
"emoji-regex": "^9.2.2",
"strip-ansi": "^7.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@isaacs/cliui/node_modules/strip-ansi": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
"integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/@isaacs/cliui/node_modules/wrap-ansi": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
"dependencies": {
"ansi-styles": "^6.1.0",
"string-width": "^5.0.1",
"strip-ansi": "^7.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/@isaacs/fs-minipass": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
@@ -1527,6 +1606,15 @@
"resolved": "../dist",
"link": true
},
"node_modules/@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
"optional": true,
"engines": {
"node": ">=14"
}
},
"node_modules/@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
@@ -1758,7 +1846,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"engines": {
"node": ">=8"
}
@@ -1767,7 +1854,6 @@
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"dependencies": {
"color-convert": "^2.0.1"
},
@@ -1933,15 +2019,13 @@
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
@@ -2018,19 +2102,6 @@
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
"dev": true
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -2227,11 +2298,9 @@
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dev": true,
"license": "MIT",
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"dependencies": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
@@ -2315,19 +2384,10 @@
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
"node_modules/eastasianwidth": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
},
"node_modules/ejs": {
"version": "3.1.10",
@@ -2365,8 +2425,7 @@
"node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"dev": true
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"node_modules/error-ex": {
"version": "1.3.2",
@@ -2383,51 +2442,6 @@
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
"dev": true
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@@ -2540,21 +2554,19 @@
}
},
"node_modules/filelist/node_modules/brace-expansion": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/filelist/node_modules/minimatch": {
"version": "5.1.9",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz",
"integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==",
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
@@ -2592,16 +2604,39 @@
"resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-1.12.0.tgz",
"integrity": "sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ=="
},
"node_modules/foreground-child": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz",
"integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==",
"dependencies": {
"cross-spawn": "^7.0.0",
"signal-exit": "^4.0.1"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/foreground-child/node_modules/signal-exit": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/form-data": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
"integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
"license": "MIT",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
@@ -2649,6 +2684,7 @@
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -2671,30 +2707,6 @@
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-package-type": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
@@ -2704,19 +2716,6 @@
"node": ">=8.0.0"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/get-stream": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
@@ -2759,18 +2758,6 @@
"node": ">=4"
}
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
@@ -2791,37 +2778,11 @@
"node": ">=8"
}
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"dependencies": {
"function-bind": "^1.1.2"
},
@@ -2921,7 +2882,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"dev": true,
"engines": {
"node": ">=8"
}
@@ -2959,8 +2919,7 @@
"node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
"dev": true
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"node_modules/istanbul-lib-coverage": {
"version": "3.2.2",
@@ -3028,6 +2987,20 @@
"node": ">=8"
}
},
"node_modules/jackspeak": {
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
"dependencies": {
"@isaacs/cliui": "^8.0.2"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
},
"optionalDependencies": {
"@pkgjs/parseargs": "^0.11.0"
}
},
"node_modules/jake": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz",
@@ -3632,11 +3605,10 @@
"dev": true
},
"node_modules/js-yaml": {
"version": "3.14.2",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
"integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"version": "3.14.1",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
"dev": true,
"license": "MIT",
"dependencies": {
"argparse": "^1.0.7",
"esprima": "^4.0.0"
@@ -3756,15 +3728,6 @@
"tmpl": "1.0.5"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/merge-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
@@ -3813,11 +3776,10 @@
}
},
"node_modules/minimatch": {
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
"integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@@ -3834,17 +3796,31 @@
}
},
"node_modules/minizlib": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz",
"integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==",
"license": "MIT",
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz",
"integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==",
"dependencies": {
"minipass": "^7.1.2"
"minipass": "^7.0.4",
"rimraf": "^5.0.5"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/mkdirp": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
"integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
"bin": {
"mkdirp": "dist/cjs/src/bin.js"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -4034,6 +4010,11 @@
"node": ">=6"
}
},
"node_modules/package-json-from-dist": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="
},
"node_modules/parse-json": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
@@ -4074,7 +4055,6 @@
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
"dev": true,
"engines": {
"node": ">=8"
}
@@ -4085,6 +4065,26 @@
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true
},
"node_modules/path-scurry": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
"integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
"dependencies": {
"lru-cache": "^10.2.0",
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
},
"engines": {
"node": ">=16 || 14 >=14.18"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/path-scurry/node_modules/lru-cache": {
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@@ -4246,6 +4246,61 @@
"node": ">=10"
}
},
"node_modules/rimraf": {
"version": "5.0.10",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
"integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
"dependencies": {
"glob": "^10.3.7"
},
"bin": {
"rimraf": "dist/esm/bin.mjs"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/rimraf/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/rimraf/node_modules/glob": {
"version": "10.4.5",
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
"integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
"dependencies": {
"foreground-child": "^3.1.0",
"jackspeak": "^3.1.2",
"minimatch": "^9.0.4",
"minipass": "^7.1.2",
"package-json-from-dist": "^1.0.0",
"path-scurry": "^1.11.1"
},
"bin": {
"glob": "dist/esm/bin.mjs"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/rimraf/node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=16 || 14 >=14.17"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/semver": {
"version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
@@ -4299,7 +4354,6 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dev": true,
"dependencies": {
"shebang-regex": "^3.0.0"
},
@@ -4311,7 +4365,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true,
"engines": {
"node": ">=8"
}
@@ -4399,7 +4452,20 @@
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dev": true,
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/string-width-cjs": {
"name": "string-width",
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
@@ -4413,7 +4479,18 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-ansi-cjs": {
"name": "strip-ansi",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": {
"ansi-regex": "^5.0.1"
},
@@ -4464,15 +4541,15 @@
}
},
"node_modules/tar": {
"version": "7.5.10",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz",
"integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==",
"license": "BlueOak-1.0.0",
"version": "7.4.3",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
"minizlib": "^3.1.0",
"minizlib": "^3.0.1",
"mkdirp": "^3.0.1",
"yallist": "^5.0.0"
},
"engines": {
@@ -4705,7 +4782,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"dependencies": {
"isexe": "^2.0.0"
},
@@ -4733,6 +4809,23 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/wrap-ansi-cjs": {
"name": "wrap-ansi",
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",

View File

@@ -20,8 +20,6 @@ import {
Float32,
Float64,
Int,
Int8,
Int16,
Int32,
Int64,
LargeBinary,
@@ -37,8 +35,6 @@ import {
Timestamp,
Type,
Uint8,
Uint16,
Uint32,
Utf8,
Vector,
makeVector as arrowMakeVector,
@@ -533,8 +529,7 @@ function isObject(value: unknown): value is Record<string, unknown> {
!(value instanceof Date) &&
!(value instanceof Set) &&
!(value instanceof Map) &&
!(value instanceof Buffer) &&
!ArrayBuffer.isView(value)
!(value instanceof Buffer)
);
}
@@ -593,13 +588,6 @@ function inferType(
return new Bool();
} else if (value instanceof Buffer) {
return new Binary();
} else if (ArrayBuffer.isView(value) && !(value instanceof DataView)) {
const info = typedArrayToArrowType(value);
if (info !== undefined) {
const child = new Field("item", info.elementType, true);
return new FixedSizeList(info.length, child);
}
return undefined;
} else if (Array.isArray(value)) {
if (value.length === 0) {
return undefined; // Without any values we can't infer the type
@@ -758,32 +746,6 @@ function makeListVector(lists: unknown[][]): Vector<unknown> {
return listBuilder.finish().toVector();
}
/**
* Map a JS TypedArray instance to the corresponding Arrow element DataType
* and its length. Returns undefined if the value is not a recognized TypedArray.
*/
function typedArrayToArrowType(
value: ArrayBufferView,
): { elementType: DataType; length: number } | undefined {
if (value instanceof Float32Array)
return { elementType: new Float32(), length: value.length };
if (value instanceof Float64Array)
return { elementType: new Float64(), length: value.length };
if (value instanceof Uint8Array)
return { elementType: new Uint8(), length: value.length };
if (value instanceof Uint16Array)
return { elementType: new Uint16(), length: value.length };
if (value instanceof Uint32Array)
return { elementType: new Uint32(), length: value.length };
if (value instanceof Int8Array)
return { elementType: new Int8(), length: value.length };
if (value instanceof Int16Array)
return { elementType: new Int16(), length: value.length };
if (value instanceof Int32Array)
return { elementType: new Int32(), length: value.length };
return undefined;
}
/** Helper function to convert an Array of JS values to an Arrow Vector */
function makeVector(
values: unknown[],
@@ -852,16 +814,6 @@ function makeVector(
"makeVector cannot infer the type if all values are null or undefined",
);
}
if (ArrayBuffer.isView(sampleValue) && !(sampleValue instanceof DataView)) {
const info = typedArrayToArrowType(sampleValue);
if (info !== undefined) {
const fslType = new FixedSizeList(
info.length,
new Field("item", info.elementType, true),
);
return vectorFromArray(values, fslType);
}
}
if (Array.isArray(sampleValue)) {
// Default Arrow inference doesn't handle list types
return makeListVector(values as unknown[][]);

View File

@@ -273,9 +273,7 @@ export async function connect(
let nativeProvider: NativeJsHeaderProvider | undefined;
if (finalHeaderProvider) {
if (typeof finalHeaderProvider === "function") {
nativeProvider = new NativeJsHeaderProvider(async () =>
finalHeaderProvider(),
);
nativeProvider = new NativeJsHeaderProvider(finalHeaderProvider);
} else if (
finalHeaderProvider &&
typeof finalHeaderProvider.getHeaders === "function"

View File

@@ -684,17 +684,19 @@ export class VectorQuery extends StandardQueryBase<NativeVectorQuery> {
rerank(reranker: Reranker): VectorQuery {
super.doCall((inner) =>
inner.rerank(async (args) => {
const vecResults = await fromBufferToRecordBatch(args.vecResults);
const ftsResults = await fromBufferToRecordBatch(args.ftsResults);
const result = await reranker.rerankHybrid(
args.query,
vecResults as RecordBatch,
ftsResults as RecordBatch,
);
inner.rerank({
rerankHybrid: async (_, args) => {
const vecResults = await fromBufferToRecordBatch(args.vecResults);
const ftsResults = await fromBufferToRecordBatch(args.ftsResults);
const result = await reranker.rerankHybrid(
args.query,
vecResults as RecordBatch,
ftsResults as RecordBatch,
);
const buffer = fromRecordBatchToBuffer(result);
return buffer;
const buffer = fromRecordBatchToBuffer(result);
return buffer;
},
}),
);

View File

@@ -5,15 +5,12 @@ import {
Table as ArrowTable,
Data,
DataType,
Field,
IntoVector,
MultiVector,
Schema,
dataTypeToJson,
fromDataToBuffer,
fromTableToBuffer,
isMultiVector,
makeEmptyTable,
tableFromIPC,
} from "./arrow";
@@ -87,16 +84,6 @@ export interface OptimizeOptions {
* tbl.optimize({cleanupOlderThan: new Date()});
*/
cleanupOlderThan: Date;
/**
* Because they may be part of an in-progress transaction, files newer than
* 7 days old are not deleted by default. If you are sure that there are no
* in-progress transactions, then you can set this to true to delete all
* files older than `cleanupOlderThan`.
*
* **WARNING**: This should only be set to true if you can guarantee that
* no other process is currently working on this dataset. Otherwise the
* dataset could be put into a corrupted state.
*/
deleteUnverified: boolean;
}
@@ -394,16 +381,15 @@ export abstract class Table {
abstract vectorSearch(vector: IntoVector | MultiVector): VectorQuery;
/**
* Add new columns with defined values.
* @param {AddColumnsSql[] | Field | Field[] | Schema} newColumnTransforms Either:
* - An array of objects with column names and SQL expressions to calculate values
* - A single Arrow Field defining one column with its data type (column will be initialized with null values)
* - An array of Arrow Fields defining columns with their data types (columns will be initialized with null values)
* - An Arrow Schema defining columns with their data types (columns will be initialized with null values)
* @param {AddColumnsSql[]} newColumnTransforms pairs of column names and
* the SQL expression to use to calculate the value of the new column. These
* expressions will be evaluated for each row in the table, and can
* reference existing columns in the table.
* @returns {Promise<AddColumnsResult>} A promise that resolves to an object
* containing the new version number of the table after adding the columns.
*/
abstract addColumns(
newColumnTransforms: AddColumnsSql[] | Field | Field[] | Schema,
newColumnTransforms: AddColumnsSql[],
): Promise<AddColumnsResult>;
/**
@@ -515,7 +501,19 @@ export abstract class Table {
* - Index: Optimizes the indices, adding new data to existing indices
*
*
* The frequency an application should call optimize is based on the frequency of
* Experimental API
* ----------------
*
* The optimization process is undergoing active development and may change.
* Our goal with these changes is to improve the performance of optimization and
* reduce the complexity.
*
* That being said, it is essential today to run optimize if you want the best
* performance. It should be stable and safe to use in production, but it our
* hope that the API may be simplified (or not even need to be called) in the
* future.
*
* The frequency an application shoudl call optimize is based on the frequency of
* data modifications. If data is frequently added, deleted, or updated then
* optimize should be run frequently. A good rule of thumb is to run optimize if
* you have added or modified 100,000 or more records or run more than 20 data
@@ -808,40 +806,9 @@ export class LocalTable extends Table {
// TODO: Support BatchUDF
async addColumns(
newColumnTransforms: AddColumnsSql[] | Field | Field[] | Schema,
newColumnTransforms: AddColumnsSql[],
): Promise<AddColumnsResult> {
// Handle single Field -> convert to array of Fields
if (newColumnTransforms instanceof Field) {
newColumnTransforms = [newColumnTransforms];
}
// Handle array of Fields -> convert to Schema
if (
Array.isArray(newColumnTransforms) &&
newColumnTransforms.length > 0 &&
newColumnTransforms[0] instanceof Field
) {
const fields = newColumnTransforms as Field[];
newColumnTransforms = new Schema(fields);
}
// Handle Schema -> use schema-based approach
if (newColumnTransforms instanceof Schema) {
const schema = newColumnTransforms;
// Convert schema to buffer using Arrow IPC format
const emptyTable = makeEmptyTable(schema);
const schemaBuf = await fromTableToBuffer(emptyTable);
return await this.inner.addColumnsWithSchema(schemaBuf);
}
// Handle SQL expressions (existing functionality)
if (Array.isArray(newColumnTransforms)) {
return await this.inner.addColumns(
newColumnTransforms as AddColumnsSql[],
);
}
throw new Error("Invalid input type for addColumns");
return await this.inner.addColumns(newColumnTransforms);
}
async alterColumns(

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-darwin-arm64",
"version": "0.27.0",
"version": "0.26.0",
"os": ["darwin"],
"cpu": ["arm64"],
"main": "lancedb.darwin-arm64.node",
@@ -8,9 +8,5 @@
"license": "Apache-2.0",
"engines": {
"node": ">= 18"
},
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-linux-arm64-gnu",
"version": "0.27.0",
"version": "0.26.0",
"os": ["linux"],
"cpu": ["arm64"],
"main": "lancedb.linux-arm64-gnu.node",
@@ -9,9 +9,5 @@
"engines": {
"node": ">= 18"
},
"libc": ["glibc"],
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
"libc": ["glibc"]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-linux-arm64-musl",
"version": "0.27.0",
"version": "0.26.0",
"os": ["linux"],
"cpu": ["arm64"],
"main": "lancedb.linux-arm64-musl.node",
@@ -9,9 +9,5 @@
"engines": {
"node": ">= 18"
},
"libc": ["musl"],
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
"libc": ["musl"]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-linux-x64-gnu",
"version": "0.27.0",
"version": "0.26.0",
"os": ["linux"],
"cpu": ["x64"],
"main": "lancedb.linux-x64-gnu.node",
@@ -9,9 +9,5 @@
"engines": {
"node": ">= 18"
},
"libc": ["glibc"],
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
"libc": ["glibc"]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-linux-x64-musl",
"version": "0.27.0",
"version": "0.26.0",
"os": ["linux"],
"cpu": ["x64"],
"main": "lancedb.linux-x64-musl.node",
@@ -9,9 +9,5 @@
"engines": {
"node": ">= 18"
},
"libc": ["musl"],
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
"libc": ["musl"]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-win32-arm64-msvc",
"version": "0.27.0",
"version": "0.26.0",
"os": [
"win32"
],
@@ -14,9 +14,5 @@
"license": "Apache-2.0",
"engines": {
"node": ">= 18"
},
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@lancedb/lancedb-win32-x64-msvc",
"version": "0.27.0",
"version": "0.26.0",
"os": ["win32"],
"cpu": ["x64"],
"main": "lancedb.win32-x64-msvc.node",
@@ -8,9 +8,5 @@
"license": "Apache-2.0",
"engines": {
"node": ">= 18"
},
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
}
}

6312
nodejs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -11,7 +11,7 @@
"ann"
],
"private": false,
"version": "0.27.0",
"version": "0.26.0",
"main": "dist/index.js",
"exports": {
".": "./dist/index.js",
@@ -21,29 +21,28 @@
},
"types": "dist/index.d.ts",
"napi": {
"binaryName": "lancedb",
"targets": [
"aarch64-apple-darwin",
"x86_64-unknown-linux-gnu",
"aarch64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-unknown-linux-musl",
"x86_64-pc-windows-msvc",
"aarch64-pc-windows-msvc"
]
"name": "lancedb",
"triples": {
"defaults": false,
"additional": [
"aarch64-apple-darwin",
"x86_64-unknown-linux-gnu",
"aarch64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-unknown-linux-musl",
"x86_64-pc-windows-msvc",
"aarch64-pc-windows-msvc"
]
}
},
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/lancedb/lancedb"
},
"devDependencies": {
"@aws-sdk/client-dynamodb": "^3.33.0",
"@aws-sdk/client-kms": "^3.33.0",
"@aws-sdk/client-s3": "^3.33.0",
"@biomejs/biome": "^1.7.3",
"@jest/globals": "^29.7.0",
"@napi-rs/cli": "^3.5.1",
"@napi-rs/cli": "^2.18.3",
"@types/axios": "^0.14.0",
"@types/jest": "^29.1.2",
"@types/node": "^22.7.4",
@@ -72,9 +71,9 @@
"os": ["darwin", "linux", "win32"],
"scripts": {
"artifacts": "napi artifacts",
"build:debug": "napi build --platform --dts ../lancedb/native.d.ts --js ../lancedb/native.js --output-dir lancedb",
"build:debug": "napi build --platform --no-const-enum --dts ../lancedb/native.d.ts --js ../lancedb/native.js lancedb",
"postbuild:debug": "shx mkdir -p dist && shx cp lancedb/*.node dist/",
"build:release": "napi build --platform --release --dts ../lancedb/native.d.ts --js ../lancedb/native.js --output-dir dist",
"build:release": "napi build --platform --no-const-enum --release --dts ../lancedb/native.d.ts --js ../lancedb/native.js dist/",
"postbuild:release": "shx mkdir -p dist && shx cp lancedb/*.node dist/",
"build": "npm run build:debug && npm run tsc",
"build-release": "npm run build:release && npm run tsc",
@@ -88,7 +87,7 @@
"prepublishOnly": "napi prepublish -t npm",
"test": "jest --verbose",
"integration": "S3_TEST=1 npm run test",
"universal": "napi universalize",
"universal": "napi universal",
"version": "napi version"
},
"dependencies": {

View File

@@ -8,12 +8,11 @@ use lancedb::database::{CreateTableMode, Database};
use napi::bindgen_prelude::*;
use napi_derive::*;
use crate::ConnectionOptions;
use crate::error::NapiErrorExt;
use crate::header::JsHeaderProvider;
use crate::table::Table;
use crate::ConnectionOptions;
use lancedb::connection::{ConnectBuilder, Connection as LanceDBConnection};
use lancedb::ipc::{ipc_file_to_batches, ipc_file_to_schema};
#[napi]

View File

@@ -1,19 +1,20 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-FileCopyrightText: Copyright The LanceDB Authors
use napi::{bindgen_prelude::*, threadsafe_function::ThreadsafeFunction};
use napi::{
bindgen_prelude::*,
threadsafe_function::{ErrorStrategy, ThreadsafeFunction},
};
use napi_derive::napi;
use std::collections::HashMap;
use std::sync::Arc;
type GetHeadersFn = ThreadsafeFunction<(), Promise<HashMap<String, String>>, (), Status, false>;
/// JavaScript HeaderProvider implementation that wraps a JavaScript callback.
/// This is the only native header provider - all header provider implementations
/// should provide a JavaScript function that returns headers.
#[napi]
pub struct JsHeaderProvider {
get_headers_fn: Arc<GetHeadersFn>,
get_headers_fn: Arc<ThreadsafeFunction<(), ErrorStrategy::CalleeHandled>>,
}
impl Clone for JsHeaderProvider {
@@ -28,12 +29,9 @@ impl Clone for JsHeaderProvider {
impl JsHeaderProvider {
/// Create a new JsHeaderProvider from a JavaScript callback
#[napi(constructor)]
pub fn new(
get_headers_callback: Function<(), Promise<HashMap<String, String>>>,
) -> Result<Self> {
pub fn new(get_headers_callback: JsFunction) -> Result<Self> {
let get_headers_fn = get_headers_callback
.build_threadsafe_function()
.build()
.create_threadsafe_function(0, |ctx| Ok(vec![ctx.value]))
.map_err(|e| {
Error::new(
Status::GenericFailure,
@@ -53,7 +51,7 @@ impl lancedb::remote::HeaderProvider for JsHeaderProvider {
async fn get_headers(&self) -> lancedb::error::Result<HashMap<String, String>> {
// Call the JavaScript function asynchronously
let promise: Promise<HashMap<String, String>> =
self.get_headers_fn.call_async(()).await.map_err(|e| {
self.get_headers_fn.call_async(Ok(())).await.map_err(|e| {
lancedb::error::Error::Runtime {
message: format!("Failed to call JavaScript get_headers: {}", e),
}

View File

@@ -3,12 +3,12 @@
use std::sync::Mutex;
use lancedb::index::Index as LanceDbIndex;
use lancedb::index::scalar::{BTreeIndexBuilder, FtsIndexBuilder};
use lancedb::index::vector::{
IvfFlatIndexBuilder, IvfHnswPqIndexBuilder, IvfHnswSqIndexBuilder, IvfPqIndexBuilder,
IvfRqIndexBuilder,
};
use lancedb::index::Index as LanceDbIndex;
use napi_derive::napi;
use crate::util::parse_distance_type;

View File

@@ -60,7 +60,7 @@ pub struct OpenTableOptions {
pub storage_options: Option<HashMap<String, String>>,
}
#[napi_derive::module_init]
#[napi::module_init]
fn init() {
let env = Env::new()
.filter_or("LANCEDB_LOG", "warn")

View File

@@ -17,11 +17,11 @@ use lancedb::query::VectorQuery as LanceDbVectorQuery;
use napi::bindgen_prelude::*;
use napi_derive::napi;
use crate::error::NapiErrorExt;
use crate::error::convert_error;
use crate::error::NapiErrorExt;
use crate::iterator::RecordBatchIterator;
use crate::rerankers::RerankHybridCallbackArgs;
use crate::rerankers::Reranker;
use crate::rerankers::RerankerCallbacks;
use crate::util::{parse_distance_type, schema_to_buffer};
#[napi]
@@ -42,7 +42,7 @@ impl Query {
}
#[napi]
pub fn full_text_search(&mut self, query: Object) -> napi::Result<()> {
pub fn full_text_search(&mut self, query: napi::JsObject) -> napi::Result<()> {
let query = parse_fts_query(query)?;
self.inner = self.inner.clone().full_text_search(query);
Ok(())
@@ -235,7 +235,7 @@ impl VectorQuery {
}
#[napi]
pub fn full_text_search(&mut self, query: Object) -> napi::Result<()> {
pub fn full_text_search(&mut self, query: napi::JsObject) -> napi::Result<()> {
let query = parse_fts_query(query)?;
self.inner = self.inner.clone().full_text_search(query);
Ok(())
@@ -272,13 +272,11 @@ impl VectorQuery {
}
#[napi]
pub fn rerank(
&mut self,
rerank_hybrid: Function<RerankHybridCallbackArgs, Promise<Buffer>>,
) -> napi::Result<()> {
let reranker = Reranker::new(rerank_hybrid)?;
self.inner = self.inner.clone().rerank(Arc::new(reranker));
Ok(())
pub fn rerank(&mut self, callbacks: RerankerCallbacks) {
self.inner = self
.inner
.clone()
.rerank(Arc::new(Reranker::new(callbacks)));
}
#[napi(catch_unwind)]
@@ -525,12 +523,12 @@ impl JsFullTextQuery {
}
}
fn parse_fts_query(query: Object) -> napi::Result<FullTextSearchQuery> {
if let Ok(Some(query)) = query.get::<&JsFullTextQuery>("query") {
fn parse_fts_query(query: napi::JsObject) -> napi::Result<FullTextSearchQuery> {
if let Ok(Some(query)) = query.get::<_, &JsFullTextQuery>("query") {
Ok(FullTextSearchQuery::new_query(query.inner.clone()))
} else if let Ok(Some(query_text)) = query.get::<String>("query") {
} else if let Ok(Some(query_text)) = query.get::<_, String>("query") {
let mut query_text = query_text;
let columns = query.get::<Option<Vec<String>>>("columns")?.flatten();
let columns = query.get::<_, Option<Vec<String>>>("columns")?.flatten();
let is_phrase =
query_text.len() >= 2 && query_text.starts_with('"') && query_text.ends_with('"');
@@ -551,12 +549,15 @@ fn parse_fts_query(query: Object) -> napi::Result<FullTextSearchQuery> {
}
};
let mut query = FullTextSearchQuery::new_query(query);
if let Some(cols) = columns
&& !cols.is_empty()
{
query = query.with_columns(&cols).map_err(|e| {
napi::Error::from_reason(format!("Failed to set full text search columns: {}", e))
})?;
if let Some(cols) = columns {
if !cols.is_empty() {
query = query.with_columns(&cols).map_err(|e| {
napi::Error::from_reason(format!(
"Failed to set full text search columns: {}",
e
))
})?;
}
}
Ok(query)
} else {

View File

@@ -3,7 +3,10 @@
use arrow_array::RecordBatch;
use async_trait::async_trait;
use napi::{bindgen_prelude::*, threadsafe_function::ThreadsafeFunction};
use napi::{
bindgen_prelude::*,
threadsafe_function::{ErrorStrategy, ThreadsafeFunction},
};
use napi_derive::napi;
use lancedb::ipc::batches_to_ipc_file;
@@ -12,28 +15,27 @@ use lancedb::{error::Error, ipc::ipc_file_to_batches};
use crate::error::NapiErrorExt;
type RerankHybridFn = ThreadsafeFunction<
RerankHybridCallbackArgs,
Promise<Buffer>,
RerankHybridCallbackArgs,
Status,
false,
>;
/// Reranker implementation that "wraps" a NodeJS Reranker implementation.
/// This contains references to the callbacks that can be used to invoke the
/// reranking methods on the NodeJS implementation and handles serializing the
/// record batches to Arrow IPC buffers.
#[napi]
pub struct Reranker {
rerank_hybrid: RerankHybridFn,
/// callback to the Javascript which will call the rerankHybrid method of
/// some Reranker implementation
rerank_hybrid: ThreadsafeFunction<RerankHybridCallbackArgs, ErrorStrategy::CalleeHandled>,
}
#[napi]
impl Reranker {
pub fn new(
rerank_hybrid: Function<RerankHybridCallbackArgs, Promise<Buffer>>,
) -> napi::Result<Self> {
let rerank_hybrid = rerank_hybrid.build_threadsafe_function().build()?;
Ok(Self { rerank_hybrid })
#[napi]
pub fn new(callbacks: RerankerCallbacks) -> Self {
let rerank_hybrid = callbacks
.rerank_hybrid
.create_threadsafe_function(0, move |ctx| Ok(vec![ctx.value]))
.unwrap();
Self { rerank_hybrid }
}
}
@@ -47,16 +49,16 @@ impl lancedb::rerankers::Reranker for Reranker {
) -> lancedb::error::Result<RecordBatch> {
let callback_args = RerankHybridCallbackArgs {
query: query.to_string(),
vec_results: Buffer::from(batches_to_ipc_file(&[vector_results])?.as_ref()),
fts_results: Buffer::from(batches_to_ipc_file(&[fts_results])?.as_ref()),
vec_results: batches_to_ipc_file(&[vector_results])?,
fts_results: batches_to_ipc_file(&[fts_results])?,
};
let promised_buffer: Promise<Buffer> = self
.rerank_hybrid
.call_async(callback_args)
.call_async(Ok(callback_args))
.await
.map_err(|e| Error::Runtime {
message: format!("napi error status={}, reason={}", e.status, e.reason),
})?;
message: format!("napi error status={}, reason={}", e.status, e.reason),
})?;
let buffer = promised_buffer.await.map_err(|e| Error::Runtime {
message: format!("napi error status={}, reason={}", e.status, e.reason),
})?;
@@ -75,11 +77,16 @@ impl std::fmt::Debug for Reranker {
}
}
#[napi(object)]
pub struct RerankerCallbacks {
pub rerank_hybrid: JsFunction,
}
#[napi(object)]
pub struct RerankHybridCallbackArgs {
pub query: String,
pub vec_results: Buffer,
pub fts_results: Buffer,
pub vec_results: Vec<u8>,
pub fts_results: Vec<u8>,
}
fn buffer_to_record_batch(buffer: Buffer) -> Result<RecordBatch> {

View File

@@ -95,7 +95,8 @@ impl napi::bindgen_prelude::FromNapiValue for Session {
napi_val: napi::sys::napi_value,
) -> napi::Result<Self> {
let object: napi::bindgen_prelude::ClassInstance<Self> =
unsafe { napi::bindgen_prelude::ClassInstance::from_napi_value(env, napi_val)? };
Ok((*object).clone())
napi::bindgen_prelude::ClassInstance::from_napi_value(env, napi_val)?;
let copy = object.clone();
Ok(copy)
}
}

View File

@@ -3,7 +3,7 @@
use std::collections::HashMap;
use lancedb::ipc::{ipc_file_to_batches, ipc_file_to_schema};
use lancedb::ipc::ipc_file_to_batches;
use lancedb::table::{
AddDataMode, ColumnAlteration as LanceColumnAlteration, Duration, NewColumnTransform,
OptimizeAction, OptimizeOptions, Table as LanceDbTable,
@@ -71,17 +71,6 @@ impl Table {
pub async fn add(&self, buf: Buffer, mode: String) -> napi::Result<AddResult> {
let batches = ipc_file_to_batches(buf.to_vec())
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC file: {}", e)))?;
let batches = batches
.into_iter()
.map(|batch| {
batch.map_err(|e| {
napi::Error::from_reason(format!(
"Failed to read record batch from IPC file: {}",
e
))
})
})
.collect::<Result<Vec<_>>>()?;
let mut op = self.inner_ref()?.add(batches);
op = if mode == "append" {
@@ -279,23 +268,6 @@ impl Table {
Ok(res.into())
}
#[napi(catch_unwind)]
pub async fn add_columns_with_schema(
&self,
schema_buf: Buffer,
) -> napi::Result<AddColumnsResult> {
let schema = ipc_file_to_schema(schema_buf.to_vec())
.map_err(|e| napi::Error::from_reason(format!("Failed to read IPC schema: {}", e)))?;
let transforms = NewColumnTransform::AllNulls(schema);
let res = self
.inner_ref()?
.add_columns(transforms, None)
.await
.default_error()?;
Ok(res.into())
}
#[napi(catch_unwind)]
pub async fn alter_columns(
&self,
@@ -770,14 +742,12 @@ impl From<lancedb::table::AddResult> for AddResult {
#[napi(object)]
pub struct DeleteResult {
pub num_deleted_rows: i64,
pub version: i64,
}
impl From<lancedb::table::DeleteResult> for DeleteResult {
fn from(value: lancedb::table::DeleteResult) -> Self {
Self {
num_deleted_rows: value.num_deleted_rows as i64,
version: value.version as i64,
}
}

View File

@@ -1,5 +1,5 @@
[tool.bumpversion]
current_version = "0.30.1"
current_version = "0.29.0"
parse = """(?x)
(?P<major>0|[1-9]\\d*)\\.
(?P<minor>0|[1-9]\\d*)\\.

View File

@@ -1,13 +1,13 @@
[package]
name = "lancedb-python"
version = "0.30.1"
version = "0.29.0"
edition.workspace = true
description = "Python bindings for LanceDB"
license.workspace = true
repository.workspace = true
keywords.workspace = true
categories.workspace = true
rust-version = "1.91.0"
rust-version = "1.88.0"
[lib]
name = "_lancedb"
@@ -16,11 +16,9 @@ crate-type = ["cdylib"]
[dependencies]
arrow = { version = "57.2", features = ["pyarrow"] }
async-trait = "0.1"
bytes = "1"
lancedb = { path = "../rust/lancedb", default-features = false }
lance-core.workspace = true
lance-namespace.workspace = true
lance-namespace-impls.workspace = true
lance-io.workspace = true
env_logger.workspace = true
pyo3 = { version = "0.26", features = ["extension-module", "abi3-py39"] }
@@ -30,8 +28,6 @@ pyo3-async-runtimes = { version = "0.26", features = [
] }
pin-project = "1.1.5"
futures.workspace = true
serde = "1"
serde_json = "1"
snafu.workspace = true
tokio = { version = "1.40", features = ["sync"] }

View File

@@ -1,206 +0,0 @@
| Name | Version | License | URL |
|--------------------------------|-----------------|--------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------|
| InstructorEmbedding | 1.0.1 | Apache License 2.0 | https://github.com/HKUNLP/instructor-embedding |
| Jinja2 | 3.1.6 | BSD License | https://github.com/pallets/jinja/ |
| Markdown | 3.10.2 | BSD-3-Clause | https://Python-Markdown.github.io/ |
| MarkupSafe | 3.0.3 | BSD-3-Clause | https://github.com/pallets/markupsafe/ |
| PyJWT | 2.11.0 | MIT | https://github.com/jpadilla/pyjwt |
| PyYAML | 6.0.3 | MIT License | https://pyyaml.org/ |
| Pygments | 2.19.2 | BSD License | https://pygments.org |
| accelerate | 1.12.0 | Apache Software License | https://github.com/huggingface/accelerate |
| adlfs | 2026.2.0 | BSD License | UNKNOWN |
| aiohappyeyeballs | 2.6.1 | Python Software Foundation License | https://github.com/aio-libs/aiohappyeyeballs |
| aiohttp | 3.13.3 | Apache-2.0 AND MIT | https://github.com/aio-libs/aiohttp |
| aiosignal | 1.4.0 | Apache Software License | https://github.com/aio-libs/aiosignal |
| annotated-types | 0.7.0 | MIT License | https://github.com/annotated-types/annotated-types |
| anyio | 4.12.1 | MIT | https://anyio.readthedocs.io/en/stable/versionhistory.html |
| appnope | 0.1.4 | BSD License | http://github.com/minrk/appnope |
| asttokens | 3.0.1 | Apache 2.0 | https://github.com/gristlabs/asttokens |
| attrs | 25.4.0 | MIT | https://www.attrs.org/en/stable/changelog.html |
| awscli | 1.44.35 | Apache Software License | http://aws.amazon.com/cli/ |
| azure-core | 1.38.0 | MIT License | https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/core/azure-core |
| azure-datalake-store | 0.0.53 | MIT License | https://github.com/Azure/azure-data-lake-store-python |
| azure-identity | 1.25.1 | MIT | https://github.com/Azure/azure-sdk-for-python |
| azure-storage-blob | 12.28.0 | MIT License | https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob |
| babel | 2.18.0 | BSD License | https://babel.pocoo.org/ |
| backrefs | 6.1 | MIT | https://github.com/facelessuser/backrefs |
| beautifulsoup4 | 4.14.3 | MIT License | https://www.crummy.com/software/BeautifulSoup/bs4/ |
| bleach | 6.3.0 | Apache Software License | https://github.com/mozilla/bleach |
| boto3 | 1.42.45 | Apache-2.0 | https://github.com/boto/boto3 |
| botocore | 1.42.45 | Apache-2.0 | https://github.com/boto/botocore |
| cachetools | 7.0.0 | MIT | https://github.com/tkem/cachetools/ |
| certifi | 2026.1.4 | Mozilla Public License 2.0 (MPL 2.0) | https://github.com/certifi/python-certifi |
| cffi | 2.0.0 | MIT | https://cffi.readthedocs.io/en/latest/whatsnew.html |
| cfgv | 3.5.0 | MIT | https://github.com/asottile/cfgv |
| charset-normalizer | 3.4.4 | MIT | https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md |
| click | 8.3.1 | BSD-3-Clause | https://github.com/pallets/click/ |
| cohere | 5.20.4 | MIT License | https://github.com/cohere-ai/cohere-python |
| colorama | 0.4.6 | BSD License | https://github.com/tartley/colorama |
| colpali_engine | 0.3.13 | MIT License | https://github.com/illuin-tech/colpali |
| comm | 0.2.3 | BSD License | https://github.com/ipython/comm |
| cryptography | 46.0.4 | Apache-2.0 OR BSD-3-Clause | https://github.com/pyca/cryptography |
| datafusion | 51.0.0 | Apache Software License | https://datafusion.apache.org/python |
| debugpy | 1.8.20 | MIT License | https://aka.ms/debugpy |
| decorator | 5.2.1 | BSD License | UNKNOWN |
| defusedxml | 0.7.1 | Python Software Foundation License | https://github.com/tiran/defusedxml |
| deprecation | 2.1.0 | Apache Software License | http://deprecation.readthedocs.io/ |
| distlib | 0.4.0 | Python Software Foundation License | https://github.com/pypa/distlib |
| distro | 1.9.0 | Apache Software License | https://github.com/python-distro/distro |
| docutils | 0.19 | BSD License; GNU General Public License (GPL); Public Domain; Python Software Foundation License | https://docutils.sourceforge.io/ |
| duckdb | 1.4.4 | MIT License | https://github.com/duckdb/duckdb-python |
| executing | 2.2.1 | MIT License | https://github.com/alexmojaki/executing |
| fastavro | 1.12.1 | MIT | https://github.com/fastavro/fastavro |
| fastjsonschema | 2.21.2 | BSD License | https://github.com/horejsek/python-fastjsonschema |
| filelock | 3.20.3 | Unlicense | https://github.com/tox-dev/py-filelock |
| frozenlist | 1.8.0 | Apache-2.0 | https://github.com/aio-libs/frozenlist |
| fsspec | 2026.2.0 | BSD-3-Clause | https://github.com/fsspec/filesystem_spec |
| ftfy | 6.3.1 | Apache-2.0 | https://ftfy.readthedocs.io/en/latest/ |
| ghp-import | 2.1.0 | Apache Software License | https://github.com/c-w/ghp-import |
| google-ai-generativelanguage | 0.6.15 | Apache Software License | https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage |
| google-api-core | 2.25.2 | Apache Software License | https://github.com/googleapis/python-api-core |
| google-api-python-client | 2.189.0 | Apache Software License | https://github.com/googleapis/google-api-python-client/ |
| google-auth | 2.48.0 | Apache Software License | https://github.com/googleapis/google-auth-library-python |
| google-auth-httplib2 | 0.3.0 | Apache Software License | https://github.com/GoogleCloudPlatform/google-auth-library-python-httplib2 |
| google-generativeai | 0.8.6 | Apache Software License | https://github.com/google/generative-ai-python |
| googleapis-common-protos | 1.72.0 | Apache Software License | https://github.com/googleapis/google-cloud-python/tree/main/packages/googleapis-common-protos |
| griffe | 2.0.0 | ISC | https://mkdocstrings.github.io/griffe |
| griffecli | 2.0.0 | ISC | UNKNOWN |
| griffelib | 2.0.0 | ISC | UNKNOWN |
| grpcio | 1.78.0 | Apache-2.0 | https://grpc.io |
| grpcio-status | 1.71.2 | Apache Software License | https://grpc.io |
| h11 | 0.16.0 | MIT License | https://github.com/python-hyper/h11 |
| hf-xet | 1.2.0 | Apache-2.0 | https://github.com/huggingface/xet-core |
| httpcore | 1.0.9 | BSD-3-Clause | https://www.encode.io/httpcore/ |
| httplib2 | 0.31.2 | MIT License | https://github.com/httplib2/httplib2 |
| httpx | 0.28.1 | BSD License | https://github.com/encode/httpx |
| huggingface_hub | 0.36.2 | Apache Software License | https://github.com/huggingface/huggingface_hub |
| ibm-cos-sdk | 2.14.3 | Apache Software License | https://github.com/ibm/ibm-cos-sdk-python |
| ibm-cos-sdk-core | 2.14.3 | Apache Software License | https://github.com/ibm/ibm-cos-sdk-python-core |
| ibm-cos-sdk-s3transfer | 2.14.3 | Apache Software License | https://github.com/IBM/ibm-cos-sdk-python-s3transfer |
| ibm_watsonx_ai | 1.5.1 | BSD License | https://ibm.github.io/watsonx-ai-python-sdk/changelog.html |
| identify | 2.6.16 | MIT | https://github.com/pre-commit/identify |
| idna | 3.11 | BSD-3-Clause | https://github.com/kjd/idna |
| iniconfig | 2.3.0 | MIT | https://github.com/pytest-dev/iniconfig |
| ipykernel | 6.31.0 | BSD-3-Clause | https://ipython.org |
| ipython | 9.10.0 | BSD-3-Clause | https://ipython.org |
| ipython_pygments_lexers | 1.1.1 | BSD License | https://github.com/ipython/ipython-pygments-lexers |
| isodate | 0.7.2 | BSD License | https://github.com/gweis/isodate/ |
| jedi | 0.19.2 | MIT License | https://github.com/davidhalter/jedi |
| jiter | 0.13.0 | MIT License | https://github.com/pydantic/jiter/ |
| jmespath | 1.0.1 | MIT License | https://github.com/jmespath/jmespath.py |
| joblib | 1.5.3 | BSD-3-Clause | https://joblib.readthedocs.io |
| jsonschema | 4.26.0 | MIT | https://github.com/python-jsonschema/jsonschema |
| jsonschema-specifications | 2025.9.1 | MIT | https://github.com/python-jsonschema/jsonschema-specifications |
| jupyter_client | 8.8.0 | BSD License | https://jupyter.org |
| jupyter_core | 5.9.1 | BSD-3-Clause | https://jupyter.org |
| jupyterlab_pygments | 0.3.0 | BSD License | https://github.com/jupyterlab/jupyterlab_pygments |
| jupytext | 1.19.1 | MIT License | https://github.com/mwouts/jupytext |
| lance-namespace | 0.4.5 | Apache-2.0 | https://github.com/lance-format/lance-namespace |
| lance-namespace-urllib3-client | 0.4.5 | Apache-2.0 | https://github.com/lance-format/lance-namespace |
| lancedb | 0.29.2 | Apache Software License | https://github.com/lancedb/lancedb |
| lomond | 0.3.3 | BSD License | https://github.com/wildfoundry/dataplicity-lomond |
| markdown-it-py | 4.0.0 | MIT License | https://github.com/executablebooks/markdown-it-py |
| matplotlib-inline | 0.2.1 | UNKNOWN | https://github.com/ipython/matplotlib-inline |
| mdit-py-plugins | 0.5.0 | MIT License | https://github.com/executablebooks/mdit-py-plugins |
| mdurl | 0.1.2 | MIT License | https://github.com/executablebooks/mdurl |
| mergedeep | 1.3.4 | MIT License | https://github.com/clarketm/mergedeep |
| mistune | 3.2.0 | BSD License | https://github.com/lepture/mistune |
| mkdocs | 1.6.1 | BSD-2-Clause | https://github.com/mkdocs/mkdocs |
| mkdocs-autorefs | 1.4.3 | ISC | https://mkdocstrings.github.io/autorefs |
| mkdocs-get-deps | 0.2.0 | MIT | https://github.com/mkdocs/get-deps |
| mkdocs-jupyter | 0.25.1 | Apache-2.0 | https://github.com/danielfrg/mkdocs-jupyter |
| mkdocs-material | 9.7.1 | MIT | https://github.com/squidfunk/mkdocs-material |
| mkdocs-material-extensions | 1.3.1 | MIT | https://github.com/facelessuser/mkdocs-material-extensions |
| mkdocstrings | 1.0.3 | ISC | https://mkdocstrings.github.io |
| mkdocstrings-python | 2.0.2 | ISC | https://mkdocstrings.github.io/python |
| mpmath | 1.3.0 | BSD License | http://mpmath.org/ |
| msal | 1.34.0 | MIT License | https://github.com/AzureAD/microsoft-authentication-library-for-python |
| msal-extensions | 1.3.1 | MIT License | https://github.com/AzureAD/microsoft-authentication-extensions-for-python/releases |
| multidict | 6.7.1 | Apache License 2.0 | https://github.com/aio-libs/multidict |
| nbclient | 0.10.4 | BSD License | https://jupyter.org |
| nbconvert | 7.17.0 | BSD License | https://jupyter.org |
| nbformat | 5.10.4 | BSD License | https://jupyter.org |
| nest-asyncio | 1.6.0 | BSD License | https://github.com/erdewit/nest_asyncio |
| networkx | 3.6.1 | BSD-3-Clause | https://networkx.org/ |
| nodeenv | 1.10.0 | BSD License | https://github.com/ekalinin/nodeenv |
| numpy | 2.4.2 | BSD-3-Clause AND 0BSD AND MIT AND Zlib AND CC0-1.0 | https://numpy.org |
| ollama | 0.6.1 | MIT | https://ollama.com |
| open_clip_torch | 3.2.0 | MIT License | https://github.com/mlfoundations/open_clip |
| openai | 2.18.0 | Apache Software License | https://github.com/openai/openai-python |
| packaging | 26.0 | Apache-2.0 OR BSD-2-Clause | https://github.com/pypa/packaging |
| paginate | 0.5.7 | MIT License | https://github.com/Signum/paginate |
| pandas | 2.3.3 | BSD License | https://pandas.pydata.org |
| pandocfilters | 1.5.1 | BSD License | http://github.com/jgm/pandocfilters |
| parso | 0.8.6 | MIT License | https://github.com/davidhalter/parso |
| pathspec | 1.0.4 | Mozilla Public License 2.0 (MPL 2.0) | UNKNOWN |
| peft | 0.17.1 | Apache Software License | https://github.com/huggingface/peft |
| pexpect | 4.9.0 | ISC License (ISCL) | https://pexpect.readthedocs.io/ |
| pillow | 12.1.0 | MIT-CMU | https://python-pillow.github.io |
| platformdirs | 4.5.1 | MIT | https://github.com/tox-dev/platformdirs |
| pluggy | 1.6.0 | MIT License | UNKNOWN |
| polars | 1.3.0 | MIT License | https://www.pola.rs/ |
| pre_commit | 4.5.1 | MIT | https://github.com/pre-commit/pre-commit |
| prompt_toolkit | 3.0.52 | BSD License | https://github.com/prompt-toolkit/python-prompt-toolkit |
| propcache | 0.4.1 | Apache Software License | https://github.com/aio-libs/propcache |
| proto-plus | 1.27.1 | Apache Software License | https://github.com/googleapis/proto-plus-python |
| protobuf | 5.29.6 | 3-Clause BSD License | https://developers.google.com/protocol-buffers/ |
| psutil | 7.2.2 | BSD-3-Clause | https://github.com/giampaolo/psutil |
| ptyprocess | 0.7.0 | ISC License (ISCL) | https://github.com/pexpect/ptyprocess |
| pure_eval | 0.2.3 | MIT License | http://github.com/alexmojaki/pure_eval |
| pyarrow | 23.0.0 | Apache-2.0 | https://arrow.apache.org/ |
| pyarrow-stubs | 20.0.0.20251215 | BSD-2-Clause | https://github.com/zen-xu/pyarrow-stubs |
| pyasn1 | 0.6.2 | BSD-2-Clause | https://github.com/pyasn1/pyasn1 |
| pyasn1_modules | 0.4.2 | BSD License | https://github.com/pyasn1/pyasn1-modules |
| pycparser | 3.0 | BSD-3-Clause | https://github.com/eliben/pycparser |
| pydantic | 2.12.5 | MIT | https://github.com/pydantic/pydantic |
| pydantic_core | 2.41.5 | MIT | https://github.com/pydantic/pydantic-core |
| pylance | 2.0.0 | Apache Software License | UNKNOWN |
| pymdown-extensions | 10.20.1 | MIT | https://github.com/facelessuser/pymdown-extensions |
| pyparsing | 3.3.2 | MIT | https://github.com/pyparsing/pyparsing/ |
| pyright | 1.1.408 | MIT | https://github.com/RobertCraigie/pyright-python |
| pytest | 9.0.2 | MIT | https://docs.pytest.org/en/latest/ |
| pytest-asyncio | 1.3.0 | Apache-2.0 | https://github.com/pytest-dev/pytest-asyncio |
| pytest-mock | 3.15.1 | MIT License | https://github.com/pytest-dev/pytest-mock/ |
| python-dateutil | 2.9.0.post0 | Apache Software License; BSD License | https://github.com/dateutil/dateutil |
| pytz | 2025.2 | MIT License | http://pythonhosted.org/pytz |
| pyyaml_env_tag | 1.1 | MIT | https://github.com/waylan/pyyaml-env-tag |
| pyzmq | 27.1.0 | BSD License | https://pyzmq.readthedocs.org |
| referencing | 0.37.0 | MIT | https://github.com/python-jsonschema/referencing |
| regex | 2026.1.15 | Apache-2.0 AND CNRI-Python | https://github.com/mrabarnett/mrab-regex |
| requests | 2.32.5 | Apache Software License | https://requests.readthedocs.io |
| rpds-py | 0.30.0 | MIT | https://github.com/crate-py/rpds |
| rsa | 4.7.2 | Apache Software License | https://stuvel.eu/rsa |
| ruff | 0.15.0 | MIT License | https://docs.astral.sh/ruff |
| s3transfer | 0.16.0 | Apache Software License | https://github.com/boto/s3transfer |
| safetensors | 0.7.0 | Apache Software License | https://github.com/huggingface/safetensors |
| scikit-learn | 1.8.0 | BSD-3-Clause | https://scikit-learn.org |
| scipy | 1.17.0 | BSD License | https://scipy.org/ |
| sentence-transformers | 5.2.2 | Apache Software License | https://www.SBERT.net |
| sentencepiece | 0.2.1 | UNKNOWN | https://github.com/google/sentencepiece |
| six | 1.17.0 | MIT License | https://github.com/benjaminp/six |
| sniffio | 1.3.1 | Apache Software License; MIT License | https://github.com/python-trio/sniffio |
| soupsieve | 2.8.3 | MIT | https://github.com/facelessuser/soupsieve |
| stack-data | 0.6.3 | MIT License | http://github.com/alexmojaki/stack_data |
| sympy | 1.14.0 | BSD License | https://sympy.org |
| tabulate | 0.9.0 | MIT License | https://github.com/astanin/python-tabulate |
| tantivy | 0.25.1 | UNKNOWN | UNKNOWN |
| threadpoolctl | 3.6.0 | BSD License | https://github.com/joblib/threadpoolctl |
| timm | 1.0.24 | Apache Software License | https://github.com/huggingface/pytorch-image-models |
| tinycss2 | 1.4.0 | BSD License | https://www.courtbouillon.org/tinycss2 |
| tokenizers | 0.22.2 | Apache Software License | https://github.com/huggingface/tokenizers |
| torch | 2.8.0 | BSD License | https://pytorch.org/ |
| torchvision | 0.23.0 | BSD | https://github.com/pytorch/vision |
| tornado | 6.5.4 | Apache Software License | http://www.tornadoweb.org/ |
| tqdm | 4.67.3 | MPL-2.0 AND MIT | https://tqdm.github.io |
| traitlets | 5.14.3 | BSD License | https://github.com/ipython/traitlets |
| transformers | 4.57.6 | Apache Software License | https://github.com/huggingface/transformers |
| types-requests | 2.32.4.20260107 | Apache-2.0 | https://github.com/python/typeshed |
| typing-inspection | 0.4.2 | MIT | https://github.com/pydantic/typing-inspection |
| typing_extensions | 4.15.0 | PSF-2.0 | https://github.com/python/typing_extensions |
| tzdata | 2025.3 | Apache-2.0 | https://github.com/python/tzdata |
| uritemplate | 4.2.0 | BSD 3-Clause OR Apache-2.0 | https://uritemplate.readthedocs.org |
| urllib3 | 2.6.3 | MIT | https://github.com/urllib3/urllib3/blob/main/CHANGES.rst |
| virtualenv | 20.36.1 | MIT | https://github.com/pypa/virtualenv |
| watchdog | 6.0.0 | Apache Software License | https://github.com/gorakhargosh/watchdog |
| webencodings | 0.5.1 | BSD License | https://github.com/SimonSapin/python-webencodings |
| yarl | 1.22.0 | Apache Software License | https://github.com/aio-libs/yarl |

View File

@@ -1,4 +1,4 @@
# LanceDB Python SDK
# LanceDB
A Python library for [LanceDB](https://github.com/lancedb/lancedb).

File diff suppressed because it is too large Load Diff

View File

@@ -3,10 +3,10 @@ name = "lancedb"
# version in Cargo.toml
dynamic = ["version"]
dependencies = [
"deprecation>=2.1.0",
"numpy>=1.24.0",
"deprecation",
"numpy",
"overrides>=0.7; python_version<'3.12'",
"packaging>=23.0",
"packaging",
"pyarrow>=16",
"pydantic>=1.10",
"tqdm>=4.27.0",
@@ -45,51 +45,51 @@ repository = "https://github.com/lancedb/lancedb"
[project.optional-dependencies]
pylance = [
"pylance>=4.0.0b7",
"pylance>=1.0.0b14",
]
tests = [
"aiohttp>=3.9.0",
"boto3>=1.28.57",
"aiohttp",
"boto3",
"pandas>=1.4",
"pytest>=7.0",
"pytest-mock>=3.10",
"pytest-asyncio>=0.21",
"duckdb>=0.9.0",
"pytz>=2023.3",
"pytest",
"pytest-mock",
"pytest-asyncio",
"duckdb",
"pytz",
"polars>=0.19, <=1.3.0",
"tantivy>=0.20.0",
"pyarrow-stubs>=16.0",
"pylance>=4.0.0b7",
"requests>=2.31.0",
"datafusion>=52,<53",
"tantivy",
"pyarrow-stubs",
"pylance>=1.0.0b14",
"requests",
"datafusion",
]
dev = [
"ruff>=0.3.0",
"pre-commit>=3.5.0",
"pyright>=1.1.350",
"ruff",
"pre-commit",
"pyright",
'typing-extensions>=4.0.0; python_version < "3.11"',
]
docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings-python"]
clip = ["torch", "pillow>=12.1.1", "open-clip-torch"]
siglip = ["torch", "pillow>=12.1.1", "transformers>=4.41.0","sentencepiece"]
clip = ["torch", "pillow", "open-clip-torch"]
siglip = ["torch", "pillow", "transformers>=4.41.0","sentencepiece"]
embeddings = [
"requests>=2.31.0",
"openai>=1.6.1",
"sentence-transformers>=2.2.0",
"torch>=2.0.0",
"pillow>=12.1.1",
"open-clip-torch>=2.20.0",
"cohere>=4.0",
"sentence-transformers",
"torch",
"pillow",
"open-clip-torch",
"cohere",
"colpali-engine>=0.3.10",
"huggingface_hub>=0.19.0",
"InstructorEmbedding>=1.0.1",
"google.generativeai>=0.3.0",
"huggingface_hub",
"InstructorEmbedding",
"google.generativeai",
"boto3>=1.28.57",
"awscli>=1.44.38",
"awscli>=1.29.57",
"botocore>=1.31.57",
'ibm-watsonx-ai>=1.1.2; python_version >= "3.10"',
"ollama>=0.3.0",
"sentencepiece>=0.1.99"
"sentencepiece"
]
azure = ["adlfs>=2024.2.0"]

View File

@@ -166,8 +166,6 @@ class Table:
async def checkout(self, version: Union[int, str]): ...
async def checkout_latest(self): ...
async def restore(self, version: Optional[Union[int, str]] = None): ...
async def prewarm_index(self, index_name: str) -> None: ...
async def prewarm_data(self, columns: Optional[List[str]] = None) -> None: ...
async def list_indices(self) -> list[IndexConfig]: ...
async def delete(self, filter: str) -> DeleteResult: ...
async def add_columns(self, columns: list[tuple[str, str]]) -> AddColumnsResult: ...

View File

@@ -1,10 +1,8 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright The LanceDB Authors
from functools import singledispatch
from typing import List, Optional, Tuple, Union
from lancedb.pydantic import LanceModel, model_to_dict
import pyarrow as pa
from ._lancedb import RecordBatchStream
@@ -82,32 +80,3 @@ def peek_reader(
yield from reader
return batch, pa.RecordBatchReader.from_batches(batch.schema, all_batches())
@singledispatch
def to_arrow(data) -> pa.Table:
"""Convert a single data object to a pa.Table."""
raise NotImplementedError(f"to_arrow not implemented for type {type(data)}")
@to_arrow.register(pa.RecordBatch)
def _arrow_from_batch(data: pa.RecordBatch) -> pa.Table:
return pa.Table.from_batches([data])
@to_arrow.register(pa.Table)
def _arrow_from_table(data: pa.Table) -> pa.Table:
return data
@to_arrow.register(list)
def _arrow_from_list(data: list) -> pa.Table:
if not data:
raise ValueError("Cannot create table from empty list without a schema")
if isinstance(data[0], LanceModel):
schema = data[0].__class__.to_arrow_schema()
dicts = [model_to_dict(d) for d in data]
return pa.Table.from_pylist(dicts, schema=schema)
return pa.Table.from_pylist(data)

View File

@@ -8,7 +8,7 @@ from abc import abstractmethod
from datetime import timedelta
from pathlib import Path
import sys
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Literal, Optional, Union
from typing import TYPE_CHECKING, Dict, Iterable, List, Literal, Optional, Union
if sys.version_info >= (3, 12):
from typing import override
@@ -1541,8 +1541,6 @@ class AsyncConnection(object):
storage_options_provider: Optional["StorageOptionsProvider"] = None,
index_cache_size: Optional[int] = None,
location: Optional[str] = None,
namespace_client: Optional[Any] = None,
managed_versioning: Optional[bool] = None,
) -> AsyncTable:
"""Open a Lance Table in the database.
@@ -1575,9 +1573,6 @@ class AsyncConnection(object):
The explicit location (URI) of the table. If provided, the table will be
opened from this location instead of deriving it from the database URI
and table name.
managed_versioning: bool, optional
Whether managed versioning is enabled for this table. If provided,
avoids a redundant describe_table call when namespace_client is set.
Returns
-------
@@ -1592,8 +1587,6 @@ class AsyncConnection(object):
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
location=location,
namespace_client=namespace_client,
managed_versioning=managed_versioning,
)
return AsyncTable(table)

View File

@@ -2,7 +2,6 @@
# SPDX-FileCopyrightText: Copyright The LanceDB Authors
import warnings
from typing import List, Union
import numpy as np
@@ -16,8 +15,6 @@ from .utils import weak_lru
@register("gte-text")
class GteEmbeddings(TextEmbeddingFunction):
"""
Deprecated: GTE embeddings should be used through sentence-transformers.
An embedding function that uses GTE-LARGE MLX format(for Apple silicon devices only)
as well as the standard cpu/gpu version from: https://huggingface.co/thenlper/gte-large.
@@ -64,13 +61,6 @@ class GteEmbeddings(TextEmbeddingFunction):
def __init__(self, **kwargs):
super().__init__(**kwargs)
warnings.warn(
"GTE embeddings as a standalone embedding function are deprecated. "
"Use the 'sentence-transformers' embedding function with a GTE model "
"instead.",
DeprecationWarning,
stacklevel=3,
)
self._ndims = None
if kwargs:
self.mlx = kwargs.get("mlx", False)

View File

@@ -110,9 +110,6 @@ class OpenAIEmbeddings(TextEmbeddingFunction):
valid_embeddings = {
idx: v.embedding for v, idx in zip(rs.data, valid_indices)
}
except openai.AuthenticationError:
logging.error("Authentication failed: Invalid API key provided")
raise
except openai.BadRequestError:
logging.exception("Bad request: %s", texts)
return [None] * len(texts)

View File

@@ -6,7 +6,6 @@ import io
import os
from typing import TYPE_CHECKING, List, Union
import urllib.parse as urlparse
import warnings
import numpy as np
import pyarrow as pa
@@ -25,7 +24,6 @@ if TYPE_CHECKING:
@register("siglip")
class SigLipEmbeddings(EmbeddingFunction):
# Deprecated: prefer CLIP embeddings via `open-clip`.
model_name: str = "google/siglip-base-patch16-224"
device: str = "cpu"
batch_size: int = 64
@@ -38,12 +36,6 @@ class SigLipEmbeddings(EmbeddingFunction):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(
"SigLip embeddings are deprecated. Use CLIP embeddings via the "
"'open-clip' embedding function instead.",
DeprecationWarning,
stacklevel=3,
)
transformers = attempt_import_or_raise("transformers")
self._torch = attempt_import_or_raise("torch")

View File

@@ -269,11 +269,6 @@ def retry_with_exponential_backoff(
# and say that it is assumed that if this portion errors out, it's due
# to rate limit but the user should check the error message to be sure.
except Exception as e: # noqa: PERF203
# Don't retry on authentication errors (e.g., OpenAI 401)
# These are permanent failures that won't be fixed by retrying
if _is_non_retryable_error(e):
raise
num_retries += 1
if num_retries > max_retries:
@@ -294,29 +289,6 @@ def retry_with_exponential_backoff(
return wrapper
def _is_non_retryable_error(error: Exception) -> bool:
"""Check if an error should not be retried.
Args:
error: The exception to check
Returns:
True if the error should not be retried, False otherwise
"""
# Check for OpenAI authentication errors
error_type = type(error).__name__
if error_type == "AuthenticationError":
return True
# Check for other common non-retryable HTTP status codes
# 401 Unauthorized, 403 Forbidden
if hasattr(error, "status_code"):
if error.status_code in (401, 403):
return True
return False
def url_retrieve(url: str):
"""
Parameters

View File

@@ -12,7 +12,7 @@ from __future__ import annotations
import asyncio
import sys
from typing import Any, Dict, Iterable, List, Optional, Union
from typing import Dict, Iterable, List, Optional, Union
if sys.version_info >= (3, 12):
from typing import override
@@ -44,7 +44,7 @@ from lance_namespace import (
ListNamespacesRequest,
CreateNamespaceRequest,
DropNamespaceRequest,
DeclareTableRequest,
CreateEmptyTableRequest,
)
from lancedb.table import AsyncTable, LanceTable, Table
from lancedb.util import validate_table_name
@@ -240,7 +240,7 @@ class LanceNamespaceDBConnection(DBConnection):
session : Optional[Session]
A session to use for this connection
"""
self._namespace_client = namespace
self._ns = namespace
self.read_consistency_interval = read_consistency_interval
self.storage_options = storage_options or {}
self.session = session
@@ -269,7 +269,7 @@ class LanceNamespaceDBConnection(DBConnection):
if namespace is None:
namespace = []
request = ListTablesRequest(id=namespace, page_token=page_token, limit=limit)
response = self._namespace_client.list_tables(request)
response = self._ns.list_tables(request)
return response.tables if response.tables else []
@override
@@ -309,9 +309,7 @@ class LanceNamespaceDBConnection(DBConnection):
# Try to describe the table first to see if it exists
try:
describe_request = DescribeTableRequest(id=table_id)
describe_response = self._namespace_client.describe_table(
describe_request
)
describe_response = self._ns.describe_table(describe_request)
location = describe_response.location
namespace_storage_options = describe_response.storage_options
except Exception:
@@ -320,20 +318,20 @@ class LanceNamespaceDBConnection(DBConnection):
if location is None:
# Table doesn't exist or mode is "create", reserve a new location
declare_request = DeclareTableRequest(
create_empty_request = CreateEmptyTableRequest(
id=table_id,
location=None,
properties=self.storage_options if self.storage_options else None,
)
declare_response = self._namespace_client.declare_table(declare_request)
create_empty_response = self._ns.create_empty_table(create_empty_request)
if not declare_response.location:
if not create_empty_response.location:
raise ValueError(
"Table location is missing from declare_table response"
"Table location is missing from create_empty_table response"
)
location = declare_response.location
namespace_storage_options = declare_response.storage_options
location = create_empty_response.location
namespace_storage_options = create_empty_response.storage_options
# Merge storage options: self.storage_options < user options < namespace options
merged_storage_options = dict(self.storage_options)
@@ -355,7 +353,7 @@ class LanceNamespaceDBConnection(DBConnection):
# Only create if namespace returned storage_options (not None)
if storage_options_provider is None and namespace_storage_options is not None:
storage_options_provider = LanceNamespaceStorageOptionsProvider(
namespace=self._namespace_client,
namespace=self._ns,
table_id=table_id,
)
@@ -373,7 +371,6 @@ class LanceNamespaceDBConnection(DBConnection):
storage_options=merged_storage_options,
storage_options_provider=storage_options_provider,
location=location,
namespace_client=self._namespace_client,
)
return tbl
@@ -392,7 +389,7 @@ class LanceNamespaceDBConnection(DBConnection):
namespace = []
table_id = namespace + [name]
request = DescribeTableRequest(id=table_id)
response = self._namespace_client.describe_table(request)
response = self._ns.describe_table(request)
# Merge storage options: self.storage_options < user options < namespace options
merged_storage_options = dict(self.storage_options)
@@ -405,14 +402,10 @@ class LanceNamespaceDBConnection(DBConnection):
# Only create if namespace returned storage_options (not None)
if storage_options_provider is None and response.storage_options is not None:
storage_options_provider = LanceNamespaceStorageOptionsProvider(
namespace=self._namespace_client,
namespace=self._ns,
table_id=table_id,
)
# Pass managed_versioning to avoid redundant describe_table call in Rust.
# Convert None to False since we already have the answer from describe_table.
managed_versioning = response.managed_versioning is True
return self._lance_table_from_uri(
name,
response.location,
@@ -420,8 +413,6 @@ class LanceNamespaceDBConnection(DBConnection):
storage_options=merged_storage_options,
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
namespace_client=self._namespace_client,
managed_versioning=managed_versioning,
)
@override
@@ -431,7 +422,7 @@ class LanceNamespaceDBConnection(DBConnection):
namespace = []
table_id = namespace + [name]
request = DropTableRequest(id=table_id)
self._namespace_client.drop_table(request)
self._ns.drop_table(request)
@override
def rename_table(
@@ -493,7 +484,7 @@ class LanceNamespaceDBConnection(DBConnection):
request = ListNamespacesRequest(
id=namespace, page_token=page_token, limit=limit
)
response = self._namespace_client.list_namespaces(request)
response = self._ns.list_namespaces(request)
return ListNamespacesResponse(
namespaces=response.namespaces if response.namespaces else [],
page_token=response.page_token,
@@ -529,7 +520,7 @@ class LanceNamespaceDBConnection(DBConnection):
mode=_normalize_create_namespace_mode(mode),
properties=properties,
)
response = self._namespace_client.create_namespace(request)
response = self._ns.create_namespace(request)
return CreateNamespaceResponse(
properties=response.properties if hasattr(response, "properties") else None
)
@@ -564,7 +555,7 @@ class LanceNamespaceDBConnection(DBConnection):
mode=_normalize_drop_namespace_mode(mode),
behavior=_normalize_drop_namespace_behavior(behavior),
)
response = self._namespace_client.drop_namespace(request)
response = self._ns.drop_namespace(request)
return DropNamespaceResponse(
properties=(
response.properties if hasattr(response, "properties") else None
@@ -590,7 +581,7 @@ class LanceNamespaceDBConnection(DBConnection):
Response containing the namespace properties.
"""
request = DescribeNamespaceRequest(id=namespace)
response = self._namespace_client.describe_namespace(request)
response = self._ns.describe_namespace(request)
return DescribeNamespaceResponse(
properties=response.properties if hasattr(response, "properties") else None
)
@@ -624,7 +615,7 @@ class LanceNamespaceDBConnection(DBConnection):
if namespace is None:
namespace = []
request = ListTablesRequest(id=namespace, page_token=page_token, limit=limit)
response = self._namespace_client.list_tables(request)
response = self._ns.list_tables(request)
return ListTablesResponse(
tables=response.tables if response.tables else [],
page_token=response.page_token,
@@ -639,8 +630,6 @@ class LanceNamespaceDBConnection(DBConnection):
storage_options: Optional[Dict[str, str]] = None,
storage_options_provider: Optional[StorageOptionsProvider] = None,
index_cache_size: Optional[int] = None,
namespace_client: Optional[Any] = None,
managed_versioning: Optional[bool] = None,
) -> LanceTable:
# Open a table directly from a URI using the location parameter
# Note: storage_options should already be merged by the caller
@@ -654,8 +643,6 @@ class LanceNamespaceDBConnection(DBConnection):
)
# Open the table using the temporary connection with the location parameter
# Pass namespace_client to enable managed versioning support
# Pass managed_versioning to avoid redundant describe_table call
return LanceTable.open(
temp_conn,
name,
@@ -664,8 +651,6 @@ class LanceNamespaceDBConnection(DBConnection):
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
location=table_uri,
namespace_client=namespace_client,
managed_versioning=managed_versioning,
)
@@ -700,7 +685,7 @@ class AsyncLanceNamespaceDBConnection:
session : Optional[Session]
A session to use for this connection
"""
self._namespace_client = namespace
self._ns = namespace
self.read_consistency_interval = read_consistency_interval
self.storage_options = storage_options or {}
self.session = session
@@ -728,7 +713,7 @@ class AsyncLanceNamespaceDBConnection:
if namespace is None:
namespace = []
request = ListTablesRequest(id=namespace, page_token=page_token, limit=limit)
response = self._namespace_client.list_tables(request)
response = self._ns.list_tables(request)
return response.tables if response.tables else []
async def create_table(
@@ -765,9 +750,7 @@ class AsyncLanceNamespaceDBConnection:
# Try to describe the table first to see if it exists
try:
describe_request = DescribeTableRequest(id=table_id)
describe_response = self._namespace_client.describe_table(
describe_request
)
describe_response = self._ns.describe_table(describe_request)
location = describe_response.location
namespace_storage_options = describe_response.storage_options
except Exception:
@@ -776,20 +759,20 @@ class AsyncLanceNamespaceDBConnection:
if location is None:
# Table doesn't exist or mode is "create", reserve a new location
declare_request = DeclareTableRequest(
create_empty_request = CreateEmptyTableRequest(
id=table_id,
location=None,
properties=self.storage_options if self.storage_options else None,
)
declare_response = self._namespace_client.declare_table(declare_request)
create_empty_response = self._ns.create_empty_table(create_empty_request)
if not declare_response.location:
if not create_empty_response.location:
raise ValueError(
"Table location is missing from declare_table response"
"Table location is missing from create_empty_table response"
)
location = declare_response.location
namespace_storage_options = declare_response.storage_options
location = create_empty_response.location
namespace_storage_options = create_empty_response.storage_options
# Merge storage options: self.storage_options < user options < namespace options
merged_storage_options = dict(self.storage_options)
@@ -814,7 +797,7 @@ class AsyncLanceNamespaceDBConnection:
and namespace_storage_options is not None
):
provider = LanceNamespaceStorageOptionsProvider(
namespace=self._namespace_client,
namespace=self._ns,
table_id=table_id,
)
else:
@@ -834,7 +817,6 @@ class AsyncLanceNamespaceDBConnection:
storage_options=merged_storage_options,
storage_options_provider=provider,
location=location,
namespace_client=self._namespace_client,
)
lance_table = await asyncio.to_thread(_create_table)
@@ -855,7 +837,7 @@ class AsyncLanceNamespaceDBConnection:
namespace = []
table_id = namespace + [name]
request = DescribeTableRequest(id=table_id)
response = self._namespace_client.describe_table(request)
response = self._ns.describe_table(request)
# Merge storage options: self.storage_options < user options < namespace options
merged_storage_options = dict(self.storage_options)
@@ -867,14 +849,10 @@ class AsyncLanceNamespaceDBConnection:
# Create a storage options provider if not provided by user
if storage_options_provider is None and response.storage_options is not None:
storage_options_provider = LanceNamespaceStorageOptionsProvider(
namespace=self._namespace_client,
namespace=self._ns,
table_id=table_id,
)
# Capture managed_versioning from describe response.
# Convert None to False since we already have the answer from describe_table.
managed_versioning = response.managed_versioning is True
# Open table in a thread
def _open_table():
temp_conn = LanceDBConnection(
@@ -892,8 +870,6 @@ class AsyncLanceNamespaceDBConnection:
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
location=response.location,
namespace_client=self._namespace_client,
managed_versioning=managed_versioning,
)
lance_table = await asyncio.to_thread(_open_table)
@@ -905,7 +881,7 @@ class AsyncLanceNamespaceDBConnection:
namespace = []
table_id = namespace + [name]
request = DropTableRequest(id=table_id)
self._namespace_client.drop_table(request)
self._ns.drop_table(request)
async def rename_table(
self,
@@ -967,7 +943,7 @@ class AsyncLanceNamespaceDBConnection:
request = ListNamespacesRequest(
id=namespace, page_token=page_token, limit=limit
)
response = self._namespace_client.list_namespaces(request)
response = self._ns.list_namespaces(request)
return ListNamespacesResponse(
namespaces=response.namespaces if response.namespaces else [],
page_token=response.page_token,
@@ -1002,7 +978,7 @@ class AsyncLanceNamespaceDBConnection:
mode=_normalize_create_namespace_mode(mode),
properties=properties,
)
response = self._namespace_client.create_namespace(request)
response = self._ns.create_namespace(request)
return CreateNamespaceResponse(
properties=response.properties if hasattr(response, "properties") else None
)
@@ -1036,7 +1012,7 @@ class AsyncLanceNamespaceDBConnection:
mode=_normalize_drop_namespace_mode(mode),
behavior=_normalize_drop_namespace_behavior(behavior),
)
response = self._namespace_client.drop_namespace(request)
response = self._ns.drop_namespace(request)
return DropNamespaceResponse(
properties=(
response.properties if hasattr(response, "properties") else None
@@ -1063,7 +1039,7 @@ class AsyncLanceNamespaceDBConnection:
Response containing the namespace properties.
"""
request = DescribeNamespaceRequest(id=namespace)
response = self._namespace_client.describe_namespace(request)
response = self._ns.describe_namespace(request)
return DescribeNamespaceResponse(
properties=response.properties if hasattr(response, "properties") else None
)
@@ -1096,7 +1072,7 @@ class AsyncLanceNamespaceDBConnection:
if namespace is None:
namespace = []
request = ListTablesRequest(id=namespace, page_token=page_token, limit=limit)
response = self._namespace_client.list_tables(request)
response = self._ns.list_tables(request)
return ListTablesResponse(
tables=response.tables if response.tables else [],
page_token=response.page_token,

View File

@@ -9,7 +9,7 @@ import json
from ._lancedb import async_permutation_builder, PermutationReader
from .table import LanceTable
from .background_loop import LOOP
from .util import batch_to_tensor, batch_to_tensor_rows
from .util import batch_to_tensor
from typing import Any, Callable, Iterator, Literal, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
@@ -333,11 +333,7 @@ class Transforms:
"""
@staticmethod
def arrow2python(batch: pa.RecordBatch) -> list[dict[str, Any]]:
return batch.to_pylist()
@staticmethod
def arrow2pythoncol(batch: pa.RecordBatch) -> dict[str, list[Any]]:
def arrow2python(batch: pa.RecordBatch) -> dict[str, list[Any]]:
return batch.to_pydict()
@staticmethod
@@ -691,17 +687,7 @@ class Permutation:
return
def with_format(
self,
format: Literal[
"numpy",
"python",
"python_col",
"pandas",
"arrow",
"torch",
"torch_col",
"polars",
],
self, format: Literal["numpy", "python", "pandas", "arrow", "torch", "polars"]
) -> "Permutation":
"""
Set the format for batches
@@ -710,18 +696,16 @@ class Permutation:
The format can be one of:
- "numpy" - the batch will be a dict of numpy arrays (one per column)
- "python" - the batch will be a list of dicts (one per row)
- "python_col" - the batch will be a dict of lists (one entry per column)
- "python" - the batch will be a dict of lists (one per column)
- "pandas" - the batch will be a pandas DataFrame
- "arrow" - the batch will be a pyarrow RecordBatch
- "torch" - the batch will be a list of tensors, one per row
- "torch_col" - the batch will be a 2D torch tensor (first dim indexes columns)
- "torch" - the batch will be a two dimensional torch tensor
- "polars" - the batch will be a polars DataFrame
Conversion may or may not involve a data copy. Lance uses Arrow internally
and so it is able to zero-copy to the arrow and polars formats.
and so it is able to zero-copy to the arrow and polars.
Conversion to torch_col will be zero-copy but will only support a subset of data
Conversion to torch will be zero-copy but will only support a subset of data
types (numeric types).
Conversion to numpy and/or pandas will typically be zero-copy for numeric
@@ -734,8 +718,6 @@ class Permutation:
assert format is not None, "format is required"
if format == "python":
return self.with_transform(Transforms.arrow2python)
if format == "python_col":
return self.with_transform(Transforms.arrow2pythoncol)
elif format == "numpy":
return self.with_transform(Transforms.arrow2numpy)
elif format == "pandas":
@@ -743,8 +725,6 @@ class Permutation:
elif format == "arrow":
return self.with_transform(Transforms.arrow2arrow)
elif format == "torch":
return self.with_transform(batch_to_tensor_rows)
elif format == "torch_col":
return self.with_transform(batch_to_tensor)
elif format == "polars":
return self.with_transform(Transforms.arrow2polars())
@@ -766,20 +746,15 @@ class Permutation:
def __getitem__(self, index: int) -> Any:
"""
Returns a single row from the permutation by offset
"""
return self.__getitems__([index])
Return a single row from the permutation
def __getitems__(self, indices: list[int]) -> Any:
"""
Returns rows from the permutation by offset
"""
The output will always be a python dictionary regardless of the format.
async def do_getitems():
return await self.reader.take_offsets(indices, selection=self.selection)
batch = LOOP.run(do_getitems())
return self.transform_fn(batch)
This method is mostly useful for debugging and exploration. For actual
processing use [iter](#iter) or a torch data loader to perform batched
processing.
"""
pass
@deprecated(details="Use with_skip instead")
def skip(self, skip: int) -> "Permutation":

View File

@@ -606,7 +606,6 @@ class LanceQueryBuilder(ABC):
query,
ordering_field_name=ordering_field_name,
fts_columns=fts_columns,
fast_search=fast_search,
)
if isinstance(query, list):
@@ -1457,14 +1456,12 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
query: str | FullTextQuery,
ordering_field_name: Optional[str] = None,
fts_columns: Optional[Union[str, List[str]]] = None,
fast_search: bool = None,
):
super().__init__(table)
self._query = query
self._phrase_query = False
self.ordering_field_name = ordering_field_name
self._reranker = None
self._fast_search = fast_search
if isinstance(fts_columns, str):
fts_columns = [fts_columns]
self._fts_columns = fts_columns
@@ -1486,19 +1483,6 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
self._phrase_query = phrase_query
return self
def fast_search(self) -> LanceFtsQueryBuilder:
"""
Skip a flat search of unindexed data. This will improve
search performance but search results will not include unindexed data.
Returns
-------
LanceFtsQueryBuilder
The LanceFtsQueryBuilder object.
"""
self._fast_search = True
return self
def to_query_object(self) -> Query:
return Query(
columns=self._columns,
@@ -1510,7 +1494,6 @@ class LanceFtsQueryBuilder(LanceQueryBuilder):
query=self._query, columns=self._fts_columns
),
offset=self._offset,
fast_search=self._fast_search,
)
def output_schema(self) -> pa.Schema:
@@ -1799,26 +1782,6 @@ class LanceHybridQueryBuilder(LanceQueryBuilder):
vector_results = LanceHybridQueryBuilder._rank(vector_results, "_distance")
fts_results = LanceHybridQueryBuilder._rank(fts_results, "_score")
# If both result sets are empty (e.g. after hard filtering),
# return early to avoid errors in reranking or score restoration.
if vector_results.num_rows == 0 and fts_results.num_rows == 0:
# Build a minimal empty table with the _relevance_score column
combined_schema = pa.unify_schemas(
[vector_results.schema, fts_results.schema],
)
empty = pa.table(
{
col: pa.array([], type=combined_schema.field(col).type)
for col in combined_schema.names
}
)
empty = empty.append_column(
"_relevance_score", pa.array([], type=pa.float32())
)
if not with_row_ids and "_rowid" in empty.column_names:
empty = empty.drop(["_rowid"])
return empty
original_distances = None
original_scores = None
original_distance_row_ids = None
@@ -2155,17 +2118,19 @@ class LanceHybridQueryBuilder(LanceQueryBuilder):
""" # noqa: E501
self._create_query_builders()
reranker_label = str(self._reranker) if self._reranker else "No reranker"
vector_plan = self._table._explain_plan(
self._vector_query.to_query_object(), verbose=verbose
results = ["Vector Search Plan:"]
results.append(
self._table._explain_plan(
self._vector_query.to_query_object(), verbose=verbose
)
)
fts_plan = self._table._explain_plan(
self._fts_query.to_query_object(), verbose=verbose
results.append("FTS Search Plan:")
results.append(
self._table._explain_plan(
self._fts_query.to_query_object(), verbose=verbose
)
)
# Indent sub-plans under the reranker
indented_vector = "\n".join(" " + line for line in vector_plan.splitlines())
indented_fts = "\n".join(" " + line for line in fts_plan.splitlines())
return f"{reranker_label}\n {indented_vector}\n {indented_fts}"
return "\n".join(results)
def analyze_plan(self):
"""Execute the query and display with runtime metrics.
@@ -2205,8 +2170,8 @@ class LanceHybridQueryBuilder(LanceQueryBuilder):
self._vector_query.select(self._columns)
self._fts_query.select(self._columns)
if self._where:
self._vector_query.where(self._where, not self._postfilter)
self._fts_query.where(self._where, not self._postfilter)
self._vector_query.where(self._where, self._postfilter)
self._fts_query.where(self._where, self._postfilter)
if self._with_row_id:
self._vector_query.with_row_id(True)
self._fts_query.with_row_id(True)
@@ -3199,20 +3164,23 @@ class AsyncHybridQuery(AsyncStandardQuery, AsyncVectorQueryBase):
... plan = await table.query().nearest_to([1.0, 2.0]).nearest_to_text("hello").explain_plan(True)
... print(plan)
>>> asyncio.run(doctest_example()) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
RRFReranker(K=60)
ProjectionExec: expr=[vector@0 as vector, text@3 as text, _distance@2 as _distance]
Take: columns="vector, _rowid, _distance, (text)"
CoalesceBatchesExec: target_batch_size=1024
GlobalLimitExec: skip=0, fetch=10
FilterExec: _distance@2 IS NOT NULL
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST, _rowid@1 ASC NULLS LAST], preserve_partitioning=[false]
KNNVectorDistance: metric=l2
LanceRead: uri=..., projection=[vector], ...
ProjectionExec: expr=[vector@2 as vector, text@3 as text, _score@1 as _score]
Take: columns="_rowid, _score, (vector), (text)"
CoalesceBatchesExec: target_batch_size=1024
GlobalLimitExec: skip=0, fetch=10
MatchQuery: column=text, query=hello
Vector Search Plan:
ProjectionExec: expr=[vector@0 as vector, text@3 as text, _distance@2 as _distance]
Take: columns="vector, _rowid, _distance, (text)"
CoalesceBatchesExec: target_batch_size=1024
GlobalLimitExec: skip=0, fetch=10
FilterExec: _distance@2 IS NOT NULL
SortExec: TopK(fetch=10), expr=[_distance@2 ASC NULLS LAST, _rowid@1 ASC NULLS LAST], preserve_partitioning=[false]
KNNVectorDistance: metric=l2
LanceRead: uri=..., projection=[vector], ...
<BLANKLINE>
FTS Search Plan:
ProjectionExec: expr=[vector@2 as vector, text@3 as text, _score@1 as _score]
Take: columns="_rowid, _score, (vector), (text)"
CoalesceBatchesExec: target_batch_size=1024
GlobalLimitExec: skip=0, fetch=10
MatchQuery: column=text, query=hello
<BLANKLINE>
Parameters
----------
@@ -3224,12 +3192,12 @@ class AsyncHybridQuery(AsyncStandardQuery, AsyncVectorQueryBase):
plan : str
""" # noqa: E501
vector_plan = await self._inner.to_vector_query().explain_plan(verbose)
fts_plan = await self._inner.to_fts_query().explain_plan(verbose)
# Indent sub-plans under the reranker
indented_vector = "\n".join(" " + line for line in vector_plan.splitlines())
indented_fts = "\n".join(" " + line for line in fts_plan.splitlines())
return f"{self._reranker}\n {indented_vector}\n {indented_fts}"
results = ["Vector Search Plan:"]
results.append(await self._inner.to_vector_query().explain_plan(verbose))
results.append("FTS Search Plan:")
results.append(await self._inner.to_fts_query().explain_plan(verbose))
return "\n".join(results)
async def analyze_plan(self):
"""

View File

@@ -218,6 +218,8 @@ class RemoteTable(Table):
train: bool = True,
):
"""Create an index on the table.
Currently, the only parameters that matter are
the metric and the vector column name.
Parameters
----------
@@ -248,6 +250,11 @@ class RemoteTable(Table):
>>> table.create_index("l2", "vector") # doctest: +SKIP
"""
if num_sub_vectors is not None:
logging.warning(
"num_sub_vectors is not supported on LanceDB cloud."
"This parameter will be tuned automatically."
)
if accelerator is not None:
logging.warning(
"GPU accelerator is not yet supported on LanceDB cloud."
@@ -640,45 +647,6 @@ class RemoteTable(Table):
def drop_index(self, index_name: str):
return LOOP.run(self._table.drop_index(index_name))
def prewarm_index(self, name: str) -> None:
"""Prewarm an index in the table.
This is a hint to the database that the index will be accessed in the
future and should be loaded into memory if possible. This can reduce
cold-start latency for subsequent queries.
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
Parameters
----------
name: str
The name of the index to prewarm
"""
return LOOP.run(self._table.prewarm_index(name))
def prewarm_data(self, columns: Optional[List[str]] = None) -> None:
"""Prewarm data for the table.
This is a hint to the database that the given columns will be accessed
in the future and the database should prefetch the data if possible.
Currently only supported on remote tables.
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
This operation has a large upfront cost but can speed up future queries
that need to fetch the given columns. Large columns such as embeddings
or binary data may not be practical to prewarm. This feature is intended
for workloads that issue many queries against the same columns.
Parameters
----------
columns: list of str, optional
The columns to prewarm. If None, all columns are prewarmed.
"""
return LOOP.run(self._table.prewarm_data(columns))
def wait_for_index(
self, index_names: Iterable[str], timeout: timedelta = timedelta(seconds=300)
):

View File

@@ -42,18 +42,10 @@ class AnswerdotaiRerankers(Reranker):
rerankers = attempt_import_or_raise(
"rerankers"
) # import here for faster ops later
self.model_name = model_name
self.model_type = model_type
self.reranker = rerankers.Reranker(
model_name=model_name, model_type=model_type, **kwargs
)
def __str__(self):
return (
f"AnswerdotaiRerankers(model_type={self.model_type}, "
f"model_name={self.model_name})"
)
def _rerank(self, result_set: pa.Table, query: str):
result_set = self._handle_empty_results(result_set)
if len(result_set) == 0:

View File

@@ -40,9 +40,6 @@ class Reranker(ABC):
if ARROW_VERSION.major <= 13:
self._concat_tables_args = {"promote": True}
def __str__(self):
return self.__class__.__name__
def rerank_vector(
self,
query: str,

View File

@@ -44,9 +44,6 @@ class CohereReranker(Reranker):
self.top_n = top_n
self.api_key = api_key
def __str__(self):
return f"CohereReranker(model_name={self.model_name})"
@cached_property
def _client(self):
cohere = attempt_import_or_raise("cohere")

View File

@@ -50,9 +50,6 @@ class CrossEncoderReranker(Reranker):
if self.device is None:
self.device = "cuda" if torch.cuda.is_available() else "cpu"
def __str__(self):
return f"CrossEncoderReranker(model_name={self.model_name})"
@cached_property
def model(self):
sbert = attempt_import_or_raise("sentence_transformers")

View File

@@ -45,9 +45,6 @@ class JinaReranker(Reranker):
self.top_n = top_n
self.api_key = api_key
def __str__(self):
return f"JinaReranker(model_name={self.model_name})"
@cached_property
def _client(self):
import requests

View File

@@ -38,9 +38,6 @@ class LinearCombinationReranker(Reranker):
self.weight = weight
self.fill = fill
def __str__(self):
return f"LinearCombinationReranker(weight={self.weight}, fill={self.fill})"
def rerank_hybrid(
self,
query: str, # noqa: F821

View File

@@ -54,12 +54,6 @@ class MRRReranker(Reranker):
self.weight_vector = weight_vector
self.weight_fts = weight_fts
def __str__(self):
return (
f"MRRReranker(weight_vector={self.weight_vector}, "
f"weight_fts={self.weight_fts})"
)
def rerank_hybrid(
self,
query: str, # noqa: F821

View File

@@ -43,9 +43,6 @@ class OpenaiReranker(Reranker):
self.column = column
self.api_key = api_key
def __str__(self):
return f"OpenaiReranker(model_name={self.model_name})"
def _rerank(self, result_set: pa.Table, query: str):
result_set = self._handle_empty_results(result_set)
if len(result_set) == 0:

View File

@@ -36,9 +36,6 @@ class RRFReranker(Reranker):
super().__init__(return_score)
self.K = K
def __str__(self):
return f"RRFReranker(K={self.K})"
def rerank_hybrid(
self,
query: str, # noqa: F821

View File

@@ -52,9 +52,6 @@ class VoyageAIReranker(Reranker):
self.api_key = api_key
self.truncation = truncation
def __str__(self):
return f"VoyageAIReranker(model_name={self.model_name})"
@cached_property
def _client(self):
voyageai = attempt_import_or_raise("voyageai")

View File

@@ -1,214 +0,0 @@
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: Copyright The LanceDB Authors
from dataclasses import dataclass
from functools import singledispatch
import sys
from typing import Callable, Iterator, Optional
from lancedb.arrow import to_arrow
import pyarrow as pa
import pyarrow.dataset as ds
from .pydantic import LanceModel
@dataclass
class Scannable:
schema: pa.Schema
num_rows: Optional[int]
# Factory function to create a new reader each time (supports re-scanning)
reader: Callable[[], pa.RecordBatchReader]
# Whether reader can be called more than once. For example, an iterator can
# only be consumed once, while a DataFrame can be converted to a new reader
# each time.
rescannable: bool = True
@singledispatch
def to_scannable(data) -> Scannable:
# Fallback: try iterable protocol
if hasattr(data, "__iter__"):
return _from_iterable(iter(data))
raise NotImplementedError(f"to_scannable not implemented for type {type(data)}")
@to_scannable.register(pa.RecordBatchReader)
def _from_reader(data: pa.RecordBatchReader) -> Scannable:
# RecordBatchReader can only be consumed once - not rescannable
return Scannable(
schema=data.schema, num_rows=None, reader=lambda: data, rescannable=False
)
@to_scannable.register(pa.RecordBatch)
def _from_batch(data: pa.RecordBatch) -> Scannable:
return Scannable(
schema=data.schema,
num_rows=data.num_rows,
reader=lambda: pa.RecordBatchReader.from_batches(data.schema, [data]),
)
@to_scannable.register(pa.Table)
def _from_table(data: pa.Table) -> Scannable:
return Scannable(schema=data.schema, num_rows=data.num_rows, reader=data.to_reader)
@to_scannable.register(ds.Dataset)
def _from_dataset(data: ds.Dataset) -> Scannable:
return Scannable(
schema=data.schema,
num_rows=data.count_rows(),
reader=lambda: data.scanner().to_reader(),
)
@to_scannable.register(ds.Scanner)
def _from_scanner(data: ds.Scanner) -> Scannable:
# Scanner can only be consumed once - not rescannable
return Scannable(
schema=data.projected_schema,
num_rows=None,
reader=data.to_reader,
rescannable=False,
)
@to_scannable.register(list)
def _from_list(data: list) -> Scannable:
if not data:
raise ValueError("Cannot create table from empty list without a schema")
table = to_arrow(data)
return Scannable(
schema=table.schema, num_rows=table.num_rows, reader=table.to_reader
)
@to_scannable.register(dict)
def _from_dict(data: dict) -> Scannable:
raise ValueError("Cannot add a single dictionary to a table. Use a list.")
@to_scannable.register(LanceModel)
def _from_lance_model(data: LanceModel) -> Scannable:
raise ValueError("Cannot add a single LanceModel to a table. Use a list.")
def _from_iterable(data: Iterator) -> Scannable:
first_item = next(data, None)
if first_item is None:
raise ValueError("Cannot create table from empty iterator")
first = to_arrow(first_item)
schema = first.schema
def iter():
yield from first.to_batches()
for item in data:
batch = to_arrow(item)
if batch.schema != schema:
try:
batch = batch.cast(schema)
except pa.lib.ArrowInvalid:
raise ValueError(
f"Input iterator yielded a batch with schema that "
f"does not match the schema of other batches.\n"
f"Expected:\n{schema}\nGot:\n{batch.schema}"
)
yield from batch.to_batches()
reader = pa.RecordBatchReader.from_batches(schema, iter())
return to_scannable(reader)
_registered_modules: set[str] = set()
def _register_optional_converters():
"""Register converters for optional dependencies that are already imported."""
if "pandas" in sys.modules and "pandas" not in _registered_modules:
_registered_modules.add("pandas")
import pandas as pd
@to_arrow.register(pd.DataFrame)
def _arrow_from_pandas(data: pd.DataFrame) -> pa.Table:
table = pa.Table.from_pandas(data, preserve_index=False)
return table.replace_schema_metadata(None)
@to_scannable.register(pd.DataFrame)
def _from_pandas(data: pd.DataFrame) -> Scannable:
return to_scannable(_arrow_from_pandas(data))
if "polars" in sys.modules and "polars" not in _registered_modules:
_registered_modules.add("polars")
import polars as pl
@to_arrow.register(pl.DataFrame)
def _arrow_from_polars(data: pl.DataFrame) -> pa.Table:
return data.to_arrow()
@to_scannable.register(pl.DataFrame)
def _from_polars(data: pl.DataFrame) -> Scannable:
arrow = data.to_arrow()
return Scannable(
schema=arrow.schema, num_rows=len(data), reader=arrow.to_reader
)
@to_scannable.register(pl.LazyFrame)
def _from_polars_lazy(data: pl.LazyFrame) -> Scannable:
arrow = data.collect().to_arrow()
return Scannable(
schema=arrow.schema, num_rows=arrow.num_rows, reader=arrow.to_reader
)
if "datasets" in sys.modules and "datasets" not in _registered_modules:
_registered_modules.add("datasets")
from datasets import Dataset as HFDataset
from datasets import DatasetDict as HFDatasetDict
@to_scannable.register(HFDataset)
def _from_hf_dataset(data: HFDataset) -> Scannable:
table = data.data.table # Access underlying Arrow table
return Scannable(
schema=table.schema, num_rows=len(data), reader=table.to_reader
)
@to_scannable.register(HFDatasetDict)
def _from_hf_dataset_dict(data: HFDatasetDict) -> Scannable:
# HuggingFace DatasetDict: combine all splits with a 'split' column
schema = data[list(data.keys())[0]].features.arrow_schema
if "split" not in schema.names:
schema = schema.append(pa.field("split", pa.string()))
def gen():
for split_name, dataset in data.items():
for batch in dataset.data.to_batches():
split_arr = pa.array(
[split_name] * len(batch), type=pa.string()
)
yield pa.RecordBatch.from_arrays(
list(batch.columns) + [split_arr], schema=schema
)
total_rows = sum(len(dataset) for dataset in data.values())
return Scannable(
schema=schema,
num_rows=total_rows,
reader=lambda: pa.RecordBatchReader.from_batches(schema, gen()),
)
if "lance" in sys.modules and "lance" not in _registered_modules:
_registered_modules.add("lance")
import lance
@to_scannable.register(lance.LanceDataset)
def _from_lance(data: lance.LanceDataset) -> Scannable:
return Scannable(
schema=data.schema,
num_rows=data.count_rows(),
reader=lambda: data.scanner().to_reader(),
)
# Register on module load
_register_optional_converters()

View File

@@ -25,8 +25,6 @@ from typing import (
)
from urllib.parse import urlparse
from lancedb.scannable import _register_optional_converters, to_scannable
from . import __version__
from lancedb.arrow import peek_reader
from lancedb.background_loop import LOOP
@@ -906,9 +904,7 @@ class Table(ABC):
----------
field_names: str or list of str
The name(s) of the field to index.
If ``use_tantivy`` is False (default), only a single field name
(str) is supported. To index multiple fields, create a separate
FTS index for each field.
can be only str if use_tantivy=True for now.
replace: bool, default False
If True, replace the existing index if it exists. Note that this is
not yet an atomic operation; the index will be temporarily
@@ -1331,7 +1327,7 @@ class Table(ABC):
1 2 [3.0, 4.0]
2 3 [5.0, 6.0]
>>> table.delete("x = 2")
DeleteResult(num_deleted_rows=1, version=2)
DeleteResult(version=2)
>>> table.to_pandas()
x vector
0 1 [1.0, 2.0]
@@ -1345,7 +1341,7 @@ class Table(ABC):
>>> to_remove
'1, 5'
>>> table.delete(f"x IN ({to_remove})")
DeleteResult(num_deleted_rows=1, version=3)
DeleteResult(version=3)
>>> table.to_pandas()
x vector
0 3 [5.0, 6.0]
@@ -1506,17 +1502,22 @@ class Table(ABC):
in-progress operation (e.g. appending new data) and these files will not
be deleted unless they are at least 7 days old. If delete_unverified is True
then these files will be deleted regardless of their age.
.. warning::
This should only be set to True if you can guarantee that no other
process is currently working on this dataset. Otherwise the dataset
could be put into a corrupted state.
retrain: bool, default False
This parameter is no longer used and is deprecated.
The frequency an application should call optimize is based on the frequency of
Experimental API
----------------
The optimization process is undergoing active development and may change.
Our goal with these changes is to improve the performance of optimization and
reduce the complexity.
That being said, it is essential today to run optimize if you want the best
performance. It should be stable and safe to use in production, but it our
hope that the API may be simplified (or not even need to be called) in the
future.
The frequency an application shoudl call optimize is based on the frequency of
data modifications. If data is frequently added, deleted, or updated then
optimize should be run frequently. A good rule of thumb is to run optimize if
you have added or modified 100,000 or more records or run more than 20 data
@@ -1741,8 +1742,6 @@ class LanceTable(Table):
storage_options_provider: Optional["StorageOptionsProvider"] = None,
index_cache_size: Optional[int] = None,
location: Optional[str] = None,
namespace_client: Optional[Any] = None,
managed_versioning: Optional[bool] = None,
_async: AsyncTable = None,
):
if namespace is None:
@@ -1750,7 +1749,6 @@ class LanceTable(Table):
self._conn = connection
self._namespace = namespace
self._location = location # Store location for use in _dataset_path
self._namespace_client = namespace_client
if _async is not None:
self._table = _async
else:
@@ -1762,8 +1760,6 @@ class LanceTable(Table):
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
location=location,
namespace_client=namespace_client,
managed_versioning=managed_versioning,
)
)
@@ -1806,8 +1802,6 @@ class LanceTable(Table):
storage_options_provider: Optional["StorageOptionsProvider"] = None,
index_cache_size: Optional[int] = None,
location: Optional[str] = None,
namespace_client: Optional[Any] = None,
managed_versioning: Optional[bool] = None,
):
if namespace is None:
namespace = []
@@ -1819,8 +1813,6 @@ class LanceTable(Table):
storage_options_provider=storage_options_provider,
index_cache_size=index_cache_size,
location=location,
namespace_client=namespace_client,
managed_versioning=managed_versioning,
)
# check the dataset exists
@@ -1852,16 +1844,6 @@ class LanceTable(Table):
"Please install with `pip install pylance`."
)
if self._namespace_client is not None:
table_id = self._namespace + [self.name]
return lance.dataset(
version=self.version,
storage_options=self._conn.storage_options,
namespace=self._namespace_client,
table_id=table_id,
**kwargs,
)
return lance.dataset(
self._dataset_path,
version=self.version,
@@ -2214,18 +2196,12 @@ class LanceTable(Table):
def prewarm_index(self, name: str) -> None:
"""
Prewarm an index in the table.
Prewarms an index in the table
This is a hint to the database that the index will be accessed in the
future and should be loaded into memory if possible. This can reduce
cold-start latency for subsequent queries.
This loads the entire index into memory
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
It is generally wasteful to call this if the index does not fit into the
available cache. Not all index types support prewarming; unsupported
indices will silently ignore the request.
If the index does not fit into the available cache this call
may be wasteful
Parameters
----------
@@ -2234,29 +2210,6 @@ class LanceTable(Table):
"""
return LOOP.run(self._table.prewarm_index(name))
def prewarm_data(self, columns: Optional[List[str]] = None) -> None:
"""
Prewarm data for the table.
This is a hint to the database that the given columns will be accessed
in the future and the database should prefetch the data if possible.
Currently only supported on remote tables.
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
This operation has a large upfront cost but can speed up future queries
that need to fetch the given columns. Large columns such as embeddings
or binary data may not be practical to prewarm. This feature is intended
for workloads that issue many queries against the same columns.
Parameters
----------
columns: list of str, optional
The columns to prewarm. If None, all columns are prewarmed.
"""
return LOOP.run(self._table.prewarm_data(columns))
def wait_for_index(
self, index_names: Iterable[str], timeout: timedelta = timedelta(seconds=300)
) -> None:
@@ -2345,11 +2298,7 @@ class LanceTable(Table):
):
if not use_tantivy:
if not isinstance(field_names, str):
raise ValueError(
"Native FTS indexes can only be created on a single field "
"at a time. To search over multiple text fields, create a "
"separate FTS index for each field."
)
raise ValueError("field_names must be a string when use_tantivy=False")
if tokenizer_name is None:
tokenizer_configs = {
@@ -2756,7 +2705,6 @@ class LanceTable(Table):
data_storage_version: Optional[str] = None,
enable_v2_manifest_paths: Optional[bool] = None,
location: Optional[str] = None,
namespace_client: Optional[Any] = None,
):
"""
Create a new table.
@@ -2817,7 +2765,6 @@ class LanceTable(Table):
self._conn = db
self._namespace = namespace
self._location = location
self._namespace_client = namespace_client
if data_storage_version is not None:
warnings.warn(
@@ -3042,17 +2989,22 @@ class LanceTable(Table):
in-progress operation (e.g. appending new data) and these files will not
be deleted unless they are at least 7 days old. If delete_unverified is True
then these files will be deleted regardless of their age.
.. warning::
This should only be set to True if you can guarantee that no other
process is currently working on this dataset. Otherwise the dataset
could be put into a corrupted state.
retrain: bool, default False
This parameter is no longer used and is deprecated.
The frequency an application should call optimize is based on the frequency of
Experimental API
----------------
The optimization process is undergoing active development and may change.
Our goal with these changes is to improve the performance of optimization and
reduce the complexity.
That being said, it is essential today to run optimize if you want the best
performance. It should be stable and safe to use in production, but it our
hope that the API may be simplified (or not even need to be called) in the
future.
The frequency an application shoudl call optimize is based on the frequency of
data modifications. If data is frequently added, deleted, or updated then
optimize should be run frequently. A good rule of thumb is to run optimize if
you have added or modified 100,000 or more records or run more than 20 data
@@ -3653,47 +3605,19 @@ class AsyncTable:
"""
Prewarm an index in the table.
This is a hint to the database that the index will be accessed in the
future and should be loaded into memory if possible. This can reduce
cold-start latency for subsequent queries.
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
It is generally wasteful to call this if the index does not fit into the
available cache. Not all index types support prewarming; unsupported
indices will silently ignore the request.
Parameters
----------
name: str
The name of the index to prewarm
Notes
-----
This will load the index into memory. This may reduce the cold-start time for
future queries. If the index does not fit in the cache then this call may be
wasteful.
"""
await self._inner.prewarm_index(name)
async def prewarm_data(self, columns: Optional[List[str]] = None) -> None:
"""
Prewarm data for the table.
This is a hint to the database that the given columns will be accessed
in the future and the database should prefetch the data if possible.
Currently only supported on remote tables.
This call initiates prewarming and returns once the request is accepted.
It is idempotent and safe to call from multiple clients concurrently.
This operation has a large upfront cost but can speed up future queries
that need to fetch the given columns. Large columns such as embeddings
or binary data may not be practical to prewarm. This feature is intended
for workloads that issue many queries against the same columns.
Parameters
----------
columns: list of str, optional
The columns to prewarm. If None, all columns are prewarmed.
"""
await self._inner.prewarm_data(columns)
async def wait_for_index(
self, index_names: Iterable[str], timeout: timedelta = timedelta(seconds=300)
) -> None:
@@ -3797,31 +3721,18 @@ class AsyncTable:
on_bad_vectors = "error"
if fill_value is None:
fill_value = 0.0
data = _sanitize_data(
data,
schema,
metadata=schema.metadata,
on_bad_vectors=on_bad_vectors,
fill_value=fill_value,
allow_subschema=True,
)
if isinstance(data, pa.Table):
data = data.to_reader()
# _santitize_data is an old code path, but we will use it until the
# new code path is ready.
if on_bad_vectors != "error" or (
schema.metadata is not None and b"embedding_functions" in schema.metadata
):
data = _sanitize_data(
data,
schema,
metadata=schema.metadata,
on_bad_vectors=on_bad_vectors,
fill_value=fill_value,
allow_subschema=True,
)
_register_optional_converters()
data = to_scannable(data)
try:
return await self._inner.add(data, mode or "append")
except RuntimeError as e:
if "Cast error" in str(e):
raise ValueError(e)
elif "Vector column contains NaN" in str(e):
raise ValueError(e)
else:
raise
return await self._inner.add(data, mode or "append")
def merge_insert(self, on: Union[str, Iterable[str]]) -> LanceMergeInsertBuilder:
"""
@@ -4283,7 +4194,7 @@ class AsyncTable:
1 2 [3.0, 4.0]
2 3 [5.0, 6.0]
>>> table.delete("x = 2")
DeleteResult(num_deleted_rows=1, version=2)
DeleteResult(version=2)
>>> table.to_pandas()
x vector
0 1 [1.0, 2.0]
@@ -4297,7 +4208,7 @@ class AsyncTable:
>>> to_remove
'1, 5'
>>> table.delete(f"x IN ({to_remove})")
DeleteResult(num_deleted_rows=1, version=3)
DeleteResult(version=3)
>>> table.to_pandas()
x vector
0 3 [5.0, 6.0]
@@ -4620,17 +4531,22 @@ class AsyncTable:
in-progress operation (e.g. appending new data) and these files will not
be deleted unless they are at least 7 days old. If delete_unverified is True
then these files will be deleted regardless of their age.
.. warning::
This should only be set to True if you can guarantee that no other
process is currently working on this dataset. Otherwise the dataset
could be put into a corrupted state.
retrain: bool, default False
This parameter is no longer used and is deprecated.
The frequency an application should call optimize is based on the frequency of
Experimental API
----------------
The optimization process is undergoing active development and may change.
Our goal with these changes is to improve the performance of optimization and
reduce the complexity.
That being said, it is essential today to run optimize if you want the best
performance. It should be stable and safe to use in production, but it our
hope that the API may be simplified (or not even need to be called) in the
future.
The frequency an application shoudl call optimize is based on the frequency of
data modifications. If data is frequently added, deleted, or updated then
optimize should be run frequently. A good rule of thumb is to run optimize if
you have added or modified 100,000 or more records or run more than 20 data

View File

@@ -324,16 +324,6 @@ def _(value: list):
return "[" + ", ".join(map(value_to_sql, value)) + "]"
@value_to_sql.register(dict)
def _(value: dict):
# https://datafusion.apache.org/user-guide/sql/scalar_functions.html#named-struct
return (
"named_struct("
+ ", ".join(f"'{k}', {value_to_sql(v)}" for k, v in value.items())
+ ")"
)
@value_to_sql.register(np.ndarray)
def _(value: np.ndarray):
return value_to_sql(value.tolist())
@@ -429,22 +419,3 @@ def batch_to_tensor(batch: pa.RecordBatch):
"""
torch = attempt_import_or_raise("torch", "torch")
return torch.stack([torch.from_dlpack(col) for col in batch.columns])
def batch_to_tensor_rows(batch: pa.RecordBatch):
"""
Convert a PyArrow RecordBatch to a list of PyTorch Tensor, one per row
Each column is converted to a tensor (using zero-copy via DLPack)
and the columns are then stacked into a single tensor. The 2D tensor
is then converted to a list of tensors, one per row
Fails if torch or numpy is not installed.
Fails if a column's data type is not supported by PyTorch.
"""
torch = attempt_import_or_raise("torch", "torch")
numpy = attempt_import_or_raise("numpy", "numpy")
columns = [col.to_numpy(zero_copy_only=False) for col in batch.columns]
stacked = torch.tensor(numpy.column_stack(columns))
rows = list(stacked.unbind(dim=0))
return rows

View File

@@ -515,34 +515,3 @@ def test_openai_propagates_api_key(monkeypatch):
query = "greetings"
actual = table.search(query).limit(1).to_pydantic(Words)[0]
assert len(actual.text) > 0
@patch("time.sleep")
def test_openai_no_retry_on_401(mock_sleep):
"""
Test that OpenAI embedding function does not retry on 401 authentication
errors.
"""
from lancedb.embeddings.utils import retry_with_exponential_backoff
# Create a mock that raises an AuthenticationError
class MockAuthenticationError(Exception):
"""Mock OpenAI AuthenticationError"""
pass
MockAuthenticationError.__name__ = "AuthenticationError"
mock_func = MagicMock(side_effect=MockAuthenticationError("Invalid API key"))
# Wrap the function with retry logic
wrapped_func = retry_with_exponential_backoff(mock_func, max_retries=3)
# Should raise without retrying
with pytest.raises(MockAuthenticationError):
wrapped_func()
# Verify that the function was only called once (no retries)
assert mock_func.call_count == 1
# Verify that sleep was never called (no retries)
assert mock_sleep.call_count == 0

View File

@@ -27,7 +27,6 @@ from lancedb.query import (
PhraseQuery,
BooleanQuery,
Occur,
LanceFtsQueryBuilder,
)
import numpy as np
import pyarrow as pa
@@ -883,109 +882,3 @@ def test_fts_query_to_json():
'"must_not":[]}}'
)
assert json_str == expected
def test_fts_fast_search(table):
table.create_fts_index("text", use_tantivy=False)
# Insert some unindexed data
table.add(
[
{
"text": "xyz",
"vector": [0 for _ in range(128)],
"id": 101,
"text2": "xyz",
"nested": {"text": "xyz"},
"count": 10,
}
]
)
# Without fast_search, the query object should not have fast_search set
builder = table.search("xyz", query_type="fts").limit(10)
query = builder.to_query_object()
assert query.fast_search is None
# With fast_search, the query object should have fast_search=True
builder = table.search("xyz", query_type="fts").fast_search().limit(10)
query = builder.to_query_object()
assert query.fast_search is True
# fast_search should be chainable with other methods
builder = (
table.search("xyz", query_type="fts").fast_search().select(["text"]).limit(5)
)
query = builder.to_query_object()
assert query.fast_search is True
assert query.limit == 5
assert query.columns == ["text"]
# fast_search should be enabled by keyword argument too
query = LanceFtsQueryBuilder(table, "xyz", fast_search=True).to_query_object()
assert query.fast_search is True
# Verify it executes without error and skips unindexed data
results = table.search("xyz", query_type="fts").fast_search().limit(5).to_list()
assert len(results) == 0
# Update index and verify it returns results
table.optimize()
results = table.search("xyz", query_type="fts").fast_search().limit(5).to_list()
assert len(results) > 0
@pytest.mark.asyncio
async def test_fts_fast_search_async(async_table):
await async_table.create_index("text", config=FTS())
# Insert some unindexed data
await async_table.add(
[
{
"text": "xyz",
"vector": [0 for _ in range(128)],
"id": 101,
"text2": "xyz",
"nested": {"text": "xyz"},
"count": 10,
}
]
)
# Without fast_search, should return results
results = await async_table.query().nearest_to_text("xyz").limit(5).to_list()
assert len(results) > 0
# With fast_search, should return no results data unindexed
fast_results = (
await async_table.query()
.nearest_to_text("xyz")
.fast_search()
.limit(5)
.to_list()
)
assert len(fast_results) == 0
# Update index and verify it returns results
await async_table.optimize()
fast_results = (
await async_table.query()
.nearest_to_text("xyz")
.fast_search()
.limit(5)
.to_list()
)
assert len(fast_results) > 0
# fast_search should be chainable with other methods
results = (
await async_table.query()
.nearest_to_text("xyz")
.fast_search()
.select(["text"])
.limit(5)
.to_list()
)
assert len(results) > 0

View File

@@ -163,7 +163,9 @@ async def test_explain_plan(table: AsyncTable):
table.query().nearest_to_text("dog").nearest_to([0.1, 0.1]).explain_plan(True)
)
assert "Vector Search Plan" in plan
assert "KNNVectorDistance" in plan
assert "FTS Search Plan" in plan
assert "LanceRead" in plan
@@ -177,60 +179,6 @@ async def test_analyze_plan(table: AsyncTable):
assert "metrics=" in res
@pytest.fixture
def table_with_id(tmpdir_factory) -> Table:
tmp_path = str(tmpdir_factory.mktemp("data"))
db = lancedb.connect(tmp_path)
data = pa.table(
{
"id": pa.array([1, 2, 3, 4], type=pa.int64()),
"text": pa.array(["a", "b", "cat", "dog"]),
"vector": pa.array(
[[0.1, 0.1], [2, 2], [-0.1, -0.1], [0.5, -0.5]],
type=pa.list_(pa.float32(), list_size=2),
),
}
)
table = db.create_table("test_with_id", data)
table.create_fts_index("text", with_position=False, use_tantivy=False)
return table
def test_hybrid_prefilter_explain_plan(table_with_id: Table):
"""
Verify that the prefilter logic is not inverted in LanceHybridQueryBuilder.
"""
plan_prefilter = (
table_with_id.search(query_type="hybrid")
.vector([0.0, 0.0])
.text("dog")
.where("id = 1", prefilter=True)
.limit(2)
.explain_plan(verbose=True)
)
plan_postfilter = (
table_with_id.search(query_type="hybrid")
.vector([0.0, 0.0])
.text("dog")
.where("id = 1", prefilter=False)
.limit(2)
.explain_plan(verbose=True)
)
# prefilter=True: filter is pushed into the LanceRead scan.
# The FTS sub-plan exposes this as "full_filter=id = Int64(1)" inside LanceRead.
assert "full_filter=id = Int64(1)" in plan_prefilter, (
f"Should push the filter into the scan.\nPlan:\n{plan_prefilter}"
)
# prefilter=False: filter is applied as a separate FilterExec after the search.
# The filter must NOT be embedded in the scan.
assert "full_filter=id = Int64(1)" not in plan_postfilter, (
f"Should NOT push the filter into the scan.\nPlan:\n{plan_postfilter}"
)
def test_normalize_scores():
cases = [
(pa.array([0.1, 0.4]), pa.array([0.0, 1.0])),

View File

@@ -147,12 +147,7 @@ class TrackingNamespace(LanceNamespace):
This simulates a credential rotation system where each call returns
new credentials that expire after credential_expires_in_seconds.
"""
# Start from base storage options (endpoint, region, allow_http, etc.)
# because DirectoryNamespace returns None for storage_options from
# describe_table/declare_table when no credential vendor is configured.
modified = copy.deepcopy(self.base_storage_options)
if storage_options:
modified.update(storage_options)
modified = copy.deepcopy(storage_options) if storage_options else {}
# Increment credentials to simulate rotation
modified["aws_access_key_id"] = f"AKID_{count}"

View File

@@ -664,20 +664,23 @@ def test_iter_basic(some_permutation: Permutation):
expected_batches = (950 + batch_size - 1) // batch_size # ceiling division
assert len(batches) == expected_batches
# Check that all batches are lists of dicts (default python format)
assert all(isinstance(batch, list) for batch in batches)
# Check that all batches are dicts (default python format)
assert all(isinstance(batch, dict) for batch in batches)
# Check that batches have the correct structure
for batch in batches:
assert "id" in batch[0]
assert "value" in batch[0]
assert "id" in batch
assert "value" in batch
assert isinstance(batch["id"], list)
assert isinstance(batch["value"], list)
# Check that all batches except the last have the correct size
for batch in batches[:-1]:
assert len(batch) == batch_size
assert len(batch["id"]) == batch_size
assert len(batch["value"]) == batch_size
# Last batch might be smaller
assert len(batches[-1]) <= batch_size
assert len(batches[-1]["id"]) <= batch_size
def test_iter_skip_last_batch(some_permutation: Permutation):
@@ -696,11 +699,11 @@ def test_iter_skip_last_batch(some_permutation: Permutation):
if 950 % batch_size != 0:
assert len(batches_without_skip) == num_full_batches + 1
# Last batch should be smaller
assert len(batches_without_skip[-1]) == 950 % batch_size
assert len(batches_without_skip[-1]["id"]) == 950 % batch_size
# All batches with skip_last_batch should be full size
for batch in batches_with_skip:
assert len(batch) == batch_size
assert len(batch["id"]) == batch_size
def test_iter_different_batch_sizes(some_permutation: Permutation):
@@ -717,12 +720,12 @@ def test_iter_different_batch_sizes(some_permutation: Permutation):
# Test with batch size equal to total rows
single_batch = list(some_permutation.iter(950, skip_last_batch=False))
assert len(single_batch) == 1
assert len(single_batch[0]) == 950
assert len(single_batch[0]["id"]) == 950
# Test with batch size larger than total rows
oversized_batch = list(some_permutation.iter(10000, skip_last_batch=False))
assert len(oversized_batch) == 1
assert len(oversized_batch[0]) == 950
assert len(oversized_batch[0]["id"]) == 950
def test_dunder_iter(some_permutation: Permutation):
@@ -735,13 +738,15 @@ def test_dunder_iter(some_permutation: Permutation):
# All batches should be full size
for batch in batches:
assert len(batch) == 100
assert len(batch["id"]) == 100
assert len(batch["value"]) == 100
some_permutation = some_permutation.with_batch_size(400)
batches = list(some_permutation)
assert len(batches) == 2 # floor(950 / 400) since skip_last_batch=True
for batch in batches:
assert len(batch) == 400
assert len(batch["id"]) == 400
assert len(batch["value"]) == 400
def test_iter_with_different_formats(some_permutation: Permutation):
@@ -756,7 +761,7 @@ def test_iter_with_different_formats(some_permutation: Permutation):
# Test with python format (default)
python_perm = some_permutation.with_format("python")
python_batches = list(python_perm.iter(batch_size, skip_last_batch=False))
assert all(isinstance(batch, list) for batch in python_batches)
assert all(isinstance(batch, dict) for batch in python_batches)
# Test with pandas format
pandas_perm = some_permutation.with_format("pandas")
@@ -775,8 +780,8 @@ def test_iter_with_column_selection(some_permutation: Permutation):
# Check that batches only contain the id column
for batch in batches:
assert "id" in batch[0]
assert "value" not in batch[0]
assert "id" in batch
assert "value" not in batch
def test_iter_with_column_rename(some_permutation: Permutation):
@@ -786,9 +791,9 @@ def test_iter_with_column_rename(some_permutation: Permutation):
# Check that batches have the renamed column
for batch in batches:
assert "id" in batch[0]
assert "data" in batch[0]
assert "value" not in batch[0]
assert "id" in batch
assert "data" in batch
assert "value" not in batch
def test_iter_with_limit_offset(some_permutation: Permutation):
@@ -807,14 +812,14 @@ def test_iter_with_limit_offset(some_permutation: Permutation):
assert len(limit_batches) == 5
no_skip = some_permutation.iter(101, skip_last_batch=False)
row_100 = next(no_skip)[100]["id"]
row_100 = next(no_skip)["id"][100]
# Test with both limit and offset
limited_perm = some_permutation.with_skip(100).with_take(300)
limited_batches = list(limited_perm.iter(100, skip_last_batch=False))
# Should have 3 batches (300 / 100)
assert len(limited_batches) == 3
assert limited_batches[0][0]["id"] == row_100
assert limited_batches[0]["id"][0] == row_100
def test_iter_empty_permutation(mem_db):
@@ -837,7 +842,7 @@ def test_iter_single_row(mem_db):
# With skip_last_batch=False, should get one batch
batches = list(perm.iter(10, skip_last_batch=False))
assert len(batches) == 1
assert len(batches[0]) == 1
assert len(batches[0]["id"]) == 1
# With skip_last_batch=True, should skip the single row (since it's < batch_size)
batches_skip = list(perm.iter(10, skip_last_batch=True))
@@ -855,7 +860,8 @@ def test_identity_permutation(mem_db):
batches = list(permutation.iter(10, skip_last_batch=False))
assert len(batches) == 1
assert len(batches[0]) == 10
assert len(batches[0]["id"]) == 10
assert len(batches[0]["value"]) == 10
permutation = permutation.remove_columns(["value"])
assert permutation.num_columns == 1
@@ -898,10 +904,10 @@ def test_transform_fn(mem_db):
py_result = list(permutation.with_format("python").iter(10, skip_last_batch=False))[
0
]
assert len(py_result) == 10
assert "id" in py_result[0]
assert "value" in py_result[0]
assert isinstance(py_result, list)
assert len(py_result) == 2
assert len(py_result["id"]) == 10
assert len(py_result["value"]) == 10
assert isinstance(py_result, dict)
try:
import torch
@@ -909,11 +915,9 @@ def test_transform_fn(mem_db):
torch_result = list(
permutation.with_format("torch").iter(10, skip_last_batch=False)
)[0]
assert isinstance(torch_result, list)
assert len(torch_result) == 10
assert isinstance(torch_result[0], torch.Tensor)
assert torch_result[0].shape == (2,)
assert torch_result[0].dtype == torch.int64
assert torch_result.shape == (2, 10)
assert torch_result.dtype == torch.int64
assert isinstance(torch_result, torch.Tensor)
except ImportError:
# Skip check if torch is not installed
pass
@@ -941,113 +945,3 @@ def test_custom_transform(mem_db):
batch = batches[0]
assert batch == pa.record_batch([range(10)], ["id"])
def test_getitems_basic(some_permutation: Permutation):
"""Test __getitems__ returns correct rows by offset."""
result = some_permutation.__getitems__([0, 1, 2])
assert isinstance(result, list)
assert "id" in result[0]
assert "value" in result[0]
assert len(result) == 3
def test_getitems_single_index(some_permutation: Permutation):
"""Test __getitems__ with a single index."""
result = some_permutation.__getitems__([0])
assert len(result) == 1
def test_getitems_preserves_order(some_permutation: Permutation):
"""Test __getitems__ returns rows in the requested order."""
# Get rows in forward order
forward = some_permutation.__getitems__([0, 1, 2, 3, 4])
# Get the same rows in reverse order
reverse = some_permutation.__getitems__([4, 3, 2, 1, 0])
assert [r["id"] for r in forward] == list(reversed([r["id"] for r in reverse]))
assert [r["value"] for r in forward] == list(
reversed([r["value"] for r in reverse])
)
def test_getitems_non_contiguous(some_permutation: Permutation):
"""Test __getitems__ with non-contiguous indices."""
result = some_permutation.__getitems__([0, 10, 50, 100, 500])
assert len(result) == 5
# Each id/value pair should match what we'd get individually
for i, offset in enumerate([0, 10, 50, 100, 500]):
single = some_permutation.__getitems__([offset])
assert result[i]["id"] == single[0]["id"]
assert result[i]["value"] == single[0]["value"]
def test_getitems_with_column_selection(some_permutation: Permutation):
"""Test __getitems__ respects column selection."""
id_only = some_permutation.select_columns(["id"])
result = id_only.__getitems__([0, 1, 2])
assert "id" in result[0]
assert "value" not in result[0]
assert len(result) == 3
def test_getitems_with_column_rename(some_permutation: Permutation):
"""Test __getitems__ respects column renames."""
renamed = some_permutation.rename_column("value", "data")
result = renamed.__getitems__([0, 1])
assert "data" in result[0]
assert "value" not in result[0]
assert len(result) == 2
def test_getitems_with_format(some_permutation: Permutation):
"""Test __getitems__ applies the transform function."""
arrow_perm = some_permutation.with_format("arrow")
result = arrow_perm.__getitems__([0, 1, 2])
assert isinstance(result, pa.RecordBatch)
assert result.num_rows == 3
def test_getitems_with_custom_transform(some_permutation: Permutation):
"""Test __getitems__ with a custom transform."""
def transform(batch: pa.RecordBatch) -> list:
return batch.column("id").to_pylist()
custom = some_permutation.with_transform(transform)
result = custom.__getitems__([0, 1, 2])
assert isinstance(result, list)
assert len(result) == 3
def test_getitems_identity_permutation(mem_db):
"""Test __getitems__ on an identity permutation."""
tbl = mem_db.create_table(
"test_table", pa.table({"id": range(10), "value": range(10)})
)
perm = Permutation.identity(tbl)
result = perm.__getitems__([0, 5, 9])
assert [r["id"] for r in result] == [0, 5, 9]
assert [r["value"] for r in result] == [0, 5, 9]
def test_getitems_with_limit_offset(some_permutation: Permutation):
"""Test __getitems__ on a permutation with skip/take applied."""
limited = some_permutation.with_skip(100).with_take(200)
# Should be able to access offsets within the limited range
result = limited.__getitems__([0, 1, 199])
assert len(result) == 3
# The first item of the limited permutation should match offset 100 of original
full_result = some_permutation.__getitems__([100])
limited_result = limited.__getitems__([0])
assert limited_result[0]["id"] == full_result[0]["id"]
def test_getitems_invalid_offset(some_permutation: Permutation):
"""Test __getitems__ with an out-of-range offset raises an error."""
with pytest.raises(Exception):
some_permutation.__getitems__([999999])

View File

@@ -531,78 +531,6 @@ def test_empty_result_reranker():
)
def test_empty_hybrid_result_reranker():
"""Test that hybrid search with empty results after filtering doesn't crash.
Regression test for https://github.com/lancedb/lancedb/issues/2425
"""
from lancedb.query import LanceHybridQueryBuilder
# Simulate empty vector and FTS results with the expected schema
vector_schema = pa.schema(
[
("text", pa.string()),
("vector", pa.list_(pa.float32(), 4)),
("_rowid", pa.uint64()),
("_distance", pa.float32()),
]
)
fts_schema = pa.schema(
[
("text", pa.string()),
("vector", pa.list_(pa.float32(), 4)),
("_rowid", pa.uint64()),
("_score", pa.float32()),
]
)
empty_vector = pa.table(
{
"text": pa.array([], type=pa.string()),
"vector": pa.array([], type=pa.list_(pa.float32(), 4)),
"_rowid": pa.array([], type=pa.uint64()),
"_distance": pa.array([], type=pa.float32()),
},
schema=vector_schema,
)
empty_fts = pa.table(
{
"text": pa.array([], type=pa.string()),
"vector": pa.array([], type=pa.list_(pa.float32(), 4)),
"_rowid": pa.array([], type=pa.uint64()),
"_score": pa.array([], type=pa.float32()),
},
schema=fts_schema,
)
for reranker in [LinearCombinationReranker(), RRFReranker()]:
result = LanceHybridQueryBuilder._combine_hybrid_results(
fts_results=empty_fts,
vector_results=empty_vector,
norm="score",
fts_query="nonexistent query",
reranker=reranker,
limit=10,
with_row_ids=False,
)
assert len(result) == 0
assert "_relevance_score" in result.column_names
assert "_rowid" not in result.column_names
# Also test with with_row_ids=True
result = LanceHybridQueryBuilder._combine_hybrid_results(
fts_results=empty_fts,
vector_results=empty_vector,
norm="score",
fts_query="nonexistent query",
reranker=LinearCombinationReranker(),
limit=10,
with_row_ids=True,
)
assert len(result) == 0
assert "_relevance_score" in result.column_names
assert "_rowid" in result.column_names
@pytest.mark.parametrize("use_tantivy", [True, False])
def test_cross_encoder_reranker_return_all(tmp_path, use_tantivy):
pytest.importorskip("sentence_transformers")

View File

@@ -326,24 +326,6 @@ def test_add_struct(mem_db: DBConnection):
table = mem_db.create_table("test2", schema=schema)
table.add(data)
struct_type = pa.struct(
[
("b", pa.int64()),
("a", pa.int64()),
]
)
expected = pa.table(
{
"s_list": [
[
pa.scalar({"b": 1, "a": 2}, type=struct_type),
pa.scalar({"b": 4, "a": None}, type=struct_type),
]
],
}
)
assert table.to_arrow() == expected
def test_add_subschema(mem_db: DBConnection):
schema = pa.schema(
@@ -828,7 +810,7 @@ def test_create_index_name_and_train_parameters(
)
def test_create_with_nans(mem_db: DBConnection):
def test_add_with_nans(mem_db: DBConnection):
# by default we raise an error on bad input vectors
bad_data = [
{"vector": [np.nan], "item": "bar", "price": 20.0},
@@ -872,57 +854,6 @@ def test_create_with_nans(mem_db: DBConnection):
assert np.allclose(v, np.array([0.0, 0.0]))
def test_add_with_nans(mem_db: DBConnection):
schema = pa.schema(
[
pa.field("vector", pa.list_(pa.float32(), 2), nullable=True),
pa.field("item", pa.string(), nullable=True),
pa.field("price", pa.float64(), nullable=False),
],
)
table = mem_db.create_table("test", schema=schema)
# by default we raise an error on bad input vectors
bad_data = [
{"vector": [np.nan], "item": "bar", "price": 20.0},
{"vector": [5], "item": "bar", "price": 20.0},
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
{"vector": [np.nan, 5.0], "item": "bar", "price": 20.0},
]
for row in bad_data:
with pytest.raises(ValueError):
table.add(
data=[row],
)
table.add(
[
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
{"vector": [2.1, 4.1], "item": "foo", "price": 9.0},
{"vector": [np.nan], "item": "bar", "price": 20.0},
{"vector": [5], "item": "bar", "price": 20.0},
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
],
on_bad_vectors="drop",
)
assert len(table) == 2
table.delete("true")
# We can fill bad input with some value
table.add(
data=[
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
{"vector": [np.nan], "item": "bar", "price": 20.0},
{"vector": [np.nan, np.nan], "item": "bar", "price": 20.0},
],
on_bad_vectors="fill",
fill_value=0.0,
)
assert len(table) == 3
arrow_tbl = table.search().where("item == 'bar'").to_arrow()
v = arrow_tbl["vector"].to_pylist()[0]
assert np.allclose(v, np.array([0.0, 0.0]))
def test_restore(mem_db: DBConnection):
table = mem_db.create_table(
"my_table",

View File

@@ -4,7 +4,6 @@
import pyarrow as pa
import pytest
from lancedb.util import tbl_to_tensor
from lancedb.permutation import Permutation
torch = pytest.importorskip("torch")
@@ -17,26 +16,3 @@ def test_table_dataloader(mem_db):
for batch in dataloader:
assert batch.size(0) == 1
assert batch.size(1) == 10
def test_permutation_dataloader(mem_db):
table = mem_db.create_table("test_table", pa.table({"a": range(1000)}))
permutation = Permutation.identity(table)
dataloader = torch.utils.data.DataLoader(permutation, batch_size=10, shuffle=True)
for batch in dataloader:
assert batch["a"].size(0) == 10
permutation = permutation.with_format("torch")
dataloader = torch.utils.data.DataLoader(permutation, batch_size=10, shuffle=True)
for batch in dataloader:
assert batch.size(0) == 10
assert batch.size(1) == 1
permutation = permutation.with_format("torch_col")
dataloader = torch.utils.data.DataLoader(
permutation, collate_fn=lambda x: x, batch_size=10, shuffle=True
)
for batch in dataloader:
assert batch.size(0) == 1
assert batch.size(1) == 10

View File

@@ -121,32 +121,6 @@ def test_value_to_sql_string(tmp_path):
assert table.to_pandas().query("search == @value")["replace"].item() == value
def test_value_to_sql_dict():
# Simple flat struct
assert value_to_sql({"a": 1, "b": "hello"}) == "named_struct('a', 1, 'b', 'hello')"
# Nested struct
assert (
value_to_sql({"outer": {"inner": 1}})
== "named_struct('outer', named_struct('inner', 1))"
)
# List inside struct
assert value_to_sql({"a": [1, 2]}) == "named_struct('a', [1, 2])"
# Mixed types
assert (
value_to_sql({"name": "test", "count": 42, "rate": 3.14, "active": True})
== "named_struct('name', 'test', 'count', 42, 'rate', 3.14, 'active', TRUE)"
)
# Null value inside struct
assert value_to_sql({"a": None}) == "named_struct('a', NULL)"
# Empty dict
assert value_to_sql({}) == "named_struct()"
def test_append_vector_columns():
registry = EmbeddingFunctionRegistry.get_instance()
registry.register("test")(MockTextEmbeddingFunction)
@@ -318,14 +292,18 @@ class TestModel(lancedb.pydantic.LanceModel):
lambda: pa.table({"a": [1], "b": [2]}),
lambda: pa.table({"a": [1], "b": [2]}).to_reader(),
lambda: iter(pa.table({"a": [1], "b": [2]}).to_batches()),
lambda: lance.write_dataset(
pa.table({"a": [1], "b": [2]}),
"memory://test",
lambda: (
lance.write_dataset(
pa.table({"a": [1], "b": [2]}),
"memory://test",
)
),
lambda: (
lance.write_dataset(
pa.table({"a": [1], "b": [2]}),
"memory://test",
).scanner()
),
lambda: lance.write_dataset(
pa.table({"a": [1], "b": [2]}),
"memory://test",
).scanner(),
lambda: pd.DataFrame({"a": [1], "b": [2]}),
lambda: pl.DataFrame({"a": [1], "b": [2]}),
lambda: pl.LazyFrame({"a": [1], "b": [2]}),

View File

@@ -10,7 +10,7 @@ use arrow::{
use futures::stream::StreamExt;
use lancedb::arrow::SendableRecordBatchStream;
use pyo3::{
Bound, Py, PyAny, PyRef, PyResult, Python, exceptions::PyStopAsyncIteration, pyclass, pymethods,
exceptions::PyStopAsyncIteration, pyclass, pymethods, Bound, Py, PyAny, PyRef, PyResult, Python,
};
use pyo3_async_runtimes::tokio::future_into_py;

View File

@@ -9,16 +9,15 @@ use lancedb::{
database::{CreateTableMode, Database, ReadConsistency},
};
use pyo3::{
Bound, FromPyObject, Py, PyAny, PyRef, PyResult, Python,
exceptions::{PyRuntimeError, PyValueError},
pyclass, pyfunction, pymethods,
types::{PyDict, PyDictMethods},
Bound, FromPyObject, Py, PyAny, PyRef, PyResult, Python,
};
use pyo3_async_runtimes::tokio::future_into_py;
use crate::{
error::PythonErrorExt, namespace::extract_namespace_arc,
storage_options::py_object_to_storage_options_provider, table::Table,
error::PythonErrorExt, storage_options::py_object_to_storage_options_provider, table::Table,
};
#[pyclass]
@@ -122,8 +121,7 @@ impl Connection {
let mode = Self::parse_create_mode_str(mode)?;
let batches: Box<dyn arrow::array::RecordBatchReader + Send> =
Box::new(ArrowArrayStreamReader::from_pyarrow_bound(&data)?);
let batches = ArrowArrayStreamReader::from_pyarrow_bound(&data)?;
let mut builder = inner.create_table(name, batches).mode(mode);
@@ -183,8 +181,7 @@ impl Connection {
})
}
#[allow(clippy::too_many_arguments)]
#[pyo3(signature = (name, namespace=vec![], storage_options = None, storage_options_provider=None, index_cache_size = None, location=None, namespace_client=None, managed_versioning=None))]
#[pyo3(signature = (name, namespace=vec![], storage_options = None, storage_options_provider=None, index_cache_size = None, location=None))]
pub fn open_table(
self_: PyRef<'_, Self>,
name: String,
@@ -193,13 +190,11 @@ impl Connection {
storage_options_provider: Option<Py<PyAny>>,
index_cache_size: Option<u32>,
location: Option<String>,
namespace_client: Option<Py<PyAny>>,
managed_versioning: Option<bool>,
) -> PyResult<Bound<'_, PyAny>> {
let inner = self_.get_inner()?.clone();
let mut builder = inner.open_table(name);
builder = builder.namespace(namespace.clone());
builder = builder.namespace(namespace);
if let Some(storage_options) = storage_options {
builder = builder.storage_options(storage_options);
}
@@ -213,20 +208,6 @@ impl Connection {
if let Some(location) = location {
builder = builder.location(location);
}
// Extract namespace client from Python object if provided
let ns_client = if let Some(ns_obj) = namespace_client {
let py = self_.py();
Some(extract_namespace_arc(py, ns_obj)?)
} else {
None
};
if let Some(ns_client) = ns_client {
builder = builder.namespace_client(ns_client);
}
// Pass managed_versioning if provided to avoid redundant describe_table call
if let Some(enabled) = managed_versioning {
builder = builder.managed_versioning(enabled);
}
future_into_py(self_.py(), async move {
let table = builder.execute().await.infer_error()?;

Some files were not shown because too many files have changed in this diff Show More