mirror of
https://github.com/lancedb/lancedb.git
synced 2025-12-23 13:29:57 +00:00
Compare commits
53 Commits
v0.1.2-dev
...
python-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5210f40a33 | ||
|
|
5ec4a5d730 | ||
|
|
e4f64fca7b | ||
|
|
4744640bd2 | ||
|
|
094b5e643c | ||
|
|
a318778d2a | ||
|
|
9b83ce3d2a | ||
|
|
7bad676f30 | ||
|
|
0e981e782b | ||
|
|
e18cdfc7cf | ||
|
|
fed33a51d5 | ||
|
|
a56b65db84 | ||
|
|
f21caebeda | ||
|
|
12da77a9f7 | ||
|
|
131b2dc57b | ||
|
|
3798f56a9b | ||
|
|
50cdb16b45 | ||
|
|
d803482588 | ||
|
|
f37994b72a | ||
|
|
2418de0a3c | ||
|
|
d0c47e3838 | ||
|
|
41cca31f48 | ||
|
|
b621009d39 | ||
|
|
6a9cde22de | ||
|
|
bfa90b35ee | ||
|
|
12ec29f55b | ||
|
|
cdd08ef35c | ||
|
|
adcb2a1387 | ||
|
|
9d52a32668 | ||
|
|
11b2e63eea | ||
|
|
daedf1396b | ||
|
|
8af5f19cc1 | ||
|
|
fbd0bc7740 | ||
|
|
f765a453cf | ||
|
|
45b3a14f26 | ||
|
|
9965b4564d | ||
|
|
0719e4b3fb | ||
|
|
091fb9b665 | ||
|
|
03013a4434 | ||
|
|
3e14b357e7 | ||
|
|
99cbda8b07 | ||
|
|
e50b642d80 | ||
|
|
6d8cf52e01 | ||
|
|
53f3882d6e | ||
|
|
2b26775ed1 | ||
|
|
306ada5cb8 | ||
|
|
d3aa8bfbc5 | ||
|
|
04d97347d7 | ||
|
|
22aa8a93c2 | ||
|
|
f485378ea4 | ||
|
|
f923cfe47f | ||
|
|
06cb7b6458 | ||
|
|
bdef634954 |
31
.github/workflows/pypi-publish.yml
vendored
Normal file
31
.github/workflows/pypi-publish.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: PyPI Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
tags:
|
||||||
|
- 'python-v*' # Push events that matches the python-make-release action
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: python
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
- name: Build distribution
|
||||||
|
run: |
|
||||||
|
ls -la
|
||||||
|
pip install wheel setuptools --upgrade
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
|
- name: Publish
|
||||||
|
uses: pypa/gh-action-pypi-publish@v1.8.5
|
||||||
|
with:
|
||||||
|
password: ${{ secrets.LANCEDB_PYPI_API_TOKEN }}
|
||||||
|
packages-dir: python/dist
|
||||||
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
56
.github/workflows/python-make-release-commit.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: Python - Create release commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dry_run:
|
||||||
|
description: 'Dry run (create the local commit/tags but do not push it)'
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- "true"
|
||||||
|
- "false"
|
||||||
|
part:
|
||||||
|
description: 'What kind of release is this?'
|
||||||
|
required: true
|
||||||
|
default: 'patch'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- patch
|
||||||
|
- minor
|
||||||
|
- major
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out main
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
lfs: true
|
||||||
|
- name: Set git configs for bumpversion
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config user.name 'Lance Release'
|
||||||
|
git config user.email 'lance-dev@lancedb.com'
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Bump version, create tag and commit
|
||||||
|
working-directory: python
|
||||||
|
run: |
|
||||||
|
pip install bump2version
|
||||||
|
bumpversion --verbose ${{ inputs.part }}
|
||||||
|
- name: Push new version and tag
|
||||||
|
if: ${{ inputs.dry_run }} == "false"
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.LANCEDB_RELEASE_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
tags: true
|
||||||
|
|
||||||
12
.github/workflows/python.yml
vendored
12
.github/workflows/python.yml
vendored
@@ -31,9 +31,14 @@ jobs:
|
|||||||
- name: Install lancedb
|
- name: Install lancedb
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
pip install pytest
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
|
pip install pytest pytest-mock black
|
||||||
|
- name: Black
|
||||||
|
run: black --check --diff --no-color --quiet .
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -x -v --durations=30 tests
|
run: pytest -x -v --durations=30 tests
|
||||||
|
- name: doctest
|
||||||
|
run: pytest --doctest-modules lancedb
|
||||||
mac:
|
mac:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
runs-on: "macos-12"
|
runs-on: "macos-12"
|
||||||
@@ -49,10 +54,11 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.11"
|
||||||
- name: Install lancedb
|
- name: Install lancedb
|
||||||
run: |
|
run: |
|
||||||
pip install -e .
|
pip install -e .
|
||||||
pip install pytest
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
|
pip install pytest pytest-mock
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pytest -x -v --durations=30 tests
|
run: pytest -x -v --durations=30 tests
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -15,7 +15,7 @@ site
|
|||||||
python/build
|
python/build
|
||||||
python/dist
|
python/dist
|
||||||
|
|
||||||
notebooks/.ipynb_checkpoints
|
**/.ipynb_checkpoints
|
||||||
|
|
||||||
**/.hypothesis
|
**/.hypothesis
|
||||||
|
|
||||||
|
|||||||
10
Cargo.lock
generated
10
Cargo.lock
generated
@@ -1052,6 +1052,7 @@ dependencies = [
|
|||||||
"paste",
|
"paste",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"rand",
|
"rand",
|
||||||
|
"regex",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -1645,9 +1646,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lance"
|
name = "lance"
|
||||||
version = "0.4.12"
|
version = "0.4.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fc96cf89139af6f439a0e28ccd04ddf81be795b79fda3105b7a8952fadeb778e"
|
checksum = "86dda8185bd1ffae7b910c1f68035af23be9b717c52e9cc4de176cd30b47f772"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"accelerate-src",
|
"accelerate-src",
|
||||||
"arrow",
|
"arrow",
|
||||||
@@ -1684,6 +1685,7 @@ dependencies = [
|
|||||||
"rand",
|
"rand",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"shellexpand",
|
"shellexpand",
|
||||||
|
"snafu",
|
||||||
"sqlparser-lance",
|
"sqlparser-lance",
|
||||||
"tokio",
|
"tokio",
|
||||||
"url",
|
"url",
|
||||||
@@ -3359,8 +3361,12 @@ name = "vectordb"
|
|||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrow-array",
|
"arrow-array",
|
||||||
|
"arrow-data",
|
||||||
"arrow-schema",
|
"arrow-schema",
|
||||||
"lance",
|
"lance",
|
||||||
|
"object_store",
|
||||||
|
"rand",
|
||||||
|
"snafu",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -10,6 +10,10 @@
|
|||||||
<a href="https://discord.gg/zMM32dvNtd">Discord</a> •
|
<a href="https://discord.gg/zMM32dvNtd">Discord</a> •
|
||||||
<a href="https://twitter.com/lancedb">Twitter</a>
|
<a href="https://twitter.com/lancedb">Twitter</a>
|
||||||
|
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<img max-width="750px" alt="LanceDB Multimodal Search" src="https://github.com/lancedb/lancedb/assets/917119/09c5afc5-7816-4687-bae4-f2ca194426ec">
|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -23,13 +27,15 @@ The key features of LanceDB include:
|
|||||||
|
|
||||||
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
||||||
|
|
||||||
|
* Support for vector similarity search, full-text search and SQL.
|
||||||
|
|
||||||
* Native Python and Javascript/Typescript support.
|
* Native Python and Javascript/Typescript support.
|
||||||
|
|
||||||
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
||||||
|
|
||||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||||
|
|
||||||
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/eto-ai/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -69,4 +75,4 @@ result = table.search([100, 100]).limit(2).to_df()
|
|||||||
|
|
||||||
## Blogs, Tutorials & Videos
|
## Blogs, Tutorials & Videos
|
||||||
* 📈 <a href="https://blog.eto.ai/benchmarking-random-access-in-lance-ed690757a826">2000x better performance with Lance over Parquet</a>
|
* 📈 <a href="https://blog.eto.ai/benchmarking-random-access-in-lance-ed690757a826">2000x better performance with Lance over Parquet</a>
|
||||||
* 🤖 <a href="https://github.com/lancedb/lancedb/blob/main/notebooks/youtube_transcript_search.ipynb">Build a question and answer bot with LanceDB</a>
|
* 🤖 <a href="https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb">Build a question and answer bot with LanceDB</a>
|
||||||
|
|||||||
@@ -1,28 +1,43 @@
|
|||||||
site_name: LanceDB Documentation
|
site_name: LanceDB Docs
|
||||||
|
repo_url: https://github.com/lancedb/lancedb
|
||||||
|
repo_name: lancedb/lancedb
|
||||||
docs_dir: src
|
docs_dir: src
|
||||||
|
|
||||||
theme:
|
theme:
|
||||||
name: "material"
|
name: "material"
|
||||||
|
logo: assets/logo.png
|
||||||
features:
|
features:
|
||||||
- content.code.copy
|
- content.code.copy
|
||||||
|
- content.tabs.link
|
||||||
|
icon:
|
||||||
|
repo: fontawesome/brands/github
|
||||||
|
|
||||||
plugins:
|
plugins:
|
||||||
- search
|
- search
|
||||||
|
- autorefs
|
||||||
- mkdocstrings:
|
- mkdocstrings:
|
||||||
handlers:
|
handlers:
|
||||||
python:
|
python:
|
||||||
paths: [../python]
|
paths: [../python]
|
||||||
|
selection:
|
||||||
|
docstring_style: numpy
|
||||||
|
rendering:
|
||||||
|
heading_level: 4
|
||||||
|
show_source: false
|
||||||
|
show_symbol_type_in_heading: true
|
||||||
|
show_signature_annotations: true
|
||||||
|
show_root_heading: true
|
||||||
|
members_order: source
|
||||||
|
import:
|
||||||
|
# for cross references
|
||||||
|
- https://arrow.apache.org/docs/objects.inv
|
||||||
|
- https://pandas.pydata.org/docs/objects.inv
|
||||||
- mkdocs-jupyter
|
- mkdocs-jupyter
|
||||||
|
|
||||||
nav:
|
|
||||||
- Home: index.md
|
|
||||||
- Basics: basic.md
|
|
||||||
- Embeddings: embedding.md
|
|
||||||
- Indexing: ann_indexes.md
|
|
||||||
- Integrations: integrations.md
|
|
||||||
- Python API: python.md
|
|
||||||
|
|
||||||
markdown_extensions:
|
markdown_extensions:
|
||||||
|
- admonition
|
||||||
|
- pymdownx.superfences
|
||||||
|
- pymdownx.details
|
||||||
- pymdownx.highlight:
|
- pymdownx.highlight:
|
||||||
anchor_linenums: true
|
anchor_linenums: true
|
||||||
line_spans: __span
|
line_spans: __span
|
||||||
@@ -30,3 +45,29 @@ markdown_extensions:
|
|||||||
- pymdownx.inlinehilite
|
- pymdownx.inlinehilite
|
||||||
- pymdownx.snippets
|
- pymdownx.snippets
|
||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
|
- pymdownx.tabbed:
|
||||||
|
alternate_style: true
|
||||||
|
|
||||||
|
nav:
|
||||||
|
- Home: index.md
|
||||||
|
- Basics: basic.md
|
||||||
|
- Embeddings: embedding.md
|
||||||
|
- Python full-text search: fts.md
|
||||||
|
- Python integrations: integrations.md
|
||||||
|
- Python examples:
|
||||||
|
- YouTube Transcript Search: notebooks/youtube_transcript_search.ipynb
|
||||||
|
- Documentation QA Bot using LangChain: notebooks/code_qa_bot.ipynb
|
||||||
|
- Multimodal search using CLIP: notebooks/multimodal_search.ipynb
|
||||||
|
- Serverless QA Bot with S3 and Lambda: examples/serverless_lancedb_with_s3_and_lambda.md
|
||||||
|
- Serverless QA Bot with Modal: examples/serverless_qa_bot_with_modal_and_langchain.md
|
||||||
|
- Javascript examples:
|
||||||
|
- YouTube Transcript Search: examples/youtube_transcript_bot_with_nodejs.md
|
||||||
|
- References:
|
||||||
|
- Vector Search: search.md
|
||||||
|
- Indexing: ann_indexes.md
|
||||||
|
- API references:
|
||||||
|
- Python API: python/python.md
|
||||||
|
- Javascript API: javascript/modules.md
|
||||||
|
|
||||||
|
extra_css:
|
||||||
|
- styles/global.css
|
||||||
|
|||||||
@@ -12,12 +12,13 @@ In the future we will look to automatically create and configure the ANN index.
|
|||||||
|
|
||||||
## Creating an ANN Index
|
## Creating an ANN Index
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
Creating indexes is done via the [create_index](https://lancedb.github.io/lancedb/python/#lancedb.table.LanceTable.create_index) method.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
import numpy as np
|
import numpy as np
|
||||||
uri = "~/.lancedb"
|
uri = "data/sample-lancedb"
|
||||||
db = lancedb.connect(uri)
|
db = lancedb.connect(uri)
|
||||||
|
|
||||||
# Create 10,000 sample vectors
|
# Create 10,000 sample vectors
|
||||||
@@ -31,10 +32,23 @@ tbl = db.create_table("my_vectors", data=data)
|
|||||||
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
tbl.create_index(num_partitions=256, num_sub_vectors=96)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
let data = []
|
||||||
|
for (let i = 0; i < 10_000; i++) {
|
||||||
|
data.push({vector: Array(1536).fill(i), id: `${i}`, content: "", longId: `${i}`},)
|
||||||
|
}
|
||||||
|
const table = await db.createTable('vectors', data)
|
||||||
|
await table.create_index({ type: 'ivf_pq', column: 'vector', num_partitions: 256, num_sub_vectors: 96 })
|
||||||
|
```
|
||||||
|
|
||||||
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
Since `create_index` has a training step, it can take a few minutes to finish for large tables. You can control the index
|
||||||
creation by providing the following parameters:
|
creation by providing the following parameters:
|
||||||
|
|
||||||
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support cosine distance.
|
- **metric** (default: "L2"): The distance metric to use. By default we use euclidean distance. We also support "cosine" distance.
|
||||||
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
- **num_partitions** (default: 256): The number of partitions of the index. The number of partitions should be configured so each partition has 3-5K vectors. For example, a table
|
||||||
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
with ~1M vectors should use 256 partitions. You can specify arbitrary number of partitions but powers of 2 is most conventional.
|
||||||
A higher number leads to faster queries, but it makes index generation slower.
|
A higher number leads to faster queries, but it makes index generation slower.
|
||||||
@@ -57,7 +71,7 @@ There are a couple of parameters that can be used to fine-tune the search:
|
|||||||
e.g., for 1M vectors divided into 256 partitions, if you're looking for top 20, then refine_factor=200 reranks the whole partition.<br/>
|
e.g., for 1M vectors divided into 256 partitions, if you're looking for top 20, then refine_factor=200 reranks the whole partition.<br/>
|
||||||
Note: refine_factor is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
Note: refine_factor is only applicable if an ANN index is present. If specified on a table without an ANN index, it is ignored.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl.search(np.random.random((768))) \
|
tbl.search(np.random.random((768))) \
|
||||||
.limit(2) \
|
.limit(2) \
|
||||||
@@ -70,6 +84,16 @@ tbl.search(np.random.random((768))) \
|
|||||||
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
1 [0.48587373, 0.269207, 0.15095535, 0.65531915,... item 3953 108.393867
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(768).fill(1.2))
|
||||||
|
.limit(2)
|
||||||
|
.nprobes(20)
|
||||||
|
.refineFactor(10)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
The search will return the data requested in addition to the score of each item.
|
The search will return the data requested in addition to the score of each item.
|
||||||
|
|
||||||
**Note:** The score is the distance between the query vector and the element. A lower number means that the result is more relevant.
|
**Note:** The score is the distance between the query vector and the element. A lower number means that the result is more relevant.
|
||||||
@@ -78,14 +102,24 @@ The search will return the data requested in addition to the score of each item.
|
|||||||
|
|
||||||
You can further filter the elements returned by a search using a where clause.
|
You can further filter the elements returned by a search using a where clause.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl.search(np.random.random((768))).where("item != 'item 1141'").to_df()
|
tbl.search(np.random.random((768))).where("item != 'item 1141'").to_df()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.where("item != 'item 1141'")
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
### Projections (select clause)
|
### Projections (select clause)
|
||||||
|
|
||||||
You can select the columns returned by the query using a select clause.
|
You can select the columns returned by the query using a select clause.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl.search(np.random.random((768))).select(["vector"]).to_df()
|
tbl.search(np.random.random((768))).select(["vector"]).to_df()
|
||||||
vector score
|
vector score
|
||||||
@@ -93,3 +127,11 @@ tbl.search(np.random.random((768))).select(["vector"]).to_df()
|
|||||||
1 [0.2525465, 0.01723831, 0.261568, 0.002007689,... 95.173485
|
1 [0.2525465, 0.01723831, 0.261568, 0.002007689,... 95.173485
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search(Array(1536).fill(1.2))
|
||||||
|
.select(["id"])
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|||||||
BIN
docs/src/assets/lancedb_embedded_explanation.png
Normal file
BIN
docs/src/assets/lancedb_embedded_explanation.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 190 KiB |
BIN
docs/src/assets/lancedb_local_data_explanation.png
Normal file
BIN
docs/src/assets/lancedb_local_data_explanation.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 101 KiB |
BIN
docs/src/assets/logo.png
Normal file
BIN
docs/src/assets/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.7 KiB |
@@ -1,8 +1,26 @@
|
|||||||
# Basic LanceDB Functionality
|
# Basic LanceDB Functionality
|
||||||
|
|
||||||
|
We'll cover the basics of using LanceDB on your local machine in this section.
|
||||||
|
|
||||||
|
??? info "LanceDB runs embedded on your backend application, so there is no need to run a separate server."
|
||||||
|
|
||||||
|
<img src="../assets/lancedb_embedded_explanation.png" width="650px" />
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
```shell
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```shell
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
## How to connect to a database
|
## How to connect to a database
|
||||||
|
|
||||||
In local mode, LanceDB stores data in a directory on your local machine. To connect to a local database, you can use the following code:
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
uri = "~/.lancedb"
|
uri = "~/.lancedb"
|
||||||
@@ -13,18 +31,27 @@ LanceDB will create the directory if it doesn't exist (including parent director
|
|||||||
|
|
||||||
If you need a reminder of the uri, use the `db.uri` property.
|
If you need a reminder of the uri, use the `db.uri` property.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
|
const uri = "~./lancedb";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
```
|
||||||
|
|
||||||
|
LanceDB will create the directory if it doesn't exist (including parent directories).
|
||||||
|
|
||||||
|
If you need a reminder of the uri, you can call `db.uri()`.
|
||||||
|
|
||||||
## How to create a table
|
## How to create a table
|
||||||
|
|
||||||
To create a table, you can use the following code:
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl = db.create_table("my_table",
|
tbl = db.create_table("my_table",
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
```
|
```
|
||||||
|
|
||||||
Under the hood, LanceDB is converting the input data into an Apache Arrow table
|
|
||||||
and persisting it to disk in [Lance format](github.com/eto-ai/lance).
|
|
||||||
|
|
||||||
If the table already exists, LanceDB will raise an error by default.
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||||
to the `create_table` method.
|
to the `create_table` method.
|
||||||
@@ -37,9 +64,26 @@ df = pd.DataFrame([{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
|||||||
tbl = db.create_table("table_from_df", data=df)
|
tbl = db.create_table("table_from_df", data=df)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const tb = await db.createTable("my_table",
|
||||||
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
If the table already exists, LanceDB will raise an error by default.
|
||||||
|
If you want to overwrite the table, you can pass in `mode="overwrite"`
|
||||||
|
to the `createTable` function.
|
||||||
|
|
||||||
|
??? info "Under the hood, LanceDB is converting the input data into an Apache Arrow table and persisting it to disk in [Lance format](https://www.github.com/lancedb/lance)."
|
||||||
|
|
||||||
## How to open an existing table
|
## How to open an existing table
|
||||||
|
|
||||||
Once created, you can open a table using the following code:
|
Once created, you can open a table using the following code:
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl = db.open_table("my_table")
|
tbl = db.open_table("my_table")
|
||||||
```
|
```
|
||||||
@@ -47,29 +91,53 @@ tbl = db.open_table("my_table")
|
|||||||
If you forget the name of your table, you can always get a listing of all table names:
|
If you forget the name of your table, you can always get a listing of all table names:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
db.table_names()
|
print(db.table_names())
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const tbl = await db.openTable("my_table");
|
||||||
|
```
|
||||||
|
|
||||||
|
If you forget the name of your table, you can always get a listing of all table names:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
console.log(db.tableNames());
|
||||||
```
|
```
|
||||||
|
|
||||||
## How to add data to a table
|
## How to add data to a table
|
||||||
|
|
||||||
After a table has been created, you can always add more data to it using
|
After a table has been created, you can always add more data to it using
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
df = pd.DataFrame([{"vector": [1.3, 1.4], "item": "fizz", "price": 100.0},
|
df = pd.DataFrame([{"vector": [1.3, 1.4], "item": "fizz", "price": 100.0},
|
||||||
{"vector": [9.5, 56.2], "item": "buzz", "price": 200.0}])
|
{"vector": [9.5, 56.2], "item": "buzz", "price": 200.0}])
|
||||||
tbl.add(df)
|
tbl.add(df)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
await tbl.add([vector: [1.3, 1.4], item: "fizz", price: 100.0},
|
||||||
|
{vector: [9.5, 56.2], item: "buzz", price: 200.0}])
|
||||||
|
```
|
||||||
|
|
||||||
## How to search for (approximate) nearest neighbors
|
## How to search for (approximate) nearest neighbors
|
||||||
|
|
||||||
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
Once you've embedded the query, you can find its nearest neighbors using the following code:
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
tbl.search([100, 100]).limit(2).to_df()
|
tbl.search([100, 100]).limit(2).to_df()
|
||||||
```
|
```
|
||||||
|
|
||||||
This returns a pandas DataFrame with the results.
|
This returns a pandas DataFrame with the results.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const query = await tbl.search([100, 100]).limit(2).execute();
|
||||||
|
```
|
||||||
|
|
||||||
## What's next
|
## What's next
|
||||||
|
|
||||||
This section covered the very basics of the LanceDB API.
|
This section covered the very basics of the LanceDB API.
|
||||||
|
|||||||
@@ -25,10 +25,13 @@ def embed_func(batch):
|
|||||||
return [model.encode(sentence) for sentence in batch]
|
return [model.encode(sentence) for sentence in batch]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Please note that currently HuggingFace is only supported in the Python SDK.
|
||||||
|
|
||||||
### OpenAI example
|
### OpenAI example
|
||||||
|
|
||||||
You can also use an external API like OpenAI to generate embeddings
|
You can also use an external API like OpenAI to generate embeddings
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
import openai
|
import openai
|
||||||
import os
|
import os
|
||||||
@@ -46,8 +49,19 @@ def embed_func(c):
|
|||||||
return [record["embedding"] for record in rs["data"]]
|
return [record["embedding"] for record in rs["data"]]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
|
// You need to provide an OpenAI API key
|
||||||
|
const apiKey = "sk-..."
|
||||||
|
// The embedding function will create embeddings for the 'text' column
|
||||||
|
const embedding = new lancedb.OpenAIEmbeddingFunction('text', apiKey)
|
||||||
|
```
|
||||||
|
|
||||||
## Applying an embedding function
|
## Applying an embedding function
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
Using an embedding function, you can apply it to raw data
|
Using an embedding function, you can apply it to raw data
|
||||||
to generate embeddings for each row.
|
to generate embeddings for each row.
|
||||||
|
|
||||||
@@ -55,6 +69,7 @@ Say if you have a pandas DataFrame with a `text` column that you want to be embe
|
|||||||
you can use the [with_embeddings](https://lancedb.github.io/lancedb/python/#lancedb.embeddings.with_embeddings)
|
you can use the [with_embeddings](https://lancedb.github.io/lancedb/python/#lancedb.embeddings.with_embeddings)
|
||||||
function to generate embeddings and add create a combined pyarrow table:
|
function to generate embeddings and add create a combined pyarrow table:
|
||||||
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from lancedb.embeddings import with_embeddings
|
from lancedb.embeddings import with_embeddings
|
||||||
@@ -75,12 +90,31 @@ using the `batch_size` parameter to `with_embeddings`.
|
|||||||
LanceDB automatically wraps the function with retry and rate-limit logic to ensure the OpenAI
|
LanceDB automatically wraps the function with retry and rate-limit logic to ensure the OpenAI
|
||||||
API call is reliable.
|
API call is reliable.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
Using an embedding function, you can apply it to raw data
|
||||||
|
to generate embeddings for each row.
|
||||||
|
|
||||||
|
You can just pass the embedding function created previously and LanceDB will automatically generate
|
||||||
|
embededings for your data.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const db = await lancedb.connect("/tmp/lancedb");
|
||||||
|
const data = [
|
||||||
|
{ text: 'pepperoni' },
|
||||||
|
{ text: 'pineapple' }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('vectors', data, embedding)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Searching with an embedding function
|
## Searching with an embedding function
|
||||||
|
|
||||||
At inference time, you also need the same embedding function to embed your query text.
|
At inference time, you also need the same embedding function to embed your query text.
|
||||||
It's important that you use the same model / function otherwise the embedding vectors don't
|
It's important that you use the same model / function otherwise the embedding vectors don't
|
||||||
belong in the same latent space and your results will be nonsensical.
|
belong in the same latent space and your results will be nonsensical.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
```python
|
```python
|
||||||
query = "What's the best pizza topping?"
|
query = "What's the best pizza topping?"
|
||||||
query_vector = embed_func([query])[0]
|
query_vector = embed_func([query])[0]
|
||||||
@@ -89,6 +123,17 @@ tbl.search(query_vector).limit(10).to_df()
|
|||||||
|
|
||||||
The above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
The above snippet returns a pandas DataFrame with the 10 closest vectors to the query.
|
||||||
|
|
||||||
|
=== "Javascript"
|
||||||
|
```javascript
|
||||||
|
const results = await table
|
||||||
|
.search('What's the best pizza topping?')
|
||||||
|
.limit(10)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
The above snippet returns an array of records with the 10 closest vectors to the query.
|
||||||
|
|
||||||
|
|
||||||
## Roadmap
|
## Roadmap
|
||||||
|
|
||||||
In the near future, we'll be integrating the embedding functions deeper into LanceDB<br/>.
|
In the near future, we'll be integrating the embedding functions deeper into LanceDB<br/>.
|
||||||
|
|||||||
@@ -4,4 +4,4 @@
|
|||||||
|
|
||||||
<img id="splash" width="400" alt="langchain" src="https://user-images.githubusercontent.com/917119/236580868-61a246a9-e587-4c2b-8ae5-6fe5f7b7e81e.png">
|
<img id="splash" width="400" alt="langchain" src="https://user-images.githubusercontent.com/917119/236580868-61a246a9-e587-4c2b-8ae5-6fe5f7b7e81e.png">
|
||||||
|
|
||||||
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/notebooks/code_qa_bot.ipynb)
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/code_qa_bot.ipynb)
|
||||||
119
docs/src/examples/modal_langchain.py
Normal file
119
docs/src/examples/modal_langchain.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import sys
|
||||||
|
from modal import Secret, Stub, Image, web_endpoint
|
||||||
|
import lancedb
|
||||||
|
import re
|
||||||
|
import pickle
|
||||||
|
import requests
|
||||||
|
import zipfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from langchain.document_loaders import UnstructuredHTMLLoader
|
||||||
|
from langchain.embeddings import OpenAIEmbeddings
|
||||||
|
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||||
|
from langchain.vectorstores import LanceDB
|
||||||
|
from langchain.llms import OpenAI
|
||||||
|
from langchain.chains import RetrievalQA
|
||||||
|
|
||||||
|
lancedb_image = Image.debian_slim().pip_install(
|
||||||
|
"lancedb", "langchain", "openai", "pandas", "tiktoken", "unstructured", "tabulate"
|
||||||
|
)
|
||||||
|
|
||||||
|
stub = Stub(
|
||||||
|
name="example-langchain-lancedb",
|
||||||
|
image=lancedb_image,
|
||||||
|
secrets=[Secret.from_name("my-openai-secret")],
|
||||||
|
)
|
||||||
|
|
||||||
|
docsearch = None
|
||||||
|
docs_path = Path("docs.pkl")
|
||||||
|
db_path = Path("lancedb")
|
||||||
|
|
||||||
|
|
||||||
|
def get_document_title(document):
|
||||||
|
m = str(document.metadata["source"])
|
||||||
|
title = re.findall("pandas.documentation(.*).html", m)
|
||||||
|
if title[0] is not None:
|
||||||
|
return title[0]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def download_docs():
|
||||||
|
pandas_docs = requests.get(
|
||||||
|
"https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip"
|
||||||
|
)
|
||||||
|
with open(Path("pandas.documentation.zip"), "wb") as f:
|
||||||
|
f.write(pandas_docs.content)
|
||||||
|
|
||||||
|
file = zipfile.ZipFile(Path("pandas.documentation.zip"))
|
||||||
|
file.extractall(path=Path("pandas_docs"))
|
||||||
|
|
||||||
|
|
||||||
|
def store_docs():
|
||||||
|
docs = []
|
||||||
|
|
||||||
|
if not docs_path.exists():
|
||||||
|
for p in Path("pandas_docs/pandas.documentation").rglob("*.html"):
|
||||||
|
if p.is_dir():
|
||||||
|
continue
|
||||||
|
loader = UnstructuredHTMLLoader(p)
|
||||||
|
raw_document = loader.load()
|
||||||
|
|
||||||
|
m = {}
|
||||||
|
m["title"] = get_document_title(raw_document[0])
|
||||||
|
m["version"] = "2.0rc0"
|
||||||
|
raw_document[0].metadata = raw_document[0].metadata | m
|
||||||
|
raw_document[0].metadata["source"] = str(raw_document[0].metadata["source"])
|
||||||
|
docs = docs + raw_document
|
||||||
|
|
||||||
|
with docs_path.open("wb") as fh:
|
||||||
|
pickle.dump(docs, fh)
|
||||||
|
else:
|
||||||
|
with docs_path.open("rb") as fh:
|
||||||
|
docs = pickle.load(fh)
|
||||||
|
|
||||||
|
return docs
|
||||||
|
|
||||||
|
|
||||||
|
def qanda_langchain(query):
|
||||||
|
download_docs()
|
||||||
|
docs = store_docs()
|
||||||
|
|
||||||
|
text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=1000,
|
||||||
|
chunk_overlap=200,
|
||||||
|
)
|
||||||
|
documents = text_splitter.split_documents(docs)
|
||||||
|
embeddings = OpenAIEmbeddings()
|
||||||
|
|
||||||
|
db = lancedb.connect(db_path)
|
||||||
|
table = db.create_table(
|
||||||
|
"pandas_docs",
|
||||||
|
data=[
|
||||||
|
{
|
||||||
|
"vector": embeddings.embed_query("Hello World"),
|
||||||
|
"text": "Hello World",
|
||||||
|
"id": "1",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
mode="overwrite",
|
||||||
|
)
|
||||||
|
docsearch = LanceDB.from_documents(documents, embeddings, connection=table)
|
||||||
|
qa = RetrievalQA.from_chain_type(
|
||||||
|
llm=OpenAI(), chain_type="stuff", retriever=docsearch.as_retriever()
|
||||||
|
)
|
||||||
|
return qa.run(query)
|
||||||
|
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
@web_endpoint(method="GET")
|
||||||
|
def web(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
return {
|
||||||
|
"answer": answer,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
def cli(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
print(answer)
|
||||||
7
docs/src/examples/multimodal_search.md
Normal file
7
docs/src/examples/multimodal_search.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Image multimodal search
|
||||||
|
|
||||||
|
## Search through an image dataset using natural language, full text and SQL
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="multimodal search" src="https://github.com/lancedb/lancedb/assets/917119/993a7c9f-be01-449d-942e-1ce1d4ed63af">
|
||||||
|
|
||||||
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/multimodal_search.ipynb)
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
# YouTube transcript QA bot with NodeJS
|
|
||||||
|
|
||||||
## use LanceDB's Javascript API and OpenAI to build a QA bot for YouTube transcripts
|
|
||||||
|
|
||||||
<img id="splash" width="400" alt="nodejs" src="https://github.com/lancedb/lancedb/assets/917119/3a140e75-bf8e-438a-a1e4-af14a72bcf98">
|
|
||||||
|
|
||||||
This Q&A bot will allow you to search through youtube transcripts using natural language! We'll introduce how you can use LanceDB's Javascript API to store and manage your data easily.
|
|
||||||
|
|
||||||
For this example we're using a HuggingFace dataset that contains YouTube transcriptions: `jamescalam/youtube-transcriptions`, to make it easier, we've converted it to a LanceDB `db` already, which you can download and put in a working directory:
|
|
||||||
|
|
||||||
```wget -c https://eto-public.s3.us-west-2.amazonaws.com/lancedb_demo.tar.gz -O - | tar -xz -C .```
|
|
||||||
|
|
||||||
Now, we'll create a simple app that can:
|
|
||||||
1. Take a text based query and search for contexts in our corpus, using embeddings generated from the OpenAI Embedding API.
|
|
||||||
2. Create a prompt with the contexts, and call the OpenAI Completion API to answer the text based query.
|
|
||||||
|
|
||||||
Dependencies and setup of OpenAI API:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const lancedb = require("vectordb");
|
|
||||||
const { Configuration, OpenAIApi } = require("openai");
|
|
||||||
|
|
||||||
const configuration = new Configuration({
|
|
||||||
apiKey: process.env.OPENAI_API_KEY,
|
|
||||||
});
|
|
||||||
const openai = new OpenAIApi(configuration);
|
|
||||||
```
|
|
||||||
|
|
||||||
First, let's set our question and the context amount. The context amount will be used to query similar documents in our corpus.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const QUESTION = "who was the 12th person on the moon and when did they land?";
|
|
||||||
const CONTEXT_AMOUNT = 3;
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, let's generate an embedding from this question:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const embeddingResponse = await openai.createEmbedding({
|
|
||||||
model: "text-embedding-ada-002",
|
|
||||||
input: QUESTION,
|
|
||||||
});
|
|
||||||
|
|
||||||
const embedding = embeddingResponse.data["data"][0]["embedding"];
|
|
||||||
```
|
|
||||||
|
|
||||||
Once we have the embedding, we can connect to LanceDB (using the database we downloaded earlier), and search through the chatbot table.
|
|
||||||
We'll extract 3 similar documents found.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const db = await lancedb.connect('./lancedb');
|
|
||||||
const tbl = await db.openTable('chatbot');
|
|
||||||
const query = tbl.search(embedding);
|
|
||||||
query.limit = CONTEXT_AMOUNT;
|
|
||||||
const context = await query.execute();
|
|
||||||
```
|
|
||||||
|
|
||||||
Let's combine the context together so we can pass it into our prompt:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
for (let i = 1; i < context.length; i++) {
|
|
||||||
context[0]["text"] += " " + context[i]["text"];
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Lastly, let's construct the prompt. You could play around with this to create more accurate/better prompts to yield results.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const prompt = "Answer the question based on the context below.\n\n" +
|
|
||||||
"Context:\n" +
|
|
||||||
`${context[0]["text"]}\n` +
|
|
||||||
`\n\nQuestion: ${QUESTION}\nAnswer:`;
|
|
||||||
```
|
|
||||||
|
|
||||||
We pass the prompt, along with the context, to the completion API.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const completion = await openai.createCompletion({
|
|
||||||
model: "text-davinci-003",
|
|
||||||
prompt,
|
|
||||||
temperature: 0,
|
|
||||||
max_tokens: 400,
|
|
||||||
top_p: 1,
|
|
||||||
frequency_penalty: 0,
|
|
||||||
presence_penalty: 0,
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
And that's it!
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
console.log(completion.data.choices[0].text);
|
|
||||||
```
|
|
||||||
|
|
||||||
The response is (which is non deterministic):
|
|
||||||
|
|
||||||
```
|
|
||||||
The 12th person on the moon was Harrison Schmitt and he landed on December 11, 1972.
|
|
||||||
```
|
|
||||||
166
docs/src/examples/serverless_qa_bot_with_modal_and_langchain.md
Normal file
166
docs/src/examples/serverless_qa_bot_with_modal_and_langchain.md
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# Serverless QA Bot with Modal and LangChain
|
||||||
|
|
||||||
|
## use LanceDB's LangChain integration with Modal to run a serverless app
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="modal" src="https://github.com/lancedb/lancedb/assets/917119/7d80a40f-60d7-48a6-972f-dab05000eccf">
|
||||||
|
|
||||||
|
We're going to build a QA bot for your documentation using LanceDB's LangChain integration and use Modal for deployment.
|
||||||
|
|
||||||
|
Modal is an end-to-end compute platform for model inference, batch jobs, task queues, web apps and more. It's a great way to deploy your LanceDB models and apps.
|
||||||
|
|
||||||
|
To get started, ensure that you have created an account and logged into [Modal](https://modal.com/). To follow along, the full source code is available on Github [here](https://github.com/lancedb/lancedb/blob/main/docs/src/examples/modal_langchain.py).
|
||||||
|
|
||||||
|
### Setting up Modal
|
||||||
|
|
||||||
|
We'll start by specifying our dependencies and creating a new Modal `Stub`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
lancedb_image = Image.debian_slim().pip_install(
|
||||||
|
"lancedb",
|
||||||
|
"langchain",
|
||||||
|
"openai",
|
||||||
|
"pandas",
|
||||||
|
"tiktoken",
|
||||||
|
"unstructured",
|
||||||
|
"tabulate"
|
||||||
|
)
|
||||||
|
|
||||||
|
stub = Stub(
|
||||||
|
name="example-langchain-lancedb",
|
||||||
|
image=lancedb_image,
|
||||||
|
secrets=[Secret.from_name("my-openai-secret")],
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
We're using Modal's Secrets injection to secure our OpenAI key. To set your own, you can access the Modal UI and enter your key.
|
||||||
|
|
||||||
|
### Setting up caches for LanceDB and LangChain
|
||||||
|
|
||||||
|
Next, we can setup some globals to cache our LanceDB database, as well as our LangChain docsource:
|
||||||
|
|
||||||
|
```python
|
||||||
|
docsearch = None
|
||||||
|
docs_path = Path("docs.pkl")
|
||||||
|
db_path = Path("lancedb")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Downloading our dataset
|
||||||
|
|
||||||
|
We're going use a pregenerated dataset, which stores HTML files of the Pandas 2.0 documentation.
|
||||||
|
You could switch this out for your own dataset.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def download_docs():
|
||||||
|
pandas_docs = requests.get("https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip")
|
||||||
|
with open(Path("pandas.documentation.zip"), "wb") as f:
|
||||||
|
f.write(pandas_docs.content)
|
||||||
|
|
||||||
|
file = zipfile.ZipFile(Path("pandas.documentation.zip"))
|
||||||
|
file.extractall(path=Path("pandas_docs"))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pre-processing the dataset and generating metadata
|
||||||
|
|
||||||
|
Once we've downloaded it, we want to parse and pre-process them using LangChain, and then vectorize them and store it in LanceDB.
|
||||||
|
Let's first create a function that uses LangChains `UnstructuredHTMLLoader` to parse them.
|
||||||
|
We can then add our own metadata to it and store it alongside the data, we'll later be able to use this for filtering metadata.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def store_docs():
|
||||||
|
docs = []
|
||||||
|
|
||||||
|
if not docs_path.exists():
|
||||||
|
for p in Path("pandas_docs/pandas.documentation").rglob("*.html"):
|
||||||
|
if p.is_dir():
|
||||||
|
continue
|
||||||
|
loader = UnstructuredHTMLLoader(p)
|
||||||
|
raw_document = loader.load()
|
||||||
|
|
||||||
|
m = {}
|
||||||
|
m["title"] = get_document_title(raw_document[0])
|
||||||
|
m["version"] = "2.0rc0"
|
||||||
|
raw_document[0].metadata = raw_document[0].metadata | m
|
||||||
|
raw_document[0].metadata["source"] = str(raw_document[0].metadata["source"])
|
||||||
|
docs = docs + raw_document
|
||||||
|
|
||||||
|
with docs_path.open("wb") as fh:
|
||||||
|
pickle.dump(docs, fh)
|
||||||
|
else:
|
||||||
|
with docs_path.open("rb") as fh:
|
||||||
|
docs = pickle.load(fh)
|
||||||
|
|
||||||
|
return docs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Simple LangChain chain for a QA bot
|
||||||
|
|
||||||
|
Now we can create a simple LangChain chain for our QA bot. We'll use the `RecursiveCharacterTextSplitter` to split our documents into chunks, and then use the `OpenAIEmbeddings` to vectorize them.
|
||||||
|
|
||||||
|
Lastly, we'll create a LanceDB table and store the vectorized documents in it, then create a `RetrievalQA` model from the chain and return it.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def qanda_langchain(query):
|
||||||
|
download_docs()
|
||||||
|
docs = store_docs()
|
||||||
|
|
||||||
|
text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=1000,
|
||||||
|
chunk_overlap=200,
|
||||||
|
)
|
||||||
|
documents = text_splitter.split_documents(docs)
|
||||||
|
embeddings = OpenAIEmbeddings()
|
||||||
|
|
||||||
|
db = lancedb.connect(db_path)
|
||||||
|
table = db.create_table("pandas_docs", data=[
|
||||||
|
{"vector": embeddings.embed_query("Hello World"), "text": "Hello World", "id": "1"}
|
||||||
|
], mode="overwrite")
|
||||||
|
docsearch = LanceDB.from_documents(documents, embeddings, connection=table)
|
||||||
|
qa = RetrievalQA.from_chain_type(llm=OpenAI(), chain_type="stuff", retriever=docsearch.as_retriever())
|
||||||
|
return qa.run(query)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating our Modal entry points
|
||||||
|
|
||||||
|
Now we can create our Modal entry points for our CLI and web endpoint:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@stub.function()
|
||||||
|
@web_endpoint(method="GET")
|
||||||
|
def web(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
return {
|
||||||
|
"answer": answer,
|
||||||
|
}
|
||||||
|
|
||||||
|
@stub.function()
|
||||||
|
def cli(query: str):
|
||||||
|
answer = qanda_langchain(query)
|
||||||
|
print(answer)
|
||||||
|
```
|
||||||
|
|
||||||
|
# Testing it out!
|
||||||
|
|
||||||
|
Testing the CLI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
modal run modal_langchain.py --query "What are the major differences in pandas 2.0?"
|
||||||
|
```
|
||||||
|
|
||||||
|
Testing the web endpoint:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
modal serve modal_langchain.py
|
||||||
|
```
|
||||||
|
|
||||||
|
In the CLI, Modal will provide you a web endpoint. Copy this endpoint URI for the next step.
|
||||||
|
Once this is served, then we can hit it with `curl`.
|
||||||
|
|
||||||
|
Note, the first time this runs, it will take a few minutes to download the dataset and vectorize it.
|
||||||
|
An actual production example would pre-cache/load the dataset and vectorized documents prior
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl --get --data-urlencode "query=What are the major differences in pandas 2.0?" https://your-modal-endpoint-app.modal.run
|
||||||
|
|
||||||
|
{"answer":" The major differences in pandas 2.0 include the ability to use any numpy numeric dtype in a Index, installing optional dependencies with pip extras, and enhancements, bug fixes, and performance improvements."}
|
||||||
|
```
|
||||||
|
|
||||||
@@ -4,4 +4,4 @@
|
|||||||
|
|
||||||
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
<img id="splash" width="400" alt="youtube transcript search" src="https://user-images.githubusercontent.com/917119/236965568-def7394d-171c-45f2-939d-8edfeaadd88c.png">
|
||||||
|
|
||||||
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/notebooks/youtube_transcript_search.ipynb)
|
This example is in a [notebook](https://github.com/lancedb/lancedb/blob/main/docs/src/notebooks/youtube_transcript_search.ipynb)
|
||||||
139
docs/src/examples/youtube_transcript_bot_with_nodejs.md
Normal file
139
docs/src/examples/youtube_transcript_bot_with_nodejs.md
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
# YouTube transcript QA bot with NodeJS
|
||||||
|
|
||||||
|
## use LanceDB's Javascript API and OpenAI to build a QA bot for YouTube transcripts
|
||||||
|
|
||||||
|
<img id="splash" width="400" alt="nodejs" src="https://github.com/lancedb/lancedb/assets/917119/3a140e75-bf8e-438a-a1e4-af14a72bcf98">
|
||||||
|
|
||||||
|
This Q&A bot will allow you to search through youtube transcripts using natural language! We'll introduce how to use LanceDB's Javascript API to store and manage your data easily.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Download the data
|
||||||
|
|
||||||
|
For this example, we're using a sample of a HuggingFace dataset that contains YouTube transcriptions: `jamescalam/youtube-transcriptions`. Download and extract this file under the `data` folder:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
wget -c https://eto-public.s3.us-west-2.amazonaws.com/datasets/youtube_transcript/youtube-transcriptions_sample.jsonl
|
||||||
|
```
|
||||||
|
|
||||||
|
## Prepare Context
|
||||||
|
|
||||||
|
Each item in the dataset contains just a short chunk of text. We'll need to merge a bunch of these chunks together on a rolling basis. For this demo, we'll look back 20 records to create a more complete context for each sentence.
|
||||||
|
|
||||||
|
First, we need to read and parse the input file.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lines = (await fs.readFile(INPUT_FILE_NAME, 'utf-8'))
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.map(line => JSON.parse(line))
|
||||||
|
|
||||||
|
const data = contextualize(lines, 20, 'video_id')
|
||||||
|
```
|
||||||
|
|
||||||
|
The contextualize function groups the transcripts by video_id and then creates the expanded context for each item.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function contextualize (rows, contextSize, groupColumn) {
|
||||||
|
const grouped = []
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (!grouped[row[groupColumn]]) {
|
||||||
|
grouped[row[groupColumn]] = []
|
||||||
|
}
|
||||||
|
grouped[row[groupColumn]].push(row)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
Object.keys(grouped).forEach(key => {
|
||||||
|
for (let i = 0; i < grouped[key].length; i++) {
|
||||||
|
const start = i - contextSize > 0 ? i - contextSize : 0
|
||||||
|
grouped[key][i].context = grouped[key].slice(start, i + 1).map(r => r.text).join(' ')
|
||||||
|
}
|
||||||
|
data.push(...grouped[key])
|
||||||
|
})
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create the LanceDB Table
|
||||||
|
|
||||||
|
To load our data into LanceDB, we need to create embedding (vectors) for each item. For this example, we will use the OpenAI embedding functions, which have a native integration with LanceDB.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'context' column
|
||||||
|
const embedFunction = new lancedb.OpenAIEmbeddingFunction('context', apiKey)
|
||||||
|
// Connects to LanceDB
|
||||||
|
const db = await lancedb.connect('data/youtube-lancedb')
|
||||||
|
const tbl = await db.createTable('vectors', data, embedFunction)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create and answer the prompt
|
||||||
|
|
||||||
|
We will accept questions in natural language and use our corpus stored in LanceDB to answer them. First, we need to set up the OpenAI client:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const configuration = new Configuration({ apiKey })
|
||||||
|
const openai = new OpenAIApi(configuration)
|
||||||
|
```
|
||||||
|
|
||||||
|
Then we can prompt questions and use LanceDB to retrieve the three most relevant transcripts for this prompt.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const query = await rl.question('Prompt: ')
|
||||||
|
const results = await tbl
|
||||||
|
.search(query)
|
||||||
|
.select(['title', 'text', 'context'])
|
||||||
|
.limit(3)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
The query and the transcripts' context are appended together in a single prompt:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function createPrompt (query, context) {
|
||||||
|
let prompt =
|
||||||
|
'Answer the question based on the context below.\n\n' +
|
||||||
|
'Context:\n'
|
||||||
|
|
||||||
|
// need to make sure our prompt is not larger than max size
|
||||||
|
prompt = prompt + context.map(c => c.context).join('\n\n---\n\n').substring(0, 3750)
|
||||||
|
prompt = prompt + `\n\nQuestion: ${query}\nAnswer:`
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
We can now use the OpenAI Completion API to process our custom prompt and give us an answer.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const response = await openai.createCompletion({
|
||||||
|
model: 'text-davinci-003',
|
||||||
|
prompt: createPrompt(query, results),
|
||||||
|
max_tokens: 400,
|
||||||
|
temperature: 0,
|
||||||
|
top_p: 1,
|
||||||
|
frequency_penalty: 0,
|
||||||
|
presence_penalty: 0
|
||||||
|
})
|
||||||
|
console.log(response.data.choices[0].text)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Let's put it all together now
|
||||||
|
|
||||||
|
Now we can provide queries and have them answered based on your local LanceDB data.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
Prompt: who was the 12th person on the moon and when did they land?
|
||||||
|
The 12th person on the moon was Harrison Schmitt and he landed on December 11, 1972.
|
||||||
|
Prompt: Which training method should I use for sentence transformers when I only have pairs of related sentences?
|
||||||
|
NLI with multiple negative ranking loss.
|
||||||
|
```
|
||||||
|
|
||||||
|
## That's a wrap
|
||||||
|
|
||||||
|
In this example, you learned how to use LanceDB to store and query embedding representations of your local data. The complete example code is on [GitHub](https://github.com/lancedb/lancedb/tree/main/node/examples), and you can also download the LanceDB dataset using [this link](https://eto-public.s3.us-west-2.amazonaws.com/datasets/youtube_transcript/youtube-lancedb.zip).
|
||||||
|
|
||||||
51
docs/src/fts.md
Normal file
51
docs/src/fts.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# [EXPERIMENTAL] Full text search
|
||||||
|
|
||||||
|
LanceDB now provides experimental support for full text search.
|
||||||
|
This is currently Python only. We plan to push the integration down to Rust in the future
|
||||||
|
to make this available for JS as well.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
To use full text search, you must install optional dependency tantivy-py:
|
||||||
|
|
||||||
|
# tantivy 0.19.2
|
||||||
|
pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985
|
||||||
|
|
||||||
|
|
||||||
|
## Quickstart
|
||||||
|
|
||||||
|
Assume:
|
||||||
|
1. `table` is a LanceDB Table
|
||||||
|
2. `text` is the name of the Table column that we want to index
|
||||||
|
|
||||||
|
To create the index:
|
||||||
|
|
||||||
|
```python
|
||||||
|
table.create_fts_index("text")
|
||||||
|
```
|
||||||
|
|
||||||
|
To search:
|
||||||
|
|
||||||
|
```python
|
||||||
|
df = table.search("puppy").limit(10).select(["text"]).to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
LanceDB automatically looks for an FTS index if the input is str.
|
||||||
|
|
||||||
|
## Multiple text columns
|
||||||
|
|
||||||
|
If you have multiple columns to index, pass them all as a list to `create_fts_index`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
table.create_fts_index(["text1", "text2"])
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that the search API call does not change - you can search over all indexed columns at once.
|
||||||
|
|
||||||
|
## Current limitations
|
||||||
|
|
||||||
|
1. Currently we do not yet support incremental writes.
|
||||||
|
If you add data after fts index creation, it won't be reflected
|
||||||
|
in search results until you do a full reindex.
|
||||||
|
|
||||||
|
2. We currently only support local filesystem paths for the fts index.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# Welcome to LanceDB's Documentation
|
# Welcome to LanceDB's Documentation
|
||||||
|
|
||||||
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrivial, filtering and management of embeddings.
|
LanceDB is an open-source database for vector-search built with persistent storage, which greatly simplifies retrevial, filtering and management of embeddings.
|
||||||
|
|
||||||
The key features of LanceDB include:
|
The key features of LanceDB include:
|
||||||
|
|
||||||
@@ -8,42 +8,65 @@ The key features of LanceDB include:
|
|||||||
|
|
||||||
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
* Store, query and filter vectors, metadata and multi-modal data (text, images, videos, point clouds, and more).
|
||||||
|
|
||||||
* Native Python and Javascript/Typescript support (coming soon).
|
* Support for vector similarity search, full-text search and SQL.
|
||||||
|
|
||||||
|
* Native Python and Javascript/Typescript support.
|
||||||
|
|
||||||
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
* Zero-copy, automatic versioning, manage versions of your data without needing extra infrastructure.
|
||||||
|
|
||||||
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
* Ecosystem integrations with [LangChain 🦜️🔗](https://python.langchain.com/en/latest/modules/indexes/vectorstores/examples/lanecdb.html), [LlamaIndex 🦙](https://gpt-index.readthedocs.io/en/latest/examples/vector_stores/LanceDBIndexDemo.html), Apache-Arrow, Pandas, Polars, DuckDB and more on the way.
|
||||||
|
|
||||||
LanceDB's core is written in Rust 🦀 and is built using Lance, an open-source columnar format designed for performant ML workloads.
|
LanceDB's core is written in Rust 🦀 and is built using <a href="https://github.com/lancedb/lance">Lance</a>, an open-source columnar format designed for performant ML workloads.
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
## Installation
|
=== "Python"
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pip install lancedb
|
pip install lancedb
|
||||||
```
|
```
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import lancedb
|
import lancedb
|
||||||
|
|
||||||
db = lancedb.connect(".")
|
uri = "/tmp/lancedb"
|
||||||
|
db = lancedb.connect(uri)
|
||||||
table = db.create_table("my_table",
|
table = db.create_table("my_table",
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
||||||
result = table.search([100, 100]).limit(2).to_df()
|
result = table.search([100, 100]).limit(2).to_df()
|
||||||
```
|
```
|
||||||
|
|
||||||
## Complete Demos
|
=== "Javascript"
|
||||||
|
```shell
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
We will be adding completed demo apps built using LanceDB.
|
```javascript
|
||||||
- [YouTube Transcript Search](../notebooks/youtube_transcript_search.ipynb)
|
const lancedb = require("vectordb");
|
||||||
|
|
||||||
|
const uri = "/tmp/lancedb";
|
||||||
|
const db = await lancedb.connect(uri);
|
||||||
|
const table = await db.createTable("my_table",
|
||||||
|
[{ id: 1, vector: [3.1, 4.1], item: "foo", price: 10.0 },
|
||||||
|
{ id: 2, vector: [5.9, 26.5], item: "bar", price: 20.0 }])
|
||||||
|
const results = await table.search([100, 100]).limit(2).execute();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Complete Demos (Python)
|
||||||
|
- [YouTube Transcript Search](notebooks/youtube_transcript_search.ipynb)
|
||||||
|
- [Documentation QA Bot using LangChain](notebooks/code_qa_bot.ipynb)
|
||||||
|
- [Multimodal search using CLIP](notebooks/multimodal_search.ipynb)
|
||||||
|
- [Serverless QA Bot with S3 and Lambda](examples/serverless_lancedb_with_s3_and_lambda.md)
|
||||||
|
- [Serverless QA Bot with Modal](examples/serverless_qa_bot_with_modal_and_langchain.md)
|
||||||
|
|
||||||
|
## Complete Demos (JavaScript)
|
||||||
|
- [YouTube Transcript Search](examples/youtube_transcript_bot_with_nodejs.md)
|
||||||
|
|
||||||
## Documentation Quick Links
|
## Documentation Quick Links
|
||||||
* [`Basic Operations`](basic.md) - basic functionality of LanceDB.
|
* [`Basic Operations`](basic.md) - basic functionality of LanceDB.
|
||||||
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
* [`Embedding Functions`](embedding.md) - functions for working with embeddings.
|
||||||
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
* [`Indexing`](ann_indexes.md) - create vector indexes to speed up queries.
|
||||||
|
* [`Full text search`](fts.md) - [EXPERIMENTAL] full-text search API
|
||||||
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
* [`Ecosystem Integrations`](integrations.md) - integrating LanceDB with python data tooling ecosystem.
|
||||||
* [`API Reference`](python.md) - detailed documentation for the LanceDB Python SDK.
|
* [`Python API Reference`](python/python.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
* [`Node API Reference`](javascript/modules.md) - detailed documentation for the LanceDB Python SDK.
|
||||||
|
|||||||
@@ -24,9 +24,6 @@ data = pd.DataFrame({
|
|||||||
"price": [10.0, 20.0]
|
"price": [10.0, 20.0]
|
||||||
})
|
})
|
||||||
table = db.create_table("pd_table", data=data)
|
table = db.create_table("pd_table", data=data)
|
||||||
|
|
||||||
# Optionally, create a IVF_PQ index
|
|
||||||
table.create_index(num_partitions=256, num_sub_vectors=96)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](indexing.md)
|
You will find detailed instructions of creating dataset and index in [Basic Operations](basic.md) and [Indexing](indexing.md)
|
||||||
|
|||||||
1
docs/src/javascript/.nojekyll
Normal file
1
docs/src/javascript/.nojekyll
Normal file
@@ -0,0 +1 @@
|
|||||||
|
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.
|
||||||
51
docs/src/javascript/README.md
Normal file
51
docs/src/javascript/README.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
vectordb / [Exports](modules.md)
|
||||||
|
|
||||||
|
# LanceDB
|
||||||
|
|
||||||
|
A JavaScript / Node.js library for [LanceDB](https://github.com/lancedb/lancedb).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install vectordb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const lancedb = require('vectordb');
|
||||||
|
const db = lancedb.connect('<PATH_TO_LANCEDB_DATASET>');
|
||||||
|
const table = await db.openTable('my_table');
|
||||||
|
const query = await table.search([0.1, 0.3]).setLimit(20).execute();
|
||||||
|
console.log(results);
|
||||||
|
```
|
||||||
|
|
||||||
|
The [examples](./examples) folder contains complete examples.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
The LanceDB javascript is built with npm:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run tsc
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the tests with
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
To run the linter and have it automatically fix all errors
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run lint -- --fix
|
||||||
|
```
|
||||||
|
|
||||||
|
To build documentation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx typedoc --plugin typedoc-plugin-markdown --out ../docs/src/javascript src/index.ts
|
||||||
|
```
|
||||||
211
docs/src/javascript/classes/Connection.md
Normal file
211
docs/src/javascript/classes/Connection.md
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Connection
|
||||||
|
|
||||||
|
# Class: Connection
|
||||||
|
|
||||||
|
A connection to a LanceDB database.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Connection.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_db](Connection.md#_db)
|
||||||
|
- [\_uri](Connection.md#_uri)
|
||||||
|
|
||||||
|
### Accessors
|
||||||
|
|
||||||
|
- [uri](Connection.md#uri)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [createTable](Connection.md#createtable)
|
||||||
|
- [createTableArrow](Connection.md#createtablearrow)
|
||||||
|
- [openTable](Connection.md#opentable)
|
||||||
|
- [tableNames](Connection.md#tablenames)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Connection**(`db`, `uri`)
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `db` | `any` |
|
||||||
|
| `uri` | `string` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:46](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L46)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_db
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_db**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:44](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L44)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_uri
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_uri**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:43](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L43)
|
||||||
|
|
||||||
|
## Accessors
|
||||||
|
|
||||||
|
### uri
|
||||||
|
|
||||||
|
• `get` **uri**(): `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:51](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L51)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### createTable
|
||||||
|
|
||||||
|
▸ **createTable**(`name`, `data`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
Creates a new Table and initialize it with new data.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:91](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L91)
|
||||||
|
|
||||||
|
▸ **createTable**<`T`\>(`name`, `data`, `embeddings`): `Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
Creates a new Table and initialize it with new data.
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Non-empty Array of Records to be inserted into the Table |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use on this Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:99](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L99)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### createTableArrow
|
||||||
|
|
||||||
|
▸ **createTableArrow**(`name`, `table`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `name` | `string` |
|
||||||
|
| `table` | `Table`<`any`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:109](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L109)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### openTable
|
||||||
|
|
||||||
|
▸ **openTable**(`name`): `Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
Open a table in the database.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`number`[]\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:67](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L67)
|
||||||
|
|
||||||
|
▸ **openTable**<`T`\>(`name`, `embeddings`): `Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
Open a table in the database.
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `name` | `string` | The name of the table. |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use on this Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Table`](Table.md)<`T`\>\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:74](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L74)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### tableNames
|
||||||
|
|
||||||
|
▸ **tableNames**(): `Promise`<`string`[]\>
|
||||||
|
|
||||||
|
Get the names of all tables in the database.
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`string`[]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:58](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L58)
|
||||||
105
docs/src/javascript/classes/OpenAIEmbeddingFunction.md
Normal file
105
docs/src/javascript/classes/OpenAIEmbeddingFunction.md
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / OpenAIEmbeddingFunction
|
||||||
|
|
||||||
|
# Class: OpenAIEmbeddingFunction
|
||||||
|
|
||||||
|
An embedding function that automatically creates vector representation for a given column.
|
||||||
|
|
||||||
|
## Implements
|
||||||
|
|
||||||
|
- [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`string`\>
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](OpenAIEmbeddingFunction.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_modelName](OpenAIEmbeddingFunction.md#_modelname)
|
||||||
|
- [\_openai](OpenAIEmbeddingFunction.md#_openai)
|
||||||
|
- [sourceColumn](OpenAIEmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [embed](OpenAIEmbeddingFunction.md#embed)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new OpenAIEmbeddingFunction**(`sourceColumn`, `openAIKey`, `modelName?`)
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Default value |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `sourceColumn` | `string` | `undefined` |
|
||||||
|
| `openAIKey` | `string` | `undefined` |
|
||||||
|
| `modelName` | `string` | `'text-embedding-ada-002'` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:21](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L21)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_modelName
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_modelName**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:19](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L19)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_openai
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_openai**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:18](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L18)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sourceColumn
|
||||||
|
|
||||||
|
• **sourceColumn**: `string`
|
||||||
|
|
||||||
|
The name of the column that will be used as input for the Embedding Function.
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
[EmbeddingFunction](../interfaces/EmbeddingFunction.md).[sourceColumn](../interfaces/EmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:50](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L50)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### embed
|
||||||
|
|
||||||
|
▸ **embed**(`data`): `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
Creates a vector representation for the given values.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `data` | `string`[] |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Implementation of
|
||||||
|
|
||||||
|
[EmbeddingFunction](../interfaces/EmbeddingFunction.md).[embed](../interfaces/EmbeddingFunction.md#embed)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/openai.ts:38](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/openai.ts#L38)
|
||||||
299
docs/src/javascript/classes/Query.md
Normal file
299
docs/src/javascript/classes/Query.md
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Query
|
||||||
|
|
||||||
|
# Class: Query<T\>
|
||||||
|
|
||||||
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Query.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_columns](Query.md#_columns)
|
||||||
|
- [\_embeddings](Query.md#_embeddings)
|
||||||
|
- [\_filter](Query.md#_filter)
|
||||||
|
- [\_limit](Query.md#_limit)
|
||||||
|
- [\_metricType](Query.md#_metrictype)
|
||||||
|
- [\_nprobes](Query.md#_nprobes)
|
||||||
|
- [\_query](Query.md#_query)
|
||||||
|
- [\_queryVector](Query.md#_queryvector)
|
||||||
|
- [\_refineFactor](Query.md#_refinefactor)
|
||||||
|
- [\_tbl](Query.md#_tbl)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [execute](Query.md#execute)
|
||||||
|
- [filter](Query.md#filter)
|
||||||
|
- [limit](Query.md#limit)
|
||||||
|
- [metricType](Query.md#metrictype)
|
||||||
|
- [nprobes](Query.md#nprobes)
|
||||||
|
- [refineFactor](Query.md#refinefactor)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Query**<`T`\>(`tbl`, `query`, `embeddings?`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `tbl` | `any` |
|
||||||
|
| `query` | `T` |
|
||||||
|
| `embeddings?` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:241](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L241)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_columns
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_columns**: `string`[]
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:236](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L236)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_embeddings
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_embeddings**: [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:239](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L239)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_filter
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_filter**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:237](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L237)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_limit
|
||||||
|
|
||||||
|
• `Private` **\_limit**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:233](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L233)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_metricType
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_metricType**: [`MetricType`](../enums/MetricType.md)
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:238](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L238)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_nprobes
|
||||||
|
|
||||||
|
• `Private` **\_nprobes**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:235](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L235)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_query
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_query**: `T`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:231](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L231)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_queryVector
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_queryVector**: `number`[]
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:232](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L232)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_refineFactor
|
||||||
|
|
||||||
|
• `Private` `Optional` **\_refineFactor**: `number`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:234](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L234)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_tbl
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_tbl**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:230](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L230)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### execute
|
||||||
|
|
||||||
|
▸ **execute**<`T`\>(): `Promise`<`T`[]\>
|
||||||
|
|
||||||
|
Execute the query and return the results as an Array of Objects
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `Record`<`string`, `unknown`\> |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`T`[]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:301](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L301)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### filter
|
||||||
|
|
||||||
|
▸ **filter**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
A filter statement to be applied to this query.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `string` | A filter in the same format used by a sql WHERE clause. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:284](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L284)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### limit
|
||||||
|
|
||||||
|
▸ **limit**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Sets the number of results that will be returned
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | number of results |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:257](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L257)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### metricType
|
||||||
|
|
||||||
|
▸ **metricType**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
The MetricType used for this Query.
|
||||||
|
|
||||||
|
**`See`**
|
||||||
|
|
||||||
|
MetricType for the different options
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | [`MetricType`](../enums/MetricType.md) | The metric to the. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:293](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L293)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### nprobes
|
||||||
|
|
||||||
|
▸ **nprobes**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | The number of probes used. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:275](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L275)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### refineFactor
|
||||||
|
|
||||||
|
▸ **refineFactor**(`value`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `value` | `number` | refine factor to use in this query. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:266](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L266)
|
||||||
215
docs/src/javascript/classes/Table.md
Normal file
215
docs/src/javascript/classes/Table.md
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / Table
|
||||||
|
|
||||||
|
# Class: Table<T\>
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Constructors
|
||||||
|
|
||||||
|
- [constructor](Table.md#constructor)
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [\_embeddings](Table.md#_embeddings)
|
||||||
|
- [\_name](Table.md#_name)
|
||||||
|
- [\_tbl](Table.md#_tbl)
|
||||||
|
|
||||||
|
### Accessors
|
||||||
|
|
||||||
|
- [name](Table.md#name)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
- [add](Table.md#add)
|
||||||
|
- [create\_index](Table.md#create_index)
|
||||||
|
- [overwrite](Table.md#overwrite)
|
||||||
|
- [search](Table.md#search)
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
### constructor
|
||||||
|
|
||||||
|
• **new Table**<`T`\>(`tbl`, `name`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `tbl` | `any` |
|
||||||
|
| `name` | `string` |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:121](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L121)
|
||||||
|
|
||||||
|
• **new Table**<`T`\>(`tbl`, `name`, `embeddings`)
|
||||||
|
|
||||||
|
#### Type parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `T` | `number`[] |
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `tbl` | `any` | |
|
||||||
|
| `name` | `string` | |
|
||||||
|
| `embeddings` | [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\> | An embedding function to use when interacting with this table |
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:127](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L127)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### \_embeddings
|
||||||
|
|
||||||
|
• `Private` `Optional` `Readonly` **\_embeddings**: [`EmbeddingFunction`](../interfaces/EmbeddingFunction.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:119](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L119)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_name
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_name**: `string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:118](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L118)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### \_tbl
|
||||||
|
|
||||||
|
• `Private` `Readonly` **\_tbl**: `any`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:117](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L117)
|
||||||
|
|
||||||
|
## Accessors
|
||||||
|
|
||||||
|
### name
|
||||||
|
|
||||||
|
• `get` **name**(): `string`
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`string`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:134](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L134)
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### add
|
||||||
|
|
||||||
|
▸ **add**(`data`): `Promise`<`number`\>
|
||||||
|
|
||||||
|
Insert records into this Table.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`\>
|
||||||
|
|
||||||
|
The number of rows added to the table
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:152](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L152)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### create\_index
|
||||||
|
|
||||||
|
▸ **create_index**(`indexParams`): `Promise`<`any`\>
|
||||||
|
|
||||||
|
Create an ANN index on this Table vector index.
|
||||||
|
|
||||||
|
**`See`**
|
||||||
|
|
||||||
|
VectorIndexParams.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `indexParams` | `IvfPQIndexConfig` | The parameters of this Index, |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`any`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:171](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L171)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### overwrite
|
||||||
|
|
||||||
|
▸ **overwrite**(`data`): `Promise`<`number`\>
|
||||||
|
|
||||||
|
Insert records into this Table, replacing its contents.
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `data` | `Record`<`string`, `unknown`\>[] | Records to be inserted into the Table |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`\>
|
||||||
|
|
||||||
|
The number of rows added to the table
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:162](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L162)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### search
|
||||||
|
|
||||||
|
▸ **search**(`query`): [`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
Creates a search query to find the nearest neighbors of the given search term
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `query` | `T` | The query search term |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
[`Query`](Query.md)<`T`\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:142](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L142)
|
||||||
36
docs/src/javascript/enums/MetricType.md
Normal file
36
docs/src/javascript/enums/MetricType.md
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / MetricType
|
||||||
|
|
||||||
|
# Enumeration: MetricType
|
||||||
|
|
||||||
|
Distance metrics type.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumeration Members
|
||||||
|
|
||||||
|
- [Cosine](MetricType.md#cosine)
|
||||||
|
- [L2](MetricType.md#l2)
|
||||||
|
|
||||||
|
## Enumeration Members
|
||||||
|
|
||||||
|
### Cosine
|
||||||
|
|
||||||
|
• **Cosine** = ``"cosine"``
|
||||||
|
|
||||||
|
Cosine distance
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:341](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L341)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### L2
|
||||||
|
|
||||||
|
• **L2** = ``"l2"``
|
||||||
|
|
||||||
|
Euclidean distance
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:336](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L336)
|
||||||
30
docs/src/javascript/enums/WriteMode.md
Normal file
30
docs/src/javascript/enums/WriteMode.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / WriteMode
|
||||||
|
|
||||||
|
# Enumeration: WriteMode
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumeration Members
|
||||||
|
|
||||||
|
- [Append](WriteMode.md#append)
|
||||||
|
- [Overwrite](WriteMode.md#overwrite)
|
||||||
|
|
||||||
|
## Enumeration Members
|
||||||
|
|
||||||
|
### Append
|
||||||
|
|
||||||
|
• **Append** = ``"append"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:326](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L326)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### Overwrite
|
||||||
|
|
||||||
|
• **Overwrite** = ``"overwrite"``
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:325](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L325)
|
||||||
60
docs/src/javascript/interfaces/EmbeddingFunction.md
Normal file
60
docs/src/javascript/interfaces/EmbeddingFunction.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
[vectordb](../README.md) / [Exports](../modules.md) / EmbeddingFunction
|
||||||
|
|
||||||
|
# Interface: EmbeddingFunction<T\>
|
||||||
|
|
||||||
|
An embedding function that automatically creates vector representation for a given column.
|
||||||
|
|
||||||
|
## Type parameters
|
||||||
|
|
||||||
|
| Name |
|
||||||
|
| :------ |
|
||||||
|
| `T` |
|
||||||
|
|
||||||
|
## Implemented by
|
||||||
|
|
||||||
|
- [`OpenAIEmbeddingFunction`](../classes/OpenAIEmbeddingFunction.md)
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
- [embed](EmbeddingFunction.md#embed)
|
||||||
|
- [sourceColumn](EmbeddingFunction.md#sourcecolumn)
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
|
||||||
|
### embed
|
||||||
|
|
||||||
|
• **embed**: (`data`: `T`[]) => `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Type declaration
|
||||||
|
|
||||||
|
▸ (`data`): `Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
Creates a vector representation for the given values.
|
||||||
|
|
||||||
|
##### Parameters
|
||||||
|
|
||||||
|
| Name | Type |
|
||||||
|
| :------ | :------ |
|
||||||
|
| `data` | `T`[] |
|
||||||
|
|
||||||
|
##### Returns
|
||||||
|
|
||||||
|
`Promise`<`number`[][]\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/embedding_function.ts:27](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/embedding_function.ts#L27)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### sourceColumn
|
||||||
|
|
||||||
|
• **sourceColumn**: `string`
|
||||||
|
|
||||||
|
The name of the column that will be used as input for the Embedding Function.
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[embedding/embedding_function.ts:22](https://github.com/lancedb/lancedb/blob/31dab97/node/src/embedding/embedding_function.ts#L22)
|
||||||
61
docs/src/javascript/modules.md
Normal file
61
docs/src/javascript/modules.md
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
[vectordb](README.md) / Exports
|
||||||
|
|
||||||
|
# vectordb
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
### Enumerations
|
||||||
|
|
||||||
|
- [MetricType](enums/MetricType.md)
|
||||||
|
- [WriteMode](enums/WriteMode.md)
|
||||||
|
|
||||||
|
### Classes
|
||||||
|
|
||||||
|
- [Connection](classes/Connection.md)
|
||||||
|
- [OpenAIEmbeddingFunction](classes/OpenAIEmbeddingFunction.md)
|
||||||
|
- [Query](classes/Query.md)
|
||||||
|
- [Table](classes/Table.md)
|
||||||
|
|
||||||
|
### Interfaces
|
||||||
|
|
||||||
|
- [EmbeddingFunction](interfaces/EmbeddingFunction.md)
|
||||||
|
|
||||||
|
### Type Aliases
|
||||||
|
|
||||||
|
- [VectorIndexParams](modules.md#vectorindexparams)
|
||||||
|
|
||||||
|
### Functions
|
||||||
|
|
||||||
|
- [connect](modules.md#connect)
|
||||||
|
|
||||||
|
## Type Aliases
|
||||||
|
|
||||||
|
### VectorIndexParams
|
||||||
|
|
||||||
|
Ƭ **VectorIndexParams**: `IvfPQIndexConfig`
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:224](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L224)
|
||||||
|
|
||||||
|
## Functions
|
||||||
|
|
||||||
|
### connect
|
||||||
|
|
||||||
|
▸ **connect**(`uri`): `Promise`<[`Connection`](classes/Connection.md)\>
|
||||||
|
|
||||||
|
Connect to a LanceDB instance at the given URI
|
||||||
|
|
||||||
|
#### Parameters
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| :------ | :------ | :------ |
|
||||||
|
| `uri` | `string` | The uri of the database. |
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
`Promise`<[`Connection`](classes/Connection.md)\>
|
||||||
|
|
||||||
|
#### Defined in
|
||||||
|
|
||||||
|
[index.ts:34](https://github.com/lancedb/lancedb/blob/31dab97/node/src/index.ts#L34)
|
||||||
@@ -72,6 +72,8 @@
|
|||||||
"import lancedb\n",
|
"import lancedb\n",
|
||||||
"import re\n",
|
"import re\n",
|
||||||
"import pickle\n",
|
"import pickle\n",
|
||||||
|
"import requests\n",
|
||||||
|
"import zipfile\n",
|
||||||
"from pathlib import Path\n",
|
"from pathlib import Path\n",
|
||||||
"\n",
|
"\n",
|
||||||
"from langchain.document_loaders import UnstructuredHTMLLoader\n",
|
"from langchain.document_loaders import UnstructuredHTMLLoader\n",
|
||||||
@@ -85,10 +87,25 @@
|
|||||||
{
|
{
|
||||||
"attachments": {},
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "6ccf9b2b",
|
"id": "56cc6d50",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"You can download the Pandas documentation from https://pandas.pydata.org/docs/. To make sure we're not littering our repo with docs, we won't include it in the LanceDB repo, so download this and store it locally first."
|
"To make this easier, we've downloaded Pandas documentation and stored the raw HTML files for you to download. We'll download them and then use LangChain's HTML document readers to parse them and store them in LanceDB as a vector store, along with relevant metadata."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "7da77e75",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"pandas_docs = requests.get(\"https://eto-public.s3.us-west-2.amazonaws.com/datasets/pandas_docs/pandas.documentation.zip\")\n",
|
||||||
|
"with open('/tmp/pandas.documentation.zip', 'wb') as f:\n",
|
||||||
|
" f.write(pandas_docs.content)\n",
|
||||||
|
"\n",
|
||||||
|
"file = zipfile.ZipFile(\"/tmp/pandas.documentation.zip\")\n",
|
||||||
|
"file.extractall(path=\"/tmp/pandas_docs\")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -137,7 +154,8 @@
|
|||||||
"docs = []\n",
|
"docs = []\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if not docs_path.exists():\n",
|
"if not docs_path.exists():\n",
|
||||||
" for p in Path(\"./pandas.documentation\").rglob(\"*.html\"):\n",
|
" for p in Path(\"/tmp/pandas_docs/pandas.documentation\").rglob(\"*.html\"):\n",
|
||||||
|
" print(p)\n",
|
||||||
" if p.is_dir():\n",
|
" if p.is_dir():\n",
|
||||||
" continue\n",
|
" continue\n",
|
||||||
" loader = UnstructuredHTMLLoader(p)\n",
|
" loader = UnstructuredHTMLLoader(p)\n",
|
||||||
108
docs/src/notebooks/diffusiondb/datagen.py
Executable file
108
docs/src/notebooks/diffusiondb/datagen.py
Executable file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Dataset hf://poloclub/diffusiondb
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from multiprocessing import Pool
|
||||||
|
|
||||||
|
import lance
|
||||||
|
import lancedb
|
||||||
|
import pyarrow as pa
|
||||||
|
from datasets import load_dataset
|
||||||
|
from PIL import Image
|
||||||
|
from transformers import CLIPModel, CLIPProcessor, CLIPTokenizerFast
|
||||||
|
|
||||||
|
MODEL_ID = "openai/clip-vit-base-patch32"
|
||||||
|
|
||||||
|
device = "cuda"
|
||||||
|
|
||||||
|
tokenizer = CLIPTokenizerFast.from_pretrained(MODEL_ID)
|
||||||
|
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32").to(device)
|
||||||
|
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
||||||
|
|
||||||
|
schema = pa.schema(
|
||||||
|
[
|
||||||
|
pa.field("prompt", pa.string()),
|
||||||
|
pa.field("seed", pa.uint32()),
|
||||||
|
pa.field("step", pa.uint16()),
|
||||||
|
pa.field("cfg", pa.float32()),
|
||||||
|
pa.field("sampler", pa.string()),
|
||||||
|
pa.field("width", pa.uint16()),
|
||||||
|
pa.field("height", pa.uint16()),
|
||||||
|
pa.field("timestamp", pa.timestamp("s")),
|
||||||
|
pa.field("image_nsfw", pa.float32()),
|
||||||
|
pa.field("prompt_nsfw", pa.float32()),
|
||||||
|
pa.field("vector", pa.list_(pa.float32(), 512)),
|
||||||
|
pa.field("image", pa.binary()),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pil_to_bytes(img) -> list[bytes]:
|
||||||
|
buf = io.BytesIO()
|
||||||
|
img.save(buf, format="PNG")
|
||||||
|
return buf.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_clip_embeddings(batch) -> pa.RecordBatch:
|
||||||
|
image = processor(text=None, images=batch["image"], return_tensors="pt")[
|
||||||
|
"pixel_values"
|
||||||
|
].to(device)
|
||||||
|
img_emb = model.get_image_features(image)
|
||||||
|
batch["vector"] = img_emb.cpu().tolist()
|
||||||
|
|
||||||
|
with Pool() as p:
|
||||||
|
batch["image_bytes"] = p.map(pil_to_bytes, batch["image"])
|
||||||
|
return batch
|
||||||
|
|
||||||
|
|
||||||
|
def datagen(args):
|
||||||
|
"""Generate DiffusionDB dataset, and use CLIP model to generate image embeddings."""
|
||||||
|
dataset = load_dataset("poloclub/diffusiondb", args.subset)
|
||||||
|
data = []
|
||||||
|
for b in dataset.map(
|
||||||
|
generate_clip_embeddings, batched=True, batch_size=256, remove_columns=["image"]
|
||||||
|
)["train"]:
|
||||||
|
b["image"] = b["image_bytes"]
|
||||||
|
del b["image_bytes"]
|
||||||
|
data.append(b)
|
||||||
|
tbl = pa.Table.from_pylist(data, schema=schema)
|
||||||
|
return tbl
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
"-o", "--output", metavar="DIR", help="Output lance directory", required=True
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s",
|
||||||
|
"--subset",
|
||||||
|
choices=["2m_all", "2m_first_10k", "2m_first_100k"],
|
||||||
|
default="2m_first_10k",
|
||||||
|
help="subset of the hg dataset",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
batches = datagen(args)
|
||||||
|
lance.write_dataset(batches, args.output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
9
docs/src/notebooks/diffusiondb/requirements.txt
Normal file
9
docs/src/notebooks/diffusiondb/requirements.txt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
datasets
|
||||||
|
Pillow
|
||||||
|
lancedb
|
||||||
|
isort
|
||||||
|
black
|
||||||
|
transformers
|
||||||
|
--index-url https://download.pytorch.org/whl/cu118
|
||||||
|
torch
|
||||||
|
torchvision
|
||||||
269
docs/src/notebooks/multimodal_search.ipynb
Normal file
269
docs/src/notebooks/multimodal_search.ipynb
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.1.2\u001b[0m\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n",
|
||||||
|
"\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.1.2\u001b[0m\n",
|
||||||
|
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"!pip install --quiet -U lancedb\n",
|
||||||
|
"!pip install --quiet gradio transformers torch torchvision"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import io\n",
|
||||||
|
"import PIL\n",
|
||||||
|
"import duckdb\n",
|
||||||
|
"import lancedb"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## First run setup: Download data and pre-process"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 30,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"<lance.dataset.LanceDataset at 0x3045db590>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 30,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# remove null prompts\n",
|
||||||
|
"import lance\n",
|
||||||
|
"import pyarrow.compute as pc\n",
|
||||||
|
"\n",
|
||||||
|
"# download s3://eto-public/datasets/diffusiondb/small_10k.lance to this uri\n",
|
||||||
|
"data = lance.dataset(\"~/datasets/rawdata.lance\").to_table()\n",
|
||||||
|
"\n",
|
||||||
|
"# First data processing and full-text-search index\n",
|
||||||
|
"db = lancedb.connect(\"~/datasets/demo\")\n",
|
||||||
|
"tbl = db.create_table(\"diffusiondb\", data.filter(~pc.field(\"prompt\").is_null()))\n",
|
||||||
|
"tbl = tbl.create_fts_index([\"prompt\"])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Create / Open LanceDB Table"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"db = lancedb.connect(\"~/datasets/demo\")\n",
|
||||||
|
"tbl = db.open_table(\"diffusiondb\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Create CLIP embedding function for the text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from transformers import CLIPModel, CLIPProcessor, CLIPTokenizerFast\n",
|
||||||
|
"\n",
|
||||||
|
"MODEL_ID = \"openai/clip-vit-base-patch32\"\n",
|
||||||
|
"\n",
|
||||||
|
"tokenizer = CLIPTokenizerFast.from_pretrained(MODEL_ID)\n",
|
||||||
|
"model = CLIPModel.from_pretrained(MODEL_ID)\n",
|
||||||
|
"processor = CLIPProcessor.from_pretrained(MODEL_ID)\n",
|
||||||
|
"\n",
|
||||||
|
"def embed_func(query):\n",
|
||||||
|
" inputs = tokenizer([query], padding=True, return_tensors=\"pt\")\n",
|
||||||
|
" text_features = model.get_text_features(**inputs)\n",
|
||||||
|
" return text_features.detach().numpy()[0]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Search functions for Gradio"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def find_image_vectors(query):\n",
|
||||||
|
" emb = embed_func(query)\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" f\"embedding = embed_func('{query}')\\n\"\n",
|
||||||
|
" \"tbl.search(embedding).limit(9).to_df()\"\n",
|
||||||
|
" )\n",
|
||||||
|
" return (_extract(tbl.search(emb).limit(9).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def find_image_keywords(query):\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" f\"tbl.search('{query}').limit(9).to_df()\"\n",
|
||||||
|
" )\n",
|
||||||
|
" return (_extract(tbl.search(query).limit(9).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def find_image_sql(query):\n",
|
||||||
|
" code = (\n",
|
||||||
|
" \"import lancedb\\n\"\n",
|
||||||
|
" \"import duckdb\\n\"\n",
|
||||||
|
" \"db = lancedb.connect('~/datasets/demo')\\n\"\n",
|
||||||
|
" \"tbl = db.open_table('diffusiondb')\\n\\n\"\n",
|
||||||
|
" \"diffusiondb = tbl.to_lance()\\n\"\n",
|
||||||
|
" f\"duckdb.sql('{query}').to_df()\"\n",
|
||||||
|
" ) \n",
|
||||||
|
" diffusiondb = tbl.to_lance()\n",
|
||||||
|
" return (_extract(duckdb.sql(query).to_df()), code)\n",
|
||||||
|
"\n",
|
||||||
|
"def _extract(df):\n",
|
||||||
|
" image_col = \"image\"\n",
|
||||||
|
" return [(PIL.Image.open(io.BytesIO(row[image_col])), row[\"prompt\"]) for _, row in df.iterrows()]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Setup Gradio interface"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Running on local URL: http://127.0.0.1:7881\n",
|
||||||
|
"\n",
|
||||||
|
"To create a public link, set `share=True` in `launch()`.\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div><iframe src=\"http://127.0.0.1:7881/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.core.display.HTML object>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "display_data"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": []
|
||||||
|
},
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import gradio as gr\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"with gr.Blocks() as demo:\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" with gr.Tab(\"Embeddings\"):\n",
|
||||||
|
" vector_query = gr.Textbox(value=\"portraits of a person\", show_label=False)\n",
|
||||||
|
" b1 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Tab(\"Keywords\"):\n",
|
||||||
|
" keyword_query = gr.Textbox(value=\"ninja turtle\", show_label=False)\n",
|
||||||
|
" b2 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Tab(\"SQL\"):\n",
|
||||||
|
" sql_query = gr.Textbox(value=\"SELECT * from diffusiondb WHERE image_nsfw >= 2 LIMIT 9\", show_label=False)\n",
|
||||||
|
" b3 = gr.Button(\"Submit\")\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" code = gr.Code(label=\"Code\", language=\"python\")\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" gallery = gr.Gallery(\n",
|
||||||
|
" label=\"Found images\", show_label=False, elem_id=\"gallery\"\n",
|
||||||
|
" ).style(columns=[3], rows=[3], object_fit=\"contain\", height=\"auto\") \n",
|
||||||
|
" \n",
|
||||||
|
" b1.click(find_image_vectors, inputs=vector_query, outputs=[gallery, code])\n",
|
||||||
|
" b2.click(find_image_keywords, inputs=keyword_query, outputs=[gallery, code])\n",
|
||||||
|
" b3.click(find_image_sql, inputs=sql_query, outputs=[gallery, code])\n",
|
||||||
|
" \n",
|
||||||
|
"demo.launch()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3 (ipykernel)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 1
|
||||||
|
}
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
{
|
{
|
||||||
"cells": [
|
"cells": [
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "42bf01fb",
|
"id": "42bf01fb",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"# We're going to build question and answer bot\n",
|
"# Youtube Transcript Search QA Bot\n",
|
||||||
"\n",
|
"\n",
|
||||||
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily."
|
"This Q&A bot will allow you to search through youtube transcripts using natural language! By going through this notebook, we'll introduce how you can use LanceDB to store and manage your data easily."
|
||||||
]
|
]
|
||||||
@@ -35,6 +36,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "22e570f4",
|
"id": "22e570f4",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -87,6 +89,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "5ac2b6a3",
|
"id": "5ac2b6a3",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -181,6 +184,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "3044e0b0",
|
"id": "3044e0b0",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -209,6 +213,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "db586267",
|
"id": "db586267",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -229,6 +234,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "2106b5bb",
|
"id": "2106b5bb",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -338,6 +344,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "53e4bff1",
|
"id": "53e4bff1",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -371,6 +378,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "8ef34fca",
|
"id": "8ef34fca",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -459,6 +467,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "23afc2f9",
|
"id": "23afc2f9",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -541,6 +550,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "28705959",
|
"id": "28705959",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -571,6 +581,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"id": "559a095b",
|
"id": "559a095b",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# LanceDB Python API Reference
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install lancedb
|
|
||||||
```
|
|
||||||
|
|
||||||
## ::: lancedb
|
|
||||||
## ::: lancedb.db
|
|
||||||
## ::: lancedb.table
|
|
||||||
## ::: lancedb.query
|
|
||||||
## ::: lancedb.embeddings
|
|
||||||
## ::: lancedb.context
|
|
||||||
43
docs/src/python/python.md
Normal file
43
docs/src/python/python.md
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# LanceDB Python API Reference
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install lancedb
|
||||||
|
```
|
||||||
|
|
||||||
|
## Connection
|
||||||
|
|
||||||
|
::: lancedb.connect
|
||||||
|
|
||||||
|
::: lancedb.LanceDBConnection
|
||||||
|
|
||||||
|
## Table
|
||||||
|
|
||||||
|
::: lancedb.table.LanceTable
|
||||||
|
|
||||||
|
## Querying
|
||||||
|
|
||||||
|
::: lancedb.query.LanceQueryBuilder
|
||||||
|
|
||||||
|
::: lancedb.query.LanceFtsQueryBuilder
|
||||||
|
|
||||||
|
## Embeddings
|
||||||
|
|
||||||
|
::: lancedb.embeddings.with_embeddings
|
||||||
|
|
||||||
|
::: lancedb.embeddings.EmbeddingFunction
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
::: lancedb.context.contextualize
|
||||||
|
|
||||||
|
::: lancedb.context.Contextualizer
|
||||||
|
|
||||||
|
## Full text search
|
||||||
|
|
||||||
|
::: lancedb.fts.create_index
|
||||||
|
|
||||||
|
::: lancedb.fts.populate_index
|
||||||
|
|
||||||
|
::: lancedb.fts.search_index
|
||||||
85
docs/src/search.md
Normal file
85
docs/src/search.md
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Vector Search
|
||||||
|
|
||||||
|
`Vector Search` finds the nearest vectors from the database.
|
||||||
|
In a recommendation system or search engine, you can find similar products from
|
||||||
|
the one you searched.
|
||||||
|
In LLM and other AI applications,
|
||||||
|
each data point can be [presented by the embeddings generated from some models](embedding.md),
|
||||||
|
it returns the most relevant features.
|
||||||
|
|
||||||
|
A search in high-dimensional vector space, is to find `K-Nearest-Neighbors (KNN)` of the query vector.
|
||||||
|
|
||||||
|
## Metric
|
||||||
|
|
||||||
|
In LanceDB, a `Metric` is the way to describe the distance between a pair of vectors.
|
||||||
|
Currently, we support the following metrics:
|
||||||
|
|
||||||
|
| Metric | Description |
|
||||||
|
| ----------- | ------------------------------------ |
|
||||||
|
| `L2` | [Euclidean / L2 distance](https://en.wikipedia.org/wiki/Euclidean_distance) |
|
||||||
|
| `Cosine` | [Cosine Similarity](https://en.wikipedia.org/wiki/Cosine_similarity)|
|
||||||
|
|
||||||
|
|
||||||
|
## Search
|
||||||
|
|
||||||
|
### Flat Search
|
||||||
|
|
||||||
|
|
||||||
|
If there is no [vector index is created](ann_indexes.md), LanceDB will just brute-force scan
|
||||||
|
the vector column and compute the distance.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
import lancedb
|
||||||
|
db = lancedb.connect("data/sample-lancedb")
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
df = tbl.search(np.random.random((768)))
|
||||||
|
.limit(10)
|
||||||
|
.to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
const results = await tbl.search(Array(768))
|
||||||
|
.limit(20)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, `l2` will be used as `Metric` type. You can customize the metric type
|
||||||
|
as well.
|
||||||
|
|
||||||
|
=== "Python"
|
||||||
|
|
||||||
|
```python
|
||||||
|
df = tbl.search(np.random.random((768)))
|
||||||
|
.metric("cosine")
|
||||||
|
.limit(10)
|
||||||
|
.to_df()
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "JavaScript"
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const vectordb = require('vectordb')
|
||||||
|
const db = await vectordb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
tbl = db.open_table("my_vectors")
|
||||||
|
|
||||||
|
const results = await tbl.search(Array(768))
|
||||||
|
.metric("cosine")
|
||||||
|
.limit(20)
|
||||||
|
.execute()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search with Vector Index.
|
||||||
|
|
||||||
|
See [ANN Index](ann_indexes.md) for more details.
|
||||||
6
docs/src/styles/global.css
Normal file
6
docs/src/styles/global.css
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
:root {
|
||||||
|
--md-primary-fg-color: #625eff;
|
||||||
|
--md-primary-fg-color--dark: #4338ca;
|
||||||
|
--md-text-font: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||||
|
--md-code-font: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||||
|
}
|
||||||
64
node/CHANGELOG.md
Normal file
64
node/CHANGELOG.md
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.5] - 2023-06-00
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Support for macOS X86
|
||||||
|
|
||||||
|
## [0.1.4] - 2023-06-03
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Select / Project query API
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Deprecated created_index in favor of createIndex
|
||||||
|
|
||||||
|
## [0.1.3] - 2023-06-01
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Support S3 and Google Cloud Storage
|
||||||
|
- Embedding functions support
|
||||||
|
- OpenAI embedding function
|
||||||
|
|
||||||
|
## [0.1.2] - 2023-05-27
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Append records API
|
||||||
|
- Extra query params to to nodejs client
|
||||||
|
- Create_index API
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- bugfix: string columns should be converted to Utf8Array (#94)
|
||||||
|
|
||||||
|
## [0.1.1] - 2023-05-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- create_table API
|
||||||
|
- limit parameter for queries
|
||||||
|
- Typescript / JavaScript examples
|
||||||
|
- Linux support
|
||||||
|
|
||||||
|
## [0.1.0] - 2023-05-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Initial JavaScript / Node.js library for LanceDB
|
||||||
|
- Read-only api to query LanceDB datasets
|
||||||
|
- Supports macOS arm only
|
||||||
|
|
||||||
|
## [pre-0.1.0]
|
||||||
|
|
||||||
|
- Various prototypes / test builds
|
||||||
|
|
||||||
@@ -41,3 +41,9 @@ To run the linter and have it automatically fix all errors
|
|||||||
```bash
|
```bash
|
||||||
npm run lint -- --fix
|
npm run lint -- --fix
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To build documentation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx typedoc --plugin typedoc-plugin-markdown --out ../docs/src/javascript src/index.ts
|
||||||
|
```
|
||||||
41
node/examples/js-openai/index.js
Normal file
41
node/examples/js-openai/index.js
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
async function example () {
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'text' column(text in this case)
|
||||||
|
const embedding = new lancedb.OpenAIEmbeddingFunction('text', apiKey)
|
||||||
|
|
||||||
|
const db = await lancedb.connect('data/sample-lancedb')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ id: 1, text: 'Black T-Shirt', price: 10 },
|
||||||
|
{ id: 2, text: 'Leather Jacket', price: 50 }
|
||||||
|
]
|
||||||
|
|
||||||
|
const table = await db.createTable('vectors', data, embedding)
|
||||||
|
console.log(await db.tableNames())
|
||||||
|
|
||||||
|
const results = await table
|
||||||
|
.search('keeps me warm')
|
||||||
|
.limit(1)
|
||||||
|
.execute()
|
||||||
|
console.log(results[0].text)
|
||||||
|
}
|
||||||
|
|
||||||
|
example().then(_ => { console.log('All done!') })
|
||||||
15
node/examples/js-openai/package.json
Normal file
15
node/examples/js-openai/package.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-openai",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"vectordb": "file:../..",
|
||||||
|
"openai": "^3.2.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
122
node/examples/js-youtube-transcripts/index.js
Normal file
122
node/examples/js-youtube-transcripts/index.js
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const lancedb = require('vectordb')
|
||||||
|
const fs = require('fs/promises')
|
||||||
|
const readline = require('readline/promises')
|
||||||
|
const { stdin: input, stdout: output } = require('process')
|
||||||
|
const { Configuration, OpenAIApi } = require('openai')
|
||||||
|
|
||||||
|
// Download file from XYZ
|
||||||
|
const INPUT_FILE_NAME = 'data/youtube-transcriptions_sample.jsonl';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
// You need to provide an OpenAI API key, here we read it from the OPENAI_API_KEY environment variable
|
||||||
|
const apiKey = process.env.OPENAI_API_KEY
|
||||||
|
// The embedding function will create embeddings for the 'context' column
|
||||||
|
const embedFunction = new lancedb.OpenAIEmbeddingFunction('context', apiKey)
|
||||||
|
|
||||||
|
// Connects to LanceDB
|
||||||
|
const db = await lancedb.connect('data/youtube-lancedb')
|
||||||
|
|
||||||
|
// Open the vectors table or create one if it does not exist
|
||||||
|
let tbl
|
||||||
|
if ((await db.tableNames()).includes('vectors')) {
|
||||||
|
tbl = await db.openTable('vectors', embedFunction)
|
||||||
|
} else {
|
||||||
|
tbl = await createEmbeddingsTable(db, embedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use OpenAI Completion API to generate and answer based on the context that LanceDB provides
|
||||||
|
const configuration = new Configuration({ apiKey })
|
||||||
|
const openai = new OpenAIApi(configuration)
|
||||||
|
const rl = readline.createInterface({ input, output })
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const query = await rl.question('Prompt: ')
|
||||||
|
const results = await tbl
|
||||||
|
.search(query)
|
||||||
|
.select(['title', 'text', 'context'])
|
||||||
|
.limit(3)
|
||||||
|
.execute()
|
||||||
|
|
||||||
|
// console.table(results)
|
||||||
|
|
||||||
|
const response = await openai.createCompletion({
|
||||||
|
model: 'text-davinci-003',
|
||||||
|
prompt: createPrompt(query, results),
|
||||||
|
max_tokens: 400,
|
||||||
|
temperature: 0,
|
||||||
|
top_p: 1,
|
||||||
|
frequency_penalty: 0,
|
||||||
|
presence_penalty: 0
|
||||||
|
})
|
||||||
|
console.log(response.data.choices[0].text)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Error: ', err)
|
||||||
|
} finally {
|
||||||
|
rl.close()
|
||||||
|
}
|
||||||
|
process.exit(1)
|
||||||
|
})()
|
||||||
|
|
||||||
|
async function createEmbeddingsTable (db, embedFunction) {
|
||||||
|
console.log(`Creating embeddings from ${INPUT_FILE_NAME}`)
|
||||||
|
// read the input file into a JSON array, skipping empty lines
|
||||||
|
const lines = (await fs.readFile(INPUT_FILE_NAME, 'utf-8'))
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.map(line => JSON.parse(line))
|
||||||
|
|
||||||
|
const data = contextualize(lines, 20, 'video_id')
|
||||||
|
return await db.createTable('vectors', data, embedFunction)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Each transcript has a small text column, we include previous transcripts in order to
|
||||||
|
// have more context information when creating embeddings
|
||||||
|
function contextualize (rows, contextSize, groupColumn) {
|
||||||
|
const grouped = []
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (!grouped[row[groupColumn]]) {
|
||||||
|
grouped[row[groupColumn]] = []
|
||||||
|
}
|
||||||
|
grouped[row[groupColumn]].push(row)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
Object.keys(grouped).forEach(key => {
|
||||||
|
for (let i = 0; i < grouped[key].length; i++) {
|
||||||
|
const start = i - contextSize > 0 ? i - contextSize : 0
|
||||||
|
grouped[key][i].context = grouped[key].slice(start, i + 1).map(r => r.text).join(' ')
|
||||||
|
}
|
||||||
|
data.push(...grouped[key])
|
||||||
|
})
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a prompt by aggregating all relevant contexts
|
||||||
|
function createPrompt (query, context) {
|
||||||
|
let prompt =
|
||||||
|
'Answer the question based on the context below.\n\n' +
|
||||||
|
'Context:\n'
|
||||||
|
|
||||||
|
// need to make sure our prompt is not larger than max size
|
||||||
|
prompt = prompt + context.map(c => c.context).join('\n\n---\n\n').substring(0, 3750)
|
||||||
|
prompt = prompt + `\n\nQuestion: ${query}\nAnswer:`
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
15
node/examples/js-youtube-transcripts/package.json
Normal file
15
node/examples/js-youtube-transcripts/package.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "vectordb-example-js-openai",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"author": "Lance Devs",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"vectordb": "file:../..",
|
||||||
|
"openai": "^3.2.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,6 @@
|
|||||||
"author": "Lance Devs",
|
"author": "Lance Devs",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"vectordb": "^0.1.0"
|
"vectordb": "file:../.."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,6 @@
|
|||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"vectordb": "^0.1.0"
|
"vectordb": "file:../.."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
import lancedb
|
|
||||||
|
|
||||||
uri = "sample-lancedb"
|
|
||||||
db = lancedb.connect(uri)
|
|
||||||
table = db.create_table("my_table",
|
|
||||||
data=[{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
|
||||||
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0}])
|
|
||||||
|
|
||||||
750
node/package-lock.json
generated
750
node/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "vectordb",
|
"name": "vectordb",
|
||||||
"version": "0.1.1",
|
"version": "0.1.5",
|
||||||
"description": " Serverless, low-latency vector database for AI applications",
|
"description": " Serverless, low-latency vector database for AI applications",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -9,7 +9,8 @@
|
|||||||
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
"build": "cargo-cp-artifact --artifact cdylib vectordb-node index.node -- cargo build --message-format=json-render-diagnostics",
|
||||||
"build-release": "npm run build -- --release",
|
"build-release": "npm run build -- --release",
|
||||||
"test": "mocha -recursive dist/test",
|
"test": "mocha -recursive dist/test",
|
||||||
"lint": "eslint src --ext .js,.ts"
|
"lint": "eslint src --ext .js,.ts",
|
||||||
|
"clean": "rm -rf node_modules *.node dist/"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -27,6 +28,7 @@
|
|||||||
"@types/chai": "^4.3.4",
|
"@types/chai": "^4.3.4",
|
||||||
"@types/mocha": "^10.0.1",
|
"@types/mocha": "^10.0.1",
|
||||||
"@types/node": "^18.16.2",
|
"@types/node": "^18.16.2",
|
||||||
|
"@types/sinon": "^10.0.15",
|
||||||
"@types/temp": "^0.9.1",
|
"@types/temp": "^0.9.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.59.1",
|
"@typescript-eslint/eslint-plugin": "^5.59.1",
|
||||||
"cargo-cp-artifact": "^0.1",
|
"cargo-cp-artifact": "^0.1",
|
||||||
@@ -37,9 +39,13 @@
|
|||||||
"eslint-plugin-n": "^15.7.0",
|
"eslint-plugin-n": "^15.7.0",
|
||||||
"eslint-plugin-promise": "^6.1.1",
|
"eslint-plugin-promise": "^6.1.1",
|
||||||
"mocha": "^10.2.0",
|
"mocha": "^10.2.0",
|
||||||
|
"openai": "^3.2.1",
|
||||||
|
"sinon": "^15.1.0",
|
||||||
"temp": "^0.9.4",
|
"temp": "^0.9.4",
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"ts-node-dev": "^2.0.0",
|
"ts-node-dev": "^2.0.0",
|
||||||
|
"typedoc": "^0.24.7",
|
||||||
|
"typedoc-plugin-markdown": "^3.15.3",
|
||||||
"typescript": "*"
|
"typescript": "*"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -15,15 +15,16 @@
|
|||||||
import {
|
import {
|
||||||
Field,
|
Field,
|
||||||
Float32,
|
Float32,
|
||||||
List,
|
List, type ListBuilder,
|
||||||
makeBuilder,
|
makeBuilder,
|
||||||
RecordBatchFileWriter,
|
RecordBatchFileWriter,
|
||||||
Table,
|
Table, Utf8,
|
||||||
type Vector,
|
type Vector,
|
||||||
vectorFromArray
|
vectorFromArray
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
|
import { type EmbeddingFunction } from './index'
|
||||||
|
|
||||||
export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
export async function convertToTable<T> (data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table> {
|
||||||
if (data.length === 0) {
|
if (data.length === 0) {
|
||||||
throw new Error('At least one record needs to be provided')
|
throw new Error('At least one record needs to be provided')
|
||||||
}
|
}
|
||||||
@@ -33,11 +34,7 @@ export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
|||||||
|
|
||||||
for (const columnsKey of columns) {
|
for (const columnsKey of columns) {
|
||||||
if (columnsKey === 'vector') {
|
if (columnsKey === 'vector') {
|
||||||
const children = new Field<Float32>('item', new Float32())
|
const listBuilder = newVectorListBuilder()
|
||||||
const list = new List(children)
|
|
||||||
const listBuilder = makeBuilder({
|
|
||||||
type: list
|
|
||||||
})
|
|
||||||
const vectorSize = (data[0].vector as any[]).length
|
const vectorSize = (data[0].vector as any[]).length
|
||||||
for (const datum of data) {
|
for (const datum of data) {
|
||||||
if ((datum[columnsKey] as any[]).length !== vectorSize) {
|
if ((datum[columnsKey] as any[]).length !== vectorSize) {
|
||||||
@@ -52,15 +49,37 @@ export function convertToTable (data: Array<Record<string, unknown>>): Table {
|
|||||||
for (const datum of data) {
|
for (const datum of data) {
|
||||||
values.push(datum[columnsKey])
|
values.push(datum[columnsKey])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (columnsKey === embeddings?.sourceColumn) {
|
||||||
|
const vectors = await embeddings.embed(values as T[])
|
||||||
|
const listBuilder = newVectorListBuilder()
|
||||||
|
vectors.map(v => listBuilder.append(v))
|
||||||
|
records.vector = listBuilder.finish().toVector()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof values[0] === 'string') {
|
||||||
|
// `vectorFromArray` converts strings into dictionary vectors, forcing it back to a string column
|
||||||
|
records[columnsKey] = vectorFromArray(values, new Utf8())
|
||||||
|
} else {
|
||||||
records[columnsKey] = vectorFromArray(values)
|
records[columnsKey] = vectorFromArray(values)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return new Table(records)
|
return new Table(records)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fromRecordsToBuffer (data: Array<Record<string, unknown>>): Promise<Buffer> {
|
// Creates a new Arrow ListBuilder that stores a Vector column
|
||||||
const table = convertToTable(data)
|
function newVectorListBuilder (): ListBuilder<Float32, any> {
|
||||||
|
const children = new Field<Float32>('item', new Float32())
|
||||||
|
const list = new List(children)
|
||||||
|
return makeBuilder({
|
||||||
|
type: list
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fromRecordsToBuffer<T> (data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Buffer> {
|
||||||
|
const table = await convertToTable(data, embeddings)
|
||||||
const writer = RecordBatchFileWriter.writeAll(table)
|
const writer = RecordBatchFileWriter.writeAll(table)
|
||||||
return Buffer.from(await writer.toUint8Array())
|
return Buffer.from(await writer.toUint8Array())
|
||||||
}
|
}
|
||||||
|
|||||||
28
node/src/embedding/embedding_function.ts
Normal file
28
node/src/embedding/embedding_function.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An embedding function that automatically creates vector representation for a given column.
|
||||||
|
*/
|
||||||
|
export interface EmbeddingFunction<T> {
|
||||||
|
/**
|
||||||
|
* The name of the column that will be used as input for the Embedding Function.
|
||||||
|
*/
|
||||||
|
sourceColumn: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a vector representation for the given values.
|
||||||
|
*/
|
||||||
|
embed: (data: T[]) => Promise<number[][]>
|
||||||
|
}
|
||||||
51
node/src/embedding/openai.ts
Normal file
51
node/src/embedding/openai.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { type EmbeddingFunction } from '../index'
|
||||||
|
|
||||||
|
export class OpenAIEmbeddingFunction implements EmbeddingFunction<string> {
|
||||||
|
private readonly _openai: any
|
||||||
|
private readonly _modelName: string
|
||||||
|
|
||||||
|
constructor (sourceColumn: string, openAIKey: string, modelName: string = 'text-embedding-ada-002') {
|
||||||
|
let openai
|
||||||
|
try {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
openai = require('openai')
|
||||||
|
} catch {
|
||||||
|
throw new Error('please install openai using npm install openai')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.sourceColumn = sourceColumn
|
||||||
|
const configuration = new openai.Configuration({
|
||||||
|
apiKey: openAIKey
|
||||||
|
})
|
||||||
|
this._openai = new openai.OpenAIApi(configuration)
|
||||||
|
this._modelName = modelName
|
||||||
|
}
|
||||||
|
|
||||||
|
async embed (data: string[]): Promise<number[][]> {
|
||||||
|
const response = await this._openai.createEmbedding({
|
||||||
|
model: this._modelName,
|
||||||
|
input: data
|
||||||
|
})
|
||||||
|
const embeddings: number[][] = []
|
||||||
|
for (let i = 0; i < response.data.data.length; i++) {
|
||||||
|
embeddings.push(response.data.data[i].embedding as number[])
|
||||||
|
}
|
||||||
|
return embeddings
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceColumn: string
|
||||||
|
}
|
||||||
@@ -19,16 +19,21 @@ import {
|
|||||||
Vector
|
Vector
|
||||||
} from 'apache-arrow'
|
} from 'apache-arrow'
|
||||||
import { fromRecordsToBuffer } from './arrow'
|
import { fromRecordsToBuffer } from './arrow'
|
||||||
|
import type { EmbeddingFunction } from './embedding/embedding_function'
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const { databaseNew, databaseTableNames, databaseOpenTable, tableCreate, tableSearch, tableAdd } = require('../native.js')
|
const { databaseNew, databaseTableNames, databaseOpenTable, tableCreate, tableSearch, tableAdd, tableCreateVectorIndex } = require('../native.js')
|
||||||
|
|
||||||
|
export type { EmbeddingFunction }
|
||||||
|
export { OpenAIEmbeddingFunction } from './embedding/openai'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect to a LanceDB instance at the given URI
|
* Connect to a LanceDB instance at the given URI
|
||||||
* @param uri The uri of the database.
|
* @param uri The uri of the database.
|
||||||
*/
|
*/
|
||||||
export async function connect (uri: string): Promise<Connection> {
|
export async function connect (uri: string): Promise<Connection> {
|
||||||
return new Connection(uri)
|
const db = await databaseNew(uri)
|
||||||
|
return new Connection(db, uri)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -38,9 +43,9 @@ export class Connection {
|
|||||||
private readonly _uri: string
|
private readonly _uri: string
|
||||||
private readonly _db: any
|
private readonly _db: any
|
||||||
|
|
||||||
constructor (uri: string) {
|
constructor (db: any, uri: string) {
|
||||||
this._uri = uri
|
this._uri = uri
|
||||||
this._db = databaseNew(uri)
|
this._db = db
|
||||||
}
|
}
|
||||||
|
|
||||||
get uri (): string {
|
get uri (): string {
|
||||||
@@ -56,16 +61,49 @@ export class Connection {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a table in the database.
|
* Open a table in the database.
|
||||||
|
*
|
||||||
* @param name The name of the table.
|
* @param name The name of the table.
|
||||||
*/
|
*/
|
||||||
async openTable (name: string): Promise<Table> {
|
async openTable (name: string): Promise<Table>
|
||||||
|
/**
|
||||||
|
* Open a table in the database.
|
||||||
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param embeddings An embedding function to use on this Table
|
||||||
|
*/
|
||||||
|
async openTable<T> (name: string, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async openTable<T> (name: string, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
const tbl = await databaseOpenTable.call(this._db, name)
|
const tbl = await databaseOpenTable.call(this._db, name)
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
return new Table(tbl, name, embeddings)
|
||||||
|
} else {
|
||||||
return new Table(tbl, name)
|
return new Table(tbl, name)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table> {
|
/**
|
||||||
await tableCreate.call(this._db, name, await fromRecordsToBuffer(data))
|
* Creates a new Table and initialize it with new data.
|
||||||
return await this.openTable(name)
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param data Non-empty Array of Records to be inserted into the Table
|
||||||
|
*/
|
||||||
|
|
||||||
|
async createTable (name: string, data: Array<Record<string, unknown>>): Promise<Table>
|
||||||
|
/**
|
||||||
|
* Creates a new Table and initialize it with new data.
|
||||||
|
*
|
||||||
|
* @param name The name of the table.
|
||||||
|
* @param data Non-empty Array of Records to be inserted into the Table
|
||||||
|
* @param embeddings An embedding function to use on this Table
|
||||||
|
*/
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings: EmbeddingFunction<T>): Promise<Table<T>>
|
||||||
|
async createTable<T> (name: string, data: Array<Record<string, unknown>>, embeddings?: EmbeddingFunction<T>): Promise<Table<T>> {
|
||||||
|
const tbl = await tableCreate.call(this._db, name, await fromRecordsToBuffer(data, embeddings))
|
||||||
|
if (embeddings !== undefined) {
|
||||||
|
return new Table(tbl, name, embeddings)
|
||||||
|
} else {
|
||||||
|
return new Table(tbl, name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
async createTableArrow (name: string, table: ArrowTable): Promise<Table> {
|
||||||
@@ -75,16 +113,22 @@ export class Connection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export class Table<T = number[]> {
|
||||||
* A table in a LanceDB database.
|
|
||||||
*/
|
|
||||||
export class Table {
|
|
||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _name: string
|
private readonly _name: string
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
constructor (tbl: any, name: string) {
|
constructor (tbl: any, name: string)
|
||||||
|
/**
|
||||||
|
* @param tbl
|
||||||
|
* @param name
|
||||||
|
* @param embeddings An embedding function to use when interacting with this table
|
||||||
|
*/
|
||||||
|
constructor (tbl: any, name: string, embeddings: EmbeddingFunction<T>)
|
||||||
|
constructor (tbl: any, name: string, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._name = name
|
this._name = name
|
||||||
|
this._embeddings = embeddings
|
||||||
}
|
}
|
||||||
|
|
||||||
get name (): string {
|
get name (): string {
|
||||||
@@ -92,72 +136,192 @@ export class Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a search query to find the nearest neighbors of the given query vector.
|
* Creates a search query to find the nearest neighbors of the given search term
|
||||||
* @param queryVector The query vector.
|
* @param query The query search term
|
||||||
*/
|
*/
|
||||||
search (queryVector: number[]): Query {
|
search (query: T): Query<T> {
|
||||||
return new Query(this._tbl, queryVector)
|
return new Query(this._tbl, query, this._embeddings)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Insert records into this Table
|
* Insert records into this Table.
|
||||||
* @param data Records to be inserted into the Table
|
|
||||||
*
|
*
|
||||||
* @param mode Append / Overwrite existing records. Default: Append
|
* @param data Records to be inserted into the Table
|
||||||
* @return The number of rows added to the table
|
* @return The number of rows added to the table
|
||||||
*/
|
*/
|
||||||
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
async add (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data), WriteMode.Append.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Append.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert records into this Table, replacing its contents.
|
||||||
|
*
|
||||||
|
* @param data Records to be inserted into the Table
|
||||||
|
* @return The number of rows added to the table
|
||||||
|
*/
|
||||||
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
async overwrite (data: Array<Record<string, unknown>>): Promise<number> {
|
||||||
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data), WriteMode.Overwrite.toString())
|
return tableAdd.call(this._tbl, await fromRecordsToBuffer(data, this._embeddings), WriteMode.Overwrite.toString())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an ANN index on this Table vector index.
|
||||||
|
*
|
||||||
|
* @param indexParams The parameters of this Index, @see VectorIndexParams.
|
||||||
|
*/
|
||||||
|
async createIndex (indexParams: VectorIndexParams): Promise<any> {
|
||||||
|
return tableCreateVectorIndex.call(this._tbl, indexParams)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use [Table.createIndex]
|
||||||
|
*/
|
||||||
|
async create_index (indexParams: VectorIndexParams): Promise<any> {
|
||||||
|
return await this.createIndex(indexParams)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface IvfPQIndexConfig {
|
||||||
|
/**
|
||||||
|
* The column to be indexed
|
||||||
|
*/
|
||||||
|
column?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A unique name for the index
|
||||||
|
*/
|
||||||
|
index_name?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Metric type, L2 or Cosine
|
||||||
|
*/
|
||||||
|
metric_type?: MetricType
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of partitions this index
|
||||||
|
*/
|
||||||
|
num_partitions?: number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The max number of iterations for kmeans training.
|
||||||
|
*/
|
||||||
|
max_iters?: number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Train as optimized product quantization.
|
||||||
|
*/
|
||||||
|
use_opq?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of subvectors to build PQ code
|
||||||
|
*/
|
||||||
|
num_sub_vectors?: number
|
||||||
|
/**
|
||||||
|
* The number of bits to present one PQ centroid.
|
||||||
|
*/
|
||||||
|
num_bits?: number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Max number of iterations to train OPQ, if `use_opq` is true.
|
||||||
|
*/
|
||||||
|
max_opq_iters?: number
|
||||||
|
|
||||||
|
type: 'ivf_pq'
|
||||||
|
}
|
||||||
|
|
||||||
|
export type VectorIndexParams = IvfPQIndexConfig
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A builder for nearest neighbor queries for LanceDB.
|
* A builder for nearest neighbor queries for LanceDB.
|
||||||
*/
|
*/
|
||||||
export class Query {
|
export class Query<T = number[]> {
|
||||||
private readonly _tbl: any
|
private readonly _tbl: any
|
||||||
private readonly _query_vector: number[]
|
private readonly _query: T
|
||||||
|
private _queryVector?: number[]
|
||||||
private _limit: number
|
private _limit: number
|
||||||
private readonly _refine_factor?: number
|
private _refineFactor?: number
|
||||||
private readonly _nprobes: number
|
private _nprobes: number
|
||||||
private readonly _columns?: string[]
|
private _select?: string[]
|
||||||
private _filter?: string
|
private _filter?: string
|
||||||
private readonly _metric = 'L2'
|
private _metricType?: MetricType
|
||||||
|
private readonly _embeddings?: EmbeddingFunction<T>
|
||||||
|
|
||||||
constructor (tbl: any, queryVector: number[]) {
|
constructor (tbl: any, query: T, embeddings?: EmbeddingFunction<T>) {
|
||||||
this._tbl = tbl
|
this._tbl = tbl
|
||||||
this._query_vector = queryVector
|
this._query = query
|
||||||
this._limit = 10
|
this._limit = 10
|
||||||
this._nprobes = 20
|
this._nprobes = 20
|
||||||
this._refine_factor = undefined
|
this._refineFactor = undefined
|
||||||
this._columns = undefined
|
this._select = undefined
|
||||||
this._filter = undefined
|
this._filter = undefined
|
||||||
|
this._metricType = undefined
|
||||||
|
this._embeddings = embeddings
|
||||||
}
|
}
|
||||||
|
|
||||||
limit (value: number): Query {
|
/***
|
||||||
|
* Sets the number of results that will be returned
|
||||||
|
* @param value number of results
|
||||||
|
*/
|
||||||
|
limit (value: number): Query<T> {
|
||||||
this._limit = value
|
this._limit = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
filter (value: string): Query {
|
/**
|
||||||
|
* Refine the results by reading extra elements and re-ranking them in memory.
|
||||||
|
* @param value refine factor to use in this query.
|
||||||
|
*/
|
||||||
|
refineFactor (value: number): Query<T> {
|
||||||
|
this._refineFactor = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of probes used. A higher number makes search more accurate but also slower.
|
||||||
|
* @param value The number of probes used.
|
||||||
|
*/
|
||||||
|
nprobes (value: number): Query<T> {
|
||||||
|
this._nprobes = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A filter statement to be applied to this query.
|
||||||
|
* @param value A filter in the same format used by a sql WHERE clause.
|
||||||
|
*/
|
||||||
|
filter (value: string): Query<T> {
|
||||||
this._filter = value
|
this._filter = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Return only the specified columns.
|
||||||
|
*
|
||||||
|
* @param value Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
*/
|
||||||
|
select (value: string[]): Query<T> {
|
||||||
|
this._select = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The MetricType used for this Query.
|
||||||
|
* @param value The metric to the. @see MetricType for the different options
|
||||||
|
*/
|
||||||
|
metricType (value: MetricType): Query<T> {
|
||||||
|
this._metricType = value
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute the query and return the results as an Array of Objects
|
* Execute the query and return the results as an Array of Objects
|
||||||
*/
|
*/
|
||||||
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
async execute<T = Record<string, unknown>> (): Promise<T[]> {
|
||||||
let buffer
|
if (this._embeddings !== undefined) {
|
||||||
if (this._filter != null) {
|
this._queryVector = (await this._embeddings.embed([this._query]))[0]
|
||||||
buffer = await tableSearch.call(this._tbl, this._query_vector, this._limit, this._filter)
|
|
||||||
} else {
|
} else {
|
||||||
buffer = await tableSearch.call(this._tbl, this._query_vector, this._limit)
|
this._queryVector = this._query as number[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const buffer = await tableSearch.call(this._tbl, this)
|
||||||
const data = tableFromIPC(buffer)
|
const data = tableFromIPC(buffer)
|
||||||
return data.toArray().map((entry: Record<string, unknown>) => {
|
return data.toArray().map((entry: Record<string, unknown>) => {
|
||||||
const newObject: Record<string, unknown> = {}
|
const newObject: Record<string, unknown> = {}
|
||||||
@@ -177,3 +341,18 @@ export enum WriteMode {
|
|||||||
Overwrite = 'overwrite',
|
Overwrite = 'overwrite',
|
||||||
Append = 'append'
|
Append = 'append'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Distance metrics type.
|
||||||
|
*/
|
||||||
|
export enum MetricType {
|
||||||
|
/**
|
||||||
|
* Euclidean distance
|
||||||
|
*/
|
||||||
|
L2 = 'l2',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cosine distance
|
||||||
|
*/
|
||||||
|
Cosine = 'cosine'
|
||||||
|
}
|
||||||
|
|||||||
50
node/src/test/embedding/openai.ts
Normal file
50
node/src/test/embedding/openai.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import { describe } from 'mocha'
|
||||||
|
import { assert } from 'chai'
|
||||||
|
|
||||||
|
import { OpenAIEmbeddingFunction } from '../../embedding/openai'
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { OpenAIApi } = require('openai')
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const { stub } = require('sinon')
|
||||||
|
|
||||||
|
describe('OpenAPIEmbeddings', function () {
|
||||||
|
const stubValue = {
|
||||||
|
data: {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
embedding: Array(1536).fill(1.0)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
embedding: Array(1536).fill(2.0)
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('#embed', function () {
|
||||||
|
it('should create vector embeddings', async function () {
|
||||||
|
const openAIStub = stub(OpenAIApi.prototype, 'createEmbedding').returns(stubValue)
|
||||||
|
const f = new OpenAIEmbeddingFunction('text', 'sk-key')
|
||||||
|
const vectors = await f.embed(['abc', 'def'])
|
||||||
|
assert.isTrue(openAIStub.calledOnce)
|
||||||
|
assert.equal(vectors.length, 2)
|
||||||
|
assert.deepEqual(vectors[0], stubValue.data.data[0].embedding)
|
||||||
|
assert.deepEqual(vectors[1], stubValue.data.data[1].embedding)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
52
node/src/test/io.ts
Normal file
52
node/src/test/io.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// IO tests
|
||||||
|
|
||||||
|
import { describe } from 'mocha'
|
||||||
|
import { assert } from 'chai'
|
||||||
|
|
||||||
|
import * as lancedb from '../index'
|
||||||
|
|
||||||
|
describe('LanceDB S3 client', function () {
|
||||||
|
if (process.env.TEST_S3_BASE_URL != null) {
|
||||||
|
const baseUri = process.env.TEST_S3_BASE_URL
|
||||||
|
it('should have a valid url', async function () {
|
||||||
|
const uri = `${baseUri}/valid_url`
|
||||||
|
const table = await createTestDB(uri, 2, 20)
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
assert.equal(con.uri, uri)
|
||||||
|
|
||||||
|
const results = await table.search([0.1, 0.3]).limit(5).execute()
|
||||||
|
assert.equal(results.length, 5)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
describe.skip('Skip S3 test', function () {})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
async function createTestDB (uri: string, numDimensions: number = 2, numRows: number = 2): Promise<lancedb.Table> {
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
for (let i = 0; i < numRows; i++) {
|
||||||
|
const vector = []
|
||||||
|
for (let j = 0; j < numDimensions; j++) {
|
||||||
|
vector.push(i + (j * 0.1))
|
||||||
|
}
|
||||||
|
data.push({ id: i + 1, name: `name_${i}`, price: i + 10, is_active: (i % 2 === 0), vector })
|
||||||
|
}
|
||||||
|
|
||||||
|
return await con.createTable('vectors', data)
|
||||||
|
}
|
||||||
@@ -17,6 +17,7 @@ import { assert } from 'chai'
|
|||||||
import { track } from 'temp'
|
import { track } from 'temp'
|
||||||
|
|
||||||
import * as lancedb from '../index'
|
import * as lancedb from '../index'
|
||||||
|
import { type EmbeddingFunction, MetricType, Query } from '../index'
|
||||||
|
|
||||||
describe('LanceDB client', function () {
|
describe('LanceDB client', function () {
|
||||||
describe('when creating a connection to lancedb', function () {
|
describe('when creating a connection to lancedb', function () {
|
||||||
@@ -67,10 +68,26 @@ describe('LanceDB client', function () {
|
|||||||
const uri = await createTestDB()
|
const uri = await createTestDB()
|
||||||
const con = await lancedb.connect(uri)
|
const con = await lancedb.connect(uri)
|
||||||
const table = await con.openTable('vectors')
|
const table = await con.openTable('vectors')
|
||||||
const results = await table.search([0.1, 0.3]).filter('id == 2').execute()
|
const results = await table.search([0.1, 0.1]).filter('id == 2').execute()
|
||||||
assert.equal(results.length, 1)
|
assert.equal(results.length, 1)
|
||||||
assert.equal(results[0].id, 2)
|
assert.equal(results[0].id, 2)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('select only a subset of columns', async function () {
|
||||||
|
const uri = await createTestDB()
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
const results = await table.search([0.1, 0.1]).select(['is_active']).execute()
|
||||||
|
assert.equal(results.length, 2)
|
||||||
|
// vector and score are always returned
|
||||||
|
assert.isDefined(results[0].vector)
|
||||||
|
assert.isDefined(results[0].score)
|
||||||
|
assert.isDefined(results[0].is_active)
|
||||||
|
|
||||||
|
assert.isUndefined(results[0].id)
|
||||||
|
assert.isUndefined(results[0].name)
|
||||||
|
assert.isUndefined(results[0].price)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('when creating a new dataset', function () {
|
describe('when creating a new dataset', function () {
|
||||||
@@ -96,8 +113,8 @@ describe('LanceDB client', function () {
|
|||||||
const con = await lancedb.connect(dir)
|
const con = await lancedb.connect(dir)
|
||||||
|
|
||||||
const data = [
|
const data = [
|
||||||
{ id: 1, vector: [0.1, 0.2], price: 10 },
|
{ id: 1, vector: [0.1, 0.2], price: 10, name: 'a' },
|
||||||
{ id: 2, vector: [1.1, 1.2], price: 50 }
|
{ id: 2, vector: [1.1, 1.2], price: 50, name: 'b' }
|
||||||
]
|
]
|
||||||
|
|
||||||
const table = await con.createTable('vectors', data)
|
const table = await con.createTable('vectors', data)
|
||||||
@@ -105,8 +122,8 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(results.length, 2)
|
assert.equal(results.length, 2)
|
||||||
|
|
||||||
const dataAdd = [
|
const dataAdd = [
|
||||||
{ id: 3, vector: [2.1, 2.2], price: 10 },
|
{ id: 3, vector: [2.1, 2.2], price: 10, name: 'c' },
|
||||||
{ id: 4, vector: [3.1, 3.2], price: 50 }
|
{ id: 4, vector: [3.1, 3.2], price: 50, name: 'd' }
|
||||||
]
|
]
|
||||||
await table.add(dataAdd)
|
await table.add(dataAdd)
|
||||||
const resultsAdd = await table.search([0.1, 0.3]).execute()
|
const resultsAdd = await table.search([0.1, 0.3]).execute()
|
||||||
@@ -130,16 +147,78 @@ describe('LanceDB client', function () {
|
|||||||
assert.equal(resultsAdd.length, 2)
|
assert.equal(resultsAdd.length, 2)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('when creating a vector index', function () {
|
||||||
|
it('overwrite all records in a table', async function () {
|
||||||
|
const uri = await createTestDB(32, 300)
|
||||||
|
const con = await lancedb.connect(uri)
|
||||||
|
const table = await con.openTable('vectors')
|
||||||
|
await table.createIndex({ type: 'ivf_pq', column: 'vector', num_partitions: 2, max_iters: 2 })
|
||||||
|
}).timeout(10_000) // Timeout is high partially because GH macos runner is pretty slow
|
||||||
})
|
})
|
||||||
|
|
||||||
async function createTestDB (): Promise<string> {
|
describe('when using a custom embedding function', function () {
|
||||||
|
class TextEmbedding implements EmbeddingFunction<string> {
|
||||||
|
sourceColumn: string
|
||||||
|
|
||||||
|
constructor (targetColumn: string) {
|
||||||
|
this.sourceColumn = targetColumn
|
||||||
|
}
|
||||||
|
|
||||||
|
_embedding_map = new Map<string, number[]>([
|
||||||
|
['foo', [2.1, 2.2]],
|
||||||
|
['bar', [3.1, 3.2]]
|
||||||
|
])
|
||||||
|
|
||||||
|
async embed (data: string[]): Promise<number[][]> {
|
||||||
|
return data.map(datum => this._embedding_map.get(datum) ?? [0.0, 0.0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should encode the original data into embeddings', async function () {
|
||||||
|
const dir = await track().mkdir('lancejs')
|
||||||
|
const con = await lancedb.connect(dir)
|
||||||
|
const embeddings = new TextEmbedding('name')
|
||||||
|
|
||||||
|
const data = [
|
||||||
|
{ price: 10, name: 'foo' },
|
||||||
|
{ price: 50, name: 'bar' }
|
||||||
|
]
|
||||||
|
const table = await con.createTable('vectors', data, embeddings)
|
||||||
|
const results = await table.search('foo').execute()
|
||||||
|
assert.equal(results.length, 2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Query object', function () {
|
||||||
|
it('sets custom parameters', async function () {
|
||||||
|
const query = new Query(undefined, [0.1, 0.3])
|
||||||
|
.limit(1)
|
||||||
|
.metricType(MetricType.Cosine)
|
||||||
|
.refineFactor(100)
|
||||||
|
.select(['a', 'b'])
|
||||||
|
.nprobes(20) as Record<string, any>
|
||||||
|
assert.equal(query._limit, 1)
|
||||||
|
assert.equal(query._metricType, MetricType.Cosine)
|
||||||
|
assert.equal(query._refineFactor, 100)
|
||||||
|
assert.equal(query._nprobes, 20)
|
||||||
|
assert.deepEqual(query._select, ['a', 'b'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
async function createTestDB (numDimensions: number = 2, numRows: number = 2): Promise<string> {
|
||||||
const dir = await track().mkdir('lancejs')
|
const dir = await track().mkdir('lancejs')
|
||||||
const con = await lancedb.connect(dir)
|
const con = await lancedb.connect(dir)
|
||||||
|
|
||||||
const data = [
|
const data = []
|
||||||
{ id: 1, vector: [0.1, 0.2], name: 'foo', price: 10, is_active: true },
|
for (let i = 0; i < numRows; i++) {
|
||||||
{ id: 2, vector: [1.1, 1.2], name: 'bar', price: 50, is_active: false }
|
const vector = []
|
||||||
]
|
for (let j = 0; j < numDimensions; j++) {
|
||||||
|
vector.push(i + (j * 0.1))
|
||||||
|
}
|
||||||
|
data.push({ id: i + 1, name: `name_${i}`, price: i + 10, is_active: (i % 2 === 0), vector })
|
||||||
|
}
|
||||||
|
|
||||||
await con.createTable('vectors', data)
|
await con.createTable('vectors', data)
|
||||||
return dir
|
return dir
|
||||||
|
|||||||
8
python/.bumpversion.cfg
Normal file
8
python/.bumpversion.cfg
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[bumpversion]
|
||||||
|
current_version = 0.1.8
|
||||||
|
commit = True
|
||||||
|
message = [python] Bump version: {current_version} → {new_version}
|
||||||
|
tag = True
|
||||||
|
tag_name = python-v{new_version}
|
||||||
|
|
||||||
|
[bumpversion:file:pyproject.toml]
|
||||||
@@ -22,8 +22,21 @@ def connect(uri: URI) -> LanceDBConnection:
|
|||||||
uri: str or Path
|
uri: str or Path
|
||||||
The uri of the database.
|
The uri of the database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
For a local directory, provide a path for the database:
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("~/.lancedb")
|
||||||
|
|
||||||
|
For object storage, use a URI prefix:
|
||||||
|
|
||||||
|
>>> db = lancedb.connect("s3://my-bucket/lancedb")
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
conn : LanceDBConnection
|
||||||
A connection to a LanceDB database.
|
A connection to a LanceDB database.
|
||||||
"""
|
"""
|
||||||
return LanceDBConnection(uri)
|
return LanceDBConnection(uri)
|
||||||
|
|||||||
18
python/lancedb/conftest.py
Normal file
18
python/lancedb/conftest.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import builtins
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# import lancedb so we don't have to in every example
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def doctest_setup(monkeypatch, tmpdir):
|
||||||
|
# disable color for doctests so we don't have to include
|
||||||
|
# escape codes in docstrings
|
||||||
|
monkeypatch.setitem(os.environ, "NO_COLOR", "1")
|
||||||
|
# Explicitly set the column width
|
||||||
|
monkeypatch.setitem(os.environ, "COLUMNS", "80")
|
||||||
|
# Work in a temporary directory
|
||||||
|
monkeypatch.chdir(tmpdir)
|
||||||
@@ -13,16 +13,80 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
from .exceptions import MissingValueError, MissingColumnError
|
||||||
|
|
||||||
|
|
||||||
def contextualize(raw_df: pd.DataFrame) -> Contextualizer:
|
def contextualize(raw_df: pd.DataFrame) -> Contextualizer:
|
||||||
"""Create a Contextualizer object for the given DataFrame.
|
"""Create a Contextualizer object for the given DataFrame.
|
||||||
Used to create context windows.
|
|
||||||
|
Used to create context windows. Context windows are rolling subsets of text
|
||||||
|
data.
|
||||||
|
|
||||||
|
The input text column should already be separated into rows that will be the
|
||||||
|
unit of the window. So to create a context window over tokens, start with
|
||||||
|
a DataFrame with one token per row. To create a context window over sentences,
|
||||||
|
start with a DataFrame with one sentence per row.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> from lancedb.context import contextualize
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({
|
||||||
|
... 'token': ['The', 'quick', 'brown', 'fox', 'jumped', 'over',
|
||||||
|
... 'the', 'lazy', 'dog', 'I', 'love', 'sandwiches'],
|
||||||
|
... 'document_id': [1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2]
|
||||||
|
... })
|
||||||
|
|
||||||
|
``window`` determines how many rows to include in each window. In our case
|
||||||
|
this how many tokens, but depending on the input data, it could be sentences,
|
||||||
|
paragraphs, messages, etc.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(3).stride(1).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown 1
|
||||||
|
1 quick brown fox 1
|
||||||
|
2 brown fox jumped 1
|
||||||
|
3 fox jumped over 1
|
||||||
|
4 jumped over the 1
|
||||||
|
5 over the lazy 1
|
||||||
|
6 the lazy dog 1
|
||||||
|
7 lazy dog I 1
|
||||||
|
8 dog I love 1
|
||||||
|
>>> contextualize(data).window(7).stride(1).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox jumped over the 1
|
||||||
|
1 quick brown fox jumped over the lazy 1
|
||||||
|
2 brown fox jumped over the lazy dog 1
|
||||||
|
3 fox jumped over the lazy dog I 1
|
||||||
|
4 jumped over the lazy dog I love 1
|
||||||
|
|
||||||
|
|
||||||
|
``stride`` determines how many rows to skip between each window start. This can
|
||||||
|
be used to reduce the total number of windows generated.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(4).stride(2).text_col('token').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox 1
|
||||||
|
2 brown fox jumped over 1
|
||||||
|
4 jumped over the lazy 1
|
||||||
|
6 the lazy dog I 1
|
||||||
|
|
||||||
|
``groupby`` determines how to group the rows. For example, we would like to have
|
||||||
|
context windows that don't cross document boundaries. In this case, we can
|
||||||
|
pass ``document_id`` as the group by.
|
||||||
|
|
||||||
|
>>> contextualize(data).window(4).stride(2).text_col('token').groupby('document_id').to_df()
|
||||||
|
token document_id
|
||||||
|
0 The quick brown fox 1
|
||||||
|
2 brown fox jumped over 1
|
||||||
|
4 jumped over the lazy 1
|
||||||
"""
|
"""
|
||||||
return Contextualizer(raw_df)
|
return Contextualizer(raw_df)
|
||||||
|
|
||||||
|
|
||||||
class Contextualizer:
|
class Contextualizer:
|
||||||
|
"""Create context windows from a DataFrame. See [lancedb.context.contextualize][]."""
|
||||||
|
|
||||||
def __init__(self, raw_df):
|
def __init__(self, raw_df):
|
||||||
self._text_col = None
|
self._text_col = None
|
||||||
self._groupby = None
|
self._groupby = None
|
||||||
@@ -78,6 +142,21 @@ class Contextualizer:
|
|||||||
def to_df(self) -> pd.DataFrame:
|
def to_df(self) -> pd.DataFrame:
|
||||||
"""Create the context windows and return a DataFrame."""
|
"""Create the context windows and return a DataFrame."""
|
||||||
|
|
||||||
|
if self._text_col not in self._raw_df.columns.tolist():
|
||||||
|
raise MissingColumnError(self._text_col)
|
||||||
|
|
||||||
|
if self._window is None or self._window < 1:
|
||||||
|
raise MissingValueError(
|
||||||
|
"The value of window is None or less than 1. Specify the "
|
||||||
|
"window size (number of rows to include in each window)"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._stride is None or self._stride < 1:
|
||||||
|
raise MissingValueError(
|
||||||
|
"The value of stride is None or less than 1. Specify the "
|
||||||
|
"stride (number of rows to skip between each window)"
|
||||||
|
)
|
||||||
|
|
||||||
def process_group(grp):
|
def process_group(grp):
|
||||||
# For each group, create the text rolling window
|
# For each group, create the text rolling window
|
||||||
text = grp[self._text_col].values
|
text = grp[self._text_col].values
|
||||||
|
|||||||
@@ -13,18 +13,46 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
from pyarrow import fs
|
||||||
|
|
||||||
from .common import DATA, URI
|
from .common import DATA, URI
|
||||||
from .table import LanceTable
|
from .table import LanceTable
|
||||||
from .util import get_uri_scheme
|
from .util import get_uri_scheme, get_uri_location
|
||||||
|
|
||||||
|
|
||||||
class LanceDBConnection:
|
class LanceDBConnection:
|
||||||
"""
|
"""
|
||||||
A connection to a LanceDB database.
|
A connection to a LanceDB database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri: str or Path
|
||||||
|
The root uri of the database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2},
|
||||||
|
... {"vector": [0.5, 1.3], "b": 4}])
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> db.create_table("another_table", data=[{"vector": [0.4, 0.4], "b": 6}])
|
||||||
|
LanceTable(another_table)
|
||||||
|
>>> db.table_names()
|
||||||
|
['another_table', 'my_table']
|
||||||
|
>>> len(db)
|
||||||
|
2
|
||||||
|
>>> db["my_table"]
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> "my_table" in db
|
||||||
|
True
|
||||||
|
>>> db.drop_table("my_table")
|
||||||
|
>>> db.drop_table("another_table")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, uri: URI):
|
def __init__(self, uri: URI):
|
||||||
@@ -45,13 +73,27 @@ class LanceDBConnection:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
list of str
|
||||||
A list of table names.
|
A list of table names.
|
||||||
"""
|
"""
|
||||||
if get_uri_scheme(self.uri) == "file":
|
try:
|
||||||
return [p.stem for p in Path(self.uri).glob("*.lance")]
|
filesystem, path = fs.FileSystem.from_uri(self.uri)
|
||||||
raise NotImplementedError(
|
except pa.ArrowInvalid:
|
||||||
"List table_names is only supported for local filesystem for now"
|
raise NotImplementedError("Unsupported scheme: " + self.uri)
|
||||||
|
|
||||||
|
try:
|
||||||
|
paths = filesystem.get_file_info(
|
||||||
|
fs.FileSelector(get_uri_location(self.uri))
|
||||||
)
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# It is ok if the file does not exist since it will be created
|
||||||
|
paths = []
|
||||||
|
tables = [
|
||||||
|
os.path.splitext(file_info.base_name)[0]
|
||||||
|
for file_info in paths
|
||||||
|
if file_info.extension == "lance"
|
||||||
|
]
|
||||||
|
return tables
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self.table_names())
|
return len(self.table_names())
|
||||||
@@ -91,7 +133,73 @@ class LanceDBConnection:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A LanceTable object representing the table.
|
LanceTable
|
||||||
|
A reference to the newly created table.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Can create with list of tuples or dictionaries:
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> data = [{"vector": [1.1, 1.2], "lat": 45.5, "long": -122.7},
|
||||||
|
... {"vector": [0.2, 1.8], "lat": 40.1, "long": -74.1}]
|
||||||
|
>>> db.create_table("my_table", data)
|
||||||
|
LanceTable(my_table)
|
||||||
|
>>> db["my_table"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: double
|
||||||
|
long: double
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
You can also pass a pandas DataFrame:
|
||||||
|
|
||||||
|
>>> import pandas as pd
|
||||||
|
>>> data = pd.DataFrame({
|
||||||
|
... "vector": [[1.1, 1.2], [0.2, 1.8]],
|
||||||
|
... "lat": [45.5, 40.1],
|
||||||
|
... "long": [-122.7, -74.1]
|
||||||
|
... })
|
||||||
|
>>> db.create_table("table2", data)
|
||||||
|
LanceTable(table2)
|
||||||
|
>>> db["table2"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: double
|
||||||
|
long: double
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
|
|
||||||
|
Data is converted to Arrow before being written to disk. For maximum
|
||||||
|
control over how data is saved, either provide the PyArrow schema to
|
||||||
|
convert to or else provide a PyArrow table directly.
|
||||||
|
|
||||||
|
>>> custom_schema = pa.schema([
|
||||||
|
... pa.field("vector", pa.list_(pa.float32(), 2)),
|
||||||
|
... pa.field("lat", pa.float32()),
|
||||||
|
... pa.field("long", pa.float32())
|
||||||
|
... ])
|
||||||
|
>>> db.create_table("table3", data, schema = custom_schema)
|
||||||
|
LanceTable(table3)
|
||||||
|
>>> db["table3"].head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
lat: float
|
||||||
|
long: float
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2],[0.2,1.8]]]
|
||||||
|
lat: [[45.5,40.1]]
|
||||||
|
long: [[-122.7,-74.1]]
|
||||||
"""
|
"""
|
||||||
if data is not None:
|
if data is not None:
|
||||||
tbl = LanceTable.create(self, name, data, schema, mode=mode)
|
tbl = LanceTable.create(self, name, data, schema, mode=mode)
|
||||||
@@ -112,3 +220,15 @@ class LanceDBConnection:
|
|||||||
A LanceTable object representing the table.
|
A LanceTable object representing the table.
|
||||||
"""
|
"""
|
||||||
return LanceTable(self, name)
|
return LanceTable(self, name)
|
||||||
|
|
||||||
|
def drop_table(self, name: str):
|
||||||
|
"""Drop a table from the database.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
name: str
|
||||||
|
The name of the table.
|
||||||
|
"""
|
||||||
|
filesystem, path = pa.fs.FileSystem.from_uri(self.uri)
|
||||||
|
table_path = os.path.join(path, name + ".lance")
|
||||||
|
filesystem.delete_dir(table_path)
|
||||||
|
|||||||
@@ -29,7 +29,31 @@ def with_embeddings(
|
|||||||
wrap_api: bool = True,
|
wrap_api: bool = True,
|
||||||
show_progress: bool = False,
|
show_progress: bool = False,
|
||||||
batch_size: int = 1000,
|
batch_size: int = 1000,
|
||||||
):
|
) -> pa.Table:
|
||||||
|
"""Add a vector column to a table using the given embedding function.
|
||||||
|
|
||||||
|
The new columns will be called "vector".
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
func : Callable
|
||||||
|
A function that takes a list of strings and returns a list of vectors.
|
||||||
|
data : pa.Table or pd.DataFrame
|
||||||
|
The data to add an embedding column to.
|
||||||
|
column : str, default "text"
|
||||||
|
The name of the column to use as input to the embedding function.
|
||||||
|
wrap_api : bool, default True
|
||||||
|
Whether to wrap the embedding function in a retry and rate limiter.
|
||||||
|
show_progress : bool, default False
|
||||||
|
Whether to show a progress bar.
|
||||||
|
batch_size : int, default 1000
|
||||||
|
The number of row values to pass to each call of the embedding function.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Table
|
||||||
|
The input table with a new column called "vector" containing the embeddings.
|
||||||
|
"""
|
||||||
func = EmbeddingFunction(func)
|
func = EmbeddingFunction(func)
|
||||||
if wrap_api:
|
if wrap_api:
|
||||||
func = func.retry().rate_limit()
|
func = func.retry().rate_limit()
|
||||||
|
|||||||
22
python/lancedb/exceptions.py
Normal file
22
python/lancedb/exceptions.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""Custom exception handling"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingValueError(ValueError):
|
||||||
|
"""Exception raised when a required value is missing."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MissingColumnError(KeyError):
|
||||||
|
"""
|
||||||
|
Exception raised when a column name specified is not in
|
||||||
|
the DataFrame object
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, column_name):
|
||||||
|
self.column_name = column_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return (
|
||||||
|
f"Error: Column '{self.column_name}' does not exist in the DataFrame object"
|
||||||
|
)
|
||||||
135
python/lancedb/fts.py
Normal file
135
python/lancedb/fts.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Full text search index using tantivy-py"""
|
||||||
|
import os
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tantivy
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Please install tantivy-py `pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985` to use the full text search feature."
|
||||||
|
)
|
||||||
|
|
||||||
|
from .table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
|
def create_index(index_path: str, text_fields: List[str]) -> tantivy.Index:
|
||||||
|
"""
|
||||||
|
Create a new Index (not populated)
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
index_path : str
|
||||||
|
Path to the index directory
|
||||||
|
text_fields : List[str]
|
||||||
|
List of text fields to index
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
index : tantivy.Index
|
||||||
|
The index object (not yet populated)
|
||||||
|
"""
|
||||||
|
# Declaring our schema.
|
||||||
|
schema_builder = tantivy.SchemaBuilder()
|
||||||
|
# special field that we'll populate with row_id
|
||||||
|
schema_builder.add_integer_field("doc_id", stored=True)
|
||||||
|
# data fields
|
||||||
|
for name in text_fields:
|
||||||
|
schema_builder.add_text_field(name, stored=True)
|
||||||
|
schema = schema_builder.build()
|
||||||
|
os.makedirs(index_path, exist_ok=True)
|
||||||
|
index = tantivy.Index(schema, path=index_path)
|
||||||
|
return index
|
||||||
|
|
||||||
|
|
||||||
|
def populate_index(index: tantivy.Index, table: LanceTable, fields: List[str]) -> int:
|
||||||
|
"""
|
||||||
|
Populate an index with data from a LanceTable
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
index : tantivy.Index
|
||||||
|
The index object
|
||||||
|
table : LanceTable
|
||||||
|
The table to index
|
||||||
|
fields : List[str]
|
||||||
|
List of fields to index
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
int
|
||||||
|
The number of rows indexed
|
||||||
|
"""
|
||||||
|
# first check the fields exist and are string or large string type
|
||||||
|
for name in fields:
|
||||||
|
f = table.schema.field(name) # raises KeyError if not found
|
||||||
|
if not pa.types.is_string(f.type) and not pa.types.is_large_string(f.type):
|
||||||
|
raise TypeError(f"Field {name} is not a string type")
|
||||||
|
|
||||||
|
# create a tantivy writer
|
||||||
|
writer = index.writer()
|
||||||
|
# write data into index
|
||||||
|
dataset = table.to_lance()
|
||||||
|
row_id = 0
|
||||||
|
for b in dataset.to_batches(columns=fields):
|
||||||
|
for i in range(b.num_rows):
|
||||||
|
doc = tantivy.Document()
|
||||||
|
doc.add_integer("doc_id", row_id)
|
||||||
|
for name in fields:
|
||||||
|
doc.add_text(name, b[name][i].as_py())
|
||||||
|
writer.add_document(doc)
|
||||||
|
row_id += 1
|
||||||
|
# commit changes
|
||||||
|
writer.commit()
|
||||||
|
return row_id
|
||||||
|
|
||||||
|
|
||||||
|
def search_index(
|
||||||
|
index: tantivy.Index, query: str, limit: int = 10
|
||||||
|
) -> Tuple[Tuple[int], Tuple[float]]:
|
||||||
|
"""
|
||||||
|
Search an index for a query
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
index : tantivy.Index
|
||||||
|
The index object
|
||||||
|
query : str
|
||||||
|
The query string
|
||||||
|
limit : int
|
||||||
|
The maximum number of results to return
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
ids_and_score: list[tuple[int], tuple[float]]
|
||||||
|
A tuple of two tuples, the first containing the document ids
|
||||||
|
and the second containing the scores
|
||||||
|
"""
|
||||||
|
searcher = index.searcher()
|
||||||
|
query = index.parse_query(query)
|
||||||
|
# get top results
|
||||||
|
results = searcher.search(query, limit)
|
||||||
|
if results.count == 0:
|
||||||
|
return tuple(), tuple()
|
||||||
|
return tuple(
|
||||||
|
zip(
|
||||||
|
*[
|
||||||
|
(searcher.doc(doc_address)["doc_id"][0], score)
|
||||||
|
for score, doc_address in results.hits
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -11,9 +11,11 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
from .common import VECTOR_COLUMN_NAME
|
from .common import VECTOR_COLUMN_NAME
|
||||||
|
|
||||||
@@ -21,6 +23,24 @@ from .common import VECTOR_COLUMN_NAME
|
|||||||
class LanceQueryBuilder:
|
class LanceQueryBuilder:
|
||||||
"""
|
"""
|
||||||
A builder for nearest neighbor queries for LanceDB.
|
A builder for nearest neighbor queries for LanceDB.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> data = [{"vector": [1.1, 1.2], "b": 2},
|
||||||
|
... {"vector": [0.5, 1.3], "b": 4},
|
||||||
|
... {"vector": [0.4, 0.4], "b": 6},
|
||||||
|
... {"vector": [0.4, 0.4], "b": 10}]
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data=data)
|
||||||
|
>>> (table.search([0.4, 0.4])
|
||||||
|
... .metric("cosine")
|
||||||
|
... .where("b < 10")
|
||||||
|
... .select(["b"])
|
||||||
|
... .limit(2)
|
||||||
|
... .to_df())
|
||||||
|
b vector score
|
||||||
|
0 6 [0.4, 0.4] 0.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, table: "lancedb.table.LanceTable", query: np.ndarray):
|
def __init__(self, table: "lancedb.table.LanceTable", query: np.ndarray):
|
||||||
@@ -43,6 +63,7 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._limit = limit
|
self._limit = limit
|
||||||
@@ -58,6 +79,7 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._columns = columns
|
self._columns = columns
|
||||||
@@ -73,21 +95,23 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._where = where
|
self._where = where
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def metric(self, metric: str) -> LanceQueryBuilder:
|
def metric(self, metric: Literal["L2", "cosine"]) -> LanceQueryBuilder:
|
||||||
"""Set the distance metric to use.
|
"""Set the distance metric to use.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
metric: str
|
metric: "L2" or "cosine"
|
||||||
The distance metric to use. By default "l2" is used.
|
The distance metric to use. By default "L2" is used.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._metric = metric
|
self._metric = metric
|
||||||
@@ -96,6 +120,12 @@ class LanceQueryBuilder:
|
|||||||
def nprobes(self, nprobes: int) -> LanceQueryBuilder:
|
def nprobes(self, nprobes: int) -> LanceQueryBuilder:
|
||||||
"""Set the number of probes to use.
|
"""Set the number of probes to use.
|
||||||
|
|
||||||
|
Higher values will yield better recall (more likely to find vectors if
|
||||||
|
they exist) at the expense of latency.
|
||||||
|
|
||||||
|
See discussion in [Querying an ANN Index][../querying-an-ann-index] for
|
||||||
|
tuning advice.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
nprobes: int
|
nprobes: int
|
||||||
@@ -103,13 +133,20 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._nprobes = nprobes
|
self._nprobes = nprobes
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def refine_factor(self, refine_factor: int) -> LanceQueryBuilder:
|
def refine_factor(self, refine_factor: int) -> LanceQueryBuilder:
|
||||||
"""Set the refine factor to use.
|
"""Set the refine factor to use, increasing the number of vectors sampled.
|
||||||
|
|
||||||
|
As an example, a refine factor of 2 will sample 2x as many vectors as
|
||||||
|
requested, re-ranks them, and returns the top half most relevant results.
|
||||||
|
|
||||||
|
See discussion in [Querying an ANN Index][querying-an-ann-index] for
|
||||||
|
tuning advice.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@@ -118,6 +155,7 @@ class LanceQueryBuilder:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
LanceQueryBuilder
|
||||||
The LanceQueryBuilder object.
|
The LanceQueryBuilder object.
|
||||||
"""
|
"""
|
||||||
self._refine_factor = refine_factor
|
self._refine_factor = refine_factor
|
||||||
@@ -131,7 +169,6 @@ class LanceQueryBuilder:
|
|||||||
vector and the returned vector.
|
vector and the returned vector.
|
||||||
"""
|
"""
|
||||||
ds = self._table.to_lance()
|
ds = self._table.to_lance()
|
||||||
# TODO indexed search
|
|
||||||
tbl = ds.to_table(
|
tbl = ds.to_table(
|
||||||
columns=self._columns,
|
columns=self._columns,
|
||||||
filter=self._where,
|
filter=self._where,
|
||||||
@@ -145,3 +182,28 @@ class LanceQueryBuilder:
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
return tbl.to_pandas()
|
return tbl.to_pandas()
|
||||||
|
|
||||||
|
|
||||||
|
class LanceFtsQueryBuilder(LanceQueryBuilder):
|
||||||
|
def to_df(self) -> pd.DataFrame:
|
||||||
|
try:
|
||||||
|
import tantivy
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Please install tantivy-py `pip install tantivy@git+https://github.com/quickwit-oss/tantivy-py#164adc87e1a033117001cf70e38c82a53014d985` to use the full text search feature."
|
||||||
|
)
|
||||||
|
|
||||||
|
from .fts import search_index
|
||||||
|
|
||||||
|
# get the index path
|
||||||
|
index_path = self._table._get_fts_index_path()
|
||||||
|
# open the index
|
||||||
|
index = tantivy.Index.open(index_path)
|
||||||
|
# get the scores and doc ids
|
||||||
|
row_ids, scores = search_index(index, self._query, self._limit)
|
||||||
|
if len(row_ids) == 0:
|
||||||
|
return pd.DataFrame()
|
||||||
|
scores = pa.array(scores)
|
||||||
|
output_tbl = self._table.to_lance().take(row_ids, columns=self._columns)
|
||||||
|
output_tbl = output_tbl.append_column("score", scores)
|
||||||
|
return output_tbl.to_pandas()
|
||||||
|
|||||||
@@ -14,7 +14,9 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
|
from typing import List, Union
|
||||||
|
|
||||||
import lance
|
import lance
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -24,7 +26,8 @@ from lance import LanceDataset
|
|||||||
from lance.vector import vec_to_table
|
from lance.vector import vec_to_table
|
||||||
|
|
||||||
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
from .common import DATA, VEC, VECTOR_COLUMN_NAME
|
||||||
from .query import LanceQueryBuilder
|
from .query import LanceFtsQueryBuilder, LanceQueryBuilder
|
||||||
|
from .util import get_uri_scheme
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_data(data, schema):
|
def _sanitize_data(data, schema):
|
||||||
@@ -44,6 +47,40 @@ def _sanitize_data(data, schema):
|
|||||||
class LanceTable:
|
class LanceTable:
|
||||||
"""
|
"""
|
||||||
A table in a LanceDB database.
|
A table in a LanceDB database.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Create using [LanceDBConnection.create_table][lancedb.LanceDBConnection.create_table]
|
||||||
|
(more examples in that method's documentation).
|
||||||
|
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", data=[{"vector": [1.1, 1.2], "b": 2}])
|
||||||
|
>>> table.head()
|
||||||
|
pyarrow.Table
|
||||||
|
vector: fixed_size_list<item: float>[2]
|
||||||
|
child 0, item: float
|
||||||
|
b: int64
|
||||||
|
----
|
||||||
|
vector: [[[1.1,1.2]]]
|
||||||
|
b: [[2]]
|
||||||
|
|
||||||
|
Can append new data with [LanceTable.add][lancedb.table.LanceTable.add].
|
||||||
|
|
||||||
|
>>> table.add([{"vector": [0.5, 1.3], "b": 4}])
|
||||||
|
2
|
||||||
|
|
||||||
|
Can query the table with [LanceTable.search][lancedb.table.LanceTable.search].
|
||||||
|
|
||||||
|
>>> table.search([0.4, 0.4]).select(["b"]).to_df()
|
||||||
|
b vector score
|
||||||
|
0 4 [0.5, 1.3] 0.82
|
||||||
|
1 2 [1.1, 1.2] 1.13
|
||||||
|
|
||||||
|
Search queries are much faster when an index is created. See
|
||||||
|
[LanceTable.create_index][lancedb.table.LanceTable.create_index].
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -61,7 +98,12 @@ class LanceTable:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def schema(self) -> pa.Schema:
|
def schema(self) -> pa.Schema:
|
||||||
"""Return the schema of the table."""
|
"""Return the schema of the table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Schema
|
||||||
|
A PyArrow schema object."""
|
||||||
return self._dataset.schema
|
return self._dataset.schema
|
||||||
|
|
||||||
def list_versions(self):
|
def list_versions(self):
|
||||||
@@ -69,12 +111,39 @@ class LanceTable:
|
|||||||
return self._dataset.versions()
|
return self._dataset.versions()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self) -> int:
|
||||||
"""Get the current version of the table"""
|
"""Get the current version of the table"""
|
||||||
return self._dataset.version
|
return self._dataset.version
|
||||||
|
|
||||||
def checkout(self, version: int):
|
def checkout(self, version: int):
|
||||||
"""Checkout a version of the table"""
|
"""Checkout a version of the table. This is an in-place operation.
|
||||||
|
|
||||||
|
This allows viewing previous versions of the table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
version : int
|
||||||
|
The version to checkout.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import lancedb
|
||||||
|
>>> db = lancedb.connect("./.lancedb")
|
||||||
|
>>> table = db.create_table("my_table", [{"vector": [1.1, 0.9], "type": "vector"}])
|
||||||
|
>>> table.version
|
||||||
|
1
|
||||||
|
>>> table.to_pandas()
|
||||||
|
vector type
|
||||||
|
0 [1.1, 0.9] vector
|
||||||
|
>>> table.add([{"vector": [0.5, 0.2], "type": "vector"}])
|
||||||
|
2
|
||||||
|
>>> table.version
|
||||||
|
2
|
||||||
|
>>> table.checkout(1)
|
||||||
|
>>> table.to_pandas()
|
||||||
|
vector type
|
||||||
|
0 [1.1, 0.9] vector
|
||||||
|
"""
|
||||||
max_ver = max([v["version"] for v in self._dataset.versions()])
|
max_ver = max([v["version"] for v in self._dataset.versions()])
|
||||||
if version < 1 or version > max_ver:
|
if version < 1 or version > max_ver:
|
||||||
raise ValueError(f"Invalid version {version}")
|
raise ValueError(f"Invalid version {version}")
|
||||||
@@ -95,11 +164,20 @@ class LanceTable:
|
|||||||
return self._dataset.head(n)
|
return self._dataset.head(n)
|
||||||
|
|
||||||
def to_pandas(self) -> pd.DataFrame:
|
def to_pandas(self) -> pd.DataFrame:
|
||||||
"""Return the table as a pandas DataFrame."""
|
"""Return the table as a pandas DataFrame.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pd.DataFrame
|
||||||
|
"""
|
||||||
return self.to_arrow().to_pandas()
|
return self.to_arrow().to_pandas()
|
||||||
|
|
||||||
def to_arrow(self) -> pa.Table:
|
def to_arrow(self) -> pa.Table:
|
||||||
"""Return the table as a pyarrow Table."""
|
"""Return the table as a pyarrow Table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
pa.Table"""
|
||||||
return self._dataset.to_table()
|
return self._dataset.to_table()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -130,6 +208,27 @@ class LanceTable:
|
|||||||
)
|
)
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
|
|
||||||
|
def create_fts_index(self, field_names: Union[str, List[str]]):
|
||||||
|
"""Create a full-text search index on the table.
|
||||||
|
|
||||||
|
Warning - this API is highly experimental and is highly likely to change
|
||||||
|
in the future.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
field_names: str or list of str
|
||||||
|
The name(s) of the field to index.
|
||||||
|
"""
|
||||||
|
from .fts import create_index, populate_index
|
||||||
|
|
||||||
|
if isinstance(field_names, str):
|
||||||
|
field_names = [field_names]
|
||||||
|
index = create_index(self._get_fts_index_path(), field_names)
|
||||||
|
populate_index(index, self, field_names)
|
||||||
|
|
||||||
|
def _get_fts_index_path(self):
|
||||||
|
return os.path.join(self._dataset_uri, "_indices", "tantivy")
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def _dataset(self) -> LanceDataset:
|
def _dataset(self) -> LanceDataset:
|
||||||
return lance.dataset(self._dataset_uri, version=self._version)
|
return lance.dataset(self._dataset_uri, version=self._version)
|
||||||
@@ -151,14 +250,15 @@ class LanceTable:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
The number of vectors added to the table.
|
int
|
||||||
|
The number of vectors in the table.
|
||||||
"""
|
"""
|
||||||
data = _sanitize_data(data, self.schema)
|
data = _sanitize_data(data, self.schema)
|
||||||
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
lance.write_dataset(data, self._dataset_uri, mode=mode)
|
||||||
self._reset_dataset()
|
self._reset_dataset()
|
||||||
return len(self)
|
return len(self)
|
||||||
|
|
||||||
def search(self, query: VEC) -> LanceQueryBuilder:
|
def search(self, query: Union[VEC, str]) -> LanceQueryBuilder:
|
||||||
"""Create a search query to find the nearest neighbors
|
"""Create a search query to find the nearest neighbors
|
||||||
of the given query vector.
|
of the given query vector.
|
||||||
|
|
||||||
@@ -169,11 +269,16 @@ class LanceTable:
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
A LanceQueryBuilder object representing the query.
|
LanceQueryBuilder
|
||||||
|
A query builder object representing the query.
|
||||||
Once executed, the query returns selected columns, the vector,
|
Once executed, the query returns selected columns, the vector,
|
||||||
and also the "score" column which is the distance between the query
|
and also the "score" column which is the distance between the query
|
||||||
vector and the returned vector.
|
vector and the returned vector.
|
||||||
"""
|
"""
|
||||||
|
if isinstance(query, str):
|
||||||
|
# fts
|
||||||
|
return LanceFtsQueryBuilder(self, query)
|
||||||
|
|
||||||
if isinstance(query, list):
|
if isinstance(query, list):
|
||||||
query = np.array(query)
|
query = np.array(query)
|
||||||
if isinstance(query, np.ndarray):
|
if isinstance(query, np.ndarray):
|
||||||
@@ -225,8 +330,7 @@ def _sanitize_vector_column(data: pa.Table, vector_column_name: str) -> pa.Table
|
|||||||
vector_column_name: str
|
vector_column_name: str
|
||||||
The name of the vector column.
|
The name of the vector column.
|
||||||
"""
|
"""
|
||||||
i = data.column_names.index(vector_column_name)
|
if vector_column_name not in data.column_names:
|
||||||
if i < 0:
|
|
||||||
raise ValueError(f"Missing vector column: {vector_column_name}")
|
raise ValueError(f"Missing vector column: {vector_column_name}")
|
||||||
vec_arr = data[vector_column_name].combine_chunks()
|
vec_arr = data[vector_column_name].combine_chunks()
|
||||||
if pa.types.is_fixed_size_list(vec_arr.type):
|
if pa.types.is_fixed_size_list(vec_arr.type):
|
||||||
@@ -238,4 +342,6 @@ def _sanitize_vector_column(data: pa.Table, vector_column_name: str) -> pa.Table
|
|||||||
values = values.cast(pa.float32())
|
values = values.cast(pa.float32())
|
||||||
list_size = len(values) / len(data)
|
list_size = len(values) / len(data)
|
||||||
vec_arr = pa.FixedSizeListArray.from_arrays(values, list_size)
|
vec_arr = pa.FixedSizeListArray.from_arrays(values, list_size)
|
||||||
return data.set_column(i, vector_column_name, vec_arr)
|
return data.set_column(
|
||||||
|
data.column_names.index(vector_column_name), vector_column_name, vec_arr
|
||||||
|
)
|
||||||
|
|||||||
@@ -41,3 +41,23 @@ def get_uri_scheme(uri: str) -> str:
|
|||||||
# So we add special handling here for schemes that are a single character
|
# So we add special handling here for schemes that are a single character
|
||||||
scheme = "file"
|
scheme = "file"
|
||||||
return scheme
|
return scheme
|
||||||
|
|
||||||
|
|
||||||
|
def get_uri_location(uri: str) -> str:
|
||||||
|
"""
|
||||||
|
Get the location of a URI. If the parameter is not a url, assumes it is just a path
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
uri : str
|
||||||
|
The URI to parse.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str: Location part of the URL, without scheme
|
||||||
|
"""
|
||||||
|
parsed = urlparse(uri)
|
||||||
|
if not parsed.netloc:
|
||||||
|
return parsed.path
|
||||||
|
else:
|
||||||
|
return parsed.netloc + parsed.path
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "lancedb"
|
name = "lancedb"
|
||||||
version = "0.1.2"
|
version = "0.1.8"
|
||||||
dependencies = ["pylance>=0.4.6", "ratelimiter", "retry", "tqdm"]
|
dependencies = ["pylance>=0.4.20", "ratelimiter", "retry", "tqdm"]
|
||||||
description = "lancedb"
|
description = "lancedb"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
{ name = "LanceDB Devs", email = "dev@lancedb.com" },
|
||||||
@@ -33,11 +33,11 @@ classifiers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
repository = "https://github.com/eto-ai/lancedb"
|
repository = "https://github.com/lancedb/lancedb"
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
tests = [
|
tests = [
|
||||||
"pytest"
|
"pytest", "pytest-mock", "doctest"
|
||||||
]
|
]
|
||||||
dev = [
|
dev = [
|
||||||
"ruff", "pre-commit", "black"
|
"ruff", "pre-commit", "black"
|
||||||
|
|||||||
@@ -97,3 +97,26 @@ def test_create_mode(tmp_path):
|
|||||||
)
|
)
|
||||||
tbl = db.create_table("test", data=new_data, mode="overwrite")
|
tbl = db.create_table("test", data=new_data, mode="overwrite")
|
||||||
assert tbl.to_pandas().item.tolist() == ["fizz", "buzz"]
|
assert tbl.to_pandas().item.tolist() == ["fizz", "buzz"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_table(tmp_path):
|
||||||
|
db = lancedb.connect(tmp_path)
|
||||||
|
data = pd.DataFrame(
|
||||||
|
{
|
||||||
|
"vector": [[3.1, 4.1], [5.9, 26.5]],
|
||||||
|
"item": ["foo", "bar"],
|
||||||
|
"price": [10.0, 20.0],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
|
||||||
|
db.drop_table("test")
|
||||||
|
assert db.table_names() == []
|
||||||
|
|
||||||
|
db.create_table("test", data=data)
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ import sys
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
|
|
||||||
from lancedb.embeddings import with_embeddings
|
from lancedb.embeddings import with_embeddings
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
91
python/tests/test_fts.py
Normal file
91
python/tests/test_fts.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
|
||||||
|
import lancedb.fts
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pytest
|
||||||
|
import tantivy
|
||||||
|
|
||||||
|
import lancedb as ldb
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def table(tmp_path) -> ldb.table.LanceTable:
|
||||||
|
db = ldb.connect(tmp_path)
|
||||||
|
vectors = [np.random.randn(128) for _ in range(100)]
|
||||||
|
|
||||||
|
nouns = ("puppy", "car", "rabbit", "girl", "monkey")
|
||||||
|
verbs = ("runs", "hits", "jumps", "drives", "barfs")
|
||||||
|
adv = ("crazily.", "dutifully.", "foolishly.", "merrily.", "occasionally.")
|
||||||
|
adj = ("adorable", "clueless", "dirty", "odd", "stupid")
|
||||||
|
text = [
|
||||||
|
" ".join(
|
||||||
|
[
|
||||||
|
nouns[random.randrange(0, 5)],
|
||||||
|
verbs[random.randrange(0, 5)],
|
||||||
|
adv[random.randrange(0, 5)],
|
||||||
|
adj[random.randrange(0, 5)],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for _ in range(100)
|
||||||
|
]
|
||||||
|
table = db.create_table(
|
||||||
|
"test", data=pd.DataFrame({"vector": vectors, "text": text, "text2": text})
|
||||||
|
)
|
||||||
|
return table
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_index(tmp_path):
|
||||||
|
index = ldb.fts.create_index(str(tmp_path / "index"), ["text"])
|
||||||
|
assert isinstance(index, tantivy.Index)
|
||||||
|
assert os.path.exists(str(tmp_path / "index"))
|
||||||
|
|
||||||
|
|
||||||
|
def test_populate_index(tmp_path, table):
|
||||||
|
index = ldb.fts.create_index(str(tmp_path / "index"), ["text"])
|
||||||
|
assert ldb.fts.populate_index(index, table, ["text"]) == len(table)
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_index(tmp_path, table):
|
||||||
|
index = ldb.fts.create_index(str(tmp_path / "index"), ["text"])
|
||||||
|
ldb.fts.populate_index(index, table, ["text"])
|
||||||
|
index.reload()
|
||||||
|
results = ldb.fts.search_index(index, query="puppy", limit=10)
|
||||||
|
assert len(results) == 2
|
||||||
|
assert len(results[0]) == 10 # row_ids
|
||||||
|
assert len(results[1]) == 10 # scores
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_index_from_table(tmp_path, table):
|
||||||
|
table.create_fts_index("text")
|
||||||
|
df = table.search("puppy").limit(10).select(["text"]).to_df()
|
||||||
|
assert len(df) == 10
|
||||||
|
assert "text" in df.columns
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_index_multiple_columns(tmp_path, table):
|
||||||
|
table.create_fts_index(["text", "text2"])
|
||||||
|
df = table.search("puppy").limit(10).to_df()
|
||||||
|
assert len(df) == 10
|
||||||
|
assert "text" in df.columns
|
||||||
|
assert "text2" in df.columns
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_rs(tmp_path, table, mocker):
|
||||||
|
table.create_fts_index(["text", "text2"])
|
||||||
|
mocker.patch("lancedb.fts.search_index", return_value=([], []))
|
||||||
|
df = table.search("puppy").limit(10).to_df()
|
||||||
|
assert len(df) == 0
|
||||||
50
python/tests/test_io.py
Normal file
50
python/tests/test_io.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# Copyright 2023 LanceDB Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import lancedb
|
||||||
|
|
||||||
|
# You need to setup AWS credentials an a base path to run this test. Example
|
||||||
|
# AWS_PROFILE=default TEST_S3_BASE_URL=s3://my_bucket/dataset pytest tests/test_io.py
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
(os.environ.get("TEST_S3_BASE_URL") is None),
|
||||||
|
reason="please setup s3 base url",
|
||||||
|
)
|
||||||
|
def test_s3_io():
|
||||||
|
db = lancedb.connect(os.environ.get("TEST_S3_BASE_URL"))
|
||||||
|
assert db.table_names() == []
|
||||||
|
|
||||||
|
table = db.create_table(
|
||||||
|
"test",
|
||||||
|
data=[
|
||||||
|
{"vector": [3.1, 4.1], "item": "foo", "price": 10.0},
|
||||||
|
{"vector": [5.9, 26.5], "item": "bar", "price": 20.0},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
rs = table.search([100, 100]).limit(1).to_df()
|
||||||
|
assert len(rs) == 1
|
||||||
|
assert rs["item"].iloc[0] == "bar"
|
||||||
|
|
||||||
|
rs = table.search([100, 100]).where("price < 15").limit(2).to_df()
|
||||||
|
assert len(rs) == 1
|
||||||
|
assert rs["item"].iloc[0] == "foo"
|
||||||
|
|
||||||
|
assert db.table_names() == ["test"]
|
||||||
|
assert "test" in db
|
||||||
|
assert len(db) == 1
|
||||||
|
|
||||||
|
assert db.open_table("test").name == db["test"].name
|
||||||
@@ -17,7 +17,6 @@ import pandas as pd
|
|||||||
import pandas.testing as tm
|
import pandas.testing as tm
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lancedb.query import LanceQueryBuilder
|
from lancedb.query import LanceQueryBuilder
|
||||||
|
|
||||||
|
|
||||||
@@ -31,23 +30,17 @@ class MockTable:
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def table(tmp_path) -> MockTable:
|
def table(tmp_path) -> MockTable:
|
||||||
df = pd.DataFrame(
|
df = pa.table(
|
||||||
{
|
{
|
||||||
"vector": [[1, 2], [3, 4]],
|
"vector": pa.array(
|
||||||
"id": [1, 2],
|
[[1, 2], [3, 4]], type=pa.list_(pa.float32(), list_size=2)
|
||||||
"str_field": ["a", "b"],
|
),
|
||||||
"float_field": [1.0, 2.0],
|
"id": pa.array([1, 2]),
|
||||||
|
"str_field": pa.array(["a", "b"]),
|
||||||
|
"float_field": pa.array([1.0, 2.0]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
schema = pa.schema(
|
lance.write_dataset(df, tmp_path)
|
||||||
[
|
|
||||||
pa.field("vector", pa.list_(pa.float32(), list_size=2)),
|
|
||||||
pa.field("id", pa.int32()),
|
|
||||||
pa.field("str_field", pa.string()),
|
|
||||||
pa.field("float_field", pa.float64()),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
lance.write_dataset(df, tmp_path, schema)
|
|
||||||
return MockTable(tmp_path)
|
return MockTable(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
@@ -66,7 +59,7 @@ def test_query_builder_with_filter(table):
|
|||||||
def test_query_builder_with_metric(table):
|
def test_query_builder_with_metric(table):
|
||||||
query = [4, 8]
|
query = [4, 8]
|
||||||
df_default = LanceQueryBuilder(table, query).to_df()
|
df_default = LanceQueryBuilder(table, query).to_df()
|
||||||
df_l2 = LanceQueryBuilder(table, query).metric("l2").to_df()
|
df_l2 = LanceQueryBuilder(table, query).metric("L2").to_df()
|
||||||
tm.assert_frame_equal(df_default, df_l2)
|
tm.assert_frame_equal(df_default, df_l2)
|
||||||
|
|
||||||
df_cosine = LanceQueryBuilder(table, query).metric("cosine").limit(1).to_df()
|
df_cosine = LanceQueryBuilder(table, query).metric("cosine").limit(1).to_df()
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ from pathlib import Path
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pyarrow as pa
|
import pyarrow as pa
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lancedb.table import LanceTable
|
from lancedb.table import LanceTable
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ arrow-ipc = "37.0"
|
|||||||
arrow-schema = "37.0"
|
arrow-schema = "37.0"
|
||||||
once_cell = "1"
|
once_cell = "1"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
lance = "0.4.3"
|
lance = "0.4.17"
|
||||||
vectordb = { path = "../../vectordb" }
|
vectordb = { path = "../../vectordb" }
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
neon = {version = "0.10.1", default-features = false, features = ["channel-api", "napi-6", "promise-api", "task-api"] }
|
||||||
|
|||||||
15
rust/ffi/node/src/index.rs
Normal file
15
rust/ffi/node/src/index.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
pub mod vector;
|
||||||
128
rust/ffi/node/src/index/vector.rs
Normal file
128
rust/ffi/node/src/index/vector.rs
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
|
use lance::index::vector::ivf::IvfBuildParams;
|
||||||
|
use lance::index::vector::pq::PQBuildParams;
|
||||||
|
use lance::index::vector::MetricType;
|
||||||
|
use neon::context::FunctionContext;
|
||||||
|
use neon::prelude::*;
|
||||||
|
|
||||||
|
use vectordb::index::vector::{IvfPQIndexBuilder, VectorIndexBuilder};
|
||||||
|
|
||||||
|
use crate::{runtime, JsTable};
|
||||||
|
|
||||||
|
pub(crate) fn table_create_vector_index(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
|
let index_params = cx.argument::<JsObject>(0)?;
|
||||||
|
let index_params_builder = get_index_params_builder(&mut cx, index_params).unwrap();
|
||||||
|
|
||||||
|
let rt = runtime(&mut cx)?;
|
||||||
|
let channel = cx.channel();
|
||||||
|
|
||||||
|
let (deferred, promise) = cx.promise();
|
||||||
|
let table = js_table.table.clone();
|
||||||
|
|
||||||
|
rt.block_on(async move {
|
||||||
|
let add_result = table
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.create_index(&index_params_builder)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
add_result
|
||||||
|
.map(|_| cx.undefined())
|
||||||
|
.or_else(|err| cx.throw_error(err.to_string()))
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index_params_builder(
|
||||||
|
cx: &mut FunctionContext,
|
||||||
|
obj: Handle<JsObject>,
|
||||||
|
) -> Result<impl VectorIndexBuilder, String> {
|
||||||
|
let idx_type = obj
|
||||||
|
.get::<JsString, _, _>(cx, "type")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.value(cx);
|
||||||
|
|
||||||
|
match idx_type.as_str() {
|
||||||
|
"ivf_pq" => {
|
||||||
|
let mut index_builder: IvfPQIndexBuilder = IvfPQIndexBuilder::new();
|
||||||
|
let mut pq_params = PQBuildParams::default();
|
||||||
|
|
||||||
|
obj.get_opt::<JsString, _, _>(cx, "column")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| index_builder.column(s.value(cx)));
|
||||||
|
|
||||||
|
obj.get_opt::<JsString, _, _>(cx, "index_name")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| index_builder.index_name(s.value(cx)));
|
||||||
|
|
||||||
|
obj.get_opt::<JsString, _, _>(cx, "metric_type")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| MetricType::try_from(s.value(cx).as_str()))
|
||||||
|
.map(|mt| {
|
||||||
|
let metric_type = mt.unwrap();
|
||||||
|
index_builder.metric_type(metric_type);
|
||||||
|
pq_params.metric_type = metric_type;
|
||||||
|
});
|
||||||
|
|
||||||
|
let num_partitions = obj
|
||||||
|
.get_opt::<JsNumber, _, _>(cx, "num_partitions")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| s.value(cx) as usize);
|
||||||
|
|
||||||
|
let max_iters = obj
|
||||||
|
.get_opt::<JsNumber, _, _>(cx, "max_iters")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| s.value(cx) as usize);
|
||||||
|
|
||||||
|
num_partitions.map(|np| {
|
||||||
|
let max_iters = max_iters.unwrap_or(50);
|
||||||
|
let ivf_params = IvfBuildParams {
|
||||||
|
num_partitions: np,
|
||||||
|
max_iters,
|
||||||
|
};
|
||||||
|
index_builder.ivf_params(ivf_params)
|
||||||
|
});
|
||||||
|
|
||||||
|
obj.get_opt::<JsBoolean, _, _>(cx, "use_opq")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| pq_params.use_opq = s.value(cx));
|
||||||
|
|
||||||
|
obj.get_opt::<JsNumber, _, _>(cx, "num_sub_vectors")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| pq_params.num_sub_vectors = s.value(cx) as usize);
|
||||||
|
|
||||||
|
obj.get_opt::<JsNumber, _, _>(cx, "num_bits")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| pq_params.num_bits = s.value(cx) as usize);
|
||||||
|
|
||||||
|
obj.get_opt::<JsNumber, _, _>(cx, "max_iters")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| pq_params.max_iters = s.value(cx) as usize);
|
||||||
|
|
||||||
|
obj.get_opt::<JsNumber, _, _>(cx, "max_opq_iters")
|
||||||
|
.map_err(|t| t.to_string())?
|
||||||
|
.map(|s| pq_params.max_opq_iters = s.value(cx) as usize);
|
||||||
|
|
||||||
|
Ok(index_builder)
|
||||||
|
}
|
||||||
|
t => Err(format!("{} is not a valid index type", t).to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,6 +13,7 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::convert::TryFrom;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
@@ -21,6 +22,7 @@ use arrow_ipc::writer::FileWriter;
|
|||||||
use futures::{TryFutureExt, TryStreamExt};
|
use futures::{TryFutureExt, TryStreamExt};
|
||||||
use lance::arrow::RecordBatchBuffer;
|
use lance::arrow::RecordBatchBuffer;
|
||||||
use lance::dataset::WriteMode;
|
use lance::dataset::WriteMode;
|
||||||
|
use lance::index::vector::MetricType;
|
||||||
use neon::prelude::*;
|
use neon::prelude::*;
|
||||||
use neon::types::buffer::TypedArray;
|
use neon::types::buffer::TypedArray;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
@@ -34,17 +36,18 @@ use crate::arrow::arrow_buffer_to_record_batch;
|
|||||||
|
|
||||||
mod arrow;
|
mod arrow;
|
||||||
mod convert;
|
mod convert;
|
||||||
|
mod index;
|
||||||
|
|
||||||
struct JsDatabase {
|
struct JsDatabase {
|
||||||
database: Arc<Database>,
|
database: Arc<Database>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Finalize for JsDatabase {}
|
||||||
|
|
||||||
struct JsTable {
|
struct JsTable {
|
||||||
table: Arc<Mutex<Table>>,
|
table: Arc<Mutex<Table>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Finalize for JsDatabase {}
|
|
||||||
|
|
||||||
impl Finalize for JsTable {}
|
impl Finalize for JsTable {}
|
||||||
|
|
||||||
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
||||||
@@ -53,23 +56,46 @@ fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> {
|
|||||||
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
RUNTIME.get_or_try_init(|| Runtime::new().or_else(|err| cx.throw_error(err.to_string())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_new(mut cx: FunctionContext) -> JsResult<JsBox<JsDatabase>> {
|
fn database_new(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let path = cx.argument::<JsString>(0)?.value(&mut cx);
|
let path = cx.argument::<JsString>(0)?.value(&mut cx);
|
||||||
|
|
||||||
|
let rt = runtime(&mut cx)?;
|
||||||
|
let channel = cx.channel();
|
||||||
|
let (deferred, promise) = cx.promise();
|
||||||
|
|
||||||
|
rt.spawn(async move {
|
||||||
|
let database = Database::connect(&path).await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let db = JsDatabase {
|
let db = JsDatabase {
|
||||||
database: Arc::new(Database::connect(path).or_else(|err| cx.throw_error(err.to_string()))?),
|
database: Arc::new(database.or_else(|err| cx.throw_error(err.to_string()))?),
|
||||||
};
|
};
|
||||||
Ok(cx.boxed(db))
|
Ok(cx.boxed(db))
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_table_names(mut cx: FunctionContext) -> JsResult<JsArray> {
|
fn database_table_names(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let db = cx
|
let db = cx
|
||||||
.this()
|
.this()
|
||||||
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
.downcast_or_throw::<JsBox<JsDatabase>, _>(&mut cx)?;
|
||||||
let tables = db
|
|
||||||
.database
|
let rt = runtime(&mut cx)?;
|
||||||
.table_names()
|
let (deferred, promise) = cx.promise();
|
||||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
let channel = cx.channel();
|
||||||
convert::vec_str_to_array(&tables, &mut cx)
|
let database = db.database.clone();
|
||||||
|
|
||||||
|
rt.spawn(async move {
|
||||||
|
let tables_rst = database.table_names().await;
|
||||||
|
|
||||||
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
|
let tables = tables_rst.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||||
|
let table_names = convert::vec_str_to_array(&tables, &mut cx);
|
||||||
|
table_names
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
@@ -84,10 +110,12 @@ fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
let table_rst = database.open_table(table_name).await;
|
let table_rst = database.open_table(&table_name).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(table_rst.or_else(|err| cx.throw_error(err.to_string()))?));
|
let table = Arc::new(Mutex::new(
|
||||||
|
table_rst.or_else(|err| cx.throw_error(err.to_string()))?,
|
||||||
|
));
|
||||||
Ok(cx.boxed(JsTable { table }))
|
Ok(cx.boxed(JsTable { table }))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -96,15 +124,43 @@ fn database_open_table(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
||||||
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
let query_vector = cx.argument::<JsArray>(0)?; //. .as_value(&mut cx);
|
let query_obj = cx.argument::<JsObject>(0)?;
|
||||||
let limit = cx.argument::<JsNumber>(1)?.value(&mut cx);
|
|
||||||
let filter = cx.argument_opt(2).map(|f| f.downcast_or_throw::<JsString, _>(&mut cx).unwrap().value(&mut cx));
|
let limit = query_obj
|
||||||
|
.get::<JsNumber, _, _>(&mut cx, "_limit")?
|
||||||
|
.value(&mut cx);
|
||||||
|
let select = query_obj
|
||||||
|
.get_opt::<JsArray, _, _>(&mut cx, "_select")?
|
||||||
|
.map(|arr| {
|
||||||
|
let js_array = arr.deref();
|
||||||
|
let mut projection_vec: Vec<String> = Vec::new();
|
||||||
|
for i in 0..js_array.len(&mut cx) {
|
||||||
|
let entry: Handle<JsString> = js_array.get(&mut cx, i).unwrap();
|
||||||
|
projection_vec.push(entry.value(&mut cx));
|
||||||
|
}
|
||||||
|
projection_vec
|
||||||
|
});
|
||||||
|
let filter = query_obj
|
||||||
|
.get_opt::<JsString, _, _>(&mut cx, "_filter")?
|
||||||
|
.map(|s| s.value(&mut cx));
|
||||||
|
let refine_factor = query_obj
|
||||||
|
.get_opt::<JsNumber, _, _>(&mut cx, "_refineFactor")?
|
||||||
|
.map(|s| s.value(&mut cx))
|
||||||
|
.map(|i| i as u32);
|
||||||
|
let nprobes = query_obj
|
||||||
|
.get::<JsNumber, _, _>(&mut cx, "_nprobes")?
|
||||||
|
.value(&mut cx) as usize;
|
||||||
|
let metric_type = query_obj
|
||||||
|
.get_opt::<JsString, _, _>(&mut cx, "_metricType")?
|
||||||
|
.map(|s| s.value(&mut cx))
|
||||||
|
.map(|s| MetricType::try_from(s.as_str()).unwrap());
|
||||||
|
|
||||||
let rt = runtime(&mut cx)?;
|
let rt = runtime(&mut cx)?;
|
||||||
let channel = cx.channel();
|
let channel = cx.channel();
|
||||||
|
|
||||||
let (deferred, promise) = cx.promise();
|
let (deferred, promise) = cx.promise();
|
||||||
let table = js_table.table.clone();
|
let table = js_table.table.clone();
|
||||||
|
let query_vector = query_obj.get::<JsArray, _, _>(&mut cx, "_queryVector")?;
|
||||||
let query = convert::js_array_to_vec(query_vector.deref(), &mut cx);
|
let query = convert::js_array_to_vec(query_vector.deref(), &mut cx);
|
||||||
|
|
||||||
rt.spawn(async move {
|
rt.spawn(async move {
|
||||||
@@ -113,7 +169,11 @@ fn table_search(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.search(Float32Array::from(query))
|
.search(Float32Array::from(query))
|
||||||
.limit(limit as usize)
|
.limit(limit as usize)
|
||||||
.filter(filter);
|
.refine_factor(refine_factor)
|
||||||
|
.nprobes(nprobes)
|
||||||
|
.filter(filter)
|
||||||
|
.metric_type(metric_type)
|
||||||
|
.select(select);
|
||||||
let record_batch_stream = builder.execute();
|
let record_batch_stream = builder.execute();
|
||||||
let results = record_batch_stream
|
let results = record_batch_stream
|
||||||
.and_then(|stream| stream.try_collect::<Vec<_>>().map_err(Error::from))
|
.and_then(|stream| stream.try_collect::<Vec<_>>().map_err(Error::from))
|
||||||
@@ -161,10 +221,12 @@ fn table_create(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
|
|
||||||
rt.block_on(async move {
|
rt.block_on(async move {
|
||||||
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(batches));
|
let batch_reader: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(batches));
|
||||||
let table_rst = database.create_table(table_name, batch_reader).await;
|
let table_rst = database.create_table(&table_name, batch_reader).await;
|
||||||
|
|
||||||
deferred.settle_with(&channel, move |mut cx| {
|
deferred.settle_with(&channel, move |mut cx| {
|
||||||
let table = Arc::new(Mutex::new(table_rst.or_else(|err| cx.throw_error(err.to_string()))?));
|
let table = Arc::new(Mutex::new(
|
||||||
|
table_rst.or_else(|err| cx.throw_error(err.to_string()))?,
|
||||||
|
));
|
||||||
Ok(cx.boxed(JsTable { table }))
|
Ok(cx.boxed(JsTable { table }))
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -178,9 +240,7 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
("overwrite", WriteMode::Overwrite),
|
("overwrite", WriteMode::Overwrite),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
let js_table = cx
|
let js_table = cx.this().downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
||||||
.this()
|
|
||||||
.downcast_or_throw::<JsBox<JsTable>, _>(&mut cx)?;
|
|
||||||
let buffer = cx.argument::<JsBuffer>(0)?;
|
let buffer = cx.argument::<JsBuffer>(0)?;
|
||||||
let write_mode = cx.argument::<JsString>(1)?.value(&mut cx);
|
let write_mode = cx.argument::<JsString>(1)?.value(&mut cx);
|
||||||
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx));
|
let batches = arrow_buffer_to_record_batch(buffer.as_slice(&mut cx));
|
||||||
@@ -204,7 +264,6 @@ fn table_add(mut cx: FunctionContext) -> JsResult<JsPromise> {
|
|||||||
Ok(promise)
|
Ok(promise)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[neon::main]
|
#[neon::main]
|
||||||
fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
||||||
cx.export_function("databaseNew", database_new)?;
|
cx.export_function("databaseNew", database_new)?;
|
||||||
@@ -213,5 +272,9 @@ fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
|||||||
cx.export_function("tableSearch", table_search)?;
|
cx.export_function("tableSearch", table_search)?;
|
||||||
cx.export_function("tableCreate", table_create)?;
|
cx.export_function("tableCreate", table_create)?;
|
||||||
cx.export_function("tableAdd", table_add)?;
|
cx.export_function("tableAdd", table_add)?;
|
||||||
|
cx.export_function(
|
||||||
|
"tableCreateVectorIndex",
|
||||||
|
index::vector::table_create_vector_index,
|
||||||
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,9 +10,13 @@ repository = "https://github.com/lancedb/lancedb"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arrow-array = "37.0"
|
arrow-array = "37.0"
|
||||||
|
arrow-data = "37.0"
|
||||||
arrow-schema = "37.0"
|
arrow-schema = "37.0"
|
||||||
lance = "0.4.3"
|
object_store = "0.5.6"
|
||||||
|
snafu = "0.7.4"
|
||||||
|
lance = "0.4.17"
|
||||||
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
tokio = { version = "1.23", features = ["rt-multi-thread"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3.5.0"
|
tempfile = "3.5.0"
|
||||||
|
rand = { version = "0.8.3", features = ["small_rng"] }
|
||||||
|
|||||||
@@ -12,16 +12,20 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use arrow_array::RecordBatchReader;
|
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::error::Result;
|
use arrow_array::RecordBatchReader;
|
||||||
|
use lance::io::object_store::ObjectStore;
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
|
use crate::error::{CreateDirSnafu, Result};
|
||||||
use crate::table::Table;
|
use crate::table::Table;
|
||||||
|
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
pub(crate) path: Arc<PathBuf>,
|
object_store: ObjectStore,
|
||||||
|
|
||||||
|
pub(crate) uri: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
const LANCE_EXTENSION: &str = "lance";
|
const LANCE_EXTENSION: &str = "lance";
|
||||||
@@ -37,26 +41,38 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Database] object.
|
/// * A [Database] object.
|
||||||
pub fn connect<P: AsRef<Path>>(path: P) -> Result<Database> {
|
pub async fn connect(uri: &str) -> Result<Database> {
|
||||||
if !path.as_ref().try_exists()? {
|
let object_store = ObjectStore::new(uri).await?;
|
||||||
create_dir_all(&path)?;
|
if object_store.is_local() {
|
||||||
|
Self::try_create_dir(uri).context(CreateDirSnafu { path: uri })?;
|
||||||
}
|
}
|
||||||
Ok(Database {
|
Ok(Database {
|
||||||
path: Arc::new(path.as_ref().to_path_buf()),
|
uri: uri.to_string(),
|
||||||
|
object_store,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Try to create a local directory to store the lancedb dataset
|
||||||
|
fn try_create_dir(path: &str) -> core::result::Result<(), std::io::Error> {
|
||||||
|
let path = Path::new(path);
|
||||||
|
if !path.try_exists()? {
|
||||||
|
create_dir_all(&path)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the names of all tables in the database.
|
/// Get the names of all tables in the database.
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Vec<String>] with all table names.
|
/// * A [Vec<String>] with all table names.
|
||||||
pub fn table_names(&self) -> Result<Vec<String>> {
|
pub async fn table_names(&self) -> Result<Vec<String>> {
|
||||||
let f = self
|
let f = self
|
||||||
.path
|
.object_store
|
||||||
.read_dir()?
|
.read_dir("/")
|
||||||
.flatten()
|
.await?
|
||||||
.map(|dir_entry| dir_entry.path())
|
.iter()
|
||||||
|
.map(|fname| Path::new(fname))
|
||||||
.filter(|path| {
|
.filter(|path| {
|
||||||
let is_lance = path
|
let is_lance = path
|
||||||
.extension()
|
.extension()
|
||||||
@@ -76,10 +92,10 @@ impl Database {
|
|||||||
|
|
||||||
pub async fn create_table(
|
pub async fn create_table(
|
||||||
&self,
|
&self,
|
||||||
name: String,
|
name: &str,
|
||||||
batches: Box<dyn RecordBatchReader>,
|
batches: Box<dyn RecordBatchReader>,
|
||||||
) -> Result<Table> {
|
) -> Result<Table> {
|
||||||
Table::create(self.path.clone(), name, batches).await
|
Table::create(&self.uri, name, batches).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open a table in the database.
|
/// Open a table in the database.
|
||||||
@@ -90,8 +106,8 @@ impl Database {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open_table(&self, name: String) -> Result<Table> {
|
pub async fn open_table(&self, name: &str) -> Result<Table> {
|
||||||
Table::open(self.path.clone(), name).await
|
Table::open(&self.uri, name).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,10 +121,10 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_connect() {
|
async fn test_connect() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
let db = Database::connect(&path_buf);
|
let db = Database::connect(uri).await.unwrap();
|
||||||
|
|
||||||
assert_eq!(db.unwrap().path.as_path(), path_buf.as_path())
|
assert_eq!(db.uri, uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -118,10 +134,16 @@ mod tests {
|
|||||||
create_dir_all(tmp_dir.path().join("table2.lance")).unwrap();
|
create_dir_all(tmp_dir.path().join("table2.lance")).unwrap();
|
||||||
create_dir_all(tmp_dir.path().join("invalidlance")).unwrap();
|
create_dir_all(tmp_dir.path().join("invalidlance")).unwrap();
|
||||||
|
|
||||||
let db = Database::connect(&tmp_dir.into_path()).unwrap();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
let tables = db.table_names().unwrap();
|
let db = Database::connect(uri).await.unwrap();
|
||||||
|
let tables = db.table_names().await.unwrap();
|
||||||
assert_eq!(tables.len(), 2);
|
assert_eq!(tables.len(), 2);
|
||||||
assert!(tables.contains(&String::from("table1")));
|
assert!(tables.contains(&String::from("table1")));
|
||||||
assert!(tables.contains(&String::from("table2")));
|
assert!(tables.contains(&String::from("table2")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_connect_s3() {
|
||||||
|
// let db = Database::connect("s3://bucket/path/to/database").await.unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,32 +12,50 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
#[derive(Debug)]
|
use snafu::Snafu;
|
||||||
pub enum Error {
|
|
||||||
IO(String),
|
|
||||||
Lance(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Error {
|
#[derive(Debug, Snafu)]
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
#[snafu(visibility(pub(crate)))]
|
||||||
let (catalog, message) = match self {
|
pub enum Error {
|
||||||
Self::IO(s) => ("I/O", s.as_str()),
|
#[snafu(display("LanceDBError: Invalid table name: {name}"))]
|
||||||
Self::Lance(s) => ("Lance", s.as_str()),
|
InvalidTableName { name: String },
|
||||||
};
|
#[snafu(display("LanceDBError: Table '{name}' was not found"))]
|
||||||
write!(f, "LanceDBError({catalog}): {message}")
|
TableNotFound { name: String },
|
||||||
}
|
#[snafu(display("LanceDBError: Table '{name}' already exists"))]
|
||||||
|
TableAlreadyExists { name: String },
|
||||||
|
#[snafu(display("LanceDBError: Unable to created lance dataset at {path}: {source}"))]
|
||||||
|
CreateDir {
|
||||||
|
path: String,
|
||||||
|
source: std::io::Error,
|
||||||
|
},
|
||||||
|
#[snafu(display("LanceDBError: {message}"))]
|
||||||
|
Store { message: String },
|
||||||
|
#[snafu(display("LanceDBError: {message}"))]
|
||||||
|
Lance { message: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
impl From<lance::Error> for Error {
|
||||||
fn from(e: std::io::Error) -> Self {
|
fn from(e: lance::Error) -> Self {
|
||||||
Self::IO(e.to_string())
|
Self::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<lance::Error> for Error {
|
impl From<object_store::Error> for Error {
|
||||||
fn from(e: lance::Error) -> Self {
|
fn from(e: object_store::Error) -> Self {
|
||||||
Self::Lance(e.to_string())
|
Self::Store {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<object_store::path::Error> for Error {
|
||||||
|
fn from(e: object_store::path::Error) -> Self {
|
||||||
|
Self::Store {
|
||||||
|
message: e.to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
15
rust/vectordb/src/index.rs
Normal file
15
rust/vectordb/src/index.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
pub mod vector;
|
||||||
163
rust/vectordb/src/index/vector.rs
Normal file
163
rust/vectordb/src/index/vector.rs
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
// Copyright 2023 Lance Developers.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use lance::index::vector::ivf::IvfBuildParams;
|
||||||
|
use lance::index::vector::pq::PQBuildParams;
|
||||||
|
use lance::index::vector::{MetricType, VectorIndexParams};
|
||||||
|
|
||||||
|
pub trait VectorIndexBuilder {
|
||||||
|
fn get_column(&self) -> Option<String>;
|
||||||
|
fn get_index_name(&self) -> Option<String>;
|
||||||
|
fn build(&self) -> VectorIndexParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct IvfPQIndexBuilder {
|
||||||
|
column: Option<String>,
|
||||||
|
index_name: Option<String>,
|
||||||
|
metric_type: Option<MetricType>,
|
||||||
|
ivf_params: Option<IvfBuildParams>,
|
||||||
|
pq_params: Option<PQBuildParams>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IvfPQIndexBuilder {
|
||||||
|
pub fn new() -> IvfPQIndexBuilder {
|
||||||
|
IvfPQIndexBuilder {
|
||||||
|
column: None,
|
||||||
|
index_name: None,
|
||||||
|
metric_type: None,
|
||||||
|
ivf_params: None,
|
||||||
|
pq_params: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IvfPQIndexBuilder {
|
||||||
|
pub fn column(&mut self, column: String) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.column = Some(column);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn index_name(&mut self, index_name: String) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.index_name = Some(index_name);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn metric_type(&mut self, metric_type: MetricType) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.metric_type = Some(metric_type);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ivf_params(&mut self, ivf_params: IvfBuildParams) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.ivf_params = Some(ivf_params);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pq_params(&mut self, pq_params: PQBuildParams) -> &mut IvfPQIndexBuilder {
|
||||||
|
self.pq_params = Some(pq_params);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VectorIndexBuilder for IvfPQIndexBuilder {
|
||||||
|
fn get_column(&self) -> Option<String> {
|
||||||
|
self.column.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index_name(&self) -> Option<String> {
|
||||||
|
self.index_name.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build(&self) -> VectorIndexParams {
|
||||||
|
let ivf_params = self.ivf_params.clone().unwrap_or(IvfBuildParams::default());
|
||||||
|
let pq_params = self.pq_params.clone().unwrap_or(PQBuildParams::default());
|
||||||
|
|
||||||
|
VectorIndexParams::with_ivf_pq_params(pq_params.metric_type, ivf_params, pq_params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use lance::index::vector::ivf::IvfBuildParams;
|
||||||
|
use lance::index::vector::pq::PQBuildParams;
|
||||||
|
use lance::index::vector::{MetricType, StageParams};
|
||||||
|
|
||||||
|
use crate::index::vector::{IvfPQIndexBuilder, VectorIndexBuilder};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_builder_no_params() {
|
||||||
|
let index_builder = IvfPQIndexBuilder::new();
|
||||||
|
assert!(index_builder.get_column().is_none());
|
||||||
|
assert!(index_builder.get_index_name().is_none());
|
||||||
|
|
||||||
|
let index_params = index_builder.build();
|
||||||
|
assert_eq!(index_params.stages.len(), 2);
|
||||||
|
if let StageParams::Ivf(ivf_params) = index_params.stages.get(0).unwrap() {
|
||||||
|
let default = IvfBuildParams::default();
|
||||||
|
assert_eq!(ivf_params.num_partitions, default.num_partitions);
|
||||||
|
assert_eq!(ivf_params.max_iters, default.max_iters);
|
||||||
|
} else {
|
||||||
|
panic!("Expected first stage to be ivf")
|
||||||
|
}
|
||||||
|
|
||||||
|
if let StageParams::PQ(pq_params) = index_params.stages.get(1).unwrap() {
|
||||||
|
assert_eq!(pq_params.use_opq, false);
|
||||||
|
} else {
|
||||||
|
panic!("Expected second stage to be pq")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_builder_all_params() {
|
||||||
|
let mut index_builder = IvfPQIndexBuilder::new();
|
||||||
|
|
||||||
|
index_builder
|
||||||
|
.column("c".to_owned())
|
||||||
|
.metric_type(MetricType::Cosine)
|
||||||
|
.index_name("index".to_owned());
|
||||||
|
|
||||||
|
assert_eq!(index_builder.column.clone().unwrap(), "c");
|
||||||
|
assert_eq!(index_builder.metric_type.unwrap(), MetricType::Cosine);
|
||||||
|
assert_eq!(index_builder.index_name.clone().unwrap(), "index");
|
||||||
|
|
||||||
|
let ivf_params = IvfBuildParams::new(500);
|
||||||
|
let mut pq_params = PQBuildParams::default();
|
||||||
|
pq_params.use_opq = true;
|
||||||
|
pq_params.max_iters = 1;
|
||||||
|
pq_params.num_bits = 8;
|
||||||
|
pq_params.num_sub_vectors = 50;
|
||||||
|
pq_params.metric_type = MetricType::Cosine;
|
||||||
|
pq_params.max_opq_iters = 2;
|
||||||
|
index_builder.ivf_params(ivf_params);
|
||||||
|
index_builder.pq_params(pq_params);
|
||||||
|
|
||||||
|
let index_params = index_builder.build();
|
||||||
|
assert_eq!(index_params.stages.len(), 2);
|
||||||
|
if let StageParams::Ivf(ivf_params) = index_params.stages.get(0).unwrap() {
|
||||||
|
assert_eq!(ivf_params.num_partitions, 500);
|
||||||
|
} else {
|
||||||
|
assert!(false, "Expected first stage to be ivf")
|
||||||
|
}
|
||||||
|
|
||||||
|
if let StageParams::PQ(pq_params) = index_params.stages.get(1).unwrap() {
|
||||||
|
assert_eq!(pq_params.use_opq, true);
|
||||||
|
assert_eq!(pq_params.max_iters, 1);
|
||||||
|
assert_eq!(pq_params.num_bits, 8);
|
||||||
|
assert_eq!(pq_params.num_sub_vectors, 50);
|
||||||
|
assert_eq!(pq_params.metric_type, MetricType::Cosine);
|
||||||
|
assert_eq!(pq_params.max_opq_iters, 2);
|
||||||
|
} else {
|
||||||
|
assert!(false, "Expected second stage to be pq")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,5 +14,6 @@
|
|||||||
|
|
||||||
pub mod database;
|
pub mod database;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
pub mod index;
|
||||||
pub mod query;
|
pub mod query;
|
||||||
pub mod table;
|
pub mod table;
|
||||||
|
|||||||
@@ -27,9 +27,10 @@ pub struct Query {
|
|||||||
pub query_vector: Float32Array,
|
pub query_vector: Float32Array,
|
||||||
pub limit: usize,
|
pub limit: usize,
|
||||||
pub filter: Option<String>,
|
pub filter: Option<String>,
|
||||||
|
pub select: Option<Vec<String>>,
|
||||||
pub nprobes: usize,
|
pub nprobes: usize,
|
||||||
pub refine_factor: Option<u32>,
|
pub refine_factor: Option<u32>,
|
||||||
pub metric_type: MetricType,
|
pub metric_type: Option<MetricType>,
|
||||||
pub use_index: bool,
|
pub use_index: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,9 +52,10 @@ impl Query {
|
|||||||
limit: 10,
|
limit: 10,
|
||||||
nprobes: 20,
|
nprobes: 20,
|
||||||
refine_factor: None,
|
refine_factor: None,
|
||||||
metric_type: MetricType::L2,
|
metric_type: None,
|
||||||
use_index: false,
|
use_index: false,
|
||||||
filter: None
|
filter: None,
|
||||||
|
select: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,10 +73,13 @@ impl Query {
|
|||||||
self.limit,
|
self.limit,
|
||||||
)?;
|
)?;
|
||||||
scanner.nprobs(self.nprobes);
|
scanner.nprobs(self.nprobes);
|
||||||
scanner.distance_metric(self.metric_type);
|
|
||||||
scanner.use_index(self.use_index);
|
scanner.use_index(self.use_index);
|
||||||
|
self.select
|
||||||
|
.as_ref()
|
||||||
|
.map(|p| scanner.project(p.as_slice()));
|
||||||
self.filter.as_ref().map(|f| scanner.filter(f));
|
self.filter.as_ref().map(|f| scanner.filter(f));
|
||||||
self.refine_factor.map(|rf| scanner.refine(rf));
|
self.refine_factor.map(|rf| scanner.refine(rf));
|
||||||
|
self.metric_type.map(|mt| scanner.distance_metric(mt));
|
||||||
Ok(scanner.try_into_stream().await?)
|
Ok(scanner.try_into_stream().await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,7 +128,7 @@ impl Query {
|
|||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `metric_type` - The distance metric to use. By default [MetricType::L2] is used.
|
/// * `metric_type` - The distance metric to use. By default [MetricType::L2] is used.
|
||||||
pub fn metric_type(mut self, metric_type: MetricType) -> Query {
|
pub fn metric_type(mut self, metric_type: Option<MetricType>) -> Query {
|
||||||
self.metric_type = metric_type;
|
self.metric_type = metric_type;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -138,10 +143,23 @@ impl Query {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A filter statement to be applied to this query.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `filter` - value A filter in the same format used by a sql WHERE clause.
|
||||||
pub fn filter(mut self, filter: Option<String>) -> Query {
|
pub fn filter(mut self, filter: Option<String>) -> Query {
|
||||||
self.filter = filter;
|
self.filter = filter;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return only the specified columns.
|
||||||
|
///
|
||||||
|
/// Only select the specified columns. If not specified, all columns will be returned.
|
||||||
|
pub fn select(mut self, columns: Option<Vec<String>>) -> Query {
|
||||||
|
self.select = columns;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -174,14 +192,14 @@ mod tests {
|
|||||||
.limit(100)
|
.limit(100)
|
||||||
.nprobes(1000)
|
.nprobes(1000)
|
||||||
.use_index(true)
|
.use_index(true)
|
||||||
.metric_type(MetricType::Cosine)
|
.metric_type(Some(MetricType::Cosine))
|
||||||
.refine_factor(Some(999));
|
.refine_factor(Some(999));
|
||||||
|
|
||||||
assert_eq!(query.query_vector, new_vector);
|
assert_eq!(query.query_vector, new_vector);
|
||||||
assert_eq!(query.limit, 100);
|
assert_eq!(query.limit, 100);
|
||||||
assert_eq!(query.nprobes, 1000);
|
assert_eq!(query.nprobes, 1000);
|
||||||
assert_eq!(query.use_index, true);
|
assert_eq!(query.use_index, true);
|
||||||
assert_eq!(query.metric_type, MetricType::Cosine);
|
assert_eq!(query.metric_type, Some(MetricType::Cosine));
|
||||||
assert_eq!(query.refine_factor, Some(999));
|
assert_eq!(query.refine_factor, Some(999));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,26 +12,35 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use arrow_array::{Float32Array, RecordBatchReader};
|
use arrow_array::{Float32Array, RecordBatchReader};
|
||||||
use lance::dataset::{Dataset, WriteMode, WriteParams};
|
use lance::dataset::{Dataset, WriteMode, WriteParams};
|
||||||
|
use lance::index::IndexType;
|
||||||
|
use snafu::prelude::*;
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, InvalidTableNameSnafu, Result};
|
||||||
|
use crate::index::vector::VectorIndexBuilder;
|
||||||
use crate::query::Query;
|
use crate::query::Query;
|
||||||
|
|
||||||
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
pub const VECTOR_COLUMN_NAME: &str = "vector";
|
||||||
|
|
||||||
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
pub const LANCE_FILE_EXTENSION: &str = "lance";
|
||||||
|
|
||||||
/// A table in a LanceDB database.
|
/// A table in a LanceDB database.
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Table {
|
pub struct Table {
|
||||||
name: String,
|
name: String,
|
||||||
path: String,
|
uri: String,
|
||||||
dataset: Arc<Dataset>,
|
dataset: Arc<Dataset>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Table {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "Table({})", self.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Table {
|
impl Table {
|
||||||
/// Opens an existing Table
|
/// Opens an existing Table
|
||||||
///
|
///
|
||||||
@@ -43,18 +52,28 @@ impl Table {
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn open(base_path: Arc<PathBuf>, name: String) -> Result<Self> {
|
pub async fn open(base_uri: &str, name: &str) -> Result<Self> {
|
||||||
let ds_path = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
let path = Path::new(base_uri);
|
||||||
let ds_uri = ds_path
|
|
||||||
|
let table_uri = path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::IO(format!("Unable to find table {}", name)))?;
|
.context(InvalidTableNameSnafu { name })?;
|
||||||
let dataset = Dataset::open(ds_uri).await?;
|
|
||||||
let table = Table {
|
let dataset = Dataset::open(&uri).await.map_err(|e| match e {
|
||||||
name,
|
lance::Error::DatasetNotFound { .. } => Error::TableNotFound {
|
||||||
path: ds_uri.to_string(),
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
e => Error::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
|
Ok(Table {
|
||||||
|
name: name.to_string(),
|
||||||
|
uri: uri.to_string(),
|
||||||
dataset: Arc::new(dataset),
|
dataset: Arc::new(dataset),
|
||||||
};
|
})
|
||||||
Ok(table)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new Table
|
/// Creates a new Table
|
||||||
@@ -69,18 +88,52 @@ impl Table {
|
|||||||
///
|
///
|
||||||
/// * A [Table] object.
|
/// * A [Table] object.
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
base_path: Arc<PathBuf>,
|
base_uri: &str,
|
||||||
name: String,
|
name: &str,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
mut batches: Box<dyn RecordBatchReader>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let ds_path = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
let base_path = Path::new(base_uri);
|
||||||
let path = ds_path
|
let table_uri = base_path.join(format!("{}.{}", name, LANCE_FILE_EXTENSION));
|
||||||
|
let uri = table_uri
|
||||||
|
.as_path()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::IO(format!("Unable to find table {}", name)))?;
|
.context(InvalidTableNameSnafu { name })?
|
||||||
|
.to_string();
|
||||||
|
let dataset = Dataset::write(&mut batches, &uri, Some(WriteParams::default()))
|
||||||
|
.await
|
||||||
|
.map_err(|e| match e {
|
||||||
|
lance::Error::DatasetAlreadyExists { .. } => Error::TableAlreadyExists {
|
||||||
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
e => Error::Lance {
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
|
Ok(Table {
|
||||||
|
name: name.to_string(),
|
||||||
|
uri,
|
||||||
|
dataset: Arc::new(dataset),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
let dataset =
|
/// Create index on the table.
|
||||||
Arc::new(Dataset::write(&mut batches, path, Some(WriteParams::default())).await?);
|
pub async fn create_index(&mut self, index_builder: &impl VectorIndexBuilder) -> Result<()> {
|
||||||
Ok(Table { name, path: path.to_string(), dataset })
|
use lance::index::DatasetIndexExt;
|
||||||
|
|
||||||
|
let dataset = self
|
||||||
|
.dataset
|
||||||
|
.create_index(
|
||||||
|
&[index_builder
|
||||||
|
.get_column()
|
||||||
|
.unwrap_or(VECTOR_COLUMN_NAME.to_string())
|
||||||
|
.as_str()],
|
||||||
|
IndexType::Vector,
|
||||||
|
index_builder.get_index_name(),
|
||||||
|
&index_builder.build(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
self.dataset = Arc::new(dataset);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert records into this Table
|
/// Insert records into this Table
|
||||||
@@ -95,12 +148,12 @@ impl Table {
|
|||||||
pub async fn add(
|
pub async fn add(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut batches: Box<dyn RecordBatchReader>,
|
mut batches: Box<dyn RecordBatchReader>,
|
||||||
write_mode: Option<WriteMode>
|
write_mode: Option<WriteMode>,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
let mut params = WriteParams::default();
|
let mut params = WriteParams::default();
|
||||||
params.mode = write_mode.unwrap_or(WriteMode::Append);
|
params.mode = write_mode.unwrap_or(WriteMode::Append);
|
||||||
|
|
||||||
self.dataset = Arc::new(Dataset::write(&mut batches, self.path.as_str(), Some(params)).await?);
|
self.dataset = Arc::new(Dataset::write(&mut batches, &self.uri, Some(params)).await?);
|
||||||
Ok(batches.count())
|
Ok(batches.count())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,56 +178,84 @@ impl Table {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use arrow_array::{Float32Array, Int32Array, RecordBatch, RecordBatchReader};
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use arrow_array::{
|
||||||
|
Array, FixedSizeListArray, Float32Array, Int32Array, RecordBatch, RecordBatchReader,
|
||||||
|
};
|
||||||
|
use arrow_data::ArrayDataBuilder;
|
||||||
use arrow_schema::{DataType, Field, Schema};
|
use arrow_schema::{DataType, Field, Schema};
|
||||||
use lance::arrow::RecordBatchBuffer;
|
use lance::arrow::RecordBatchBuffer;
|
||||||
use lance::dataset::{Dataset, WriteMode};
|
use lance::dataset::{Dataset, WriteMode};
|
||||||
use std::sync::Arc;
|
use lance::index::vector::ivf::IvfBuildParams;
|
||||||
|
use lance::index::vector::pq::PQBuildParams;
|
||||||
|
use rand::Rng;
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
use crate::table::Table;
|
use super::*;
|
||||||
|
use crate::index::vector::IvfPQIndexBuilder;
|
||||||
#[tokio::test]
|
|
||||||
async fn test_new_table_not_exists() {
|
|
||||||
let tmp_dir = tempdir().unwrap();
|
|
||||||
let path_buf = tmp_dir.into_path();
|
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string()).await;
|
|
||||||
assert!(table.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_open() {
|
async fn test_open() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
Dataset::write(
|
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
||||||
&mut batches,
|
|
||||||
path_buf.join("test.lance").to_str().unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string())
|
let table = Table::open(uri, "test").await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(table.name, "test")
|
assert_eq!(table.name, "test")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_add() {
|
async fn test_open_not_found() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
let table = Table::open(uri, "test").await;
|
||||||
|
assert!(matches!(table.unwrap_err(), Error::TableNotFound { .. }));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_object_store_path() {
|
||||||
|
use std::path::Path as StdPath;
|
||||||
|
let p = StdPath::new("s3://bucket/path/to/file");
|
||||||
|
let c = p.join("subfile");
|
||||||
|
assert_eq!(c.to_str().unwrap(), "s3://bucket/path/to/file/subfile");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_already_exists() {
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(Arc::new(path_buf), "test".to_string(), batches).await.unwrap();
|
Table::create(&uri, "test", batches).await.unwrap();
|
||||||
|
|
||||||
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
|
let result = Table::create(&uri, "test", batches).await;
|
||||||
|
assert!(matches!(
|
||||||
|
result.unwrap_err(),
|
||||||
|
Error::TableAlreadyExists { .. }
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_add() {
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
|
let schema = batches.schema().clone();
|
||||||
|
let mut table = Table::create(&uri, "test", batches).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(vec![RecordBatch::try_new(
|
let new_batches: Box<dyn RecordBatchReader> =
|
||||||
|
Box::new(RecordBatchBuffer::new(vec![RecordBatch::try_new(
|
||||||
schema,
|
schema,
|
||||||
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
||||||
)
|
)
|
||||||
@@ -188,19 +269,24 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_add_overwrite() {
|
async fn test_add_overwrite() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
let schema = batches.schema().clone();
|
let schema = batches.schema().clone();
|
||||||
let mut table = Table::create(Arc::new(path_buf), "test".to_string(), batches).await.unwrap();
|
let mut table = Table::create(uri, "test", batches).await.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
|
|
||||||
let new_batches: Box<dyn RecordBatchReader> = Box::new(RecordBatchBuffer::new(vec![RecordBatch::try_new(
|
let new_batches: Box<dyn RecordBatchReader> =
|
||||||
|
Box::new(RecordBatchBuffer::new(vec![RecordBatch::try_new(
|
||||||
schema,
|
schema,
|
||||||
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
vec![Arc::new(Int32Array::from_iter_values(100..110))],
|
||||||
).unwrap()]));
|
)
|
||||||
|
.unwrap()]));
|
||||||
|
|
||||||
table.add(new_batches, Some(WriteMode::Overwrite)).await.unwrap();
|
table
|
||||||
|
.add(new_batches, Some(WriteMode::Overwrite))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(table.count_rows().await.unwrap(), 10);
|
assert_eq!(table.count_rows().await.unwrap(), 10);
|
||||||
assert_eq!(table.name, "test");
|
assert_eq!(table.name, "test");
|
||||||
}
|
}
|
||||||
@@ -208,20 +294,15 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_search() {
|
async fn test_search() {
|
||||||
let tmp_dir = tempdir().unwrap();
|
let tmp_dir = tempdir().unwrap();
|
||||||
let path_buf = tmp_dir.into_path();
|
let dataset_path = tmp_dir.path().join("test.lance");
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
let mut batches: Box<dyn RecordBatchReader> = Box::new(make_test_batches());
|
||||||
Dataset::write(
|
Dataset::write(&mut batches, dataset_path.to_str().unwrap(), None)
|
||||||
&mut batches,
|
|
||||||
path_buf.join("test.lance").to_str().unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let table = Table::open(Arc::new(path_buf), "test".to_string())
|
let table = Table::open(uri, "test").await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
let vector = Float32Array::from_iter_values([0.1, 0.2]);
|
||||||
let query = table.search(vector.clone());
|
let query = table.search(vector.clone());
|
||||||
@@ -236,4 +317,72 @@ mod tests {
|
|||||||
)
|
)
|
||||||
.unwrap()])
|
.unwrap()])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_index() {
|
||||||
|
use arrow_array::RecordBatch;
|
||||||
|
use arrow_schema::{DataType, Field, Schema as ArrowSchema};
|
||||||
|
use rand;
|
||||||
|
use std::iter::repeat_with;
|
||||||
|
|
||||||
|
use arrow_array::Float32Array;
|
||||||
|
|
||||||
|
let tmp_dir = tempdir().unwrap();
|
||||||
|
let uri = tmp_dir.path().to_str().unwrap();
|
||||||
|
|
||||||
|
let dimension = 16;
|
||||||
|
let schema = Arc::new(ArrowSchema::new(vec![Field::new(
|
||||||
|
"embeddings",
|
||||||
|
DataType::FixedSizeList(
|
||||||
|
Arc::new(Field::new("item", DataType::Float32, true)),
|
||||||
|
dimension,
|
||||||
|
),
|
||||||
|
false,
|
||||||
|
)]));
|
||||||
|
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let float_arr = Float32Array::from(
|
||||||
|
repeat_with(|| rng.gen::<f32>())
|
||||||
|
.take(512 * dimension as usize)
|
||||||
|
.collect::<Vec<f32>>(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let vectors = Arc::new(create_fixed_size_list(float_arr, dimension).unwrap());
|
||||||
|
let batches = RecordBatchBuffer::new(vec![RecordBatch::try_new(
|
||||||
|
schema.clone(),
|
||||||
|
vec![vectors.clone()],
|
||||||
|
)
|
||||||
|
.unwrap()]);
|
||||||
|
|
||||||
|
let reader: Box<dyn RecordBatchReader + Send> = Box::new(batches);
|
||||||
|
let mut table = Table::create(uri, "test", reader).await.unwrap();
|
||||||
|
|
||||||
|
let mut i = IvfPQIndexBuilder::new();
|
||||||
|
|
||||||
|
let index_builder = i
|
||||||
|
.column("embeddings".to_string())
|
||||||
|
.index_name("my_index".to_string())
|
||||||
|
.ivf_params(IvfBuildParams::new(256))
|
||||||
|
.pq_params(PQBuildParams::default());
|
||||||
|
|
||||||
|
table.create_index(index_builder).await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(table.dataset.load_indices().await.unwrap().len(), 1);
|
||||||
|
assert_eq!(table.count_rows().await.unwrap(), 512);
|
||||||
|
assert_eq!(table.name, "test");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_fixed_size_list<T: Array>(values: T, list_size: i32) -> Result<FixedSizeListArray> {
|
||||||
|
let list_type = DataType::FixedSizeList(
|
||||||
|
Arc::new(Field::new("item", values.data_type().clone(), true)),
|
||||||
|
list_size,
|
||||||
|
);
|
||||||
|
let data = ArrayDataBuilder::new(list_type)
|
||||||
|
.len(values.len() / list_size as usize)
|
||||||
|
.add_child_data(values.into_data())
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok(FixedSizeListArray::from(data))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user